Merge branch 'master' of github.com:D4-project/d4-core

pull/23/head
Gerard Wagener 2019-04-05 15:07:46 +02:00
commit 60cfbcb250
40 changed files with 5873 additions and 351 deletions

289
.gitchangelog.rc Normal file
View File

@ -0,0 +1,289 @@
# -*- coding: utf-8; mode: python -*-
##
## Format
##
## ACTION: [AUDIENCE:] COMMIT_MSG [!TAG ...]
##
## Description
##
## ACTION is one of 'chg', 'fix', 'new'
##
## Is WHAT the change is about.
##
## 'chg' is for refactor, small improvement, cosmetic changes...
## 'fix' is for bug fixes
## 'new' is for new features, big improvement
##
## AUDIENCE is optional and one of 'dev', 'usr', 'pkg', 'test', 'doc'|'docs'
##
## Is WHO is concerned by the change.
##
## 'dev' is for developpers (API changes, refactors...)
## 'usr' is for final users (UI changes)
## 'pkg' is for packagers (packaging changes)
## 'test' is for testers (test only related changes)
## 'doc' is for doc guys (doc only changes)
##
## COMMIT_MSG is ... well ... the commit message itself.
##
## TAGs are additionnal adjective as 'refactor' 'minor' 'cosmetic'
##
## They are preceded with a '!' or a '@' (prefer the former, as the
## latter is wrongly interpreted in github.) Commonly used tags are:
##
## 'refactor' is obviously for refactoring code only
## 'minor' is for a very meaningless change (a typo, adding a comment)
## 'cosmetic' is for cosmetic driven change (re-indentation, 80-col...)
## 'wip' is for partial functionality but complete subfunctionality.
##
## Example:
##
## new: usr: support of bazaar implemented
## chg: re-indentend some lines !cosmetic
## new: dev: updated code to be compatible with last version of killer lib.
## fix: pkg: updated year of licence coverage.
## new: test: added a bunch of test around user usability of feature X.
## fix: typo in spelling my name in comment. !minor
##
## Please note that multi-line commit message are supported, and only the
## first line will be considered as the "summary" of the commit message. So
## tags, and other rules only applies to the summary. The body of the commit
## message will be displayed in the changelog without reformatting.
##
## ``ignore_regexps`` is a line of regexps
##
## Any commit having its full commit message matching any regexp listed here
## will be ignored and won't be reported in the changelog.
##
ignore_regexps = [
r'@minor', r'!minor',
r'@cosmetic', r'!cosmetic',
r'@refactor', r'!refactor',
r'@wip', r'!wip',
r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*[p|P]kg:',
r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*[d|D]ev:',
r'^(.{3,3}\s*:)?\s*[fF]irst commit.?\s*$',
]
## ``section_regexps`` is a list of 2-tuples associating a string label and a
## list of regexp
##
## Commit messages will be classified in sections thanks to this. Section
## titles are the label, and a commit is classified under this section if any
## of the regexps associated is matching.
##
## Please note that ``section_regexps`` will only classify commits and won't
## make any changes to the contents. So you'll probably want to go check
## ``subject_process`` (or ``body_process``) to do some changes to the subject,
## whenever you are tweaking this variable.
##
section_regexps = [
('New', [
r'^[nN]ew\s*:\s*((dev|use?r|pkg|test|doc|docs)\s*:\s*)?([^\n]*)$',
]),
('Changes', [
r'^[cC]hg\s*:\s*((dev|use?r|pkg|test|doc|docs)\s*:\s*)?([^\n]*)$',
]),
('Fix', [
r'^[fF]ix\s*:\s*((dev|use?r|pkg|test|doc|docs)\s*:\s*)?([^\n]*)$',
]),
('Other', None ## Match all lines
),
]
## ``body_process`` is a callable
##
## This callable will be given the original body and result will
## be used in the changelog.
##
## Available constructs are:
##
## - any python callable that take one txt argument and return txt argument.
##
## - ReSub(pattern, replacement): will apply regexp substitution.
##
## - Indent(chars=" "): will indent the text with the prefix
## Please remember that template engines gets also to modify the text and
## will usually indent themselves the text if needed.
##
## - Wrap(regexp=r"\n\n"): re-wrap text in separate paragraph to fill 80-Columns
##
## - noop: do nothing
##
## - ucfirst: ensure the first letter is uppercase.
## (usually used in the ``subject_process`` pipeline)
##
## - final_dot: ensure text finishes with a dot
## (usually used in the ``subject_process`` pipeline)
##
## - strip: remove any spaces before or after the content of the string
##
## - SetIfEmpty(msg="No commit message."): will set the text to
## whatever given ``msg`` if the current text is empty.
##
## Additionally, you can `pipe` the provided filters, for instance:
#body_process = Wrap(regexp=r'\n(?=\w+\s*:)') | Indent(chars=" ")
#body_process = Wrap(regexp=r'\n(?=\w+\s*:)')
#body_process = noop
body_process = ReSub(r'((^|\n)[A-Z]\w+(-\w+)*: .*(\n\s+.*)*)+$', r'') | strip
## ``subject_process`` is a callable
##
## This callable will be given the original subject and result will
## be used in the changelog.
##
## Available constructs are those listed in ``body_process`` doc.
subject_process = (strip |
ReSub(r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*((dev|use?r|pkg|test|doc|docs)\s*:\s*)?([^\n@]*)(@[a-z]+\s+)*$', r'\4') |
SetIfEmpty("No commit message.") | ucfirst | final_dot)
## ``tag_filter_regexp`` is a regexp
##
## Tags that will be used for the changelog must match this regexp.
##
tag_filter_regexp = r'^v[0-9]+\.[0-9]+$'
## ``unreleased_version_label`` is a string or a callable that outputs a string
##
## This label will be used as the changelog Title of the last set of changes
## between last valid tag and HEAD if any.
unreleased_version_label = "%%version%% (unreleased)"
## ``output_engine`` is a callable
##
## This will change the output format of the generated changelog file
##
## Available choices are:
##
## - rest_py
##
## Legacy pure python engine, outputs ReSTructured text.
## This is the default.
##
## - mustache(<template_name>)
##
## Template name could be any of the available templates in
## ``templates/mustache/*.tpl``.
## Requires python package ``pystache``.
## Examples:
## - mustache("markdown")
## - mustache("restructuredtext")
##
## - makotemplate(<template_name>)
##
## Template name could be any of the available templates in
## ``templates/mako/*.tpl``.
## Requires python package ``mako``.
## Examples:
## - makotemplate("restructuredtext")
##
output_engine = rest_py
#output_engine = mustache("restructuredtext")
#output_engine = mustache("markdown")
#output_engine = makotemplate("restructuredtext")
## ``include_merge`` is a boolean
##
## This option tells git-log whether to include merge commits in the log.
## The default is to include them.
include_merge = True
## ``log_encoding`` is a string identifier
##
## This option tells gitchangelog what encoding is outputed by ``git log``.
## The default is to be clever about it: it checks ``git config`` for
## ``i18n.logOutputEncoding``, and if not found will default to git's own
## default: ``utf-8``.
#log_encoding = 'utf-8'
## ``publish`` is a callable
##
## Sets what ``gitchangelog`` should do with the output generated by
## the output engine. ``publish`` is a callable taking one argument
## that is an interator on lines from the output engine.
##
## Some helper callable are provided:
##
## Available choices are:
##
## - stdout
##
## Outputs directly to standard output
## (This is the default)
##
## - FileInsertAtFirstRegexMatch(file, pattern, idx=lamda m: m.start())
##
## Creates a callable that will parse given file for the given
## regex pattern and will insert the output in the file.
## ``idx`` is a callable that receive the matching object and
## must return a integer index point where to insert the
## the output in the file. Default is to return the position of
## the start of the matched string.
##
## - FileRegexSubst(file, pattern, replace, flags)
##
## Apply a replace inplace in the given file. Your regex pattern must
## take care of everything and might be more complex. Check the README
## for a complete copy-pastable example.
##
# publish = FileInsertIntoFirstRegexMatch(
# "CHANGELOG.rst",
# r'/(?P<rev>[0-9]+\.[0-9]+(\.[0-9]+)?)\s+\([0-9]+-[0-9]{2}-[0-9]{2}\)\n--+\n/',
# idx=lambda m: m.start(1)
# )
#publish = stdout
## ``revs`` is a list of callable or a list of string
##
## callable will be called to resolve as strings and allow dynamical
## computation of these. The result will be used as revisions for
## gitchangelog (as if directly stated on the command line). This allows
## to filter exaclty which commits will be read by gitchangelog.
##
## To get a full documentation on the format of these strings, please
## refer to the ``git rev-list`` arguments. There are many examples.
##
## Using callables is especially useful, for instance, if you
## are using gitchangelog to generate incrementally your changelog.
##
## Some helpers are provided, you can use them::
##
## - FileFirstRegexMatch(file, pattern): will return a callable that will
## return the first string match for the given pattern in the given file.
## If you use named sub-patterns in your regex pattern, it'll output only
## the string matching the regex pattern named "rev".
##
## - Caret(rev): will return the rev prefixed by a "^", which is a
## way to remove the given revision and all its ancestor.
##
## Please note that if you provide a rev-list on the command line, it'll
## replace this value (which will then be ignored).
##
## If empty, then ``gitchangelog`` will act as it had to generate a full
## changelog.
##
## The default is to use all commits to make the changelog.
#revs = ["^1.0.3", ]
#revs = [
# Caret(
# FileFirstRegexMatch(
# "CHANGELOG.rst",
# r"(?P<rev>[0-9]+\.[0-9]+(\.[0-9]+)?)\s+\([0-9]+-[0-9]{2}-[0-9]{2}\)\n--+\n")),
# "HEAD"
#]
revs = []

8
.gitignore vendored Normal file
View File

@ -0,0 +1,8 @@
# Temp files
*.swp
*.pyc
*.swo
*.o
# redis datas
server/dump6380.rdb

View File

@ -1,17 +1,84 @@
# D4 core # D4 core
Software components used for the D4 project ![](https://www.d4-project.org/assets/images/logo.png)
D4 core are software components used in the D4 project. The software includes everything to create your own sensor network or connect
to an existing sensor network using simple clients.
![https://github.com/D4-project/d4-core/releases/latest](https://img.shields.io/github/release/D4-project/d4-core/all.svg)
![https://github.com/D4-project/d4-core/blob/master/LICENSE](https://img.shields.io/badge/License-AGPL-yellow.svg)
## D4 core client ## D4 core client
[D4 core client](https://github.com/D4-project/d4-core/tree/master/client) is a simple and minimal implementation of the [D4 encapsulation protocol](https://github.com/D4-project/architecture/tree/master/format). There is also a [portable D4 client](https://github.com/D4-project/d4-goclient) in Go including the support for the SSL/TLS connectivity.
### Requirements
- Unix-like operating system
- make
- a recent C compiler
### Usage
The D4 client can be used to stream any byte stream towards a D4 server.
As an example, you directly stream tcpdump output to a D4 server with the following
script:
````
tcpdump -n -s0 -w - | ./d4 -c ./conf | socat - OPENSSL-CONNECT:$D4-SERVER-IP-ADDRESS:$PORT,verify=0
````
~~~~ ~~~~
d4 - d4 client
Read data from the configured <source> and send it to <destination>
Usage: d4 -c config_directory
Configuration
The configuration settings are stored in files in the configuration directory
specified with the -c command line switch.
Files in the configuration directory
key - is the private HMAC-SHA-256-128 key.
The HMAC is computed on the header with a HMAC value set to 0
which is updated later.
snaplen - the length of bytes that is read from the <source>
version - the version of the d4 client
type - the type of data that is send. pcap, netflow, ...
source - the source where the data is read from
destination - the destination where the data is written to
~~~~
### Installation
~~~~
cd client
git submodule init git submodule init
git submodule update git submodule update
~~~~ ~~~~
## D4 core server
D4 core server is a complete server to handle clients (sensors) including the decapsulation of the [D4 protocol](https://github.com/D4-project/architecture/tree/master/format), control of
sensor registrations, management of decoding protocols and dispatching to adequate decoders/analysers.
### Requirements ### Requirements
- uuid-dev - Python 3.6
- make - GNU/Linux distribution
- a recent C compiler
### Installation
- [Install D4 Server](https://github.com/D4-project/d4-core/tree/master/server)
### Screenshots of D4 core server management
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/main.png)
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/sensor-mgmt.png)
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/server-mgmt.png)
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/analyzer-mgmt.png)
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/server-mgmt2.png)

Binary file not shown.

After

Width:  |  Height:  |  Size: 64 KiB

BIN
doc/images/main.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 88 KiB

BIN
doc/images/sensor-mgmt.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 94 KiB

BIN
doc/images/server-mgmt.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 136 KiB

BIN
doc/images/server-mgmt2.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 127 KiB

1
server/.gitignore vendored
View File

@ -2,6 +2,7 @@
*.csr *.csr
*.pem *.pem
*.key *.key
configs/server.conf
data/ data/
logs/ logs/
redis/ redis/

View File

@ -36,7 +36,7 @@ function helptext {
- D4 Twisted server. - D4 Twisted server.
- All wokers manager. - All wokers manager.
- All Redis in memory servers. - All Redis in memory servers.
- Flak server. - Flask server.
Usage: LAUNCH.sh Usage: LAUNCH.sh
[-l | --launchAuto] [-l | --launchAuto]
@ -51,7 +51,7 @@ function launching_redis {
screen -dmS "Redis_D4" screen -dmS "Redis_D4"
sleep 0.1 sleep 0.1
echo -e $GREEN"\t* Launching D4 Redis ervers"$DEFAULT echo -e $GREEN"\t* Launching D4 Redis Servers"$DEFAULT
screen -S "Redis_D4" -X screen -t "6379" bash -c $redis_dir'redis-server '$conf_dir'6379.conf ; read x' screen -S "Redis_D4" -X screen -t "6379" bash -c $redis_dir'redis-server '$conf_dir'6379.conf ; read x'
sleep 0.1 sleep 0.1
screen -S "Redis_D4" -X screen -t "6380" bash -c $redis_dir'redis-server '$conf_dir'6380.conf ; read x' screen -S "Redis_D4" -X screen -t "6380" bash -c $redis_dir'redis-server '$conf_dir'6380.conf ; read x'
@ -72,9 +72,13 @@ function launching_workers {
sleep 0.1 sleep 0.1
echo -e $GREEN"\t* Launching D4 Workers"$DEFAULT echo -e $GREEN"\t* Launching D4 Workers"$DEFAULT
screen -S "Workers_D4" -X screen -t "1_workers_manager" bash -c "cd ${D4_HOME}/workers/workers_1; ./workers_manager.py; read x" screen -S "Workers_D4" -X screen -t "1_workers" bash -c "cd ${D4_HOME}/workers/workers_1; ./workers_manager.py; read x"
sleep 0.1 sleep 0.1
screen -S "Workers_D4" -X screen -t "4_workers_manager" bash -c "cd ${D4_HOME}/workers/workers_4; ./workers_manager.py; read x" screen -S "Workers_D4" -X screen -t "2_workers" bash -c "cd ${D4_HOME}/workers/workers_2; ./workers_manager.py; read x"
sleep 0.1
screen -S "Workers_D4" -X screen -t "4_workers" bash -c "cd ${D4_HOME}/workers/workers_4; ./workers_manager.py; read x"
sleep 0.1
screen -S "Workers_D4" -X screen -t "8_workers" bash -c "cd ${D4_HOME}/workers/workers_8; ./workers_manager.py; read x"
sleep 0.1 sleep 0.1
} }
@ -159,6 +163,7 @@ function launch_flask {
screen -dmS "Flask_D4" screen -dmS "Flask_D4"
sleep 0.1 sleep 0.1
echo -e $GREEN"\t* Launching Flask server"$DEFAULT echo -e $GREEN"\t* Launching Flask server"$DEFAULT
# screen -S "Flask_D4" -X screen -t "Flask_server" bash -c "cd $flask_dir; export FLASK_DEBUG=1;export FLASK_APP=Flask_server.py; python -m flask run --port 7000; read x"
screen -S "Flask_D4" -X screen -t "Flask_server" bash -c "cd $flask_dir; ls; ./Flask_server.py; read x" screen -S "Flask_D4" -X screen -t "Flask_server" bash -c "cd $flask_dir; ls; ./Flask_server.py; read x"
else else
echo -e $RED"\t* A Flask_D4 screen is already launched"$DEFAULT echo -e $RED"\t* A Flask_D4 screen is already launched"$DEFAULT
@ -200,9 +205,15 @@ function update_web {
fi fi
} }
function update_config {
echo -e $GREEN"\t* Updating Config File"$DEFAULT
bash -c "(cd ${D4_HOME}/configs; ./update_conf.py -v 0)"
}
function launch_all { function launch_all {
helptext; helptext;
launch_redis; launch_redis;
update_config;
launch_d4_server; launch_d4_server;
launch_workers; launch_workers;
launch_flask; launch_flask;

68
server/README.md Normal file
View File

@ -0,0 +1,68 @@
# D4 core
![](https://www.d4-project.org/assets/images/logo.png)
## D4 core server
D4 core server is a complete server to handle clients (sensors) including the decapsulation of the [D4 protocol](https://github.com/D4-project/architecture/tree/master/format), control of
sensor registrations, management of decoding protocols and dispatching to adequate decoders/analysers.
### Requirements
- Python 3.6
- GNU/Linux distribution
### Installation
###### Install D4 server
~~~~
cd server
./install_server.sh
~~~~
Create or add a pem in [d4-core/server](https://github.com/D4-project/d4-core/tree/master/server) :
~~~~
cd gen_cert
./gen_root.sh
./gen_cert.sh
cd ..
~~~~
###### Launch D4 server
~~~~
./LAUNCH.sh -l
~~~~
The web interface is accessible via `http://127.0.0.1:7000/`
### Updating web assets
To update javascript libs run:
~~~~
cd web
./update_web.sh
~~~~
### Notes
- All server logs are located in ``d4-core/server/logs/``
- Close D4 Server: ``./LAUNCH.sh -k``
### Screenshots of D4 core server management
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/main.png)
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/sensor-mgmt.png)
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/server-mgmt.png)
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/analyzer-mgmt.png)
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/server-mgmt2.png)
### Troubleshooting
###### Worker 1, tcpdump: Permission denied
Could be related to AppArmor:
~~~~
sudo cat /var/log/syslog | grep denied
~~~~
Run the following command as root:
~~~~
aa-complain /usr/sbin/tcpdump
~~~~

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,5 @@
[Save_Directories]
# By default all datas are saved in $D4_HOME/data/
use_default_save_directory = yes
save_directory = None

77
server/configs/update_conf.py Executable file
View File

@ -0,0 +1,77 @@
#!/usr/bin/env python3
import os
import argparse
import configparser
def print_message(message_to_print, verbose):
if verbose:
print(message_to_print)
if __name__ == "__main__":
# parse parameters
parser = argparse.ArgumentParser()
parser.add_argument('-v', '--verbose',help='Display Info Messages', type=int, default=1, choices=[0, 1])
parser.add_argument('-b', '--backup',help='Create Config Backup', type=int, default=1, choices=[0, 1])
args = parser.parse_args()
if args.verbose == 1:
verbose = True
else:
verbose = False
if args.backup == 1:
backup = True
else:
backup = False
config_file_server = os.path.join(os.environ['D4_HOME'], 'configs/server.conf')
config_file_sample = os.path.join(os.environ['D4_HOME'], 'configs/server.conf.sample')
config_file_backup = os.path.join(os.environ['D4_HOME'], 'configs/server.conf.backup')
# Check if confile file exist
if not os.path.isfile(config_file_server):
# create config file
with open(config_file_server, 'w') as configfile:
with open(config_file_sample, 'r') as config_file_sample:
configfile.write(config_file_sample.read())
print_message('Config File Created', verbose)
else:
config_server = configparser.ConfigParser()
config_server.read(config_file_server)
config_sections = config_server.sections()
config_sample = configparser.ConfigParser()
config_sample.read(config_file_sample)
sample_sections = config_sample.sections()
mew_content_added = False
for section in sample_sections:
new_key_added = False
if section not in config_sections:
# add new section
config_server.add_section(section)
mew_content_added = True
for key in config_sample[section]:
if key not in config_server[section]:
# add new section key
config_server.set(section, key, config_sample[section][key])
if not new_key_added:
print_message('[{}]'.format(section), verbose)
new_key_added = True
mew_content_added = True
print_message(' {} = {}'.format(key, config_sample[section][key]), verbose)
# new keys have been added to config file
if mew_content_added:
# backup config file
if backup:
with open(config_file_backup, 'w') as configfile:
with open(config_file_server, 'r') as configfile_origin:
configfile.write(configfile_origin.read())
print_message('New Backup Created', verbose)
# create new config file
with open(config_file_server, 'w') as configfile:
config_server.write(configfile)
print_message('Config file updated', verbose)
else:
print_message('Nothing to update', verbose)

View File

@ -2,4 +2,4 @@
# Create Root key # Create Root key
openssl genrsa -out rootCA.key 4096 openssl genrsa -out rootCA.key 4096
# Create and Sign the Root CA Certificate # Create and Sign the Root CA Certificate
openssl req -x509 -new -nodes -key rootCA.key -sha256 -days 1024 -out rootCA.crt openssl req -x509 -new -nodes -key rootCA.key -sha256 -days 1024 -out rootCA.crt -config san.cnf

View File

@ -3,7 +3,7 @@
set -e set -e
set -x set -x
sudo apt-get install python3-pip virtualenv screen -y sudo apt-get install python3-pip virtualenv screen whois unzip libffi-dev gcc -y
if [ -z "$VIRTUAL_ENV" ]; then if [ -z "$VIRTUAL_ENV" ]; then
virtualenv -p python3 D4ENV virtualenv -p python3 D4ENV

View File

@ -23,7 +23,8 @@ from twisted.protocols.policies import TimeoutMixin
hmac_reset = bytearray(32) hmac_reset = bytearray(32)
hmac_key = b'private key to change' hmac_key = b'private key to change'
accepted_type = [1, 4] accepted_type = [1, 2, 4, 8, 254]
accepted_extended_type = ['ja3-jl']
timeout_time = 30 timeout_time = 30
@ -60,40 +61,86 @@ except redis.exceptions.ConnectionError:
print('Error: Redis server {}:{}, ConnectionError'.format(host_redis_metadata, port_redis_metadata)) print('Error: Redis server {}:{}, ConnectionError'.format(host_redis_metadata, port_redis_metadata))
sys.exit(1) sys.exit(1)
# set hmac default key
redis_server_metadata.set('server:hmac_default_key', hmac_key)
# init redis_server_metadata # init redis_server_metadata
redis_server_metadata.delete('server:accepted_type') redis_server_metadata.delete('server:accepted_type')
for type in accepted_type: for type in accepted_type:
redis_server_metadata.sadd('server:accepted_type', type) redis_server_metadata.sadd('server:accepted_type', type)
redis_server_metadata.delete('server:accepted_extended_type')
for type in accepted_extended_type:
redis_server_metadata.sadd('server:accepted_extended_type', type)
class Echo(Protocol, TimeoutMixin): dict_all_connection = {}
class D4_Server(Protocol, TimeoutMixin):
def __init__(self): def __init__(self):
self.buffer = b'' self.buffer = b''
self.setTimeout(timeout_time) self.setTimeout(timeout_time)
self.session_uuid = str(uuid.uuid4()) self.session_uuid = str(uuid.uuid4())
self.data_saved = False self.data_saved = False
self.update_stream_type = True
self.first_connection = True
self.ip = None
self.source_port = None
self.stream_max_size = None self.stream_max_size = None
self.hmac_key = None
#self.version = None
self.type = None
self.uuid = None
logger.debug('New session: session_uuid={}'.format(self.session_uuid)) logger.debug('New session: session_uuid={}'.format(self.session_uuid))
dict_all_connection[self.session_uuid] = self
def dataReceived(self, data): def dataReceived(self, data):
self.resetTimeout() # check and kick sensor by uuid
ip, source_port = self.transport.client for client_uuid in redis_server_stream.smembers('server:sensor_to_kick'):
# check blacklisted_ip client_uuid = client_uuid.decode()
if redis_server_metadata.sismember('blacklist_ip', ip): for session_uuid in redis_server_stream.smembers('map:active_connection-uuid-session_uuid:{}'.format(client_uuid)):
self.transport.abortConnection() session_uuid = session_uuid.decode()
logger.warning('Blacklisted IP={}, connection closed'.format(ip)) logger.warning('Sensor kicked uuid={}, session_uuid={}'.format(client_uuid, session_uuid))
redis_server_stream.set('temp_blacklist_uuid:{}'.format(client_uuid), 'some random string')
redis_server_stream.expire('temp_blacklist_uuid:{}'.format(client_uuid), 30)
dict_all_connection[session_uuid].transport.abortConnection()
redis_server_stream.srem('server:sensor_to_kick', client_uuid)
self.process_header(data, ip, source_port) self.resetTimeout()
if self.first_connection or self.ip is None:
client_info = self.transport.client
self.ip = self.extract_ip(client_info[0])
self.source_port = client_info[1]
logger.debug('New connection, ip={}, port={} session_uuid={}'.format(self.ip, self.source_port, self.session_uuid))
# check blacklisted_ip
if redis_server_metadata.sismember('blacklist_ip', self.ip):
self.transport.abortConnection()
logger.warning('Blacklisted IP={}, connection closed'.format(self.ip))
else:
# process data
self.process_header(data, self.ip, self.source_port)
def timeoutConnection(self): def timeoutConnection(self):
if self.uuid is None:
# # TODO: ban auto
logger.warning('Timeout, no D4 header send, session_uuid={}, connection closed'.format(self.session_uuid))
self.transport.abortConnection()
else:
self.resetTimeout() self.resetTimeout()
self.buffer = b'' self.buffer = b''
logger.debug('buffer timeout, session_uuid={}'.format(self.session_uuid)) logger.debug('buffer timeout, session_uuid={}'.format(self.session_uuid))
def connectionMade(self):
self.transport.setTcpKeepAlive(1)
def connectionLost(self, reason): def connectionLost(self, reason):
redis_server_stream.sadd('ended_session', self.session_uuid) redis_server_stream.sadd('ended_session', self.session_uuid)
self.setTimeout(None) self.setTimeout(None)
redis_server_stream.srem('active_connection:{}'.format(self.type), '{}:{}'.format(self.ip, self.uuid))
redis_server_stream.srem('active_connection', '{}'.format(self.uuid))
if self.uuid:
redis_server_stream.srem('map:active_connection-uuid-session_uuid:{}'.format(self.uuid), self.session_uuid)
logger.debug('Connection closed: session_uuid={}'.format(self.session_uuid)) logger.debug('Connection closed: session_uuid={}'.format(self.session_uuid))
dict_all_connection.pop(self.session_uuid)
def unpack_header(self, data): def unpack_header(self, data):
data_header = {} data_header = {}
@ -104,26 +151,19 @@ class Echo(Protocol, TimeoutMixin):
data_header['timestamp'] = struct.unpack('Q', data[18:26])[0] data_header['timestamp'] = struct.unpack('Q', data[18:26])[0]
data_header['hmac_header'] = data[26:58] data_header['hmac_header'] = data[26:58]
data_header['size'] = struct.unpack('I', data[58:62])[0] data_header['size'] = struct.unpack('I', data[58:62])[0]
# uuid blacklist
if redis_server_metadata.sismember('blacklist_uuid', data_header['uuid_header']):
self.transport.abortConnection()
logger.warning('Blacklisted UUID={}, connection closed'.format(data_header['uuid_header']))
# check default size limit
if data_header['size'] > data_default_size_limit:
self.transport.abortConnection()
logger.warning('Incorrect header data size: the server received more data than expected by default, expected={}, received={} , uuid={}, session_uuid={}'.format(data_default_size_limit, data_header['size'] ,data_header['uuid_header'], self.session_uuid))
# Worker: Incorrect type
if redis_server_stream.sismember('Error:IncorrectType:{}'.format(data_header['type']), self.session_uuid):
self.transport.abortConnection()
redis_server_stream.delete(stream_name)
redis_server_stream.srem('Error:IncorrectType:{}'.format(data_header['type']), self.session_uuid)
logger.warning('Incorrect type={} detected by worker, uuid={}, session_uuid={}'.format(data_header['type'] ,data_header['uuid_header'], self.session_uuid))
return data_header return data_header
def extract_ip(self, ip_string):
#remove interface
ip_string = ip_string.split('%')[0]
# IPv4
#extract ipv4
if '.' in ip_string:
return ip_string.split(':')[-1]
# IPv6
else:
return ip_string
def is_valid_uuid_v4(self, header_uuid): def is_valid_uuid_v4(self, header_uuid):
try: try:
uuid_test = uuid.UUID(hex=header_uuid, version=4) uuid_test = uuid.UUID(hex=header_uuid, version=4)
@ -143,14 +183,103 @@ class Echo(Protocol, TimeoutMixin):
logger.info('Invalid Header, uuid={}, session_uuid={}'.format(uuid_to_check, self.session_uuid)) logger.info('Invalid Header, uuid={}, session_uuid={}'.format(uuid_to_check, self.session_uuid))
return False return False
def check_connection_validity(self, data_header):
# blacklist ip by uuid
if redis_server_metadata.sismember('blacklist_ip_by_uuid', data_header['uuid_header']):
redis_server_metadata.sadd('blacklist_ip', self.ip)
self.transport.abortConnection()
logger.warning('Blacklisted IP by UUID={}, connection closed'.format(data_header['uuid_header']))
return False
# uuid blacklist
if redis_server_metadata.sismember('blacklist_uuid', data_header['uuid_header']):
logger.warning('Blacklisted UUID={}, connection closed'.format(data_header['uuid_header']))
self.transport.abortConnection()
return False
# check temp blacklist
if redis_server_stream.exists('temp_blacklist_uuid:{}'.format(data_header['uuid_header'])):
logger.warning('Temporarily Blacklisted UUID={}, connection closed'.format(data_header['uuid_header']))
redis_server_metadata.hset('metadata_uuid:{}'.format(data_header['uuid_header']), 'Error', 'Error: This UUID is temporarily blacklisted')
self.transport.abortConnection()
return False
# check default size limit
if data_header['size'] > data_default_size_limit:
self.transport.abortConnection()
logger.warning('Incorrect header data size: the server received more data than expected by default, expected={}, received={} , uuid={}, session_uuid={}'.format(data_default_size_limit, data_header['size'] ,data_header['uuid_header'], self.session_uuid))
return False
# Worker: Incorrect type
if redis_server_stream.sismember('Error:IncorrectType', self.session_uuid):
self.transport.abortConnection()
redis_server_stream.delete('stream:{}:{}'.format(data_header['type'], self.session_uuid))
redis_server_stream.srem('Error:IncorrectType', self.session_uuid)
logger.warning('Incorrect type={} detected by worker, uuid={}, session_uuid={}'.format(data_header['type'] ,data_header['uuid_header'], self.session_uuid))
return False
return True
def process_header(self, data, ip, source_port): def process_header(self, data, ip, source_port):
if not self.buffer: if not self.buffer:
data_header = self.unpack_header(data) data_header = self.unpack_header(data)
if data_header: if data_header:
if not self.check_connection_validity(data_header):
return 1
if self.is_valid_header(data_header['uuid_header'], data_header['type']): if self.is_valid_header(data_header['uuid_header'], data_header['type']):
# auto kill connection # TODO: map type
if self.first_connection:
self.first_connection = False
if redis_server_stream.sismember('active_connection:{}'.format(data_header['type']), '{}:{}'.format(ip, data_header['uuid_header'])):
# same IP-type for an UUID
logger.warning('is using the same UUID for one type, ip={} uuid={} type={} session_uuid={}'.format(ip, data_header['uuid_header'], data_header['type'], self.session_uuid))
redis_server_metadata.hset('metadata_uuid:{}'.format(data_header['uuid_header']), 'Error', 'Error: This UUID is using the same UUID for one type={}'.format(data_header['type']))
self.transport.abortConnection()
return 1
else:
#self.version = None
# check if type change
if self.data_saved:
# type change detected
if self.type != data_header['type']:
# Meta types
if self.type == 2 and data_header['type'] == 254:
self.update_stream_type = True
# Type Error
else:
logger.warning('Unexpected type change, type={} new type={}, ip={} uuid={} session_uuid={}'.format(ip, data_header['uuid_header'], data_header['type'], self.session_uuid))
redis_server_metadata.hset('metadata_uuid:{}'.format(data_header['uuid_header']), 'Error', 'Error: Unexpected type change type={}, new type={}'.format(self.type, data_header['type']))
self.transport.abortConnection()
return 1
# type 254, check if previous type 2 saved
elif data_header['type'] == 254:
logger.warning('a type 2 packet must be sent, ip={} uuid={} type={} session_uuid={}'.format(ip, data_header['uuid_header'], data_header['type'], self.session_uuid))
redis_server_metadata.hset('metadata_uuid:{}'.format(data_header['uuid_header']), 'Error', 'Error: a type 2 packet must be sent, type={}'.format(data_header['type']))
self.transport.abortConnection()
return 1
self.type = data_header['type']
self.uuid = data_header['uuid_header']
#active Connection
redis_server_stream.sadd('active_connection:{}'.format(self.type), '{}:{}'.format(ip, self.uuid))
redis_server_stream.sadd('active_connection', '{}'.format(self.uuid))
# map session_uuid/uuid
redis_server_stream.sadd('map:active_connection-uuid-session_uuid:{}'.format(self.uuid), self.session_uuid)
# check if the uuid is the same
if self.uuid != data_header['uuid_header']:
logger.warning('The uuid change during the connection, ip={} uuid={} type={} session_uuid={} new_uuid={}'.format(ip, self.uuid, data_header['type'], self.session_uuid, data_header['uuid_header']))
redis_server_metadata.hset('metadata_uuid:{}'.format(data_header['uuid_header']), 'Error', 'Error: The uuid change, new_uuid={}'.format(data_header['uuid_header']))
self.transport.abortConnection()
return 1
## TODO: ban ?
# check data size # check data size
if data_header['size'] == (len(data) - header_size): if data_header['size'] == (len(data) - header_size):
self.process_d4_data(data, data_header, ip) res = self.process_d4_data(data, data_header, ip)
# Error detected, kill connection
if res == 1:
return 1
# multiple d4 headers # multiple d4 headers
elif data_header['size'] < (len(data) - header_size): elif data_header['size'] < (len(data) - header_size):
next_data = data[data_header['size'] + header_size:] next_data = data[data_header['size'] + header_size:]
@ -159,7 +288,10 @@ class Echo(Protocol, TimeoutMixin):
#print(data) #print(data)
#print() #print()
#print(next_data) #print(next_data)
self.process_d4_data(data, data_header, ip) res = self.process_d4_data(data, data_header, ip)
# Error detected, kill connection
if res == 1:
return 1
# process next d4 header # process next d4 header
self.process_header(next_data, ip, source_port) self.process_header(next_data, ip, source_port)
# data_header['size'] > (len(data) - header_size) # data_header['size'] > (len(data) - header_size)
@ -210,7 +342,12 @@ class Echo(Protocol, TimeoutMixin):
self.buffer = b'' self.buffer = b''
# set hmac_header to 0 # set hmac_header to 0
data = data.replace(data_header['hmac_header'], hmac_reset, 1) data = data.replace(data_header['hmac_header'], hmac_reset, 1)
HMAC = hmac.new(hmac_key, msg=data, digestmod='sha256') if self.hmac_key is None:
self.hmac_key = redis_server_metadata.hget('metadata_uuid:{}'.format(data_header['uuid_header']), 'hmac_key')
if self.hmac_key is None:
self.hmac_key = redis_server_metadata.get('server:hmac_default_key')
HMAC = hmac.new(self.hmac_key, msg=data, digestmod='sha256')
data_header['hmac_header'] = data_header['hmac_header'].hex() data_header['hmac_header'] = data_header['hmac_header'].hex()
### Debug ### ### Debug ###
@ -234,6 +371,8 @@ class Echo(Protocol, TimeoutMixin):
date = datetime.datetime.now().strftime("%Y%m%d") date = datetime.datetime.now().strftime("%Y%m%d")
if redis_server_stream.xlen('stream:{}:{}'.format(data_header['type'], self.session_uuid)) < self.stream_max_size: if redis_server_stream.xlen('stream:{}:{}'.format(data_header['type'], self.session_uuid)) < self.stream_max_size:
# Clean Error Message
redis_server_metadata.hdel('metadata_uuid:{}'.format(data_header['uuid_header']), 'Error')
redis_server_stream.xadd('stream:{}:{}'.format(data_header['type'], self.session_uuid), {'message': data[header_size:], 'uuid': data_header['uuid_header'], 'timestamp': data_header['timestamp'], 'version': data_header['version']}) redis_server_stream.xadd('stream:{}:{}'.format(data_header['type'], self.session_uuid), {'message': data[header_size:], 'uuid': data_header['uuid_header'], 'timestamp': data_header['timestamp'], 'version': data_header['version']})
@ -244,24 +383,44 @@ class Echo(Protocol, TimeoutMixin):
redis_server_metadata.zincrby('daily_ip:{}'.format(date), 1, ip) redis_server_metadata.zincrby('daily_ip:{}'.format(date), 1, ip)
redis_server_metadata.zincrby('daily_type:{}'.format(date), 1, data_header['type']) redis_server_metadata.zincrby('daily_type:{}'.format(date), 1, data_header['type'])
redis_server_metadata.zincrby('stat_type_uuid:{}:{}'.format(date, data_header['type']), 1, data_header['uuid_header']) redis_server_metadata.zincrby('stat_type_uuid:{}:{}'.format(date, data_header['type']), 1, data_header['uuid_header'])
redis_server_metadata.zincrby('stat_uuid_type:{}:{}'.format(date, data_header['uuid_header']), 1, data_header['type'])
# #
if not redis_server_metadata.hexists('metadata_uuid:{}'.format(data_header['uuid_header']), 'first_seen'): if not redis_server_metadata.hexists('metadata_uuid:{}'.format(data_header['uuid_header']), 'first_seen'):
redis_server_metadata.hset('metadata_uuid:{}'.format(data_header['uuid_header']), 'first_seen', data_header['timestamp']) redis_server_metadata.hset('metadata_uuid:{}'.format(data_header['uuid_header']), 'first_seen', data_header['timestamp'])
redis_server_metadata.hset('metadata_uuid:{}'.format(data_header['uuid_header']), 'last_seen', data_header['timestamp']) redis_server_metadata.hset('metadata_uuid:{}'.format(data_header['uuid_header']), 'last_seen', data_header['timestamp'])
redis_server_metadata.hset('metadata_type_by_uuid:{}:{}'.format(data_header['uuid_header'], data_header['type']), 'last_seen', data_header['timestamp'])
if not self.data_saved: if not self.data_saved:
#UUID IP: ## TODO: use d4 timestamp ?
redis_server_metadata.lpush('list_uuid_ip:{}'.format(data_header['uuid_header']), '{}-{}'.format(ip, datetime.datetime.now().strftime("%Y%m%d%H%M%S")))
redis_server_metadata.ltrim('list_uuid_ip:{}'.format(data_header['uuid_header']), 0, 15)
self.data_saved = True
if self.update_stream_type:
redis_server_stream.sadd('session_uuid:{}'.format(data_header['type']), self.session_uuid.encode()) redis_server_stream.sadd('session_uuid:{}'.format(data_header['type']), self.session_uuid.encode())
redis_server_stream.hset('map-type:session_uuid-uuid:{}'.format(data_header['type']), self.session_uuid, data_header['uuid_header']) redis_server_stream.hset('map-type:session_uuid-uuid:{}'.format(data_header['type']), self.session_uuid, data_header['uuid_header'])
self.data_saved = True redis_server_metadata.sadd('all_types_by_uuid:{}'.format(data_header['uuid_header']), data_header['type'])
if not redis_server_metadata.hexists('metadata_type_by_uuid:{}:{}'.format(data_header['uuid_header'], data_header['type']), 'first_seen'):
redis_server_metadata.hset('metadata_type_by_uuid:{}:{}'.format(data_header['uuid_header'], data_header['type']), 'first_seen', data_header['timestamp'])
self.update_stream_type = False
return 0
else: else:
logger.warning("stream exceed max entries limit, uuid={}, session_uuid={}, type={}".format(data_header['uuid_header'], self.session_uuid, data_header['type'])) logger.warning("stream exceed max entries limit, uuid={}, session_uuid={}, type={}".format(data_header['uuid_header'], self.session_uuid, data_header['type']))
## TODO: FIXME
redis_server_metadata.hset('metadata_uuid:{}'.format(data_header['uuid_header']), 'Error', 'Error: stream exceed max entries limit')
self.transport.abortConnection() self.transport.abortConnection()
return 1
else: else:
print('hmac do not match') print('hmac do not match')
print(data) print(data)
logger.debug("HMAC don't match, uuid={}, session_uuid={}".format(data_header['uuid_header'], self.session_uuid)) logger.debug("HMAC don't match, uuid={}, session_uuid={}".format(data_header['uuid_header'], self.session_uuid))
## TODO: FIXME
redis_server_metadata.hset('metadata_uuid:{}'.format(data_header['uuid_header']), 'Error', 'Error: HMAC don\'t match')
self.transport.abortConnection()
return 1
def main(reactor): def main(reactor):
@ -273,8 +432,9 @@ def main(reactor):
print(e) print(e)
sys.exit(1) sys.exit(1)
certificate = ssl.PrivateCertificate.loadPEM(certData) certificate = ssl.PrivateCertificate.loadPEM(certData)
factory = protocol.Factory.forProtocol(Echo) factory = protocol.Factory.forProtocol(D4_Server)
reactor.listenSSL(4443, factory, certificate.options()) # use interface to support both IPv4 and IPv6
reactor.listenSSL(4443, factory, certificate.options(), interface='::')
return defer.Deferred() return defer.Deferred()
@ -283,6 +443,9 @@ if __name__ == "__main__":
parser.add_argument('-v', '--verbose',help='dddd' , type=int, default=30) parser.add_argument('-v', '--verbose',help='dddd' , type=int, default=30)
args = parser.parse_args() args = parser.parse_args()
if not redis_server_metadata.exists('first_date'):
redis_server_metadata.set('first_date', datetime.datetime.now().strftime("%Y%m%d"))
logs_dir = 'logs' logs_dir = 'logs'
if not os.path.isdir(logs_dir): if not os.path.isdir(logs_dir):
os.makedirs(logs_dir) os.makedirs(logs_dir)
@ -298,4 +461,5 @@ if __name__ == "__main__":
logger.setLevel(args.verbose) logger.setLevel(args.verbose)
logger.info('Launching Server ...') logger.info('Launching Server ...')
task.react(main) task.react(main)

View File

@ -2,11 +2,17 @@
# -*-coding:UTF-8 -* # -*-coding:UTF-8 -*
import os import os
import re
import sys import sys
import uuid
import time import time
import json
import redis import redis
import flask import flask
import datetime import datetime
import ipaddress
import subprocess
from flask import Flask, render_template, jsonify, request, Blueprint, redirect, url_for from flask import Flask, render_template, jsonify, request, Blueprint, redirect, url_for
@ -14,6 +20,22 @@ baseUrl = ''
if baseUrl != '': if baseUrl != '':
baseUrl = '/'+baseUrl baseUrl = '/'+baseUrl
host_redis_stream = "localhost"
port_redis_stream = 6379
default_max_entries_by_stream = 10000
analyzer_list_max_default_size = 10000
default_analyzer_max_line_len = 3000
json_type_description_path = os.path.join(os.environ['D4_HOME'], 'web/static/json/type.json')
redis_server_stream = redis.StrictRedis(
host=host_redis_stream,
port=port_redis_stream,
db=0,
decode_responses=True)
host_redis_metadata = "localhost" host_redis_metadata = "localhost"
port_redis_metadata= 6380 port_redis_metadata= 6380
@ -23,13 +45,89 @@ redis_server_metadata = redis.StrictRedis(
db=0, db=0,
decode_responses=True) decode_responses=True)
redis_server_analyzer = redis.StrictRedis(
host=host_redis_metadata,
port=port_redis_metadata,
db=2,
decode_responses=True)
with open(json_type_description_path, 'r') as f:
json_type = json.loads(f.read())
json_type_description = {}
for type_info in json_type:
json_type_description[type_info['type']] = type_info
app = Flask(__name__, static_url_path=baseUrl+'/static/') app = Flask(__name__, static_url_path=baseUrl+'/static/')
app.config['MAX_CONTENT_LENGTH'] = 900 * 1024 * 1024 app.config['MAX_CONTENT_LENGTH'] = 900 * 1024 * 1024
# ========== FUNCTIONS ============
def is_valid_uuid_v4(header_uuid):
try:
header_uuid=header_uuid.replace('-', '')
uuid_test = uuid.UUID(hex=header_uuid, version=4)
return uuid_test.hex == header_uuid
except:
return False
def is_valid_ip(ip):
try:
ipaddress.ip_address(ip)
return True
except ValueError:
return False
def is_valid_network(ip_network):
try:
ipaddress.ip_network(ip_network)
return True
except ValueError:
return False
# server_management input handler
def get_server_management_input_handler_value(value):
if value is not None:
if value !="0":
try:
value=int(value)
except:
value=0
else:
value=0
return value
def get_json_type_description():
return json_type_description
def get_whois_ouput(ip):
if is_valid_ip(ip):
process = subprocess.run(["whois", ip], stdout=subprocess.PIPE)
return re.sub(r"#.*\n?", '', process.stdout.decode()).lstrip('\n').rstrip('\n')
else:
return ''
def get_substract_date_range(num_day, date_from=None):
if date_from is None:
date_from = datetime.datetime.now()
else:
date_from = datetime.date(int(date_from[0:4]), int(date_from[4:6]), int(date_from[6:8]))
l_date = []
for i in range(num_day):
date = date_from - datetime.timedelta(days=i)
l_date.append( date.strftime('%Y%m%d') )
return list(reversed(l_date))
# ========== ERRORS ============
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
# ========== ROUTES ============ # ========== ROUTES ============
@app.route('/') @app.route('/')
def index(): def index():
return render_template("index.html") date = datetime.datetime.now().strftime("%Y/%m/%d")
return render_template("index.html", date=date)
@app.route('/_json_daily_uuid_stats') @app.route('/_json_daily_uuid_stats')
def _json_daily_uuid_stats(): def _json_daily_uuid_stats():
@ -42,5 +140,632 @@ def _json_daily_uuid_stats():
return jsonify(data_daily_uuid) return jsonify(data_daily_uuid)
@app.route('/_json_daily_type_stats')
def _json_daily_type_stats():
date = datetime.datetime.now().strftime("%Y%m%d")
daily_uuid = redis_server_metadata.zrange('daily_type:{}'.format(date), 0, -1, withscores=True)
json_type_description = get_json_type_description()
data_daily_uuid = []
for result in daily_uuid:
try:
type_description = json_type_description[int(result[0])]['description']
except:
type_description = 'Please update your web server'
data_daily_uuid.append({"key": '{}: {}'.format(result[0], type_description), "value": int(result[1])})
return jsonify(data_daily_uuid)
@app.route('/sensors_status')
def sensors_status():
active_connection_filter = request.args.get('active_connection_filter')
if active_connection_filter is None:
active_connection_filter = False
else:
if active_connection_filter=='True':
active_connection_filter = True
else:
active_connection_filter = False
date = datetime.datetime.now().strftime("%Y%m%d")
if not active_connection_filter:
daily_uuid = redis_server_metadata.zrange('daily_uuid:{}'.format(date), 0, -1)
else:
daily_uuid = redis_server_stream.smembers('active_connection')
status_daily_uuid = []
for result in daily_uuid:
first_seen = redis_server_metadata.hget('metadata_uuid:{}'.format(result), 'first_seen')
first_seen_gmt = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(int(first_seen)))
last_seen = redis_server_metadata.hget('metadata_uuid:{}'.format(result), 'last_seen')
last_seen_gmt = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(int(last_seen)))
if redis_server_metadata.sismember('blacklist_ip_by_uuid', result):
Error = "All IP using this UUID are Blacklisted"
elif redis_server_metadata.sismember('blacklist_uuid', result):
Error = "Blacklisted UUID"
else:
Error = redis_server_metadata.hget('metadata_uuid:{}'.format(result), 'Error')
if redis_server_stream.sismember('active_connection', result):
active_connection = True
else:
active_connection = False
if first_seen is not None and last_seen is not None:
status_daily_uuid.append({"uuid": result,"first_seen": first_seen, "last_seen": last_seen,
"active_connection": active_connection,
"first_seen_gmt": first_seen_gmt, "last_seen_gmt": last_seen_gmt, "Error": Error})
return render_template("sensors_status.html", status_daily_uuid=status_daily_uuid,
active_connection_filter=active_connection_filter)
@app.route('/show_active_uuid')
def show_active_uuid():
#swap switch value
active_connection_filter = request.args.get('show_active_connection')
if active_connection_filter is None:
active_connection_filter = True
else:
if active_connection_filter=='True':
active_connection_filter = False
else:
active_connection_filter = True
return redirect(url_for('sensors_status', active_connection_filter=active_connection_filter))
@app.route('/server_management')
def server_management():
blacklisted_ip = request.args.get('blacklisted_ip')
unblacklisted_ip = request.args.get('unblacklisted_ip')
blacklisted_uuid = request.args.get('blacklisted_uuid')
unblacklisted_uuid = request.args.get('unblacklisted_uuid')
blacklisted_ip = get_server_management_input_handler_value(blacklisted_ip)
unblacklisted_ip = get_server_management_input_handler_value(unblacklisted_ip)
blacklisted_uuid = get_server_management_input_handler_value(blacklisted_uuid)
unblacklisted_uuid = get_server_management_input_handler_value(unblacklisted_uuid)
json_type_description = get_json_type_description()
list_accepted_types = []
list_analyzer_types = []
for type in redis_server_metadata.smembers('server:accepted_type'):
try:
description = json_type_description[int(type)]['description']
except:
description = 'Please update your web server'
list_analyzer_uuid = []
for analyzer_uuid in redis_server_metadata.smembers('analyzer:{}'.format(type)):
size_limit = redis_server_metadata.hget('analyzer:{}'.format(analyzer_uuid), 'max_size')
if size_limit is None:
size_limit = analyzer_list_max_default_size
last_updated = redis_server_metadata.hget('analyzer:{}'.format(analyzer_uuid), 'last_updated')
if last_updated is None:
last_updated = 'Never'
else:
last_updated = datetime.datetime.fromtimestamp(float(last_updated)).strftime('%Y-%m-%d %H:%M:%S')
description_analyzer = redis_server_metadata.hget('analyzer:{}'.format(analyzer_uuid), 'description')
if description_analyzer is None:
description_analyzer = ''
len_queue = redis_server_analyzer.llen('analyzer:{}:{}'.format(type, analyzer_uuid))
if len_queue is None:
len_queue = 0
list_analyzer_uuid.append({'uuid': analyzer_uuid, 'description': description_analyzer, 'size_limit': size_limit,'last_updated': last_updated, 'length': len_queue})
list_accepted_types.append({"id": int(type), "description": description, 'list_analyzer_uuid': list_analyzer_uuid})
list_accepted_extended_types = []
for extended_type in redis_server_metadata.smembers('server:accepted_extended_type'):
list_analyzer_uuid = []
for analyzer_uuid in redis_server_metadata.smembers('analyzer:254:{}'.format(extended_type)):
size_limit = redis_server_metadata.hget('analyzer:{}'.format(analyzer_uuid), 'max_size')
if size_limit is None:
size_limit = analyzer_list_max_default_size
last_updated = redis_server_metadata.hget('analyzer:{}'.format(analyzer_uuid), 'last_updated')
if last_updated is None:
last_updated = 'Never'
else:
last_updated = datetime.datetime.fromtimestamp(float(last_updated)).strftime('%Y-%m-%d %H:%M:%S')
description_analyzer = redis_server_metadata.hget('analyzer:{}'.format(analyzer_uuid), 'description')
if description_analyzer is None:
description_analyzer = ''
len_queue = redis_server_analyzer.llen('analyzer:{}:{}'.format(extended_type, analyzer_uuid))
if len_queue is None:
len_queue = 0
list_analyzer_uuid.append({'uuid': analyzer_uuid, 'description': description_analyzer, 'size_limit': size_limit,'last_updated': last_updated, 'length': len_queue})
list_accepted_extended_types.append({"name": extended_type, 'list_analyzer_uuid': list_analyzer_uuid})
return render_template("server_management.html", list_accepted_types=list_accepted_types, list_accepted_extended_types=list_accepted_extended_types,
default_analyzer_max_line_len=default_analyzer_max_line_len,
blacklisted_ip=blacklisted_ip, unblacklisted_ip=unblacklisted_ip,
blacklisted_uuid=blacklisted_uuid, unblacklisted_uuid=unblacklisted_uuid)
@app.route('/uuid_management')
def uuid_management():
uuid_sensor = request.args.get('uuid')
if is_valid_uuid_v4(uuid_sensor):
first_seen = redis_server_metadata.hget('metadata_uuid:{}'.format(uuid_sensor), 'first_seen')
first_seen_gmt = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(int(first_seen)))
last_seen = redis_server_metadata.hget('metadata_uuid:{}'.format(uuid_sensor), 'last_seen')
last_seen_gmt = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(int(last_seen)))
Error = redis_server_metadata.hget('metadata_uuid:{}'.format(uuid_sensor), 'Error')
if redis_server_stream.exists('temp_blacklist_uuid:{}'.format(uuid_sensor)):
temp_blacklist_uuid = True
else:
temp_blacklist_uuid = False
if redis_server_metadata.sismember('blacklist_uuid', uuid_sensor):
blacklisted_uuid = True
Error = "Blacklisted UUID"
else:
blacklisted_uuid = False
if redis_server_metadata.sismember('blacklist_ip_by_uuid', uuid_sensor):
blacklisted_ip_by_uuid = True
Error = "All IP using this UUID are Blacklisted"
else:
blacklisted_ip_by_uuid = False
data_uuid= {"first_seen": first_seen, "last_seen": last_seen,
"temp_blacklist_uuid": temp_blacklist_uuid,
"blacklisted_uuid": blacklisted_uuid, "blacklisted_ip_by_uuid": blacklisted_ip_by_uuid,
"first_seen_gmt": first_seen_gmt, "last_seen_gmt": last_seen_gmt, "Error": Error}
if redis_server_stream.sismember('active_connection', uuid_sensor):
active_connection = True
else:
active_connection = False
max_uuid_stream = redis_server_metadata.hget('stream_max_size_by_uuid', uuid_sensor)
if max_uuid_stream is not None:
max_uuid_stream = int(max_uuid_stream)
else:
max_uuid_stream = default_max_entries_by_stream
uuid_key = redis_server_metadata.hget('metadata_uuid:{}'.format(uuid_sensor), 'hmac_key')
if uuid_key is None:
uuid_key = redis_server_metadata.get('server:hmac_default_key')
uuid_all_type_list = []
uuid_all_type = redis_server_metadata.smembers('all_types_by_uuid:{}'.format(uuid_sensor))
for type in uuid_all_type:
type_first_seen = redis_server_metadata.hget('metadata_type_by_uuid:{}:{}'.format(uuid_sensor, type), 'first_seen')
type_last_seen = redis_server_metadata.hget('metadata_type_by_uuid:{}:{}'.format(uuid_sensor, type), 'last_seen')
if type_first_seen:
type_first_seen = datetime.datetime.fromtimestamp(float(type_first_seen)).strftime('%Y-%m-%d %H:%M:%S')
if type_last_seen:
type_last_seen = datetime.datetime.fromtimestamp(float(type_last_seen)).strftime('%Y-%m-%d %H:%M:%S')
uuid_all_type_list.append({'type': type, 'first_seen':type_first_seen, 'last_seen': type_last_seen})
list_ip = redis_server_metadata.lrange('list_uuid_ip:{}'.format(uuid_sensor), 0, -1)
all_ip = []
for elem in list_ip:
ip, d_time = elem.split('-')
all_ip.append({'ip': ip,'datetime': '{}/{}/{} - {}:{}.{}'.format(d_time[0:4], d_time[5:6], d_time[6:8], d_time[8:10], d_time[10:12], d_time[12:14])})
return render_template("uuid_management.html", uuid_sensor=uuid_sensor, active_connection=active_connection,
uuid_key=uuid_key, data_uuid=data_uuid, uuid_all_type=uuid_all_type_list,
max_uuid_stream=max_uuid_stream, all_ip=all_ip)
else:
return 'Invalid uuid'
@app.route('/blacklisted_ip')
def blacklisted_ip():
blacklisted_ip = request.args.get('blacklisted_ip')
unblacklisted_ip = request.args.get('unblacklisted_ip')
try:
page = int(request.args.get('page'))
except:
page = 1
if page <= 0:
page = 1
nb_page_max = redis_server_metadata.scard('blacklist_ip')/(1000*2)
if isinstance(nb_page_max, float):
nb_page_max = int(nb_page_max)+1
if page > nb_page_max:
page = nb_page_max
start = 1000*(page -1)
stop = 1000*page
list_blacklisted_ip = list(redis_server_metadata.smembers('blacklist_ip'))
list_blacklisted_ip_1 = list_blacklisted_ip[start:stop]
list_blacklisted_ip_2 = list_blacklisted_ip[stop:stop+1000]
return render_template("blacklisted_ip.html", list_blacklisted_ip_1=list_blacklisted_ip_1, list_blacklisted_ip_2=list_blacklisted_ip_2,
page=page, nb_page_max=nb_page_max,
unblacklisted_ip=unblacklisted_ip, blacklisted_ip=blacklisted_ip)
@app.route('/blacklisted_uuid')
def blacklisted_uuid():
blacklisted_uuid = request.args.get('blacklisted_uuid')
unblacklisted_uuid = request.args.get('unblacklisted_uuid')
try:
page = int(request.args.get('page'))
except:
page = 1
if page <= 0:
page = 1
nb_page_max = redis_server_metadata.scard('blacklist_uuid')/(1000*2)
if isinstance(nb_page_max, float):
nb_page_max = int(nb_page_max)+1
if page > nb_page_max:
page = nb_page_max
start = 1000*(page -1)
stop = 1000*page
list_blacklisted_uuid = list(redis_server_metadata.smembers('blacklist_uuid'))
list_blacklisted_uuid_1 = list_blacklisted_uuid[start:stop]
list_blacklisted_uuid_2 = list_blacklisted_uuid[stop:stop+1000]
return render_template("blacklisted_uuid.html", list_blacklisted_uuid_1=list_blacklisted_uuid_1, list_blacklisted_uuid_2=list_blacklisted_uuid_2,
page=page, nb_page_max=nb_page_max,
unblacklisted_uuid=unblacklisted_uuid, blacklisted_uuid=blacklisted_uuid)
@app.route('/uuid_change_stream_max_size')
def uuid_change_stream_max_size():
uuid_sensor = request.args.get('uuid')
user = request.args.get('redirect')
max_uuid_stream = request.args.get('max_uuid_stream')
if is_valid_uuid_v4(uuid_sensor):
try:
max_uuid_stream = int(max_uuid_stream)
if max_uuid_stream < 0:
return 'stream max size, Invalid Integer'
except:
return 'stream max size, Invalid Integer'
redis_server_metadata.hset('stream_max_size_by_uuid', uuid_sensor, max_uuid_stream)
if user:
return redirect(url_for('uuid_management', uuid=uuid_sensor))
else:
return 'Invalid uuid'
# # TODO: check analyser uuid dont exist
@app.route('/add_new_analyzer')
def add_new_analyzer():
type = request.args.get('type')
user = request.args.get('redirect')
metatype_name = request.args.get('metatype_name')
analyzer_description = request.args.get('analyzer_description')
analyzer_uuid = request.args.get('analyzer_uuid')
if is_valid_uuid_v4(analyzer_uuid):
try:
type = int(type)
if type < 0:
return 'type, Invalid Integer'
except:
return 'type, Invalid Integer'
if type == 254:
# # TODO: check metatype_name
redis_server_metadata.sadd('analyzer:{}:{}'.format(type, metatype_name), analyzer_uuid)
else:
redis_server_metadata.sadd('analyzer:{}'.format(type), analyzer_uuid)
if redis_server_metadata.exists('analyzer:{}:{}'.format(type, metatype_name)) or redis_server_metadata.exists('analyzer:{}'.format(type)):
redis_server_metadata.hset('analyzer:{}'.format(analyzer_uuid), 'description', analyzer_description)
if user:
return redirect(url_for('server_management'))
else:
return 'Invalid uuid'
@app.route('/empty_analyzer_queue')
def empty_analyzer_queue():
analyzer_uuid = request.args.get('analyzer_uuid')
type = request.args.get('type')
metatype_name = request.args.get('metatype_name')
user = request.args.get('redirect')
if is_valid_uuid_v4(analyzer_uuid):
try:
type = int(type)
if type < 0:
return 'type, Invalid Integer'
except:
return 'type, Invalid Integer'
if type == 254:
redis_server_analyzer.delete('analyzer:{}:{}'.format(metatype_name, analyzer_uuid))
else:
redis_server_analyzer.delete('analyzer:{}:{}'.format(type, analyzer_uuid))
if user:
return redirect(url_for('server_management'))
else:
return 'Invalid uuid'
@app.route('/remove_analyzer')
def remove_analyzer():
analyzer_uuid = request.args.get('analyzer_uuid')
type = request.args.get('type')
metatype_name = request.args.get('metatype_name')
user = request.args.get('redirect')
if is_valid_uuid_v4(analyzer_uuid):
try:
type = int(type)
if type < 0:
return 'type, Invalid Integer'
except:
return 'type, Invalid Integer'
if type == 254:
redis_server_metadata.srem('analyzer:{}:{}'.format(type, metatype_name), analyzer_uuid)
redis_server_analyzer.delete('analyzer:{}:{}'.format(metatype_name, analyzer_uuid))
else:
redis_server_metadata.srem('analyzer:{}'.format(type), analyzer_uuid)
redis_server_analyzer.delete('analyzer:{}:{}'.format(type, analyzer_uuid))
redis_server_metadata.delete('analyzer:{}'.format(analyzer_uuid))
if user:
return redirect(url_for('server_management'))
else:
return 'Invalid uuid'
@app.route('/analyzer_change_max_size')
def analyzer_change_max_size():
analyzer_uuid = request.args.get('analyzer_uuid')
user = request.args.get('redirect')
max_size_analyzer = request.args.get('max_size_analyzer')
if is_valid_uuid_v4(analyzer_uuid):
try:
max_size_analyzer = int(max_size_analyzer)
if max_size_analyzer < 0:
return 'analyzer max size, Invalid Integer'
except:
return 'analyzer max size, Invalid Integer'
redis_server_metadata.hset('analyzer:{}'.format(analyzer_uuid), 'max_size', max_size_analyzer)
if user:
return redirect(url_for('server_management'))
else:
return 'Invalid uuid'
@app.route('/kick_uuid')
def kick_uuid():
uuid_sensor = request.args.get('uuid')
if is_valid_uuid_v4(uuid_sensor):
redis_server_stream.sadd('server:sensor_to_kick', uuid_sensor)
return redirect(url_for('uuid_management', uuid=uuid_sensor))
else:
return 'Invalid uuid'
@app.route('/blacklist_uuid')
def blacklist_uuid():
uuid_sensor = request.args.get('uuid')
user = request.args.get('redirect')
if is_valid_uuid_v4(uuid_sensor):
res = redis_server_metadata.sadd('blacklist_uuid', uuid_sensor)
if user=="0":
if res==0:
return redirect(url_for('server_management', blacklisted_uuid=2))
else:
return redirect(url_for('server_management', blacklisted_uuid=1))
elif user=="1":
return redirect(url_for('uuid_management', uuid=uuid_sensor))
else:
return "404"
else:
if user=="0":
return redirect(url_for('server_management', blacklisted_uuid=0))
return 'Invalid uuid'
@app.route('/unblacklist_uuid')
def unblacklist_uuid():
uuid_sensor = request.args.get('uuid')
user = request.args.get('redirect')
page = request.args.get('page')
if is_valid_uuid_v4(uuid_sensor):
res = redis_server_metadata.srem('blacklist_uuid', uuid_sensor)
if page:
return redirect(url_for('blacklisted_uuid', page=page))
if user=="0":
if res==0:
return redirect(url_for('server_management', unblacklisted_uuid=2))
else:
return redirect(url_for('server_management', unblacklisted_uuid=1))
elif user=="1":
return redirect(url_for('uuid_management', uuid=uuid_sensor))
else:
return "404"
else:
if user=="0":
return redirect(url_for('server_management', unblacklisted_uuid=0))
return 'Invalid uuid'
@app.route('/blacklist_ip')
def blacklist_ip():
ip = request.args.get('ip')
user = request.args.get('redirect')
if is_valid_ip(ip):
res = redis_server_metadata.sadd('blacklist_ip', ip)
if user:
if res==0:
return redirect(url_for('server_management', blacklisted_ip=2))
else:
return redirect(url_for('server_management', blacklisted_ip=1))
elif is_valid_network(ip):
for addr in ipaddress.ip_network(ip):
res = redis_server_metadata.sadd('blacklist_ip', str(addr))
if user:
if res==0:
return redirect(url_for('server_management', blacklisted_ip=2))
else:
return redirect(url_for('server_management', blacklisted_ip=1))
else:
if user:
return redirect(url_for('server_management', blacklisted_ip=0))
return 'Invalid ip'
@app.route('/unblacklist_ip')
def unblacklist_ip():
ip = request.args.get('ip')
user = request.args.get('redirect')
page = request.args.get('page')
if is_valid_ip(ip):
res = redis_server_metadata.srem('blacklist_ip', ip)
if page:
return redirect(url_for('blacklisted_ip', page=page))
if user:
if res==0:
return redirect(url_for('server_management', unblacklisted_ip=2))
else:
return redirect(url_for('server_management', unblacklisted_ip=1))
elif is_valid_network(ip):
for addr in ipaddress.ip_network(ip):
res = redis_server_metadata.srem('blacklist_ip', str(addr))
if user:
if res==0:
return redirect(url_for('server_management', unblacklisted_ip=2))
else:
return redirect(url_for('server_management', unblacklisted_ip=1))
else:
if user:
return redirect(url_for('server_management', unblacklisted_ip=0))
return 'Invalid ip'
@app.route('/blacklist_ip_by_uuid')
def blacklist_ip_by_uuid():
uuid_sensor = request.args.get('uuid')
user = request.args.get('redirect')
if is_valid_uuid_v4(uuid_sensor):
redis_server_metadata.sadd('blacklist_ip_by_uuid', uuid_sensor)
if user:
return redirect(url_for('uuid_management', uuid=uuid_sensor))
else:
return 'Invalid uuid'
@app.route('/unblacklist_ip_by_uuid')
def unblacklist_ip_by_uuid():
uuid_sensor = request.args.get('uuid')
user = request.args.get('redirect')
if is_valid_uuid_v4(uuid_sensor):
redis_server_metadata.srem('blacklist_ip_by_uuid', uuid_sensor)
if user:
return redirect(url_for('uuid_management', uuid=uuid_sensor))
else:
return 'Invalid uuid'
@app.route('/add_accepted_type')
def add_accepted_type():
type = request.args.get('type')
extended_type_name = request.args.get('extended_type_name')
user = request.args.get('redirect')
json_type_description = get_json_type_description()
try:
type = int(type)
except:
return 'Invalid type'
if json_type_description[int(type)]:
redis_server_metadata.sadd('server:accepted_type', type)
if type == 254:
redis_server_metadata.sadd('server:accepted_extended_type', extended_type_name)
if user:
return redirect(url_for('server_management'))
else:
return 'Invalid type'
@app.route('/remove_accepted_type')
def remove_accepted_type():
type = request.args.get('type')
user = request.args.get('redirect')
json_type_description = get_json_type_description()
if json_type_description[int(type)]:
redis_server_metadata.srem('server:accepted_type', type)
if user:
return redirect(url_for('server_management'))
else:
return 'Invalid type'
@app.route('/remove_accepted_extended_type')
def remove_accepted_extended_type():
type_name = request.args.get('type_name')
redis_server_metadata.srem('server:accepted_extended_type', type_name)
return redirect(url_for('server_management'))
# demo function
@app.route('/delete_data')
def delete_data():
date = datetime.datetime.now().strftime("%Y%m%d")
redis_server_metadata.delete('daily_type:{}'.format(date))
redis_server_metadata.delete('daily_uuid:{}'.format(date))
return render_template("index.html")
# demo function
@app.route('/set_uuid_hmac_key')
def set_uuid_hmac_key():
uuid_sensor = request.args.get('uuid')
user = request.args.get('redirect')
key = request.args.get('key')
redis_server_metadata.hset('metadata_uuid:{}'.format(uuid_sensor), 'hmac_key', key)
if user:
return redirect(url_for('uuid_management', uuid=uuid_sensor))
# demo function
@app.route('/whois_data')
def whois_data():
ip = request.args.get('ip')
if is_valid_ip:
return jsonify(get_whois_ouput(ip))
else:
return 'Invalid IP'
@app.route('/generate_uuid')
def generate_uuid():
new_uuid = uuid.uuid4()
return jsonify({'uuid': new_uuid})
@app.route('/get_analyser_sample')
def get_analyser_sample():
type = request.args.get('type')
analyzer_uuid = request.args.get('analyzer_uuid')
max_line_len = request.args.get('max_line_len')
# get max_line_len
if max_line_len is not None and max_line_len!= 'undefined':
try:
max_line_len = int(max_line_len)
except:
max_line_len = default_analyzer_max_line_len
if max_line_len < 1:
max_line_len = default_analyzer_max_line_len
else:
max_line_len = default_analyzer_max_line_len
if is_valid_uuid_v4(analyzer_uuid):
list_queue = redis_server_analyzer.lrange('analyzer:{}:{}'.format(type, analyzer_uuid), 0 ,10)
list_queue_res = []
for res in list_queue:
#limit line len
if len(res) > max_line_len:
res = '{} [...]'.format(res[:max_line_len])
list_queue_res.append('{}\n'.format(res))
return jsonify(''.join(list_queue_res))
else:
return jsonify('Incorrect UUID')
@app.route('/get_uuid_type_history_json')
def get_uuid_type_history_json():
uuid_sensor = request.args.get('uuid_sensor')
if is_valid_uuid_v4(uuid_sensor):
num_day_type = 7
date_range = get_substract_date_range(num_day_type)
type_history = []
range_decoder = []
all_type = set()
for date in date_range:
type_day = redis_server_metadata.zrange('stat_uuid_type:{}:{}'.format(date, uuid_sensor), 0, -1, withscores=True)
for type in type_day:
all_type.add(type[0])
range_decoder.append((date, type_day))
default_dict_type = {}
for type in all_type:
default_dict_type[type] = 0
for row in range_decoder:
day_type = default_dict_type.copy()
date = row[0]
day_type['date']= date[0:4] + '-' + date[4:6] + '-' + date[6:8]
for type in row[1]:
day_type[type[0]]= type[1]
type_history.append(day_type)
return jsonify(type_history)
else:
return jsonify('Incorrect UUID')
if __name__ == "__main__": if __name__ == "__main__":
app.run(host='0.0.0.0', port=7000, threaded=True) app.run(host='0.0.0.0', port=7000, threaded=True)

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

View File

@ -0,0 +1,70 @@
<!DOCTYPE html>
<html>
<head>
<title>D4-Project</title>
<link rel="icon" href="{{ url_for('static', filename='img/d4-logo.png')}}">
<!-- Core CSS -->
<link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='font-awesome/css/font-awesome.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/dataTables.bootstrap.min.css') }}" rel="stylesheet">
<!-- JS -->
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
<script src="{{ url_for('static', filename='js/jquery.dataTables.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/dataTables.bootstrap.min.js')}}"></script>
<style>
</style>
</head>
<body>
<nav class="navbar navbar-expand-sm navbar-dark bg-dark">
<a class="navbar-brand" href="{{ url_for('index') }}">
<img src="{{ url_for('static', filename='img/d4-logo.png')}}" alt="D4 Project" style="width:80px;">
</a>
<ul class="navbar-nav">
<li class="nav-item">
<a class="nav-link mr-3" href="{{ url_for('index') }}">Home <span class="sr-only">(current)</span></a>
</li>
<li class="nav-item" mr-3>
<a class="nav-link mr-3" href="{{ url_for('sensors_status') }}">Sensors Status</a>
</li>
<li class="nav-item mr-3">
<a class="nav-link" href="{{ url_for('server_management') }}" tabindex="-1" aria-disabled="true">Server Management</a>
</li>
</ul>
</nav>
<div class="d-flex justify-content-center">
<pre>
__ __ ______ __ __
/ | / | / \ / | / |
$$ | $$ |/$$$$$$ |$$ | $$ |
$$ |__$$ |$$$ \$$ |$$ |__$$ |
$$ $$ |$$$$ $$ |$$ $$ |
$$$$$$$$ |$$ $$ $$ |$$$$$$$$ |
$$ |$$ \$$$$ | $$ |
$$ |$$ $$$/ $$ |
_______ __ __ $$/ $$$$$$/ $$/ __
/ \ / | / | / |
$$$$$$$ |$$ | $$ | ______ ______ ______ __ ______ _______ _$$ |_
$$ | $$ |$$ |__$$ | / \ / \ / \ / | / \ / |/ $$ |
$$ | $$ |$$ $$ | /$$$$$$ |/$$$$$$ |/$$$$$$ | $$/ /$$$$$$ |/$$$$$$$/ $$$$$$/
$$ | $$ |$$$$$$$$ | $$ | $$ |$$ | $$/ $$ | $$ | / |$$ $$ |$$ | $$ | __
$$ |__$$ | $$ | $$ |__$$ |$$ | $$ \__$$ | $$ |$$$$$$$$/ $$ \_____ $$ |/ |
$$ $$/ $$ | $$ $$/ $$ | $$ $$/ $$ |$$ |$$ | $$ $$/
$$$$$$$/ $$/ $$$$$$$/ $$/ $$$$$$/__ $$ | $$$$$$$/ $$$$$$$/ $$$$/
$$ | / \__$$ |
$$ | $$ $$/
$$/ $$$$$$/
</pre>
</div>
{% include 'navfooter.html' %}
</body>

View File

@ -0,0 +1,198 @@
<!DOCTYPE html>
<html>
<head>
<title>D4-Project</title>
<link rel="icon" href="{{ url_for('static', filename='img/d4-logo.png')}}">
<!-- Core CSS -->
<link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='font-awesome/css/font-awesome.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/dataTables.bootstrap.min.css') }}" rel="stylesheet">
<!-- JS -->
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
<script src="{{ url_for('static', filename='js/jquery.dataTables.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/dataTables.bootstrap.min.js')}}"></script>
<style>
</style>
</head>
<body>
<nav class="navbar navbar-expand-sm navbar-dark bg-dark">
<a class="navbar-brand" href="{{ url_for('index') }}">
<img src="{{ url_for('static', filename='img/d4-logo.png')}}" alt="D4 Project" style="width:80px;">
</a>
<ul class="navbar-nav">
<li class="nav-item">
<a class="nav-link mr-3" href="{{ url_for('index') }}">Home <span class="sr-only">(current)</span></a>
</li>
<li class="nav-item" mr-3>
<a class="nav-link mr-3" href="{{ url_for('sensors_status') }}">Sensors Status</a>
</li>
<li class="nav-item mr-3">
<a class="nav-link" href="{{ url_for('server_management') }}" tabindex="-1" aria-disabled="true">Server Management</a>
</li>
</ul>
</nav>
<div class="card-deck justify-content-center ml-0 mr-0">
<div class="card border-dark mt-3 ml-4 mr-4">
<div class="card-header bg-dark text-white">
Blacklisted IP
</div>
<div class="card-body text-dark">
<div class="row">
<div class="col-sm-5">
<table class="table table-striped table-bordered table-hover" id="myTable_1">
<thead class="thead-dark">
<tr>
<th style="max-width: 800px;">IP</th>
<th style="max-width: 800px;">Unblacklist IP</th>
</tr>
</thead>
<tbody>
{% for ip in list_blacklisted_ip_1 %}
<tr>
<td>{{ip}}</td>
<td>
<a href="{{ url_for('unblacklist_ip') }}?page={{page}}&ip={{ip}}">
<button type="button" class="btn btn-outline-danger">UnBlacklist IP</button>
</a>
</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
<div class="col-sm-2">
<div class="card text-center border-danger" style="max-width: 20rem;">
<div class="card-body text-danger">
<h5 class="card-title">Blacklist IP</h5>
<input type="text" class="form-control {%if blacklisted_ip is not none %}{%if blacklisted_ip==1 %}is-valid{% else %}is-invalid{%endif%}{%endif%}" id="blacklist_ip_input" placeholder="IP Address">
<div class="invalid-feedback">
{%if blacklisted_ip==2 %}
This IP is already blacklisted
{% else %}
Incorrect IP address
{% endif %}
</div>
<div class="valid-feedback">
IP Blacklisted
</div>
<button type="button" class="btn btn-danger mt-2" onclick="window.location.href ='{{ url_for('blacklist_ip') }}?redirect=0&ip='+$('#blacklist_ip_input').val();">Blacklist IP</button>
</div>
</div>
<div class="card text-center border-success mt-4" style="max-width: 20rem;">
<div class="card-body">
<h5 class="card-title">Unblacklist IP</h5>
<input type="text" class="form-control {%if unblacklisted_ip is not none %}{%if unblacklisted_ip==1 %}is-valid{% else %}is-invalid{%endif%}{%endif%}" id="unblacklist_ip_input" placeholder="IP Address">
<div class="invalid-feedback">
{%if unblacklisted_ip==2 %}
This IP is not blacklisted
{% else %}
Incorrect IP address
{% endif %}
</div>
<div class="valid-feedback">
IP Unblacklisted
</div>
<button type="button" class="btn btn-outline-secondary mt-2" onclick="window.location.href ='{{ url_for('unblacklist_ip') }}?redirect=0&ip='+$('#unblacklist_ip_input').val();">Unblacklist IP</button>
</div>
</div>
</div>
<div class="col-sm-5">
<table class="table table-striped table-bordered table-hover" id="myTable_2">
<thead class="thead-dark">
<tr>
<th style="max-width: 800px;">IP</th>
<th style="max-width: 800px;">Unblacklist IP</th>
</tr>
</thead>
<tbody>
{% for ip in list_blacklisted_ip_2 %}
<tr>
<td>{{ip}}</td>
<td>
<a href="{{ url_for('unblacklist_ip') }}?page={{page}}&ip={{ip}}">
<button type="button" class="btn btn-outline-danger">UnBlacklist IP</button>
</a>
</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
</div>
</div>
</div>
</div>
<div class="d-flex justify-content-center">
<nav class="mt-4" aria-label="...">
<ul class="pagination">
<li class="page-item {%if page==1%}disabled{%endif%}">
<a class="page-link" href="{{ url_for('blacklisted_ip') }}?page={{page-1}}">Previous</a>
</li>
{%if page>3%}
<li class="page-item"><a class="page-link" href="{{ url_for('blacklisted_ip') }}?page=1">1</a></li>
<li class="page-item disabled"><a class="page-link" aria-disabled="true" href="#">...</a></li>
<li class="page-item"><a class="page-link" href="{{ url_for('blacklisted_ip') }}?page={{page-1}}">{{page-1}}</a></li>
<li class="page-item active"><a class="page-link" href="{{ url_for('blacklisted_ip') }}?page={{page}}">{{page}}</a></li>
{%else%}
{%if page>2%}<li class="page-item"><a class="page-link" href="{{ url_for('blacklisted_ip') }}?page={{page-2}}">{{page-2}}</a></li>{%endif%}
{%if page>1%}<li class="page-item"><a class="page-link" href="{{ url_for('blacklisted_ip') }}?page={{page-1}}">{{page-1}}</a></li>{%endif%}
<li class="page-item active"><a class="page-link" href="{{ url_for('blacklisted_ip') }}?page={{page}}">{{page}}</a></li>
{%endif%}
{%if nb_page_max-page>3%}
<li class="page-item"><a class="page-link" href="{{ url_for('blacklisted_ip') }}?page={{page+1}}">{{page+1}}</a></li>
<li class="page-item disabled"><a class="page-link" aria-disabled="true" href="#">...</a></li>
<li class="page-item"><a class="page-link" href="{{ url_for('blacklisted_ip') }}?page={{nb_page_max}}">{{nb_page_max}}</a></li>
{%else%}
{%if nb_page_max-page>2%}<li class="page-item"><a class="page-link" href="{{ url_for('blacklisted_ip') }}?page={{nb_page_max-2}}">{{nb_page_max-2}}</a></li>{%endif%}
{%if nb_page_max-page>1%}<li class="page-item"><a class="page-link" href="{{ url_for('blacklisted_ip') }}?page={{nb_page_max-1}}">{{nb_page_max-1}}</a></li>{%endif%}
{%if nb_page_max-page>0%}<li class="page-item"><a class="page-link" href="{{ url_for('blacklisted_ip') }}?page={{nb_page_max}}">{{nb_page_max}}</a></li>{%endif%}
{%endif%}
<li class="page-item {%if page==nb_page_max%}disabled{%endif%}">
<a class="page-link" href="{{ url_for('blacklisted_ip') }}?page={{page+1}}" aria-disabled="true">Next</a>
</li>
</ul>
</nav>
</div>
{% include 'navfooter.html' %}
</body>
<script>
var table
$(document).ready(function(){
table = $('#myTable_1').DataTable(
{
/*"aLengthMenu": [[5, 10, 15, 20, -1], [5, 10, 15, 20, "All"]],
"iDisplayLength": 10,*/
"order": [[ 0, "asc" ]]
}
);
table = $('#myTable_2').DataTable(
{
/*"aLengthMenu": [[5, 10, 15, 20, -1], [5, 10, 15, 20, "All"]],
"iDisplayLength": 10,*/
"order": [[ 0, "asc" ]]
}
);
});
</script>

View File

@ -0,0 +1,198 @@
<!DOCTYPE html>
<html>
<head>
<title>D4-Project</title>
<link rel="icon" href="{{ url_for('static', filename='img/d4-logo.png')}}">
<!-- Core CSS -->
<link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='font-awesome/css/font-awesome.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/dataTables.bootstrap.min.css') }}" rel="stylesheet">
<!-- JS -->
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
<script src="{{ url_for('static', filename='js/jquery.dataTables.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/dataTables.bootstrap.min.js')}}"></script>
<style>
</style>
</head>
<body>
<nav class="navbar navbar-expand-sm navbar-dark bg-dark">
<a class="navbar-brand" href="{{ url_for('index') }}">
<img src="{{ url_for('static', filename='img/d4-logo.png')}}" alt="D4 Project" style="width:80px;">
</a>
<ul class="navbar-nav">
<li class="nav-item">
<a class="nav-link mr-3" href="{{ url_for('index') }}">Home <span class="sr-only">(current)</span></a>
</li>
<li class="nav-item" mr-3>
<a class="nav-link mr-3" href="{{ url_for('sensors_status') }}">Sensors Status</a>
</li>
<li class="nav-item mr-3">
<a class="nav-link" href="{{ url_for('server_management') }}" tabindex="-1" aria-disabled="true">Server Management</a>
</li>
</ul>
</nav>
<div class="card-deck justify-content-center ml-0 mr-0">
<div class="card border-dark mt-3 ml-4 mr-4">
<div class="card-header bg-dark text-white">
Blacklisted UUID
</div>
<div class="card-body text-dark">
<div class="row">
<div class="col-sm-5">
<table class="table table-striped table-bordered table-hover" id="myTable_1">
<thead class="thead-dark">
<tr>
<th style="max-width: 800px;">UUID</th>
<th style="max-width: 800px;">Unblacklist UUID</th>
</tr>
</thead>
<tbody>
{% for uuid in list_blacklisted_uuid_1 %}
<tr>
<td>{{uuid}}</td>
<td>
<a href="{{ url_for('unblacklist_uuid') }}?page={{page}}&uuid={{uuid}}">
<button type="button" class="btn btn-outline-danger">UnBlacklist UUID</button>
</a>
</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
<div class="col-sm-2">
<div class="card text-center border-danger" style="max-width: 20rem;">
<div class="card-body text-danger">
<h5 class="card-title">Blacklist UUID</h5>
<input type="text" class="form-control {%if blacklisted_uuid is not none %}{%if blacklisted_uuid==1 %}is-valid{% else %}is-invalid{%endif%}{%endif%}" id="blacklist_uuid_input" placeholder="UUID Address">
<div class="invalid-feedback">
{%if blacklisted_uuid==2 %}
This UUID is already blacklisted
{% else %}
Incorrect UUID
{% endif %}
</div>
<div class="valid-feedback">
UUID Blacklisted
</div>
<button type="button" class="btn btn-danger mt-2" onclick="window.location.href ='{{ url_for('blacklist_uuid') }}?redirect=0&uuid='+$('#blacklist_uuid_input').val();">Blacklist UUID</button>
</div>
</div>
<div class="card text-center border-success mt-4" style="max-width: 20rem;">
<div class="card-body">
<h5 class="card-title">Unblacklist UUID</h5>
<input type="text" class="form-control {%if unblacklisted_uuid is not none %}{%if unblacklisted_uuid==1 %}is-valid{% else %}is-invalid{%endif%}{%endif%}" id="unblacklist_uuid_input" placeholder="UUID Address">
<div class="invalid-feedback">
{%if unblacklisted_uuid==2 %}
This UUID is not blacklisted
{% else %}
Incorrect UUID
{% endif %}
</div>
<div class="valid-feedback">
UUID Unblacklisted
</div>
<button type="button" class="btn btn-outline-secondary mt-2" onclick="window.location.href ='{{ url_for('unblacklist_uuid') }}?redirect=0&uuid='+$('#unblacklist_uuid_input').val();">Unblacklist UUID</button>
</div>
</div>
</div>
<div class="col-sm-5">
<table class="table table-striped table-bordered table-hover" id="myTable_2">
<thead class="thead-dark">
<tr>
<th style="max-width: 800px;">UUID</th>
<th style="max-width: 800px;">Unblacklist UUID</th>
</tr>
</thead>
<tbody>
{% for uuid in list_blacklisted_uuid_2 %}
<tr>
<td>{{uuid}}</td>
<td>
<a href="{{ url_for('unblacklist_uuid') }}?page={{page}}&uuid={{uuid}}">
<button type="button" class="btn btn-outline-danger">UnBlacklist UUID</button>
</a>
</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
</div>
</div>
</div>
</div>
<div class="d-flex justify-content-center">
<nav class="mt-4" aria-label="...">
<ul class="pagination">
<li class="page-item {%if page==1%}disabled{%endif%}">
<a class="page-link" href="{{ url_for('blacklisted_uuid') }}?page={{page-1}}">Previous</a>
</li>
{%if page>3%}
<li class="page-item"><a class="page-link" href="{{ url_for('blacklisted_uuid') }}?page=1">1</a></li>
<li class="page-item disabled"><a class="page-link" aria-disabled="true" href="#">...</a></li>
<li class="page-item"><a class="page-link" href="{{ url_for('blacklisted_uuid') }}?page={{page-1}}">{{page-1}}</a></li>
<li class="page-item active"><a class="page-link" href="{{ url_for('blacklisted_uuid') }}?page={{page}}">{{page}}</a></li>
{%else%}
{%if page>2%}<li class="page-item"><a class="page-link" href="{{ url_for('blacklisted_uuid') }}?page={{page-2}}">{{page-2}}</a></li>{%endif%}
{%if page>1%}<li class="page-item"><a class="page-link" href="{{ url_for('blacklisted_uuid') }}?page={{page-1}}">{{page-1}}</a></li>{%endif%}
<li class="page-item active"><a class="page-link" href="{{ url_for('blacklisted_uuid') }}?page={{page}}">{{page}}</a></li>
{%endif%}
{%if nb_page_max-page>3%}
<li class="page-item"><a class="page-link" href="{{ url_for('blacklisted_uuid') }}?page={{page+1}}">{{page+1}}</a></li>
<li class="page-item disabled"><a class="page-link" aria-disabled="true" href="#">...</a></li>
<li class="page-item"><a class="page-link" href="{{ url_for('blacklisted_uuid') }}?page={{nb_page_max}}">{{nb_page_max}}</a></li>
{%else%}
{%if nb_page_max-page>2%}<li class="page-item"><a class="page-link" href="{{ url_for('blacklisted_uuid') }}?page={{nb_page_max-2}}">{{nb_page_max-2}}</a></li>{%endif%}
{%if nb_page_max-page>1%}<li class="page-item"><a class="page-link" href="{{ url_for('blacklisted_uuid') }}?page={{nb_page_max-1}}">{{nb_page_max-1}}</a></li>{%endif%}
{%if nb_page_max-page>0%}<li class="page-item"><a class="page-link" href="{{ url_for('blacklisted_uuid') }}?page={{nb_page_max}}">{{nb_page_max}}</a></li>{%endif%}
{%endif%}
<li class="page-item {%if page==nb_page_max%}disabled{%endif%}">
<a class="page-link" href="{{ url_for('blacklisted_uuid') }}?page={{page+1}}" aria-disabled="true">Next</a>
</li>
</ul>
</nav>
</div>
{% include 'navfooter.html' %}
</body>
<script>
var table
$(document).ready(function(){
table = $('#myTable_1').DataTable(
{
/*"aLengthMenu": [[5, 10, 15, 20, -1], [5, 10, 15, 20, "All"]],
"iDisplayLength": 10,*/
"order": [[ 0, "asc" ]]
}
);
table = $('#myTable_2').DataTable(
{
/*"aLengthMenu": [[5, 10, 15, 20, -1], [5, 10, 15, 20, "All"]],
"iDisplayLength": 10,*/
"order": [[ 0, "asc" ]]
}
);
});
</script>

View File

@ -2,10 +2,15 @@
<html> <html>
<head> <head>
<title>D4-Project</title>
<link rel="icon" href="{{ url_for('static', filename='img/d4-logo.png')}}">
<!-- Core CSS --> <!-- Core CSS -->
<link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet"> <link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='font-awesome/css/font-awesome.css') }}" rel="stylesheet">
<!-- JS --> <!-- JS -->
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
<script src="{{ url_for('static', filename='js/bootstrap.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/d3.min.js')}}"></script> <script src="{{ url_for('static', filename='js/d3.min.js')}}"></script>
<style> <style>
@ -36,7 +41,7 @@
} }
text.category{ text.category{
fill: #666666; fill: #666666;
font-size: 14px; font-size: 18px;
} }
</style> </style>
@ -45,24 +50,65 @@
<body> <body>
<nav class="navbar navbar-expand-sm navbar-dark bg-dark">
<a class="navbar-brand" href="{{ url_for('index') }}">
<img src="{{ url_for('static', filename='img/d4-logo.png')}}" alt="D4 Project" style="width:80px;">
</a>
<ul class="navbar-nav">
<li class="nav-item active">
<a class="nav-link mr-3" href="{{ url_for('index') }}">Home <span class="sr-only">(current)</span></a>
</li>
<li class="nav-item" mr-3>
<a class="nav-link mr-3" href="{{ url_for('sensors_status') }}">Sensors Status</a>
</li>
<li class="nav-item mr-3">
<a class="nav-link" href="{{ url_for('server_management') }}" tabindex="-1" aria-disabled="true">Server Management</a>
</li>
</ul>
</nav>
<div class="row">
<div class="row mr-0">
<div class="col"> <div class="col">
<div id="everything"> <div class="card text-center mt-2 ml-2">
<div id="chart"></div> <div class="card-header bg-dark text-white">
UUID
</div>
<div class="card-body">
<div id="chart_uuid"></div>
</div>
<div class="card-footer text-muted">
{{date}}
</div>
</div> </div>
</div> </div>
<div class="col"> <div class="col">
<div id="everything"> <div class="card text-center mt-2 ml-2">
<div id="charter"></div> <div class="card-header bg-dark text-white">
Types
</div>
<div class="card-body">
<div id="chart_type"></div>
</div>
<div class="card-footer text-muted">
{{date}}
</div>
</div> </div>
</div> </div>
</div> </div>
<div class="d-flex justify-content-center">
<a href="{{ url_for('delete_data') }}">
<button type="button" class="btn btn-primary mt-3 mb-2">Delete All Data (Demo)</button>
</a>
</div>
{% include 'navfooter.html' %}
</body> </body>
<script> <script>
//// ////
//http://bl.ocks.org/charlesdguthrie/11356441, updated and modified //http://bl.ocks.org/charlesdguthrie/11356441, updated and modified
//updating BarChart //updating BarChart
@ -99,8 +145,8 @@ var setup = function(targetID){
var redrawChart = function(targetID, newdata) { var redrawChart = function(targetID, newdata) {
//Import settings //Import settings
var margin=settings.margin, width=settings.width, height=settings.height, categoryIndent=settings.categoryIndent, var margin=targetID.margin, width=targetID.width, height=targetID.height, categoryIndent=targetID.categoryIndent,
svg=settings.svg, x=settings.x, y=settings.y; svg=targetID.svg, x=targetID.x, y=targetID.y;
//Reset domains //Reset domains
y.domain(newdata.sort(function(a,b){ y.domain(newdata.sort(function(a,b){
@ -205,8 +251,8 @@ var redrawChart = function(targetID, newdata) {
.attr("transform", function(d){ return "translate(0," + y(d.key) + ")"; }); .attr("transform", function(d){ return "translate(0," + y(d.key) + ")"; });
}; };
var pullData = function(settings,callback){ var pullData = function(json_url,settings,callback){
d3.json("{{ url_for('_json_daily_uuid_stats') }}", function (err, data){ d3.json(json_url, function (err, data){
if (err) return console.warn(err); if (err) return console.warn(err);
callback(settings,data); callback(settings,data);
}) })
@ -220,17 +266,26 @@ var formatData = function(data){
.slice(0, 15); // linit to 15 items .slice(0, 15); // linit to 15 items
} }
var redraw = function(settings){ var redraw = function(json_url,settings){
pullData(settings,redrawChart) pullData(json_url,settings,redrawChart)
} }
json_url_uuid = "{{ url_for('_json_daily_uuid_stats') }}"
json_url_type = "{{ url_for('_json_daily_type_stats') }}"
//setup //setup
var settings = setup('#chart'); var settings = setup('#chart_uuid');
redraw(settings) redraw(json_url_uuid,settings)
redraw(json_url_uuid,settings)
var settings_type = setup('#chart_type');
redraw(json_url_type,settings_type)
redraw(json_url_type,settings_type)
//Interval //Interval
setInterval(function(){ setInterval(function(){
redraw(settings) redraw(json_url_uuid,settings)
redraw(json_url_type,settings_type)
}, 4000); }, 4000);
//// ////

View File

@ -0,0 +1,19 @@
<hr/ class="mb-0">
<nav class="navbar bottom navbar-expand-sm navbar-light bg-light">
<a class="navbar-brand" href="https://www.circl.lu/">
<img src="{{ url_for('static', filename='img/circl.png')}}" alt="circl" class="h-100" style="width: 180px;">
</a>
<img src="{{ url_for('static', filename='img/cef.png')}}" alt="cef" class="h-100" style="width: 500px;">
<ul class="navbar-nav ml-auto">
<li class="nav-item">
<a class="navbar-brand" href="https://www.d4-project.org/">
<img src="{{ url_for('static', filename='img/d4-logo.png')}}" alt="D4 Project" style="width:70px;">
</a>
</li>
<li class="nav-item">
<a class="navbar-brand" href="https://github.com/D4-project/d4-core">
<i class="fa fa-github fa-3x mt-1"></i>
</a>
</li>
</ul>
</nav>

View File

@ -0,0 +1,118 @@
<!DOCTYPE html>
<html>
<head>
<title>D4-Project</title>
<link rel="icon" href="{{ url_for('static', filename='img/d4-logo.png')}}">
<!-- Core CSS -->
<link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='font-awesome/css/font-awesome.css') }}" rel="stylesheet">
<!-- JS -->
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
<script src="{{ url_for('static', filename='js/bootstrap.min.js')}}"></script>
<style>
</style>
</head>
<body>
<nav class="navbar navbar-expand-lg navbar-dark bg-dark">
<a class="navbar-brand" href="{{ url_for('index') }}">
<img src="{{ url_for('static', filename='img/d4-logo.png')}}" alt="D4 Project" style="width:80px;">
</a>
<ul class="navbar-nav">
<li class="nav-item">
<a class="nav-link mr-3" href="{{ url_for('index') }}">Home <span class="sr-only">(current)</span></a>
</li>
<li class="nav-item" mr-3>
<a class="nav-link active mr-3" href="{{ url_for('sensors_status') }}">Sensors Status</a>
</li>
<li class="nav-item mr-3">
<a class="nav-link" href="{{ url_for('server_management') }}" tabindex="-1" aria-disabled="true">Server Management</a>
</li>
</ul>
</nav>
<div class="card mt-2 mb-2">
<div class="card-body bg-dark text-white">
<div class="row">
<div class="col-8">
<div class="custom-control custom-switch mt-2">
<input type="checkbox" class="custom-control-input" id="show_active_connection" {%if active_connection_filter%}checked value="True"{%else%}value=""{%endif%} onclick="window.location.href ='{{ url_for('show_active_uuid') }}?&show_active_connection='+$('#show_active_connection').val();">
<label class="custom-control-label" for="show_active_connection">Active Connection</label>
</div>
</div>
<div class="col-4">
<div class="form-row">
<div class="col-10">
<input type="text" class="form-control mt-1" id="search_uuid" placeholder="Search UUID">
</div>
<div class="col-2">
<button type="button" class="btn btn-outline-light" onclick="window.location.href ='{{ url_for('uuid_management') }}?redirect=0&uuid='+$('#search_uuid').val();">
<i class="fa fa-search fa-2x"></i>
</button>
</div>
</div>
</div>
</div>
</div>
</div>
{% for row_uuid in status_daily_uuid %}
<div class="card text-center mt-3 ml-2 mr-2">
<a class="btn btn-outline-dark px-1 py-1" href="{{ url_for('uuid_management') }}?uuid={{row_uuid['uuid']}}">
<div class="card-header bg-dark text-white">
UUID: {{row_uuid['uuid']}}
</div>
</a>
<div class="card-body">
<div class="card-group">
<div class="card">
<div class="card-header bg-info text-white">
First Seen
</div>
<div class="card-body">
<p class="card-text">{{row_uuid['first_seen_gmt']}} - ({{row_uuid['first_seen']}})</p>
</div>
</div>
<div class="card">
<div class="card-header bg-info text-white">
Last Seen
</div>
<div class="card-body">
<p class="card-text">{{row_uuid['last_seen_gmt']}} - ({{row_uuid['last_seen']}})</p>
</div>
</div>
<div class="card">
{% if not row_uuid['Error'] %}
<div class="card-header bg-success text-white">
Status
</div>
<div class="card-body text-success">
<p class="card-text">OK</p>
{% else %}
<div class="card-header bg-danger text-white">
Status
</div>
<div class="card-body text-danger">
<p class="card-text">{{row_uuid['Error']}}</p>
{% endif %}
{% if row_uuid['active_connection'] %}
<div style="color:Green; display:inline-block">
<i class="fa fa-check-circle"></i> Connected
</div>
{% endif %}
</div>
</div>
</div>
</div>
</div>
{% endfor %}
{% include 'navfooter.html' %}
</body>

View File

@ -0,0 +1,464 @@
<!DOCTYPE html>
<html>
<head>
<title>D4-Project</title>
<link rel="icon" href="{{ url_for('static', filename='img/d4-logo.png')}}">
<!-- Core CSS -->
<link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='font-awesome/css/font-awesome.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/dataTables.bootstrap.min.css') }}" rel="stylesheet">
<!-- JS -->
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
<script src="{{ url_for('static', filename='js/bootstrap.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/jquery.dataTables.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/dataTables.bootstrap.min.js')}}"></script>
</head>
<body>
<nav class="navbar navbar-expand-sm navbar-dark bg-dark">
<a class="navbar-brand" href="{{ url_for('index') }}">
<img src="{{ url_for('static', filename='img/d4-logo.png')}}" alt="D4 Project" style="width:80px;">
</a>
<ul class="navbar-nav">
<li class="nav-item">
<a class="nav-link mr-3" href="{{ url_for('index') }}">Home <span class="sr-only">(current)</span></a>
</li>
<li class="nav-item" mr-3>
<a class="nav-link mr-3" href="{{ url_for('sensors_status') }}">Sensors Status</a>
</li>
<li class="nav-item mr-3">
<a class="nav-link active" href="{{ url_for('server_management') }}" tabindex="-1" aria-disabled="true">Server Management</a>
</li>
</ul>
</nav>
<div class="card-deck ml-0 mr-0">
<div class="card text-center mt-3 ml-xl-4">
<div class="card-header bg-danger text-white">
Blacklist IP
</div>
<div class="card-body">
<div class="card-deck">
<div class="card text-center border-danger">
<div class="card-body text-danger">
<h5 class="card-title">Blacklist IP</h5>
<input type="text" class="form-control {%if blacklisted_ip is not none %}{%if blacklisted_ip==1 %}is-valid{% else %}is-invalid{%endif%}{%endif%}" id="blacklist_ip_input" placeholder="IP Address">
<div class="invalid-feedback">
{%if blacklisted_ip==2 %}
This IP is already blacklisted
{% else %}
Incorrect IP address
{% endif %}
</div>
<div class="valid-feedback">
IP Blacklisted
</div>
<button type="button" class="btn btn-danger mt-2" onclick="window.location.href ='{{ url_for('blacklist_ip') }}?redirect=0&ip='+$('#blacklist_ip_input').val();">Blacklist IP</button>
</div>
</div>
<div class="card text-center border-light">
<div class="card-body">
<h5 class="card-title">Manage IP Blacklist</h5>
<a href="{{ url_for('blacklisted_ip') }}">
<button type="button" class="btn btn-outline-primary">Show Blacklisted IP</button>
</a>
</div>
</div>
<div class="card text-center border-success">
<div class="card-body">
<h5 class="card-title">Unblacklist IP</h5>
<input type="text" class="form-control {%if unblacklisted_ip is not none %}{%if unblacklisted_ip==1 %}is-valid{% else %}is-invalid{%endif%}{%endif%}" id="unblacklist_ip_input" placeholder="IP Address">
<div class="invalid-feedback">
{%if unblacklisted_ip==2 %}
This IP is not blacklisted
{% else %}
Incorrect IP address
{% endif %}
</div>
<div class="valid-feedback">
IP Unblacklisted
</div>
<button type="button" class="btn btn-outline-secondary mt-2" onclick="window.location.href ='{{ url_for('unblacklist_ip') }}?redirect=0&ip='+$('#unblacklist_ip_input').val();">Unblacklist IP</button>
</div>
</div>
</div>
</div>
</div>
<div class="w-100 d-none d-sm-block d-xl-none"></div>
<div class="card text-center mt-3 mr-xl-4">
<div class="card-header bg-danger text-white">
Blacklist UUID
</div>
<div class="card-body">
<div class="card-deck">
<div class="card text-center border-danger">
<div class="card-body text-danger">
<h5 class="card-title">Blacklist UUID</h5>
<input type="text" class="form-control {%if blacklisted_uuid is not none %}{%if blacklisted_uuid==1 %}is-valid{% else %}is-invalid{%endif%}{%endif%}" id="blacklist_uuid_input" placeholder="UUID">
<div class="invalid-feedback">
{%if blacklisted_uuid==2 %}
This UUID is already blacklisted
{% else %}
Incorrect UUID
{% endif %}
</div>
<div class="valid-feedback">
UUID Blacklisted
</div>
<button type="button" class="btn btn-danger mt-2" onclick="window.location.href ='{{ url_for('blacklist_uuid') }}?redirect=0&uuid='+$('#blacklist_uuid_input').val();">Blacklist UUID</button>
</div>
</div>
<div class="card text-center border-light">
<div class="card-body">
<h5 class="card-title">Manage UUID Blacklist</h5>
<a href="{{ url_for('blacklisted_uuid') }}">
<button type="button" class="btn btn-outline-primary">Show Blacklisted UUID</button>
</a>
</div>
</div>
<div class="card text-center border-success">
<div class="card-body">
<h5 class="card-title">Unblacklist UUID</h5>
<input type="text" class="form-control {%if unblacklisted_uuid is not none %}{%if unblacklisted_uuid==1 %}is-valid{% else %}is-invalid{%endif%}{%endif%}" id="unblacklist_uuid_input" placeholder="UUID">
<div class="invalid-feedback">
{%if unblacklisted_uuid==2 %}
This UUID is not Blacklisted
{% else %}
Incorrect UUID
{% endif %}
</div>
<div class="valid-feedback">
UUID Unblacklisted
</div>
<button type="button" class="btn btn-outline-secondary mt-2" onclick="window.location.href ='{{ url_for('unblacklist_uuid') }}?redirect=0&uuid='+$('#unblacklist_uuid_input').val();">Unblacklist UUID</button>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="card-deck justify-content-center ml-0 mr-0">
<div class="card border-dark mt-3 ml-4 mr-4">
<div class="card-header bg-dark text-white">
Header Accepted Types
</div>
<div class="card-body text-dark">
<div class="row">
<div class="col-md-8">
<table class="table table-striped table-bordered table-hover" id="myTable_">
<thead class="thead-dark">
<tr>
<th>Type</th>
<th style="max-width: 800px;">Description</th>
<th style="max-width: 800px;">Remove Type</th>
</tr>
</thead>
<tbody>
{% for type in list_accepted_types %}
<tr>
<td>{{type['id']}}</td>
<td>{{type['description']}}</td>
<td>
<a href="{{ url_for('remove_accepted_type') }}?redirect=1&type={{type['id']}}">
<button type="button" class="btn btn-outline-danger">Remove Type</button>
</a>
</td>
</tr>
{% endfor %}
</tbody>
</table>
<div class="mt-3">
<table class="table table-striped table-bordered table-hover mt-3" id="table_accepted_extended_type">
<thead class="thead-dark">
<tr>
<th>Type Name</th>
<th>Description</th>
<th>Remove Type</th>
</tr>
</thead>
<tbody id="table_accepted_extended_type_tbody">
{% for type in list_accepted_extended_types %}
<tr>
<td>{{type['name']}}</td>
<td>{{type['description']}}</td>
<td>
<a href="{{ url_for('remove_accepted_extended_type') }}?type_name={{type['name']}}">
<button type="button" class="btn btn-outline-danger">Remove Extended Type</button>
</a>
</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
</div>
<div class="col-md-4">
<div class="card border-dark mt-3" style="max-width: 18rem;">
<div class="card-body text-dark">
<h5 class="card-title">Add New Types</h5>
<input class="form-control" type="number" id="accepted_type" value="1" min="1" max="254" required>
<input class="form-control" type="text" id="extended_type_name" placeholder="Type Name">
<button type="button" class="btn btn-outline-primary mt-1" onclick="window.location.href ='{{ url_for('add_accepted_type') }}?redirect=1&type='+$('#accepted_type').val()+'&extended_type_name='+$('#extended_type_name').val();">Add New Type</button>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="card-deck ml-0 mr-0">
<div class="card border-dark mt-3 ml-4 mr-4">
<div class="card-header bg-dark text-white">
Analyzer Management
</div>
<div class="card-body text-dark">
<div class="row">
<div class="col-xl-8">
<table class="table table-striped table-bordered table-hover" id="myTable_1">
<thead class="thead-dark">
<tr>
<th>Type</th>
<th style="max-width: 800px;">uuid</th>
<th style="max-width: 800px;">last updated</th>
<th style="max-width: 800px;">Change max size limit</th>
<th style="max-width: 800px;">Analyzer Queue</th>
</tr>
</thead>
<tbody>
{% for type in list_accepted_types %}
{% if type['list_analyzer_uuid'] %}
{% for analyzer in type['list_analyzer_uuid'] %}
<tr>
<td>{{type['id']}}</td>
<td>
<div class="d-flex">
{{analyzer['uuid']}}
<a href="{{ url_for('remove_analyzer') }}?redirect=1&type={{type['id']}}&analyzer_uuid={{analyzer['uuid']}}" class="ml-auto">
<button type="button" class="btn btn-outline-danger px-2 py-0"><i class="fa fa-trash"></i></button>
</a>
</div>
{%if analyzer['description']%}
<div class="text-info"><small>{{analyzer['description']}}</small></div>
{%endif%}
</td>
<td>{{analyzer['last_updated']}}</td>
<td>
<div class="d-xl-flex justify-content-xl-center">
<input class="form-control mr-lg-1" style="max-width: 100px;" type="number" id="max_size_analyzer_{{analyzer['uuid']}}" value="{{analyzer['size_limit']}}" min="0" required="">
<button type="button" class="btn btn-outline-secondary" onclick="window.location.href ='{{ url_for('analyzer_change_max_size') }}?analyzer_uuid={{analyzer['uuid']}}&redirect=0&max_size_analyzer='+$('#max_size_analyzer_{{analyzer['uuid']}}').val();">Change Max Size</button>
</div>
</td>
<td>
<a href="{{ url_for('empty_analyzer_queue') }}?redirect=1&type={{type['id']}}&analyzer_uuid={{analyzer['uuid']}}">
<button type="button" class="btn btn-outline-danger"><i class="fa fa-eraser"></i></button>
</a>
<button type="button" class="btn btn-outline-info ml-xl-3" onclick="get_analyser_sample('{{type['id']}}', '{{analyzer['uuid']}}');"><i class="fa fa-database"></i> {{analyzer['length']}}</button>
</td>
</tr>
{% endfor %}
{% endif %}
{% endfor %}
</tbody>
</table>
<div class="mt-3">
<table class="table table-striped table-bordered table-hover" id="analyzer_accepted_extended_types">
<thead class="thead-dark">
<tr>
<th>Type Name</th>
<th style="max-width: 800px;">uuid</th>
<th style="max-width: 800px;">last updated</th>
<th style="max-width: 800px;">Change max size limit</th>
<th style="max-width: 800px;">Analyzer Queue</th>
</tr>
</thead>
<tbody id="analyzer_accepted_extended_types_tbody">
{% for type in list_accepted_extended_types %}
{% if type['list_analyzer_uuid'] %}
{% for analyzer in type['list_analyzer_uuid'] %}
<tr>
<td>{{type['name']}}</td>
<td>
<div class="d-flex">
{{analyzer['uuid']}}
<a href="{{ url_for('remove_analyzer') }}?redirect=1&type=254&metatype_name={{type['name']}}&analyzer_uuid={{analyzer['uuid']}}" class="ml-auto">
<button type="button" class="btn btn-outline-danger px-2 py-0"><i class="fa fa-trash"></i></button>
</a>
</div>
{%if analyzer['description']%}
<div class="text-info"><small>{{analyzer['description']}}</small></div>
{%endif%}
</td>
<td>{{analyzer['last_updated']}}</td>
<td>
<div class="d-xl-flex justify-content-xl-center">
<input class="form-control mr-lg-1" style="max-width: 100px;" type="number" id="max_size_analyzer_{{analyzer['uuid']}}" value="{{analyzer['size_limit']}}" min="0" required="">
<button type="button" class="btn btn-outline-secondary" onclick="window.location.href ='{{ url_for('analyzer_change_max_size') }}?analyzer_uuid={{analyzer['uuid']}}&redirect=0&max_size_analyzer='+$('#max_size_analyzer_{{analyzer['uuid']}}').val();">Change Max Size</button>
</div>
</td>
<td>
<a href="{{ url_for('empty_analyzer_queue') }}?redirect=1&type=254&metatype_name={{type['name']}}&analyzer_uuid={{analyzer['uuid']}}">
<button type="button" class="btn btn-outline-danger"><i class="fa fa-eraser"></i></button>
</a>
<button type="button" class="btn btn-outline-info ml-xl-3" onclick="get_analyser_sample('{{type['name']}}', '{{analyzer['uuid']}}');"><i class="fa fa-database"></i> {{analyzer['length']}}</button>
</td>
</tr>
{% endfor %}
{% endif %}
{% endfor %}
</tbody>
</table>
</div>
</div>
<div class="col-xl-4">
<div class="card border-dark mt-3" style="max-width: 18rem;">
<div class="card-body text-dark">
<h5 class="card-title">Add New Analyzer Queue</h5>
<input class="form-control" type="number" id="analyzer_type" value="1" min="1" max="254" required>
<input class="form-control" type="text" id="analyzer_metatype_name" placeholder="Meta Type Name">
<div class="input-group">
<div class="input-group-prepend">
<button class="btn btn-outline-secondary" type="button" onclick="generate_new_uuid();"><i class="fa fa-random"></i></button>
</div>
<input class="form-control" type="text" id="analyzer_uuid" required placeholder="Analyzer uuid">
</div>
<input class="form-control" type="text" id="analyzer_description" required placeholder="Optional Description">
<button type="button" class="btn btn-outline-primary mt-1" onclick="window.location.href ='{{ url_for('add_new_analyzer') }}?redirect=1&type='+$('#analyzer_type').val()+'&analyzer_uuid='+$('#analyzer_uuid').val()+'&metatype_name='+$('#analyzer_metatype_name').val()+'&analyzer_description='+$('#analyzer_description').val();">Add New Analyzer</button>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="modal fade" id="modal_analyser_sample" tabindex="-1" role="dialog" aria-labelledby="AnalyserModalLabel" aria-hidden="true">
<div class="modal-dialog modal-xl" role="document">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title" id="modal_analyser_sample_label"></h5>
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
<span aria-hidden="true">&times;</span>
</button>
</div>
<div class="d-flex modal-body justify-content-center">
<pre id="analyzer_content">
</pre>
</div>
<div class="modal-footer">
<div class="d-sm-flex align-self-sm-start mr-auto">
<input class="form-control w-25 mr-sm-2" type="number" id="max_line_len" value="{{default_analyzer_max_line_len}}" min="1" max="10000">
<button type="button" class="btn btn-primary" onclick="change_analyser_sample_max_len();">
Change Line Size
</button>
</div>
<button type="button" class="btn btn-secondary" data-dismiss="modal">Close</button>
</div>
</div>
</div>
</div>
{% include 'navfooter.html' %}
</body>
<script>
var table
$(document).ready(function(){
$('#extended_type_name').hide()
$('#analyzer_metatype_name').hide()
table = $('#myTable_').DataTable(
{
/*"aLengthMenu": [[5, 10, 15, 20, -1], [5, 10, 15, 20, "All"]],
"iDisplayLength": 10,*/
"order": [[ 0, "asc" ]]
}
);
table = $('#myTable_1').DataTable(
{
/*"aLengthMenu": [[5, 10, 15, 20, -1], [5, 10, 15, 20, "All"]],
"iDisplayLength": 10,*/
"order": [[ 0, "asc" ]]
}
);
});
var tbody = $("#table_accepted_extended_type_tbody");
if (tbody.children().length == 0) {
$("#table_accepted_extended_type").hide();
} else {
table = $('#table_accepted_extended_type').DataTable(
{
"order": [[ 0, "asc" ]]
}
);
}
var tbody = $("#analyzer_accepted_extended_types_tbody");
if (tbody.children().length == 0) {
$("#analyzer_accepted_extended_types").hide();
} else {
table = $('#analyzer_accepted_extended_types').DataTable(
{
"order": [[ 0, "asc" ]]
}
);
}
$('#accepted_type').on('input', function() {
if ($('#accepted_type').val() == 254){
$('#extended_type_name').show()
} else {
$('#extended_type_name').hide()
}
});
$('#analyzer_type').on('input', function() {
if ($('#analyzer_type').val() == 254){
$('#analyzer_metatype_name').show()
} else {
$('#analyzer_metatype_name').hide()
}
});
function get_analyser_sample(type, analyzer_uuid, max_line_len){
$.getJSON( "{{url_for('get_analyser_sample')}}?type="+type+"&analyzer_uuid="+analyzer_uuid+"&max_line_len="+max_line_len, function( data ) {
$( "#modal_analyser_sample_label" ).text("analyzer:"+type+":"+analyzer_uuid);
$( "#analyzer_content" ).text(data);
$( "#modal_analyser_sample" ).modal('show');
});
}
function change_analyser_sample_max_len(){
var analyzer_data_info=$('#modal_analyser_sample_label').text().split(":");
get_analyser_sample(analyzer_data_info[1], analyzer_data_info[2], $('#max_line_len').val());
}
function generate_new_uuid(){
$.getJSON( "{{url_for('generate_uuid')}}", function( data ) {
console.log(data['uuid'])
$( "#analyzer_uuid" ).val(data['uuid']);
});
}
</script>

View File

@ -0,0 +1,403 @@
<!DOCTYPE html>
<html>
<head>
<title>D4-Project</title>
<link rel="icon" href="{{ url_for('static', filename='img/d4-logo.png')}}">
<!-- Core CSS -->
<link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='font-awesome/css/font-awesome.css') }}" rel="stylesheet">
<link href="{{ url_for('static', filename='css/dataTables.bootstrap.min.css') }}" rel="stylesheet">
<!-- JS -->
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
<script src="{{ url_for('static', filename='js/popper.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/bootstrap.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/jquery.dataTables.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/dataTables.bootstrap.min.js')}}"></script>
<script src="{{ url_for('static', filename='js/d3v5.min.js')}}"></script>
</head>
<body>
<nav class="navbar navbar-expand-sm navbar-dark bg-dark">
<a class="navbar-brand" href="{{ url_for('index') }}">
<img src="{{ url_for('static', filename='img/d4-logo.png')}}" alt="D4 Project" style="width:80px;">
</a>
<ul class="navbar-nav">
<li class="nav-item">
<a class="nav-link mr-3" href="{{ url_for('index') }}">Home <span class="sr-only">(current)</span></a>
</li>
<li class="nav-item" mr-3>
<a class="nav-link mr-3" href="{{ url_for('sensors_status') }}">Sensors Status</a>
</li>
<li class="nav-item mr-3">
<a class="nav-link" href="{{ url_for('server_management') }}" tabindex="-1" aria-disabled="true">Server Management</a>
</li>
</ul>
</nav>
<div class="card text-center mt-3 ml-2 mr-2">
<div class="card-header bg-dark text-white">
UUID: {{uuid_sensor}}
</div>
<div class="card-body">
<div class="card-group">
<div class="card">
<div class="card-header bg-info text-white">
First Seen
</div>
<div class="card-body">
<p class="card-text">{{data_uuid['first_seen_gmt']}} - ({{data_uuid['first_seen']}})</p>
</div>
</div>
<div class="card">
<div class="card-header bg-info text-white">
Last Seen
</div>
<div class="card-body">
<p class="card-text">{{data_uuid['last_seen_gmt']}} - ({{data_uuid['last_seen']}})</p>
</div>
</div>
<div class="card">
{% if not data_uuid['Error'] %}
<div class="card-header bg-success text-white">
Status
</div>
<div class="card-body text-success">
<p class="card-text">OK</p>
{% else %}
<div class="card-header bg-danger text-white">
Status
</div>
<div class="card-body text-danger">
<p class="card-text">{{data_uuid['Error']}}</p>
{% endif %}
{% if active_connection %}
<div style="color:Green; display:inline-block">
<i class="fa fa-check-circle"></i> Connected
</div>
<div>
<a href="{{ url_for('kick_uuid') }}?uuid={{uuid_sensor}}" {% if data_uuid['temp_blacklist_uuid'] %}style="pointer-events: none;"{% endif %}>
<button type="button" class="btn btn-outline-info" {% if data_uuid['temp_blacklist_uuid'] %}disabled{% endif %}>Kick UUID</button>
</a>
</div>
{% endif %}
</div>
</div>
</div>
</div>
</div>
<div class="card-deck justify-content-center ml-0 mr-0">
<div class="card border-dark mt-3" style="max-width: 18rem;">
<div class="card-body text-dark">
<h5 class="card-title">Change Stream Max Size</h5>
{% if not data_uuid['blacklisted_uuid'] and not data_uuid['blacklisted_ip_by_uuid'] %}
<input class="form-control" type="number" id="max_stream_input" value="{{max_uuid_stream}}" min="0" required>
<button type="button" class="btn btn-outline-secondary mt-1" onclick="window.location.href ='{{ url_for('uuid_change_stream_max_size') }}?uuid={{uuid_sensor}}&redirect=1&max_uuid_stream='+$('#max_stream_input').val();">Change Max Size</button>
{% else %}
<input class="form-control" type="number" id="max_stream_input" value="{{max_uuid_stream}}" min="0" required disabled>
<button type="button" class="btn btn-outline-secondary mt-1" disabled>Change Max Size</button>
{% endif %}
</div>
</div>
<div class="card text-center border-danger mt-3" style="max-width: 14rem;">
<div class="card-body text-danger">
<h5 class="card-title">UUID Blacklist</h5>
{% if not data_uuid['blacklisted_uuid'] %}
<a href="{{ url_for('blacklist_uuid') }}?uuid={{uuid_sensor}}&redirect=1" {% if data_uuid['blacklisted_ip_by_uuid'] %}style="pointer-events: none;"{% endif %}>
<button type="button" class="btn btn-danger" {% if data_uuid['blacklisted_ip_by_uuid'] %}disabled{% endif %}>Blacklist UUID</button>
</a>
{% else %}
<a href="{{ url_for('unblacklist_uuid') }}?uuid={{uuid_sensor}}&redirect=1" {% if data_uuid['blacklisted_ip_by_uuid'] %}style="pointer-events: none;"{% endif %}>
<button type="button" class="btn btn-warning" {% if data_uuid['blacklisted_ip_by_uuid'] %}disabled{% endif %}>UnBlacklist UUID</button>
</a>
{% endif %}
</div>
</div>
<div class="card text-center border-danger mt-3" style="max-width: 20rem;">
<div class="card-body text-danger">
<h5 class="card-title">Blacklist IP Using This UUID</h5>
{% if not data_uuid['blacklisted_ip_by_uuid'] %}
<a href="{{ url_for('blacklist_ip_by_uuid') }}?uuid={{uuid_sensor}}&redirect=1">
<button type="button" class="btn btn-danger">Blacklist IP</button>
</a>
{% else %}
<a href="{{ url_for('unblacklist_ip_by_uuid') }}?uuid={{uuid_sensor}}&redirect=1">
<button type="button" class="btn btn-warning">UnBlacklist IP</button>
</a>
{% endif %}
</div>
</div>
<div class="card border-dark mt-3" style="max-width: 18rem;">
<div class="card-body text-dark">
<h5 class="card-title">Change UUID Key</h5>
<input class="form-control" type="text" id="uuid_key" value="{{uuid_key}}" required>
<button type="button" class="btn btn-outline-secondary mt-1" onclick="window.location.href ='{{ url_for('set_uuid_hmac_key') }}?uuid={{uuid_sensor}}&redirect=1&key='+$('#uuid_key').val();">Change UUID Key</button>
</div>
</div>
</div>
<div>
<div class="card text-center mt-3 mx-3">
<div class="card-header bg-dark text-white">
Types Used:
</div>
<div class="row ml-0 mr-0">
<div class="col-lg-4">
<div class="mt-2">
<table class="table table-striped table-bordered table-hover" id="myTable_1">
<thead class="thead-dark">
<tr>
<th>Type</th>
<th style="max-width: 800px;">first seen</th>
<th style="max-width: 800px;">last seen</th>
</tr>
</thead>
<tbody>
{% for type in uuid_all_type %}
<tr>
<td>{{type['type']}}</td>
<td>{{type['first_seen']}}</td>
<td>{{type['last_seen']}}</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
</div>
<div class="col-lg-8">
<div id="barchart_type">
</div>
</div>
</div>
</div>
</div>
<div class="row ml-0 mr-0">
<div class="col-lg-6">
<div class="card text-center mt-3">
<div class="card-header bg-dark text-white">
Last IP Used:
</div>
<ul class="list-group list-group-flush">
{%for row in all_ip%}
<li class="list-group-item">
{{row['ip']}} - {{row['datetime']}} <button class="fa fa-info-circle btn text-secondary" onclick="get_whois_data('{{row['ip']}}');"></button>
</li>
{%endfor%}
</ul>
</div>
</div>
<div class="col-lg-6">
<div class="d-none card mt-3 mb-3" id="whois_data">
<div class="card-header bg-dark text-center text-white">
Whois Info:
</div>
<pre class="ml-2" id="whois_output">
</pre>
</div>
</div>
</div>
{% include 'navfooter.html' %}
</body>
<script>
var chart = {};
$(document).ready(function(){
table = $('#myTable_1').DataTable(
{
"aLengthMenu": [[5, 10, 15, 20, -1], [5, 10, 15, 20, "All"]],
"iDisplayLength": 10,
"order": [[ 0, "asc" ]]
}
);
chart.stackBarChart =barchart_type_stack("{{ url_for('get_uuid_type_history_json') }}?uuid_sensor={{uuid_sensor}}", 'id');
chart.onResize();
$(window).on("resize", function() {
chart.onResize();
});
$('[data-toggle="popover"]').popover({
placement: 'top',
container: 'body',
html : true,
});
});
function get_whois_data(ip){
$.getJSON( "{{url_for('whois_data')}}?ip="+ip, function( data ) {
$( "#whois_data" ).removeClass( "d-none" );
$( "#whois_output" ).text(data);
});
}
</script>
<script>
var margin = {top: 20, right: 90, bottom: 55, left: 0},
width = parseInt(d3.select('#barchart_type').style('width'), 10);
width = 1000 - margin.left - margin.right,
height = 500 - margin.top - margin.bottom;
var x = d3.scaleBand().rangeRound([0, width]).padding(0.1);
var y = d3.scaleLinear().rangeRound([height, 0]);
var xAxis = d3.axisBottom(x);
var yAxis = d3.axisLeft(y);
var color = d3.scaleOrdinal(d3.schemeSet3);
var svg = d3.select("#barchart_type").append("svg")
.attr("id", "thesvg")
.attr("viewBox", "0 0 "+width+" 500")
.attr("width", width + margin.left + margin.right)
.attr("height", height + margin.top + margin.bottom)
.append("g")
.attr("transform", "translate(" + margin.left + "," + margin.top + ")");
function barchart_type_stack(url, id) {
d3.json(url)
.then(function(data){
var labelVar = 'date'; //A
var varNames = d3.keys(data[0])
.filter(function (key) { return key !== labelVar;}); //B
data.forEach(function (d) { //D
var y0 = 0;
d.mapping = varNames.map(function (name) {
return {
name: name,
label: d[labelVar],
y0: y0,
y1: y0 += +d[name]
};
});
d.total = d.mapping[d.mapping.length - 1].y1;
});
x.domain(data.map(function (d) { return (d.date); })); //E
y.domain([0, d3.max(data, function (d) { return d.total; })]);
svg.append("g")
.attr("class", "x axis")
.attr("transform", "translate(0," + height + ")")
.call(xAxis)
.selectAll("text")
.attr("class", "bar")
.on("click", function (d) { window.location.href = "#" })
.attr("transform", "rotate(-18)" )
//.attr("transform", "rotate(-40)" )
.style("text-anchor", "end");
svg.append("g")
.attr("class", "y axis")
.call(yAxis)
.append("text")
.attr("transform", "rotate(-90)")
.attr("y", 6)
.attr("dy", ".71em")
.style("text-anchor", "end");
var selection = svg.selectAll(".series")
.data(data)
.enter().append("g")
.attr("class", "series")
.attr("transform", function (d) { return "translate(" + x((d.date)) + ",0)"; });
selection.selectAll("rect")
.data(function (d) { return d.mapping; })
.enter().append("rect")
.attr("class", "bar_stack")
.attr("width", x.bandwidth())
.attr("y", function (d) { return y(d.y1); })
.attr("height", function (d) { return y(d.y0) - y(d.y1); })
.style("fill", function (d) { return color(d.name); })
.style("stroke", "grey")
.on("mouseover", function (d) { showPopover.call(this, d); })
.on("mouseout", function (d) { removePopovers(); })
.on("click", function(d){ window.location.href = "#" });
data.forEach(function(d) {
if(d.total != 0){
svg.append("text")
.attr("class", "bar")
.attr("dy", "-.35em")
.attr('x', x(d.date) + x.bandwidth()/2)
.attr('y', y(d.total))
.on("click", function () {window.location.href = "#" })
.style("text-anchor", "middle")
.text(d.total);
}
});
drawLegend(varNames);
});
}
function drawLegend (varNames) {
var legend = svg.selectAll(".legend")
.data(varNames.slice().reverse())
.enter().append("g")
.attr("class", "legend")
.attr("transform", function (d, i) { return "translate(0," + i * 20 + ")"; });
legend.append("rect")
.attr("x", 943)
.attr("width", 10)
.attr("height", 10)
.style("fill", color)
.style("stroke", "grey");
legend.append("text")
.attr("class", "svgText")
.attr("x", 941)
.attr("y", 6)
.attr("dy", ".35em")
.style("text-anchor", "end")
.text(function (d) { return d; });
}
function removePopovers () {
$('.popover').each(function() {
$(this).remove();
});
}
function showPopover (d) {
$(this).popover({
title: d.name,
placement: 'top',
container: 'body',
trigger: 'manual',
html : true,
content: function() {
return d.label +
"<br/>num: " + d3.format(",")(d.value ? d.value: d.y1 - d.y0); }
});
$(this).popover('show')
}
chart.onResize = function () {
var aspect = width / height, chart = $("#thesvg");
var targetWidth = chart.parent().width();
chart.attr("width", targetWidth);
chart.attr("height", targetWidth / 2);
}
window.chart = chart;
</script>

View File

@ -5,6 +5,7 @@ set -e
BOOTSTRAP_VERSION='4.2.1' BOOTSTRAP_VERSION='4.2.1'
FONT_AWESOME_VERSION='4.7.0' FONT_AWESOME_VERSION='4.7.0'
D3_JS_VERSION='4.13.0' D3_JS_VERSION='4.13.0'
D3_JS_VERSIONv5='5.9.2'
if [ ! -d static/css ]; then if [ ! -d static/css ]; then
mkdir static/css mkdir static/css
@ -12,42 +13,55 @@ fi
if [ ! -d static/js ]; then if [ ! -d static/js ]; then
mkdir static/js mkdir static/js
fi fi
if [ ! -d static/json ]; then
mkdir static/json
fi
rm -rf temp rm -rf temp
mkdir temp mkdir temp
mkdir temp/d3v5/
wget https://github.com/twbs/bootstrap/releases/download/v${BOOTSTRAP_VERSION}/bootstrap-${BOOTSTRAP_VERSION}-dist.zip -O temp/bootstrap${BOOTSTRAP_VERSION}.zip wget https://github.com/twbs/bootstrap/releases/download/v${BOOTSTRAP_VERSION}/bootstrap-${BOOTSTRAP_VERSION}-dist.zip -O temp/bootstrap${BOOTSTRAP_VERSION}.zip
#wget https://github.com/FortAwesome/Font-Awesome/archive/v${FONT_AWESOME_VERSION}.zip -O temp/FONT_AWESOME_${FONT_AWESOME_VERSION}.zip wget https://github.com/FortAwesome/Font-Awesome/archive/v${FONT_AWESOME_VERSION}.zip -O temp/FONT_AWESOME_${FONT_AWESOME_VERSION}.zip
wget https://github.com/d3/d3/releases/download/v${D3_JS_VERSION}/d3.zip -O temp/d3_${D3_JS_VERSION}.zip wget https://github.com/d3/d3/releases/download/v${D3_JS_VERSION}/d3.zip -O temp/d3_${D3_JS_VERSION}.zip
wget https://github.com/d3/d3/releases/download/v${D3_JS_VERSIONv5}/d3.zip -O temp/d3v5/d3_${D3_JS_VERSIONv5}.zip
wget https://github.com/FezVrasta/popper.js/archive/v1.14.3.zip -O temp/popper.zip
# dateRangePicker # dateRangePicker
#wget https://github.com/moment/moment/archive/2.22.2.zip -O temp/moment_2.22.2.zip wget https://github.com/moment/moment/archive/2.22.2.zip -O temp/moment_2.22.2.zip
#wget https://github.com/longbill/jquery-date-range-picker/archive/v0.18.0.zip -O temp/daterangepicker_v0.18.0.zip wget https://github.com/longbill/jquery-date-range-picker/archive/v0.18.0.zip -O temp/daterangepicker_v0.18.0.zip
unzip temp/bootstrap${BOOTSTRAP_VERSION}.zip -d temp/ unzip temp/bootstrap${BOOTSTRAP_VERSION}.zip -d temp/
#unzip temp/FONT_AWESOME_${FONT_AWESOME_VERSION}.zip -d temp/ unzip temp/FONT_AWESOME_${FONT_AWESOME_VERSION}.zip -d temp/
unzip temp/d3_${D3_JS_VERSION}.zip -d temp/ unzip temp/d3_${D3_JS_VERSION}.zip -d temp/
unzip temp/d3v5/d3_${D3_JS_VERSIONv5}.zip -d temp/d3v5/
unzip temp/popper.zip -d temp/
#unzip temp/moment_2.22.2.zip -d temp/ unzip temp/moment_2.22.2.zip -d temp/
#unzip temp/daterangepicker_v0.18.0.zip -d temp/ unzip temp/daterangepicker_v0.18.0.zip -d temp/
mv temp/bootstrap-${BOOTSTRAP_VERSION}-dist/js/bootstrap.min.js ./static/js/ mv temp/bootstrap-${BOOTSTRAP_VERSION}-dist/js/bootstrap.min.js ./static/js/
mv temp/bootstrap-${BOOTSTRAP_VERSION}-dist/css/bootstrap.min.css ./static/css/ mv temp/bootstrap-${BOOTSTRAP_VERSION}-dist/css/bootstrap.min.css ./static/css/
mv temp/bootstrap-${BOOTSTRAP_VERSION}-dist/css/bootstrap.min.css.map ./static/css/ mv temp/bootstrap-${BOOTSTRAP_VERSION}-dist/css/bootstrap.min.css.map ./static/css/
mv temp/popper.js-1.14.3/dist/umd/popper.min.js ./static/js/
mv temp/popper.js-1.14.3/dist/umd/popper.min.js.map ./static/js/
#mv temp/Font-Awesome-${FONT_AWESOME_VERSION} temp/font-awesome mv temp/Font-Awesome-${FONT_AWESOME_VERSION} temp/font-awesome
#rm -rf ./static/fonts/ ./static/font-awesome/ rm -rf ./static/fonts/ ./static/font-awesome/
#mv temp/font-awesome/ ./static/ mv temp/font-awesome/ ./static/
#mv temp/jquery-date-range-picker-0.18.0/dist/daterangepicker.min.css ./static/css/ mv temp/jquery-date-range-picker-0.18.0/dist/daterangepicker.min.css ./static/css/
mv temp/d3.min.js ./static/js/ mv temp/d3.min.js ./static/js/
#mv temp/moment-2.22.2/min/moment.min.js ./static/js/ cp temp/d3v5/d3.min.js ./static/js/d3v5.min.js
#mv temp/jquery-date-range-picker-0.18.0/dist/jquery.daterangepicker.min.js ./static/js/
mv temp/moment-2.22.2/min/moment.min.js ./static/js/
mv temp/jquery-date-range-picker-0.18.0/dist/jquery.daterangepicker.min.js ./static/js/
rm -rf temp rm -rf temp
@ -55,7 +69,11 @@ JQVERSION="3.3.1"
wget http://code.jquery.com/jquery-${JQVERSION}.min.js -O ./static/js/jquery.js wget http://code.jquery.com/jquery-${JQVERSION}.min.js -O ./static/js/jquery.js
#Ressources for dataTable #Ressources for dataTable
wget https://cdn.datatables.net/v/bs4/dt-1.10.18/datatables.min.css -O ./static/css/dataTables.bootstrap.css wget https://cdn.datatables.net/1.10.18/css/dataTables.bootstrap4.min.css -O ./static/css/dataTables.bootstrap.min.css
wget https://cdn.datatables.net/v/bs4/dt-1.10.18/datatables.min.js -O ./static/js/dataTables.bootstrap.js wget https://cdn.datatables.net/1.10.18/js/dataTables.bootstrap4.min.js -O ./static/js/dataTables.bootstrap.min.js
wget https://cdn.datatables.net/1.10.18/js/jquery.dataTables.min.js -O ./static/js/jquery.dataTables.min.js
#Update json
wget https://raw.githubusercontent.com/D4-project/architecture/master/format/type.json -O ./static/json/type.json
rm -rf temp rm -rf temp

View File

@ -0,0 +1,159 @@
#!/usr/bin/env python3
import os
import sys
import time
import gzip
import redis
import shutil
import datetime
import signal
class GracefulKiller:
kill_now = False
def __init__(self):
signal.signal(signal.SIGINT, self.exit_gracefully)
signal.signal(signal.SIGTERM, self.exit_gracefully)
def exit_gracefully(self,signum, frame):
self.kill_now = True
def compress_file(file_full_path, session_uuid,i=0):
redis_server_stream.set('data_in_process:{}'.format(session_uuid), file_full_path)
if i==0:
compressed_filename = '{}.gz'.format(file_full_path)
else:
compressed_filename = '{}.{}.gz'.format(file_full_path, i)
if os.path.isfile(compressed_filename):
compress_file(file_full_path, session_uuid, i+1)
else:
with open(file_full_path, 'rb') as f_in:
with gzip.open(compressed_filename, 'wb') as f_out:
shutil.copyfileobj(f_in, f_out)
try:
os.remove(file_full_path)
except FileNotFoundError:
pass
# save full path in anylyzer queue
for analyzer_uuid in redis_server_metadata.smembers('analyzer:{}'.format(type)):
analyzer_uuid = analyzer_uuid.decode()
redis_server_analyzer.lpush('analyzer:{}:{}'.format(type, analyzer_uuid), compressed_filename)
redis_server_metadata.hset('analyzer:{}'.format(analyzer_uuid), 'last_updated', time.time())
analyser_queue_max_size = redis_server_metadata.hget('analyzer:{}'.format(analyzer_uuid), 'max_size')
if analyser_queue_max_size is None:
analyser_queue_max_size = analyzer_list_max_default_size
redis_server_analyzer.ltrim('analyzer:{}:{}'.format(type, analyzer_uuid), 0, analyser_queue_max_size)
host_redis_stream = "localhost"
port_redis_stream = 6379
host_redis_metadata = "localhost"
port_redis_metadata = 6380
redis_server_stream = redis.StrictRedis(
host=host_redis_stream,
port=port_redis_stream,
db=0)
redis_server_metadata = redis.StrictRedis(
host=host_redis_metadata,
port=port_redis_metadata,
db=0)
redis_server_analyzer = redis.StrictRedis(
host=host_redis_metadata,
port=port_redis_metadata,
db=2)
type = 1
sleep_time = 300
analyzer_list_max_default_size = 10000
if __name__ == "__main__":
killer = GracefulKiller()
if len(sys.argv) != 4:
print('usage:', 'Worker.py', 'session_uuid', 'tcpdump', 'date')
exit(1)
# TODO sanityse input
session_uuid = sys.argv[1]
directory_data_uuid = sys.argv[2]
date = sys.argv[3]
worker_data_directory = os.path.join(directory_data_uuid, date[0:4], date[4:6], date[6:8])
full_datetime = datetime.datetime.now().strftime("%Y%m%d%H")
current_file = None
time_change = False
while True:
if killer.kill_now:
break
new_date = datetime.datetime.now().strftime("%Y%m%d")
# get all directory files
all_files = os.listdir(worker_data_directory)
not_compressed_file = []
# filter: get all not compressed files
for file in all_files:
if file.endswith('.cap'):
not_compressed_file.append(os.path.join(worker_data_directory, file))
if not_compressed_file:
### check time-change (minus one hour) ###
new_full_datetime = datetime.datetime.now().strftime("%Y%m%d%H")
if new_full_datetime < full_datetime:
# sort list, last modified
not_compressed_file.sort(key=os.path.getctime)
else:
# sort list
not_compressed_file.sort()
### ###
# new day
if date != new_date:
# compress all file
for file in not_compressed_file:
if killer.kill_now:
break
compress_file(file, session_uuid)
# reset file tracker
current_file = None
date = new_date
# update worker_data_directory
worker_data_directory = os.path.join(directory_data_uuid, date[0:4], date[4:6], date[6:8])
# restart
continue
# file used by tcpdump
max_file = not_compressed_file[-1]
full_datetime = new_full_datetime
# Init: set current_file
if not current_file:
current_file = max_file
#print('max_file set: {}'.format(current_file))
# new file created
if max_file != current_file:
# get all previous files
for file in not_compressed_file:
if file != max_file:
if killer.kill_now:
break
#print('new file: {}'.format(file))
compress_file(file, session_uuid)
# update current_file tracker
current_file = max_file
if killer.kill_now:
break
time.sleep(sleep_time)

View File

@ -3,16 +3,18 @@
import os import os
import sys import sys
import time import time
import gzip
import redis import redis
import subprocess import shutil
import datetime import datetime
import subprocess
import configparser
def data_incorrect_format(stream_name, session_uuid, uuid): def data_incorrect_format(stream_name, session_uuid, uuid):
redis_server_stream.sadd('Error:IncorrectType:{}'.format(type), session_uuid) redis_server_stream.sadd('Error:IncorrectType', session_uuid)
redis_server_metadata.hset('metadata_uuid:{}'.format(uuid), 'Error', 'Error: Type={}, Incorrect file format'.format(type)) redis_server_metadata.hset('metadata_uuid:{}'.format(uuid), 'Error', 'Error: Type={}, Incorrect file format'.format(type))
clean_stream(stream_name, session_uuid) clean_stream(stream_name, session_uuid)
print('Incorrect format') print('Incorrect format, uuid={}'.format(uuid))
sys.exit(1) sys.exit(1)
def clean_stream(stream_name, session_uuid): def clean_stream(stream_name, session_uuid):
@ -22,6 +24,28 @@ def clean_stream(stream_name, session_uuid):
redis_server_stream.hdel('map-type:session_uuid-uuid:{}'.format(type), session_uuid) redis_server_stream.hdel('map-type:session_uuid-uuid:{}'.format(type), session_uuid)
redis_server_stream.delete(stream_name) redis_server_stream.delete(stream_name)
def compress_file(file_full_path, i=0):
if i==0:
compressed_filename = '{}.gz'.format(file_full_path)
else:
compressed_filename = '{}.{}.gz'.format(file_full_path, i)
if os.path.isfile(compressed_filename):
compress_file(file_full_path, i+1)
else:
with open(file_full_path, 'rb') as f_in:
with gzip.open(compressed_filename, 'wb') as f_out:
shutil.copyfileobj(f_in, f_out)
os.remove(file_full_path)
# save full path in anylyzer queue
for analyzer_uuid in redis_server_metadata.smembers('analyzer:{}'.format(type)):
analyzer_uuid = analyzer_uuid.decode()
redis_server_analyzer.lpush('analyzer:{}:{}'.format(type, analyzer_uuid), compressed_filename)
redis_server_metadata.hset('analyzer:{}'.format(analyzer_uuid), 'last_updated', time.time())
analyser_queue_max_size = redis_server_metadata.hget('analyzer:{}'.format(analyzer_uuid), 'max_size')
if analyser_queue_max_size is None:
analyser_queue_max_size = analyzer_list_max_default_size
redis_server_analyzer.ltrim('analyzer:{}:{}'.format(type, analyzer_uuid), 0, analyser_queue_max_size)
host_redis_stream = "localhost" host_redis_stream = "localhost"
port_redis_stream = 6379 port_redis_stream = 6379
@ -38,10 +62,31 @@ redis_server_metadata = redis.StrictRedis(
port=port_redis_metadata, port=port_redis_metadata,
db=0) db=0)
redis_server_analyzer = redis.StrictRedis(
host=host_redis_metadata,
port=port_redis_metadata,
db=2)
# get file config
config_file_server = os.path.join(os.environ['D4_HOME'], 'configs/server.conf')
config_server = configparser.ConfigParser()
config_server.read(config_file_server)
# get data directory
use_default_save_directory = config_server['Save_Directories'].getboolean('use_default_save_directory')
# check if field is None
if use_default_save_directory:
data_directory = os.path.join(os.environ['D4_HOME'], 'data')
else:
data_directory = config_server['Save_Directories'].get('save_directory')
type = 1 type = 1
tcp_dump_cycle = '300' tcp_dump_cycle = '300'
stream_buffer = 100 stream_buffer = 100
analyzer_list_max_default_size = 10000
id_to_delete = [] id_to_delete = []
if __name__ == "__main__": if __name__ == "__main__":
@ -58,11 +103,12 @@ if __name__ == "__main__":
if res: if res:
uuid = res[0][1][0][1][b'uuid'].decode() uuid = res[0][1][0][1][b'uuid'].decode()
date = datetime.datetime.now().strftime("%Y%m%d") date = datetime.datetime.now().strftime("%Y%m%d")
tcpdump_path = os.path.join('../../data', uuid, str(type)) tcpdump_path = os.path.join(data_directory, uuid, str(type))
full_tcpdump_path = os.path.join(data_directory, uuid, str(type))
rel_path = os.path.join(tcpdump_path, date[0:4], date[4:6], date[6:8]) rel_path = os.path.join(tcpdump_path, date[0:4], date[4:6], date[6:8])
if not os.path.isdir(rel_path): if not os.path.isdir(rel_path):
os.makedirs(rel_path) os.makedirs(rel_path)
print('---- worker launched, uuid={} session_uuid={}'.format(uuid, session_uuid)) print('---- worker launched, uuid={} session_uuid={} epoch={}'.format(uuid, session_uuid, time.time()))
else: else:
sys.exit(1) sys.exit(1)
print('Incorrect message') print('Incorrect message')
@ -72,6 +118,8 @@ if __name__ == "__main__":
process = subprocess.Popen(["tcpdump", '-n', '-r', '-', '-G', tcp_dump_cycle, '-w', '{}/%Y/%m/%d/{}-%Y-%m-%d-%H%M%S.cap'.format(tcpdump_path, uuid)], stdin=subprocess.PIPE, stderr=subprocess.PIPE) process = subprocess.Popen(["tcpdump", '-n', '-r', '-', '-G', tcp_dump_cycle, '-w', '{}/%Y/%m/%d/{}-%Y-%m-%d-%H%M%S.cap'.format(tcpdump_path, uuid)], stdin=subprocess.PIPE, stderr=subprocess.PIPE)
nb_save = 0 nb_save = 0
process_compressor = subprocess.Popen(['./file_compressor.py', session_uuid, full_tcpdump_path, date])
while True: while True:
res = redis_server_stream.xread({stream_name: id}, count=1) res = redis_server_stream.xread({stream_name: id}, count=1)
@ -97,6 +145,8 @@ if __name__ == "__main__":
Error_message = process.stderr.read() Error_message = process.stderr.read()
if Error_message == b'tcpdump: unknown file format\n': if Error_message == b'tcpdump: unknown file format\n':
data_incorrect_format(stream_name, session_uuid, uuid) data_incorrect_format(stream_name, session_uuid, uuid)
elif Error_message:
print(Error_message)
#print(process.stdout.read()) #print(process.stdout.read())
nb_save += 1 nb_save += 1
@ -108,24 +158,44 @@ if __name__ == "__main__":
nb_save = 0 nb_save = 0
else: else:
# sucess, all data are saved # success, all data are saved
if redis_server_stream.sismember('ended_session', session_uuid): if redis_server_stream.sismember('ended_session', session_uuid):
out, err = process.communicate(timeout= 0.5) out, err = process.communicate(timeout= 0.5)
#if out:
# print(out) # print(out)
if err == b'tcpdump: unknown file format\n': if err == b'tcpdump: unknown file format\n':
data_incorrect_format(stream_name, session_uuid, uuid) data_incorrect_format(stream_name, session_uuid, uuid)
elif err: elif err:
print(err) print(err)
# close child
try:
process_compressor.communicate(timeout= 0.5)
except subprocess.TimeoutExpired:
process_compressor.kill()
### compress all files ###
date = datetime.datetime.now().strftime("%Y%m%d")
worker_data_directory = os.path.join(full_tcpdump_path, date[0:4], date[4:6], date[6:8])
all_files = os.listdir(worker_data_directory)
all_files.sort()
if all_files:
for file in all_files:
if file.endswith('.cap'):
full_path = os.path.join(worker_data_directory, file)
if redis_server_stream.get('data_in_process:{}'.format(session_uuid)) != full_path:
compress_file(full_path)
### ###
#print(process.stderr.read()) #print(process.stderr.read())
redis_server_stream.srem('ended_session', session_uuid) redis_server_stream.srem('ended_session', session_uuid)
redis_server_stream.srem('session_uuid:{}'.format(type), session_uuid) redis_server_stream.srem('session_uuid:{}'.format(type), session_uuid)
redis_server_stream.srem('working_session_uuid:{}'.format(type), session_uuid) redis_server_stream.srem('working_session_uuid:{}'.format(type), session_uuid)
redis_server_stream.hdel('map-type:session_uuid-uuid:{}'.format(type), session_uuid) redis_server_stream.hdel('map-type:session_uuid-uuid:{}'.format(type), session_uuid)
redis_server_stream.delete(stream_name) redis_server_stream.delete(stream_name)
redis_server_stream.delete('data_in_process:{}'.format(session_uuid))
# make sure that tcpdump can save all datas # make sure that tcpdump can save all datas
time.sleep(10) time.sleep(10)
print('---- tcpdump DONE, uuid={} session_uuid={}'.format(uuid, session_uuid)) print('---- tcpdump DONE, uuid={} session_uuid={} epoch={}'.format(uuid, session_uuid, time.time()))
sys.exit(0) sys.exit(0)
else: else:
time.sleep(10) time.sleep(10)

View File

@ -0,0 +1,159 @@
#!/usr/bin/env python3
import os
import sys
import time
import gzip
import redis
import shutil
import datetime
import signal
class GracefulKiller:
kill_now = False
def __init__(self):
signal.signal(signal.SIGINT, self.exit_gracefully)
signal.signal(signal.SIGTERM, self.exit_gracefully)
def exit_gracefully(self,signum, frame):
self.kill_now = True
def compress_file(file_full_path, session_uuid,i=0):
redis_server_stream.set('data_in_process:{}'.format(session_uuid), file_full_path)
if i==0:
compressed_filename = '{}.gz'.format(file_full_path)
else:
compressed_filename = '{}.{}.gz'.format(file_full_path, i)
if os.path.isfile(compressed_filename):
compress_file(file_full_path, session_uuid, i+1)
else:
with open(file_full_path, 'rb') as f_in:
with gzip.open(compressed_filename, 'wb') as f_out:
shutil.copyfileobj(f_in, f_out)
try:
os.remove(file_full_path)
except FileNotFoundError:
pass
# save full path in anylyzer queue
for analyzer_uuid in redis_server_metadata.smembers('analyzer:{}'.format(type)):
analyzer_uuid = analyzer_uuid.decode()
redis_server_analyzer.lpush('analyzer:{}:{}'.format(type, analyzer_uuid), compressed_filename)
redis_server_metadata.hset('analyzer:{}'.format(analyzer_uuid), 'last_updated', time.time())
analyser_queue_max_size = redis_server_metadata.hget('analyzer:{}'.format(analyzer_uuid), 'max_size')
if analyser_queue_max_size is None:
analyser_queue_max_size = analyzer_list_max_default_size
redis_server_analyzer.ltrim('analyzer:{}:{}'.format(type, analyzer_uuid), 0, analyser_queue_max_size)
host_redis_stream = "localhost"
port_redis_stream = 6379
host_redis_metadata = "localhost"
port_redis_metadata = 6380
redis_server_stream = redis.StrictRedis(
host=host_redis_stream,
port=port_redis_stream,
db=0)
redis_server_metadata = redis.StrictRedis(
host=host_redis_metadata,
port=port_redis_metadata,
db=0)
redis_server_analyzer = redis.StrictRedis(
host=host_redis_metadata,
port=port_redis_metadata,
db=2)
type = 1
sleep_time = 300
analyzer_list_max_default_size = 10000
if __name__ == "__main__":
killer = GracefulKiller()
if len(sys.argv) != 4:
print('usage:', 'Worker.py', 'session_uuid', 'tcpdump', 'date')
exit(1)
# TODO sanityse input
session_uuid = sys.argv[1]
directory_data_uuid = sys.argv[2]
date = sys.argv[3]
worker_data_directory = os.path.join(directory_data_uuid, date[0:4], date[4:6], date[6:8])
full_datetime = datetime.datetime.now().strftime("%Y%m%d%H")
current_file = None
time_change = False
while True:
if killer.kill_now:
break
new_date = datetime.datetime.now().strftime("%Y%m%d")
# get all directory files
all_files = os.listdir(worker_data_directory)
not_compressed_file = []
# filter: get all not compressed files
for file in all_files:
if file.endswith('.cap'):
not_compressed_file.append(os.path.join(worker_data_directory, file))
if not_compressed_file:
### check time-change (minus one hour) ###
new_full_datetime = datetime.datetime.now().strftime("%Y%m%d%H")
if new_full_datetime < full_datetime:
# sort list, last modified
not_compressed_file.sort(key=os.path.getctime)
else:
# sort list
not_compressed_file.sort()
### ###
# new day
if date != new_date:
# compress all file
for file in not_compressed_file:
if killer.kill_now:
break
compress_file(file, session_uuid)
# reset file tracker
current_file = None
date = new_date
# update worker_data_directory
worker_data_directory = os.path.join(directory_data_uuid, date[0:4], date[4:6], date[6:8])
# restart
continue
# file used by tcpdump
max_file = not_compressed_file[-1]
full_datetime = new_full_datetime
# Init: set current_file
if not current_file:
current_file = max_file
#print('max_file set: {}'.format(current_file))
# new file created
if max_file != current_file:
# get all previous files
for file in not_compressed_file:
if file != max_file:
if killer.kill_now:
break
#print('new file: {}'.format(file))
compress_file(file, session_uuid)
# update current_file tracker
current_file = max_file
if killer.kill_now:
break
time.sleep(sleep_time)

View File

@ -0,0 +1,302 @@
#!/usr/bin/env python3
import os
import sys
import time
import json
import gzip
import redis
import shutil
import datetime
import configparser
DEFAULT_FILE_EXTENSION = 'txt'
DEFAULT_FILE_SEPARATOR = b'\n'
ROTATION_SAVE_CYCLE = 300 # seconds
MAX_BUFFER_LENGTH = 100000
TYPE = 254
host_redis_stream = "localhost"
port_redis_stream = 6379
redis_server_stream = redis.StrictRedis(
host=host_redis_stream,
port=port_redis_stream,
db=0)
host_redis_metadata = "localhost"
port_redis_metadata = 6380
redis_server_metadata = redis.StrictRedis(
host=host_redis_metadata,
port=port_redis_metadata,
db=0)
redis_server_analyzer = redis.StrictRedis(
host=host_redis_metadata,
port=port_redis_metadata,
db=2)
analyzer_list_max_default_size = 10000
class MetaTypesDefault:
def __init__(self, uuid, json_file):
self.uuid = uuid
self.type_name = json_file['type']
self.save_path = None
self.buffer = b''
self.file_rotation_mode = True
# get file config
config_file_server = os.path.join(os.environ['D4_HOME'], 'configs/server.conf')
config_server = configparser.ConfigParser()
config_server.read(config_file_server)
# get data directory
use_default_save_directory = config_server['Save_Directories'].getboolean('use_default_save_directory')
# check if field is None
if use_default_save_directory:
data_directory = os.path.join(os.environ['D4_HOME'], 'data')
else:
data_directory = config_server['Save_Directories'].get('save_directory')
self.data_directory = data_directory
self.parse_json(json_file)
def test(self):
print('class: MetaTypesDefault')
######## JSON PARSER ########
def parse_json(self, json_file):
self.file_rotation = False
self.file_separator = b'\n'
self.filename = b''.join([self.type_name.encode(), b'.txt'])
######## PROCESS FUNCTIONS ########
def process_data(self, data):
# save data on disk
self.save_rotate_file(data)
######## CORE FUNCTIONS ########
def check_json_file(self, json_file):
# the json object must contain a type field
if "type" in json_file:
return True
else:
return False
def save_json_file(self, json_file, save_by_uuid=True):
self.set_last_time_saved(time.time()) #time_file
self.set_last_saved_date(datetime.datetime.now().strftime("%Y%m%d%H%M%S")) #date_file
# update save path
self.set_save_path( os.path.join(self.get_save_dir(save_by_uuid=save_by_uuid), self.get_filename(file_extention='json', save_by_uuid=save_by_uuid)) )
# save json
with open(self.get_save_path(), 'w') as f:
f.write(json.dumps(json_file))
# update save path for 254 files type
if self.is_file_rotation_mode():
self.set_save_path( os.path.join(self.get_save_dir(), self.get_filename()) )
def save_rotate_file(self, data):
if not self.get_file_rotation():
new_date = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
# check if a new file rotation is needed # # TODO: change ROTATION_SAVE_CYCLE
if ( new_date[0:8] != self.get_last_saved_date()[0:8] ) or ( int(time.time()) - self.get_last_time_saved() > ROTATION_SAVE_CYCLE ):
self.set_rotate_file(True)
# rotate file
if self.get_file_rotation():
# init save path
if self.get_save_path() is None:
self.set_last_time_saved(time.time())
self.set_last_saved_date(datetime.datetime.now().strftime("%Y%m%d%H%M%S"))
# update save path
self.set_save_path( os.path.join(self.get_save_dir(), self.get_filename()) )
# rotate file
if self.get_file_separator() in data:
end_file, start_new_file = data.rsplit(self.get_file_separator(), maxsplit=1)
# save end of file
with open(self.get_save_path(), 'ab') as f:
f.write(end_file)
self.compress_file(self.get_save_path())
# set last saved date/time
self.set_last_time_saved(time.time())
self.set_last_saved_date(datetime.datetime.now().strftime("%Y%m%d%H%M%S"))
# update save path
self.set_save_path( os.path.join(self.get_save_dir(), self.get_filename()) )
# save start of new file
if start_new_file != b'':
with open(self.get_save_path(), 'ab') as f:
f.write(start_new_file)
# end of rotation
self.set_rotate_file(False)
# wait file separator
else:
with open(self.get_save_path(), 'ab') as f:
f.write(data)
else:
# save file
with open(self.get_save_path(), 'ab') as f:
f.write(data)
def reconstruct_data(self, data):
# save data in buffer
self.add_to_buffer(data)
data = self.get_buffer()
# end of element found in data
if self.get_file_separator() in data:
# empty buffer
self.reset_buffer()
all_line = data.split(self.get_file_separator())
for reconstructed_data in all_line[:-1]:
self.handle_reconstructed_data(reconstructed_data)
# save incomplete element in buffer
if all_line[-1] != b'':
self.add_to_buffer(all_line[-1])
# no elements
else:
# force file_separator when max buffer size is reached
if self.get_size_buffer() > MAX_BUFFER_LENGTH:
print('Error, infinite loop, max buffer length reached')
self.add_to_buffer(self.get_file_separator())
def handle_reconstructed_data(self, data):
# send data to analyzer
self.send_to_analyzers(data)
def compress_file(self, file_full_path, i=0):
if i==0:
compressed_filename = '{}.gz'.format(file_full_path)
else:
compressed_filename = '{}.{}.gz'.format(file_full_path, i)
if os.path.isfile(compressed_filename):
self.compress_file(file_full_path, i+1)
else:
with open(file_full_path, 'rb') as f_in:
with gzip.open(compressed_filename, 'wb') as f_out:
shutil.copyfileobj(f_in, f_out)
os.remove(file_full_path)
def send_to_analyzers(self, data_to_send):
## save full path in anylyzer queue
for analyzer_uuid in redis_server_metadata.smembers('analyzer:{}:{}'.format(TYPE, self.get_type_name())):
analyzer_uuid = analyzer_uuid.decode()
redis_server_analyzer.lpush('analyzer:{}:{}'.format(self.get_type_name(), analyzer_uuid), data_to_send)
redis_server_metadata.hset('analyzer:{}'.format(analyzer_uuid), 'last_updated', time.time())
analyser_queue_max_size = redis_server_metadata.hget('analyzer:{}'.format(analyzer_uuid), 'max_size')
if analyser_queue_max_size is None:
analyser_queue_max_size = analyzer_list_max_default_size
redis_server_analyzer.ltrim('analyzer:{}:{}'.format(self.get_type_name(), analyzer_uuid), 0, analyser_queue_max_size)
######## GET FUNCTIONS ########
def get_type_name(self):
return self.type_name
def get_file_separator(self):
return self.file_separator
def get_uuid(self):
return self.uuid
def get_buffer(self):
return self.buffer
def get_size_buffer(self):
return len(self.buffer)
def get_filename(self, file_extention=None, save_by_uuid=False):
if file_extention is None:
file_extention = DEFAULT_FILE_EXTENSION
# File Rotation, : data/<uuid>/254/<year>/<month>/<day>/
if self.is_file_rotation_mode() or save_by_uuid:
return '{}-{}-{}-{}-{}.{}'.format(self.uuid, self.get_last_saved_year(), self.get_last_saved_month(), self.get_last_saved_day(), self.get_last_saved_hour_minute(), file_extention)
def get_data_save_directory(self):
return self.data_directory
def get_save_dir(self, save_by_uuid=False):
# File Rotation, save data in directory: data/<uuid>/254/<year>/<month>/<day>/
if self.is_file_rotation_mode() or save_by_uuid:
data_directory_uuid_type = os.path.join(self.get_data_save_directory(), self.get_uuid(), str(TYPE))
return os.path.join(data_directory_uuid_type, self.get_last_saved_year(), self.get_last_saved_month(), self.get_last_saved_day() , self.type_name)
# data save in the same directory
else:
save_dir = os.path.join(self.get_data_save_directory(), 'datas', self.get_type_name())
if not os.path.isdir(save_dir):
os.makedirs(save_dir)
return save_dir
def get_save_path(self):
return self.save_path
def is_empty_buffer(self):
if self.buffer==b'':
return True
else:
return False
def is_file_rotation_mode(self):
if self.file_rotation_mode:
return True
else:
return False
def get_file_rotation(self):
return self.file_rotation
def get_last_time_saved(self):
return self.last_time_saved
def get_last_saved_date(self):
return self.last_saved_date
def get_last_saved_year(self):
return self.last_saved_date[0:4]
def get_last_saved_month(self):
return self.last_saved_date[4:6]
def get_last_saved_day(self):
return self.last_saved_date[6:8]
def get_last_saved_hour_minute(self):
return self.last_saved_date[8:14]
######## SET FUNCTIONS ########
def reset_buffer(self):
self.buffer = b''
def set_buffer(self, data):
self.buffer = data
def add_to_buffer(self, data):
self.buffer = b''.join([self.buffer, data])
def set_rotate_file(self, boolean_value):
self.file_rotation = boolean_value
def set_rotate_file_mode(self, boolean_value):
self.file_rotation_mode = boolean_value
def set_last_time_saved(self, value_time):
self.last_time_saved = int(value_time)
def set_last_saved_date(self, date):
self.last_saved_date = date
def set_save_path(self, save_path):
dir_path = os.path.dirname(save_path)
if not os.path.isdir(dir_path):
os.makedirs(dir_path)
self.save_path = save_path

View File

@ -0,0 +1,59 @@
#!/usr/bin/env python3
import os
import sys
import time
import json
import redis
import datetime
import hashlib
import binascii
import redis
import pdb
from meta_types_modules.MetaTypesDefault import MetaTypesDefault
class TypeHandler(MetaTypesDefault):
def __init__(self, uuid, json_file):
super().__init__(uuid, json_file)
self.set_rotate_file_mode(False)
def process_data(self, data):
self.reconstruct_data(data)
def handle_reconstructed_data(self, data):
self.set_last_time_saved(time.time())
self.set_last_saved_date(datetime.datetime.now().strftime("%Y%m%d%H%M%S"))
# Create folders
cert_save_dir = os.path.join(self.get_save_dir(), 'certs')
jsons_save_dir = os.path.join(self.get_save_dir(), 'jsons')
if not os.path.isdir(cert_save_dir):
os.makedirs(cert_save_dir)
if not os.path.isdir(jsons_save_dir):
os.makedirs(jsons_save_dir)
# Extract certificates from json
mtjson = json.loads(data.decode())
for certificate in mtjson["Certificates"] or []:
cert = binascii.a2b_base64(certificate["Raw"])
# one could also load this cert with
# xcert = x509.load_der_x509_certificate(cert, default_backend())
m = hashlib.sha1()
m.update(cert)
cert_path = os.path.join(cert_save_dir, m.hexdigest()+'.crt')
# write unique certificate der file to disk
with open(cert_path, 'w+b') as c:
c.write(cert)
# write json file to disk
jsons_path = os.path.join(jsons_save_dir, mtjson["Timestamp"]+'.json')
with open(jsons_path, 'w') as j:
j.write(data.decode())
# Send data to Analyszer
self.send_to_analyzers(jsons_path)
def test(self):
print('Class: ja3-jl')

View File

@ -0,0 +1,200 @@
#!/usr/bin/env python3
import os
import sys
import time
import json
import redis
import datetime
from meta_types_modules import MetaTypesDefault
host_redis_stream = "localhost"
port_redis_stream = 6379
redis_server_stream = redis.StrictRedis(
host=host_redis_stream,
port=port_redis_stream,
db=0)
host_redis_metadata = "localhost"
port_redis_metadata = 6380
redis_server_metadata = redis.StrictRedis(
host=host_redis_metadata,
port=port_redis_metadata,
db=0)
type_meta_header = 2
type_defined = 254
max_buffer_length = 100000
rotation_save_cycle = 10 #seconds
json_file_name = 'meta_json.json'
def get_class( package_class ):
parts = package_class.split('.')
module = ".".join(parts[:-1])
mod = __import__( module )
for comp in parts[1:]:
mod = getattr(mod, comp)
return mod
def check_default_json_file(json_file):
# the json object must contain a type field
if "type" in json_file:
return True
else:
return False
def on_error(session_uuid, type_error, message):
redis_server_stream.sadd('Error:IncorrectType', session_uuid)
redis_server_metadata.hset('metadata_uuid:{}'.format(uuid), 'Error', 'Error: Type={}, {}'.format(type_error, message))
clean_db(session_uuid)
print('Incorrect format')
sys.exit(1)
def clean_db(session_uuid):
clean_stream(stream_meta_json, type_meta_header, session_uuid)
clean_stream(stream_defined, type_defined, session_uuid)
redis_server_stream.srem('ended_session', session_uuid)
redis_server_stream.srem('working_session_uuid:{}'.format(type_meta_header), session_uuid)
def clean_stream(stream_name, type, session_uuid):
redis_server_stream.srem('session_uuid:{}'.format(type), session_uuid)
redis_server_stream.hdel('map-type:session_uuid-uuid:{}'.format(type), session_uuid)
redis_server_stream.delete(stream_name)
if __name__ == "__main__":
###################################################3
if len(sys.argv) != 2:
print('usage:', 'Worker.py', 'session_uuid')
exit(1)
session_uuid = sys.argv[1]
stream_meta_json = 'stream:{}:{}'.format(type_meta_header, session_uuid)
stream_defined = 'stream:{}:{}'.format(type_defined, session_uuid)
id = '0'
buffer = b''
stream_name = stream_meta_json
type = type_meta_header
# track launched worker
redis_server_stream.sadd('working_session_uuid:{}'.format(type_meta_header), session_uuid)
# get uuid
res = redis_server_stream.xread({stream_name: id}, count=1)
if res:
uuid = res[0][1][0][1][b'uuid'].decode()
print('---- worker launched, uuid={} session_uuid={} epoch={}'.format(uuid, session_uuid, time.time()))
else:
clean_db(session_uuid)
print('Incorrect Stream, Closing worker: type={} session_uuid={} epoch={}'.format(type, session_uuid, time.time()))
sys.exit(1)
full_json = None
# active session
while full_json is None:
res = redis_server_stream.xread({stream_name: id}, count=1)
if res:
new_id = res[0][1][0][0].decode()
if id != new_id:
id = new_id
data = res[0][1][0][1]
if id and data:
# remove line from json
data[b'message'] = data[b'message'].replace(b'\n', b'')
# reconstruct data
if buffer != b'':
data[b'message'] = b''.join([buffer, data[b'message']])
buffer = b''
try:
full_json = json.loads(data[b'message'].decode())
except:
buffer += data[b'message']
# # TODO: filter too big json
redis_server_stream.xdel(stream_name, id)
# complete json received
if full_json:
print(full_json)
if check_default_json_file(full_json):
# end type 2 processing
break
# Incorrect Json
else:
on_error(session_uuid, type, 'Incorrect JSON object')
else:
# end session, no json received
if redis_server_stream.sismember('ended_session', session_uuid):
clean_db(session_uuid)
print('---- Incomplete JSON object, DONE, uuid={} session_uuid={}'.format(uuid, session_uuid))
sys.exit(0)
else:
time.sleep(10)
# extract/parse JSON
extended_type = full_json['type']
if not redis_server_metadata.sismember('server:accepted_extended_type', extended_type):
error_mess = 'Unsupported extended_type: {}'.format(extended_type)
on_error(session_uuid, type, error_mess)
clean_db(session_uuid)
sys.exit(1)
#### Handle Specific MetaTypes ####
# Use Specific Handler defined
if os.path.isdir(os.path.join('meta_types_modules', extended_type)):
class_type_handler = get_class('meta_types_modules.{}.{}.TypeHandler'.format(extended_type, extended_type))
type_handler = class_type_handler(uuid, full_json)
# Use Standard Handler
else:
type_handler = MetaTypesDefault.MetaTypesDefault(uuid, full_json)
#file_separator = type_handler.get_file_separator(self)
#extended_type_name = type_handler.get_file_name()
# save json on disk
type_handler.save_json_file(full_json)
# change stream_name/type
stream_name = stream_defined
type = type_defined
id = 0
buffer = b''
type_handler.test()
# handle 254 type
while True:
res = redis_server_stream.xread({stream_name: id}, count=1)
if res:
new_id = res[0][1][0][0].decode()
if id != new_id:
id = new_id
data = res[0][1][0][1]
if id and data:
# process 254 data type
type_handler.process_data(data[b'message'])
# remove data from redis stream
redis_server_stream.xdel(stream_name, id)
else:
# end session, no json received
if redis_server_stream.sismember('ended_session', session_uuid):
clean_db(session_uuid)
print('---- JSON object, DONE, uuid={} session_uuid={} epoch={}'.format(uuid, session_uuid, time.time()))
sys.exit(0)
else:
time.sleep(10)

View File

@ -0,0 +1,37 @@
#!/usr/bin/env python3
import os
import sys
import time
import redis
import subprocess
host_redis_stream = "localhost"
port_redis_stream = 6379
redis_server_stream = redis.StrictRedis(
host=host_redis_stream,
port=port_redis_stream,
db=0)
type = 2
try:
redis_server_stream.ping()
except redis.exceptions.ConnectionError:
print('Error: Redis server {}:{}, ConnectionError'.format(host_redis, port_redis))
sys.exit(1)
if __name__ == "__main__":
stream_name = 'stream:{}'.format(type)
redis_server_stream.delete('working_session_uuid:{}'.format(type))
while True:
for session_uuid in redis_server_stream.smembers('session_uuid:{}'.format(type)):
session_uuid = session_uuid.decode()
if not redis_server_stream.sismember('working_session_uuid:{}'.format(type), session_uuid):
process = subprocess.Popen(['./worker.py', session_uuid])
print('Launching new worker{} ... session_uuid={}'.format(type, session_uuid))
#print('.')
time.sleep(10)

View File

@ -6,6 +6,7 @@ import time
import redis import redis
import datetime import datetime
import configparser
def data_incorrect_format(session_uuid): def data_incorrect_format(session_uuid):
print('Incorrect format') print('Incorrect format')
@ -19,6 +20,20 @@ redis_server_stream = redis.StrictRedis(
port=port_redis_stream, port=port_redis_stream,
db=0) db=0)
# get file config
config_file_server = os.path.join(os.environ['D4_HOME'], 'configs/server.conf')
config_server = configparser.ConfigParser()
config_server.read(config_file_server)
# get data directory
use_default_save_directory = config_server['Save_Directories'].getboolean('use_default_save_directory')
# check if field is None
if use_default_save_directory:
data_directory = os.path.join(os.environ['D4_HOME'], 'data')
else:
data_directory = config_server['Save_Directories'].get('save_directory')
type = 4 type = 4
rotation_save_cycle = 300 #seconds rotation_save_cycle = 300 #seconds
@ -38,13 +53,13 @@ if __name__ == "__main__":
if res: if res:
date = datetime.datetime.now().strftime("%Y%m%d%H%M%S") date = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
uuid = res[0][1][0][1][b'uuid'].decode() uuid = res[0][1][0][1][b'uuid'].decode()
data_rel_path = os.path.join('../../data', uuid, str(type)) data_rel_path = os.path.join(data_directory, uuid, str(type))
dir_path = os.path.join(data_rel_path, date[0:4], date[4:6], date[6:8]) dir_path = os.path.join(data_rel_path, date[0:4], date[4:6], date[6:8])
if not os.path.isdir(dir_path): if not os.path.isdir(dir_path):
os.makedirs(dir_path) os.makedirs(dir_path)
filename = '{}-{}-{}-{}-{}.dnscap.txt'.format(uuid, date[0:4], date[4:6], date[6:8], date[8:14]) filename = '{}-{}-{}-{}-{}.dnscap.txt'.format(uuid, date[0:4], date[4:6], date[6:8], date[8:14])
rel_path = os.path.join(dir_path, filename) rel_path = os.path.join(dir_path, filename)
print('---- worker launched, uuid={} session_uuid={}'.format(uuid, session_uuid)) print('---- worker launched, uuid={} session_uuid={} epoch={}'.format(uuid, session_uuid, time.time()))
else: else:
sys.exit(1) sys.exit(1)
print('Incorrect message') print('Incorrect message')
@ -98,7 +113,7 @@ if __name__ == "__main__":
redis_server_stream.srem('working_session_uuid:{}'.format(type), session_uuid) redis_server_stream.srem('working_session_uuid:{}'.format(type), session_uuid)
redis_server_stream.hdel('map-type:session_uuid-uuid:{}'.format(type), session_uuid) redis_server_stream.hdel('map-type:session_uuid-uuid:{}'.format(type), session_uuid)
redis_server_stream.delete(stream_name) redis_server_stream.delete(stream_name)
print('---- dnscap DONE, uuid={} session_uuid={}'.format(uuid, session_uuid)) print('---- dnscap DONE, uuid={} session_uuid={} epoch={}'.format(uuid, session_uuid, time.time()))
sys.exit(0) sys.exit(0)
else: else:
time.sleep(10) time.sleep(10)

View File

@ -0,0 +1,206 @@
#!/usr/bin/env python3
import os
import sys
import time
import gzip
import redis
import shutil
import datetime
import configparser
def data_incorrect_format(session_uuid):
print('Incorrect format')
sys.exit(1)
host_redis_stream = "localhost"
port_redis_stream = 6379
redis_server_stream = redis.StrictRedis(
host=host_redis_stream,
port=port_redis_stream,
db=0)
host_redis_metadata = "localhost"
port_redis_metadata = 6380
redis_server_metadata = redis.StrictRedis(
host=host_redis_metadata,
port=port_redis_metadata,
db=0)
redis_server_analyzer = redis.StrictRedis(
host=host_redis_metadata,
port=port_redis_metadata,
db=2)
# get file config
config_file_server = os.path.join(os.environ['D4_HOME'], 'configs/server.conf')
config_server = configparser.ConfigParser()
config_server.read(config_file_server)
# get data directory
use_default_save_directory = config_server['Save_Directories'].getboolean('use_default_save_directory')
# check if field is None
if use_default_save_directory:
data_directory = os.path.join(os.environ['D4_HOME'], 'data')
else:
data_directory = config_server['Save_Directories'].get('save_directory')
type = 8
rotation_save_cycle = 300 #seconds
analyzer_list_max_default_size = 10000
max_buffer_length = 10000
save_to_file = True
def compress_file(file_full_path, i=0):
if i==0:
compressed_filename = '{}.gz'.format(file_full_path)
else:
compressed_filename = '{}.{}.gz'.format(file_full_path, i)
if os.path.isfile(compressed_filename):
compress_file(file_full_path, i+1)
else:
with open(file_full_path, 'rb') as f_in:
with gzip.open(compressed_filename, 'wb') as f_out:
shutil.copyfileobj(f_in, f_out)
os.remove(file_full_path)
def get_save_dir(dir_data_uuid, year, month, day):
dir_path = os.path.join(dir_data_uuid, year, month, day)
if not os.path.isdir(dir_path):
os.makedirs(dir_path)
return dir_path
if __name__ == "__main__":
if len(sys.argv) != 2:
print('usage:', 'Worker.py', 'session_uuid')
exit(1)
session_uuid = sys.argv[1]
stream_name = 'stream:{}:{}'.format(type, session_uuid)
id = '0'
buffer = b''
# track launched worker
redis_server_stream.sadd('working_session_uuid:{}'.format(type), session_uuid)
# get uuid
res = redis_server_stream.xread({stream_name: id}, count=1)
if res:
uuid = res[0][1][0][1][b'uuid'].decode()
# init file rotation
if save_to_file:
rotate_file = False
time_file = time.time()
date_file = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
dir_data_uuid = os.path.join(data_directory, uuid, str(type))
dir_full_path = get_save_dir(dir_data_uuid, date_file[0:4], date_file[4:6], date_file[6:8])
filename = '{}-{}-{}-{}-{}.passivedns.txt'.format(uuid, date_file[0:4], date_file[4:6], date_file[6:8], date_file[8:14])
save_path = os.path.join(dir_full_path, filename)
print('---- worker launched, uuid={} session_uuid={} epoch={}'.format(uuid, session_uuid, time.time()))
else:
########################### # TODO: clean db on error
print('Incorrect Stream, Closing worker: type={} session_uuid={}'.format(type, session_uuid))
sys.exit(1)
while True:
res = redis_server_stream.xread({stream_name: id}, count=1)
if res:
new_id = res[0][1][0][0].decode()
if id != new_id:
id = new_id
data = res[0][1][0][1]
if id and data:
# reconstruct data
if buffer != b'':
data[b'message'] = b''.join([buffer, data[b'message']])
buffer = b''
# send data to redis
# new line in received data
if b'\n' in data[b'message']:
all_line = data[b'message'].split(b'\n')
for line in all_line[:-1]:
for analyzer_uuid in redis_server_metadata.smembers('analyzer:{}'.format(type)):
analyzer_uuid = analyzer_uuid.decode()
redis_server_analyzer.lpush('analyzer:{}:{}'.format(type, analyzer_uuid), line)
redis_server_metadata.hset('analyzer:{}'.format(analyzer_uuid), 'last_updated', time.time())
analyser_queue_max_size = redis_server_metadata.hget('analyzer:{}'.format(analyzer_uuid), 'max_size')
if analyser_queue_max_size is None:
analyser_queue_max_size = analyzer_list_max_default_size
redis_server_analyzer.ltrim('analyzer:{}:{}'.format(type, analyzer_uuid), 0, analyser_queue_max_size)
# keep incomplete line
if all_line[-1] != b'':
buffer += all_line[-1]
else:
if len(buffer) < max_buffer_length:
buffer += data[b'message']
else:
print('Error, infinite loop, max buffer length reached')
# force new line
buffer += b''.join([ data[b'message'], b'\n' ])
# save data on disk
if save_to_file:
new_date = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
# check if a new rotation is needed
if ( new_date[0:8] != date_file[0:8] ) or ( time.time() - time_file > rotation_save_cycle ):
date_file = new_date
rotate_file = True
# file rotation
if rotate_file and b'\n' in data[b'message']:
end_file, start_new_file = data[b'message'].rsplit(b'\n', maxsplit=1)
# save end of file
with open(save_path, 'ab') as f:
f.write(end_file)
compress_file(save_path)
# get new save_path
dir_full_path = get_save_dir(dir_data_uuid, date_file[0:4], date_file[4:6], date_file[6:8])
filename = '{}-{}-{}-{}-{}.passivedns.txt'.format(uuid, date_file[0:4], date_file[4:6], date_file[6:8], date_file[8:14])
save_path = os.path.join(dir_full_path, filename)
# save start of new file
if start_new_file != b'':
with open(save_path, 'ab') as f:
f.write(start_new_file)
# end of rotation
rotate_file = False
time_file = time.time()
else:
with open(save_path, 'ab') as f:
f.write(data[b'message'])
redis_server_stream.xdel(stream_name, id)
else:
# sucess, all data are saved
if redis_server_stream.sismember('ended_session', session_uuid):
redis_server_stream.srem('ended_session', session_uuid)
redis_server_stream.srem('session_uuid:{}'.format(type), session_uuid)
redis_server_stream.srem('working_session_uuid:{}'.format(type), session_uuid)
redis_server_stream.hdel('map-type:session_uuid-uuid:{}'.format(type), session_uuid)
redis_server_stream.delete(stream_name)
try:
if os.path.isfile(save_path):
print('save')
compress_file(save_path)
except NameError:
pass
print('---- passivedns DONE, uuid={} session_uuid={} epoch={}'.format(uuid, session_uuid, time.time()))
sys.exit(0)
else:
time.sleep(10)

View File

@ -0,0 +1,37 @@
#!/usr/bin/env python3
import os
import sys
import time
import redis
import subprocess
host_redis_stream = "localhost"
port_redis_stream = 6379
redis_server_stream = redis.StrictRedis(
host=host_redis_stream,
port=port_redis_stream,
db=0)
type = 8
try:
redis_server_stream.ping()
except redis.exceptions.ConnectionError:
print('Error: Redis server {}:{}, ConnectionError'.format(host_redis, port_redis))
sys.exit(1)
if __name__ == "__main__":
stream_name = 'stream:{}'.format(type)
redis_server_stream.delete('working_session_uuid:{}'.format(type))
while True:
for session_uuid in redis_server_stream.smembers('session_uuid:{}'.format(type)):
session_uuid = session_uuid.decode()
if not redis_server_stream.sismember('working_session_uuid:{}'.format(type), session_uuid):
process = subprocess.Popen(['./worker.py', session_uuid])
print('Launching new worker{} ... session_uuid={}'.format(type, session_uuid))
#print('.')
time.sleep(10)