Compare commits
120 Commits
Author | SHA1 | Date |
---|---|---|
Jean-Louis Huynen | cb3d618ee1 | |
Koen Van Impe | 27aa5b1df9 | |
Alexandre Dulaunoy | 2e8ddd490f | |
DocArmoryTech | 090d0f66bb | |
DocArmoryTech | dfd53c126b | |
DocArmoryTech | 6273a220b2 | |
Jean-Louis Huynen | 81686aa022 | |
Gerard Wagener | 399e659d8f | |
Terrtia | b2f463e8f1 | |
Terrtia | adf0f6008b | |
Terrtia | 39d593364d | |
Terrtia | cbb90c057a | |
Terrtia | b7998d5601 | |
Terrtia | dc3cdcbc1c | |
Jean-Louis Huynen | 6c3c9f9954 | |
Jean-Louis Huynen | a6d5a3d22c | |
Jean-Louis Huynen | 36a771ea2d | |
Jean-Louis Huynen | ef6e87f3c5 | |
Jean-Louis Huynen | 5a3e299332 | |
Jean-Louis Huynen | d74d2fb71a | |
Jean-Louis Huynen | cf64529929 | |
Terrtia | 893631e003 | |
Terrtia | ac301b5360 | |
Terrtia | 04fab82f5e | |
Terrtia | a297cef179 | |
Terrtia | 3edf227cc1 | |
Terrtia | 2d358918c9 | |
Terrtia | 47f82c8879 | |
Terrtia | 6fee7df9fe | |
Terrtia | 4b30072880 | |
Terrtia | 7ce265e477 | |
Terrtia | 168c31a5bf | |
Terrtia | adda78faad | |
Terrtia | df482d6ee3 | |
Terrtia | 609402ebf2 | |
Thirion Aurélien | 98e562bd47 | |
Terrtia | f17f80b21c | |
Jean-Louis Huynen | 00e3ce3437 | |
Terrtia | 82b2944119 | |
Terrtia | 7078f341ae | |
Terrtia | cb1c8c4d65 | |
Terrtia | 091994a34d | |
Terrtia | 209cd0500f | |
Terrtia | 99656658f2 | |
Terrtia | cdc72e7998 | |
Terrtia | 8a792fe4ba | |
Terrtia | ab261a6bd2 | |
Terrtia | 14d3a650e5 | |
Terrtia | b48ad52845 | |
Terrtia | 10430135d1 | |
Terrtia | 4d55d601a1 | |
Terrtia | aabf74f2f3 | |
Terrtia | d3087662a7 | |
Terrtia | 8fa83dd248 | |
Terrtia | 56e7657253 | |
Terrtia | bb3c1b2676 | |
Terrtia | 1c61e1d1fe | |
Terrtia | f5770b6e60 | |
Terrtia | e39ef2c551 | |
Terrtia | d01f686514 | |
Terrtia | a800e8c8f1 | |
Terrtia | 4dc2d1abef | |
Terrtia | 5b0b5a6f68 | |
Terrtia | 8f5a084d32 | |
Terrtia | 8bf0fe4590 | |
Terrtia | 6f58e862cc | |
Terrtia | 8db01c389b | |
Terrtia | 9a71a7a892 | |
Terrtia | d870819080 | |
Terrtia | 0bd02f21d6 | |
Terrtia | 3ce8557cff | |
Terrtia | 336fc7655a | |
Terrtia | b530c67825 | |
Terrtia | 16d9eb2561 | |
Terrtia | c54575ae77 | |
Terrtia | 5b320a9470 | |
Terrtia | 648e406c54 | |
Jean-Louis Huynen | f5af770516 | |
Thirion Aurélien | 61043d81aa | |
Terrtia | 2bc20333a9 | |
Thirion Aurélien | e9ef2d529f | |
Terrtia | 4ce9888f5d | |
Alexandre Dulaunoy | ff256984a3 | |
Terrtia | 96cfebd0ea | |
Thirion Aurélien | f6b6137937 | |
Terrtia | eb6ff228e8 | |
Terrtia | 450f5860e4 | |
Terrtia | 3630ec0460 | |
Terrtia | e5720087de | |
Terrtia | 15bb67a086 | |
Terrtia | d722390f89 | |
Terrtia | c8d2b8cb95 | |
Thirion Aurélien | 113159f820 | |
Terrtia | 67bf0c3cf0 | |
Alexandre Dulaunoy | 8bf6bdc1fe | |
Terrtia | 85f2964c6c | |
Terrtia | c19e43c931 | |
Terrtia | 489ce2c955 | |
Alexandre Dulaunoy | 868777eba5 | |
ljaqueme | acb20a769b | |
ljaqueme | 91500ba460 | |
ljaqueme | c6f21f0b5f | |
ljaqueme | 6b5ec52e28 | |
Terrtia | 3650637ce8 | |
Terrtia | b6df534a72 | |
Terrtia | fb15487773 | |
Terrtia | bfc75e0db8 | |
Terrtia | e6d98d2dbc | |
Terrtia | c26e95ce50 | |
Terrtia | bf2fce284f | |
Terrtia | 1dd57366c2 | |
Terrtia | 3a22c250ee | |
Terrtia | ae2adfe4d6 | |
Terrtia | 40ff019e2f | |
Terrtia | 0816a93efe | |
Terrtia | e4e4d8d57e | |
Terrtia | 7d96e76690 | |
Terrtia | 87a68494c1 | |
Terrtia | c0e441ee6b | |
Terrtia | 4086b462b7 |
50
README.md
|
@ -12,6 +12,10 @@ to an existing sensor network using simple clients.
|
|||
|
||||
[D4 core client](https://github.com/D4-project/d4-core/tree/master/client) is a simple and minimal implementation of the [D4 encapsulation protocol](https://github.com/D4-project/architecture/tree/master/format). There is also a [portable D4 client](https://github.com/D4-project/d4-goclient) in Go including the support for the SSL/TLS connectivity.
|
||||
|
||||
<p align="center">
|
||||
<img alt="d4-cclient" src="https://raw.githubusercontent.com/D4-project/d4-core/master/client/media/d4c-client.png" height="140" />
|
||||
</p>
|
||||
|
||||
### Requirements
|
||||
|
||||
- Unix-like operating system
|
||||
|
@ -60,10 +64,31 @@ git submodule init
|
|||
git submodule update
|
||||
~~~~
|
||||
|
||||
Build the d4 client. This will create the `d4` binary.
|
||||
|
||||
~~~~
|
||||
make
|
||||
~~~~
|
||||
|
||||
Then register the sensor with the server. Replace `API_TOKEN`, `VALID_UUID4` (create a random UUID via [UUIDgenerator](https://www.uuidgenerator.net/)) and `VALID_HMAC_KEY`.
|
||||
|
||||
~~~~
|
||||
curl -k https://127.0.0.1:7000/api/v1/add/sensor/register --header "Authorization: API_TOKEN" -H "Content-Type: application/json" --data '{"uuid":"VALID_UUID4","hmac_key":"VALID_HMAC_KEY"}' -X POST
|
||||
~~~~
|
||||
|
||||
If the registration went correctly the UUID is returned. Do not forget to approve the registration in the D4 server web interface.
|
||||
|
||||
Update the configuration file
|
||||
|
||||
~~~~
|
||||
cp -r conf.sample conf
|
||||
echo VALID_UUID4 > conf/uuid
|
||||
echo VALID_HMAC_KEY > conf/key
|
||||
~~~~
|
||||
|
||||
## D4 core server
|
||||
|
||||
D4 core server is a complete server to handle clients (sensors) including the decapsulation of the [D4 protocol](https://github.com/D4-project/architecture/tree/master/format), control of
|
||||
sensor registrations, management of decoding protocols and dispatching to adequate decoders/analysers.
|
||||
D4 core server is a complete server to handle clients (sensors) including the decapsulation of the [D4 protocol](https://github.com/D4-project/architecture/tree/master/format), control of sensor registrations, management of decoding protocols and dispatching to adequate decoders/analysers.
|
||||
|
||||
### Requirements
|
||||
|
||||
|
@ -72,13 +97,26 @@ sensor registrations, management of decoding protocols and dispatching to adequa
|
|||
|
||||
### Installation
|
||||
|
||||
|
||||
- [Install D4 Server](https://github.com/D4-project/d4-core/tree/master/server)
|
||||
|
||||
### Screenshots of D4 core server management
|
||||
### D4 core server Screenshots
|
||||
|
||||
#### Dashboard:
|
||||
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/main.png)
|
||||
|
||||
#### Connected Sensors:
|
||||
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/sensor-mgmt.png)
|
||||
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/server-mgmt.png)
|
||||
|
||||
#### Sensors Status:
|
||||
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/sensor_status.png)
|
||||
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/sensor_stat_types.png)
|
||||
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/sensor_stat_files.png)
|
||||
|
||||
#### Server Management:
|
||||
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/server-management.png)
|
||||
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/server-management-types.png)
|
||||
|
||||
#### analyzer Queues:
|
||||
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/analyzer-queues.png)
|
||||
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/create_analyzer_queue.png)
|
||||
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/analyzer-mgmt.png)
|
||||
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/server-mgmt2.png)
|
||||
|
|
|
@ -32,7 +32,7 @@ clean:
|
|||
- rm -rf *.o hmac
|
||||
|
||||
d4: d4.o sha2.o hmac.o unpack.o unparse.o pack.o gen_uuid.o randutils.o parse.o
|
||||
gcc -Wall -o d4 d4.o hmac.o sha2.o unpack.o pack.o unparse.o gen_uuid.o randutils.o parse.o
|
||||
$(CC) -Wall -o d4 d4.o hmac.o sha2.o unpack.o pack.o unparse.o gen_uuid.o randutils.o parse.o
|
||||
|
||||
d4.o: d4.c
|
||||
gcc -Wall -c d4.c
|
||||
$(CC) -Wall -c d4.c
|
||||
|
|
|
@ -210,7 +210,7 @@ void d4_transfert(d4_t* d4)
|
|||
//In case of errors see block of 0 bytes
|
||||
bzero(buf, d4->snaplen);
|
||||
nread = read(d4->source.fd, buf, d4->snaplen);
|
||||
if ( nread > 0 ) {
|
||||
if ( nread >= 0 ) {
|
||||
d4_update_header(d4, nread);
|
||||
//Do HMAC on header and payload. HMAC field is 0 during computation
|
||||
if (d4->ctx) {
|
||||
|
@ -238,6 +238,11 @@ void d4_transfert(d4_t* d4)
|
|||
fprintf(stderr,"Incomplete header written. abort to let consumer known that the packet is corrupted\n");
|
||||
abort();
|
||||
}
|
||||
// no data - create empty D4 packet
|
||||
if ( nread == 0 ) {
|
||||
//FIXME no data available, sleep, abort, retry
|
||||
break;
|
||||
}
|
||||
} else{
|
||||
//FIXME no data available, sleep, abort, retry
|
||||
break;
|
||||
|
|
After Width: | Height: | Size: 82 KiB |
After Width: | Height: | Size: 141 KiB |
After Width: | Height: | Size: 117 KiB |
Before Width: | Height: | Size: 88 KiB After Width: | Height: | Size: 243 KiB |
After Width: | Height: | Size: 52 KiB |
After Width: | Height: | Size: 48 KiB |
After Width: | Height: | Size: 66 KiB |
After Width: | Height: | Size: 85 KiB |
After Width: | Height: | Size: 68 KiB |
|
@ -0,0 +1,15 @@
|
|||
FROM python:3
|
||||
|
||||
WORKDIR /usr/src/
|
||||
RUN git clone https://github.com/D4-project/analyzer-d4-passivedns.git
|
||||
# RUN git clone https://github.com/trolldbois/analyzer-d4-passivedns.git
|
||||
WORKDIR /usr/src/analyzer-d4-passivedns
|
||||
|
||||
# FIXME typo in requirements.txt filename
|
||||
RUN pip install --no-cache-dir -r requirements
|
||||
WORKDIR /usr/src/analyzer-d4-passivedns/bin
|
||||
|
||||
# should be a config
|
||||
# RUN cat /usr/src/analyzer-d4-passivedns/etc/analyzer.conf.sample | sed "s/127.0.0.1/redis-metadata/g" > /usr/src/analyzer-d4-passivedns/etc/analyzer.conf
|
||||
# ignore the config and use ENV variables.
|
||||
RUN cp ../etc/analyzer.conf.sample ../etc/analyzer.conf
|
|
@ -0,0 +1,37 @@
|
|||
FROM python:3
|
||||
|
||||
|
||||
# that doesn't work on windows docker due to linefeeds
|
||||
# WORKDIR /usr/src/d4-server
|
||||
# COPY . .
|
||||
|
||||
## alternate solution
|
||||
WORKDIR /usr/src/tmp
|
||||
# RUN git clone https://github.com/trolldbois/d4-core.git
|
||||
RUN git clone https://github.com/D4-project/d4-core.git
|
||||
RUN mv d4-core/server/ /usr/src/d4-server
|
||||
WORKDIR /usr/src/d4-server
|
||||
|
||||
ENV D4_HOME=/usr/src/d4-server
|
||||
RUN pip install --no-cache-dir -r requirement.txt
|
||||
|
||||
# move to tls proxy ?
|
||||
WORKDIR /usr/src/d4-server/gen_cert
|
||||
RUN ./gen_root.sh
|
||||
RUN ./gen_cert.sh
|
||||
|
||||
# setup a lots of files
|
||||
WORKDIR /usr/src/d4-server/web
|
||||
RUN ./update_web.sh
|
||||
|
||||
WORKDIR /usr/src/d4-server
|
||||
|
||||
# Should be using configs instead. but not supported until docker 17.06+
|
||||
RUN cp configs/server.conf.sample configs/server.conf
|
||||
|
||||
# workers need tcpdump
|
||||
RUN apt-get update && apt-get install -y tcpdump
|
||||
|
||||
ENTRYPOINT ["python", "server.py", "-v", "10"]
|
||||
|
||||
# CMD bash -l
|
|
@ -11,10 +11,10 @@ CYAN="\\033[1;36m"
|
|||
|
||||
. ./D4ENV/bin/activate
|
||||
|
||||
isredis=`screen -ls | egrep '[0-9]+.Redis_D4' | cut -d. -f1`
|
||||
isd4server=`screen -ls | egrep '[0-9]+.Server_D4' | cut -d. -f1`
|
||||
isworker=`screen -ls | egrep '[0-9]+.Workers_D4' | cut -d. -f1`
|
||||
isflask=`screen -ls | egrep '[0-9]+.Flask_D4' | cut -d. -f1`
|
||||
isredis=`screen -ls | egrep '[0-9]+.Redis_D4 ' | cut -d. -f1`
|
||||
isd4server=`screen -ls | egrep '[0-9]+.Server_D4 ' | cut -d. -f1`
|
||||
isworker=`screen -ls | egrep '[0-9]+.Workers_D4 ' | cut -d. -f1`
|
||||
isflask=`screen -ls | egrep '[0-9]+.Flask_D4 ' | cut -d. -f1`
|
||||
|
||||
function helptext {
|
||||
echo -e $YELLOW"
|
||||
|
@ -45,6 +45,10 @@ function helptext {
|
|||
"
|
||||
}
|
||||
|
||||
CONFIG=$D4_HOME/configs/server.conf
|
||||
redis_stream=`sed -nr '/\[Redis_STREAM\]/,/\[/{/port/p}' ${CONFIG} | awk -F= '/port/{print $2}' | sed 's/ //g'`
|
||||
redis_metadata=`sed -nr '/\[Redis_METADATA\]/,/\[/{/port/p}' ${CONFIG} | awk -F= '/port/{print $2}' | sed 's/ //g'`
|
||||
|
||||
function launching_redis {
|
||||
conf_dir="${D4_HOME}/configs/"
|
||||
redis_dir="${D4_HOME}/redis/src/"
|
||||
|
@ -65,6 +69,8 @@ function launching_d4_server {
|
|||
|
||||
screen -S "Server_D4" -X screen -t "Server_D4" bash -c "cd ${D4_HOME}; ./server.py -v 10; read x"
|
||||
sleep 0.1
|
||||
screen -S "Server_D4" -X screen -t "sensors_manager" bash -c "cd ${D4_HOME}; ./sensors_manager.py; read x"
|
||||
sleep 0.1
|
||||
}
|
||||
|
||||
function launching_workers {
|
||||
|
@ -76,6 +82,8 @@ function launching_workers {
|
|||
sleep 0.1
|
||||
screen -S "Workers_D4" -X screen -t "2_workers" bash -c "cd ${D4_HOME}/workers/workers_2; ./workers_manager.py; read x"
|
||||
sleep 0.1
|
||||
screen -S "Workers_D4" -X screen -t "3_workers" bash -c "cd ${D4_HOME}/workers/workers_3; ./workers_manager.py; read x"
|
||||
sleep 0.1
|
||||
screen -S "Workers_D4" -X screen -t "4_workers" bash -c "cd ${D4_HOME}/workers/workers_4; ./workers_manager.py; read x"
|
||||
sleep 0.1
|
||||
screen -S "Workers_D4" -X screen -t "8_workers" bash -c "cd ${D4_HOME}/workers/workers_8; ./workers_manager.py; read x"
|
||||
|
@ -84,22 +92,22 @@ function launching_workers {
|
|||
|
||||
function shutting_down_redis {
|
||||
redis_dir=${D4_HOME}/redis/src/
|
||||
bash -c $redis_dir'redis-cli -p 6379 SHUTDOWN'
|
||||
bash -c $redis_dir'redis-cli -p '$redis_stream' SHUTDOWN'
|
||||
sleep 0.1
|
||||
bash -c $redis_dir'redis-cli -p 6380 SHUTDOWN'
|
||||
bash -c $redis_dir'redis-cli -p '$redis_metadata' SHUTDOWN'
|
||||
sleep 0.1
|
||||
}
|
||||
|
||||
function checking_redis {
|
||||
flag_redis=0
|
||||
redis_dir=${D4_HOME}/redis/src/
|
||||
bash -c $redis_dir'redis-cli -p 6379 PING | grep "PONG" &> /dev/null'
|
||||
bash -c $redis_dir'redis-cli -p '$redis_stream' PING | grep "PONG" &> /dev/null'
|
||||
if [ ! $? == 0 ]; then
|
||||
echo -e $RED"\t6379 not ready"$DEFAULT
|
||||
flag_redis=1
|
||||
fi
|
||||
sleep 0.1
|
||||
bash -c $redis_dir'redis-cli -p 6380 PING | grep "PONG" &> /dev/null'
|
||||
bash -c $redis_dir'redis-cli -p '$redis_metadata' PING | grep "PONG" &> /dev/null'
|
||||
if [ ! $? == 0 ]; then
|
||||
echo -e $RED"\t6380 not ready"$DEFAULT
|
||||
flag_redis=1
|
||||
|
@ -109,6 +117,18 @@ function checking_redis {
|
|||
return $flag_redis;
|
||||
}
|
||||
|
||||
function wait_until_redis_is_ready {
|
||||
redis_not_ready=true
|
||||
while $redis_not_ready; do
|
||||
if checking_redis; then
|
||||
redis_not_ready=false;
|
||||
else
|
||||
sleep 1
|
||||
fi
|
||||
done
|
||||
echo -e $YELLOW"\t* Redis Launched"$DEFAULT
|
||||
}
|
||||
|
||||
function launch_redis {
|
||||
if [[ ! $isredis ]]; then
|
||||
launching_redis;
|
||||
|
@ -280,6 +300,9 @@ while [ "$1" != "" ]; do
|
|||
-k | --killAll ) helptext;
|
||||
killall;
|
||||
;;
|
||||
-lrv | --launchRedisVerify ) launch_redis;
|
||||
wait_until_redis_is_ready;
|
||||
;;
|
||||
-h | --help ) helptext;
|
||||
exit
|
||||
;;
|
||||
|
|
|
@ -15,11 +15,24 @@ sensor registrations, management of decoding protocols and dispatching to adequa
|
|||
### Installation
|
||||
|
||||
###### Install D4 server
|
||||
|
||||
Clone the repository and install necessary packages. Installation requires *sudo* permissions.
|
||||
|
||||
~~~~
|
||||
git clone https://github.com/D4-project/d4-core.git
|
||||
cd d4-core
|
||||
cd server
|
||||
./install_server.sh
|
||||
~~~~
|
||||
Create or add a pem in [d4-core/server](https://github.com/D4-project/d4-core/tree/master/server) :
|
||||
|
||||
When the installation is finished, scroll back to where `+ ./create_default_user.py` is displayed. The next lines contain the default generated user and should resemble the snippet below. Take a temporary note of the password, you are required to **change the password** on first login.
|
||||
~~~~
|
||||
new user created: admin@admin.test
|
||||
password: <redacted>
|
||||
token: <redacted>
|
||||
~~~~
|
||||
|
||||
Then create or add a pem in [d4-core/server](https://github.com/D4-project/d4-core/tree/master/server) :
|
||||
~~~~
|
||||
cd gen_cert
|
||||
./gen_root.sh
|
||||
|
@ -27,7 +40,6 @@ cd gen_cert
|
|||
cd ..
|
||||
~~~~
|
||||
|
||||
|
||||
###### Launch D4 server
|
||||
~~~~
|
||||
./LAUNCH.sh -l
|
||||
|
@ -35,6 +47,14 @@ cd ..
|
|||
|
||||
The web interface is accessible via `http://127.0.0.1:7000/`
|
||||
|
||||
If you cannot access the web interface on localhost (for example because the system is running on a remote host), then stop the server, change the listening host IP and restart the server. In the below example it's changed to `0.0.0.0` (all interfaces). Make sure that the IP is not unintentionally publicly exposed.
|
||||
|
||||
~~~~
|
||||
./LAUNCH.sh -k
|
||||
sed -i '/\[Flask_Server\]/{:a;N;/host = 127\.0\.0\.1/!ba;s/host = 127\.0\.0\.1/host = 0.0.0.0/}' configs/server.conf
|
||||
./LAUNCH.sh -l
|
||||
~~~~
|
||||
|
||||
### Updating web assets
|
||||
To update javascript libs run:
|
||||
~~~~
|
||||
|
@ -42,18 +62,36 @@ cd web
|
|||
./update_web.sh
|
||||
~~~~
|
||||
|
||||
### API
|
||||
|
||||
[API Documentation](https://github.com/D4-project/d4-core/tree/master/server/documentation/README.md)
|
||||
|
||||
### Notes
|
||||
|
||||
- All server logs are located in ``d4-core/server/logs/``
|
||||
- Close D4 Server: ``./LAUNCH.sh -k``
|
||||
|
||||
### Screenshots of D4 core server management
|
||||
### D4 core server
|
||||
|
||||
#### Dashboard:
|
||||
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/main.png)
|
||||
|
||||
#### Connected Sensors:
|
||||
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/sensor-mgmt.png)
|
||||
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/server-mgmt.png)
|
||||
|
||||
#### Sensors Status:
|
||||
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/sensor_status.png)
|
||||
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/sensor_stat_types.png)
|
||||
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/sensor_stat_files.png)
|
||||
|
||||
#### Server Management:
|
||||
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/server-management.png)
|
||||
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/server-management-types.png)
|
||||
|
||||
#### analyzer Queues:
|
||||
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/analyzer-queues.png)
|
||||
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/create_analyzer_queue.png)
|
||||
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/analyzer-mgmt.png)
|
||||
![](https://raw.githubusercontent.com/D4-project/d4-core/master/doc/images/server-mgmt2.png)
|
||||
|
||||
### Troubleshooting
|
||||
|
||||
|
@ -66,3 +104,7 @@ Run the following command as root:
|
|||
~~~~
|
||||
aa-complain /usr/sbin/tcpdump
|
||||
~~~~
|
||||
|
||||
###### WARNING - Not registered UUID=UUID4, connection closed
|
||||
|
||||
This happens after you have registered a new sensor, but have not approved the registration. In order to approve the sensor, go in the web interface to **Server Management**, and click **Pending Sensors**.
|
|
@ -0,0 +1,75 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import redis
|
||||
import socket
|
||||
import argparse
|
||||
|
||||
import logging
|
||||
import logging.handlers
|
||||
|
||||
log_level = {'DEBUG': 10, 'INFO': 20, 'WARNING': 30, 'ERROR': 40, 'CRITICAL': 50}
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description='Export d4 data to stdout')
|
||||
parser.add_argument('-t', '--type', help='d4 type or extended type' , type=str, dest='type', required=True)
|
||||
parser.add_argument('-u', '--uuid', help='queue uuid' , type=str, dest='uuid', required=True)
|
||||
parser.add_argument('-i', '--ip',help='server ip' , type=str, default='127.0.0.1', dest='target_ip')
|
||||
parser.add_argument('-p', '--port',help='server port' ,type=int, default=514, dest='target_port')
|
||||
parser.add_argument('-l', '--log_level', help='log level: DEBUG, INFO, WARNING, ERROR, CRITICAL', type=str, default='INFO', dest='req_level')
|
||||
parser.add_argument('-n', '--newline', help='add new lines', action="store_true")
|
||||
parser.add_argument('-ri', '--redis_ip',help='redis host' , type=str, default='127.0.0.1', dest='host_redis')
|
||||
parser.add_argument('-rp', '--redis_port',help='redis port' , type=int, default=6380, dest='port_redis')
|
||||
args = parser.parse_args()
|
||||
|
||||
if not args.uuid or not args.type or not args.target_port:
|
||||
parser.print_help()
|
||||
sys.exit(0)
|
||||
|
||||
host_redis=args.host_redis
|
||||
port_redis=args.port_redis
|
||||
newLines = args.newline
|
||||
req_level = args.req_level
|
||||
|
||||
if req_level not in log_level:
|
||||
print('ERROR: incorrect log level')
|
||||
sys.exit(0)
|
||||
|
||||
redis_d4= redis.StrictRedis(
|
||||
host=host_redis,
|
||||
port=port_redis,
|
||||
db=2)
|
||||
try:
|
||||
redis_d4.ping()
|
||||
except redis.exceptions.ConnectionError:
|
||||
print('Error: Redis server {}:{}, ConnectionError'.format(host_redis, port_redis))
|
||||
sys.exit(1)
|
||||
|
||||
d4_uuid = args.uuid
|
||||
d4_type = args.type
|
||||
data_queue = 'analyzer:{}:{}'.format(d4_type, d4_uuid)
|
||||
|
||||
target_ip = args.target_ip
|
||||
target_port = args.target_port
|
||||
addr = (target_ip, target_port)
|
||||
|
||||
syslog_logger = logging.getLogger('D4-SYSLOGOUT')
|
||||
syslog_logger.setLevel(logging.DEBUG)
|
||||
client_socket = logging.handlers.SysLogHandler(address = addr)
|
||||
syslog_logger.addHandler(client_socket)
|
||||
|
||||
while True:
|
||||
|
||||
d4_data = redis_d4.rpop(data_queue)
|
||||
if d4_data is None:
|
||||
time.sleep(1)
|
||||
continue
|
||||
|
||||
if newLines:
|
||||
d4_data = d4_data + b'\n'
|
||||
|
||||
syslog_logger.log(log_level[req_level], d4_data.decode())
|
||||
|
||||
client_socket.close()
|
|
@ -0,0 +1,86 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import redis
|
||||
import time
|
||||
import datetime
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import logging.handlers
|
||||
|
||||
|
||||
import socket
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description='Export d4 data to stdout')
|
||||
parser.add_argument('-t', '--type', help='d4 type or extended type' , type=str, dest='type', required=True)
|
||||
parser.add_argument('-u', '--uuid', help='queue uuid' , type=str, dest='uuid', required=True)
|
||||
parser.add_argument('-i', '--ip',help='server ip' , type=str, default='127.0.0.1', dest='target_ip')
|
||||
parser.add_argument('-p', '--port',help='server port' , type=int, dest='target_port', required=True)
|
||||
parser.add_argument('-k', '--Keepalive', help='Keepalive in second', type=int, default='15', dest='ka_sec')
|
||||
parser.add_argument('-n', '--newline', help='add new lines', action="store_true")
|
||||
parser.add_argument('-ri', '--redis_ip',help='redis ip' , type=str, default='127.0.0.1', dest='host_redis')
|
||||
parser.add_argument('-rp', '--redis_port',help='redis port' , type=int, default=6380, dest='port_redis')
|
||||
args = parser.parse_args()
|
||||
|
||||
if not args.uuid or not args.type or not args.target_port:
|
||||
parser.print_help()
|
||||
sys.exit(0)
|
||||
|
||||
host_redis=args.host_redis
|
||||
port_redis=args.port_redis
|
||||
newLines = args.newline
|
||||
|
||||
redis_d4= redis.StrictRedis(
|
||||
host=host_redis,
|
||||
port=port_redis,
|
||||
db=2)
|
||||
try:
|
||||
redis_d4.ping()
|
||||
except redis.exceptions.ConnectionError:
|
||||
print('Error: Redis server {}:{}, ConnectionError'.format(host_redis, port_redis))
|
||||
sys.exit(1)
|
||||
|
||||
d4_uuid = args.uuid
|
||||
d4_type = args.type
|
||||
data_queue = 'analyzer:{}:{}'.format(d4_type, d4_uuid)
|
||||
|
||||
target_ip = args.target_ip
|
||||
target_port = args.target_port
|
||||
addr = (target_ip, target_port)
|
||||
|
||||
# default keep alive: 15
|
||||
ka_sec = args.ka_sec
|
||||
|
||||
# Create a TCP socket
|
||||
client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
|
||||
# TCP Keepalive
|
||||
client_socket.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
|
||||
client_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 1)
|
||||
client_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, ka_sec)
|
||||
client_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, ka_sec)
|
||||
|
||||
# TCP connect
|
||||
client_socket.connect(addr)
|
||||
|
||||
newLines=True
|
||||
while True:
|
||||
|
||||
d4_data = redis_d4.rpop(data_queue)
|
||||
if d4_data is None:
|
||||
time.sleep(1)
|
||||
continue
|
||||
|
||||
if newLines:
|
||||
d4_data = d4_data + b'\n'
|
||||
|
||||
print(d4_data)
|
||||
client_socket.sendall(d4_data)
|
||||
|
||||
client_socket.shutdown(socket.SHUT_RDWR)
|
|
@ -0,0 +1,101 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import redis
|
||||
import time
|
||||
import datetime
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import logging.handlers
|
||||
|
||||
|
||||
import socket
|
||||
import ssl
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description='Export d4 data to stdout')
|
||||
parser.add_argument('-t', '--type', help='d4 type or extended type', type=str, dest='type', required=True)
|
||||
parser.add_argument('-u', '--uuid', help='queue uuid', type=str, dest='uuid', required=True)
|
||||
parser.add_argument('-i', '--ip',help='server ip', type=str, default='127.0.0.1', dest='target_ip')
|
||||
parser.add_argument('-p', '--port',help='server port', type=int, dest='target_port', required=True)
|
||||
parser.add_argument('-k', '--Keepalive', help='Keepalive in second', type=int, default='15', dest='ka_sec')
|
||||
parser.add_argument('-n', '--newline', help='add new lines', action="store_true")
|
||||
parser.add_argument('-ri', '--redis_ip', help='redis ip', type=str, default='127.0.0.1', dest='host_redis')
|
||||
parser.add_argument('-rp', '--redis_port', help='redis port', type=int, default=6380, dest='port_redis')
|
||||
parser.add_argument('-v', '--verify_certificate', help='verify server certificate', type=str, default='True', dest='verify_certificate')
|
||||
parser.add_argument('-c', '--ca_certs', help='cert filename' , type=str, default=None, dest='ca_certs')
|
||||
args = parser.parse_args()
|
||||
|
||||
if not args.uuid or not args.type or not args.target_port:
|
||||
parser.print_help()
|
||||
sys.exit(0)
|
||||
|
||||
host_redis=args.host_redis
|
||||
port_redis=args.port_redis
|
||||
newLines=args.newline
|
||||
verify_certificate=args.verify_certificate
|
||||
ca_certs=args.ca_certs
|
||||
|
||||
redis_d4= redis.StrictRedis(
|
||||
host=host_redis,
|
||||
port=port_redis,
|
||||
db=2)
|
||||
try:
|
||||
redis_d4.ping()
|
||||
except redis.exceptions.ConnectionError:
|
||||
print('Error: Redis server {}:{}, ConnectionError'.format(host_redis, port_redis))
|
||||
sys.exit(1)
|
||||
|
||||
d4_uuid = args.uuid
|
||||
d4_type = args.type
|
||||
data_queue = 'analyzer:{}:{}'.format(d4_type, d4_uuid)
|
||||
|
||||
target_ip = args.target_ip
|
||||
target_port = args.target_port
|
||||
addr = (target_ip, target_port)
|
||||
|
||||
# default keep alive: 15
|
||||
ka_sec = args.ka_sec
|
||||
|
||||
# Create a TCP socket
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
|
||||
# TCP Keepalive
|
||||
s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
|
||||
s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 1)
|
||||
s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, ka_sec)
|
||||
s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, ka_sec)
|
||||
|
||||
# SSL
|
||||
if verify_certificate in ['False', 'false', 'f']:
|
||||
cert_reqs_option = ssl.CERT_NONE
|
||||
else:
|
||||
cert_reqs_option = ssl.CERT_REQUIRED
|
||||
|
||||
if ca_certs:
|
||||
ca_certs = None
|
||||
|
||||
client_socket = ssl.wrap_socket(s, cert_reqs=cert_reqs_option, ca_certs=ca_certs, ssl_version=ssl.PROTOCOL_TLS)
|
||||
|
||||
# TCP connect
|
||||
client_socket.connect(addr)
|
||||
|
||||
newLines=True
|
||||
while True:
|
||||
|
||||
d4_data = redis_d4.rpop(data_queue)
|
||||
if d4_data is None:
|
||||
time.sleep(1)
|
||||
continue
|
||||
|
||||
if newLines:
|
||||
d4_data = d4_data + b'\n'
|
||||
|
||||
print(d4_data)
|
||||
client_socket.send(d4_data)
|
||||
|
||||
client_socket.shutdown(socket.SHUT_RDWR)
|
|
@ -0,0 +1,73 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import redis
|
||||
import time
|
||||
import datetime
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import logging.handlers
|
||||
|
||||
|
||||
import socket
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description='Export d4 data to stdout')
|
||||
parser.add_argument('-t', '--type', help='d4 type or extended type' , type=str, dest='type', required=True)
|
||||
parser.add_argument('-u', '--uuid', help='queue uuid' , type=str, dest='uuid', required=True)
|
||||
parser.add_argument('-i', '--ip',help='server ip' , type=str, default='127.0.0.1', dest='target_ip')
|
||||
parser.add_argument('-p', '--port',help='server port' , type=int, dest='target_port', required=True)
|
||||
parser.add_argument('-n', '--newline', help='add new lines', action="store_true")
|
||||
parser.add_argument('-ri', '--redis_ip',help='redis host' , type=str, default='127.0.0.1', dest='host_redis')
|
||||
parser.add_argument('-rp', '--redis_port',help='redis port' , type=int, default=6380, dest='port_redis')
|
||||
args = parser.parse_args()
|
||||
|
||||
if not args.uuid or not args.type or not args.target_port:
|
||||
parser.print_help()
|
||||
sys.exit(0)
|
||||
|
||||
host_redis=args.host_redis
|
||||
port_redis=args.port_redis
|
||||
newLines = args.newline
|
||||
|
||||
redis_d4= redis.StrictRedis(
|
||||
host=host_redis,
|
||||
port=port_redis,
|
||||
db=2)
|
||||
try:
|
||||
redis_d4.ping()
|
||||
except redis.exceptions.ConnectionError:
|
||||
print('Error: Redis server {}:{}, ConnectionError'.format(host_redis, port_redis))
|
||||
sys.exit(1)
|
||||
|
||||
d4_uuid = args.uuid
|
||||
d4_type = args.type
|
||||
data_queue = 'analyzer:{}:{}'.format(d4_type, d4_uuid)
|
||||
|
||||
target_ip = args.target_ip
|
||||
target_port = args.target_port
|
||||
addr = (target_ip, target_port)
|
||||
|
||||
#Create a UDP socket
|
||||
client_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
|
||||
newLines=True
|
||||
while True:
|
||||
|
||||
d4_data = redis_d4.rpop(data_queue)
|
||||
if d4_data is None:
|
||||
time.sleep(1)
|
||||
continue
|
||||
|
||||
if newLines:
|
||||
d4_data = d4_data + b'\n'
|
||||
|
||||
print(d4_data)
|
||||
client_socket.sendto(d4_data, addr)
|
||||
|
||||
client_socket.close()
|
|
@ -0,0 +1,80 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import redis
|
||||
import time
|
||||
import datetime
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import logging.handlers
|
||||
|
||||
|
||||
import socket
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description='Export d4 data to stdout')
|
||||
parser.add_argument('-t', '--type', help='d4 type or extended type' , type=str, dest='type', required=True)
|
||||
parser.add_argument('-u', '--uuid', help='queue uuid' , type=str, dest='uuid', required=True)
|
||||
parser.add_argument('-s', '--socket',help='socket file' , type=str, dest='socket_file', required=True)
|
||||
parser.add_argument('-n', '--newline', help='add new lines', action="store_true")
|
||||
parser.add_argument('-ri', '--redis_ip',help='redis host' , type=str, default='127.0.0.1', dest='host_redis')
|
||||
parser.add_argument('-rp', '--redis_port',help='redis port' , type=int, default=6380, dest='port_redis')
|
||||
args = parser.parse_args()
|
||||
|
||||
if not args.uuid or not args.type or not args.socket_file:
|
||||
parser.print_help()
|
||||
sys.exit(0)
|
||||
|
||||
host_redis=args.host_redis
|
||||
port_redis=args.port_redis
|
||||
newLines = args.newline
|
||||
|
||||
redis_d4= redis.StrictRedis(
|
||||
host=host_redis,
|
||||
port=port_redis,
|
||||
db=2)
|
||||
try:
|
||||
redis_d4.ping()
|
||||
except redis.exceptions.ConnectionError:
|
||||
print('Error: Redis server {}:{}, ConnectionError'.format(host_redis, port_redis))
|
||||
sys.exit(1)
|
||||
|
||||
d4_uuid = args.uuid
|
||||
d4_type = args.type
|
||||
data_queue = 'analyzer:{}:{}'.format(d4_type, d4_uuid)
|
||||
|
||||
|
||||
socket_file = args.socket_file
|
||||
print("UNIX SOCKET: Connecting...")
|
||||
if os.path.exists(socket_file):
|
||||
client = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
|
||||
client.connect(socket_file)
|
||||
print("Connected")
|
||||
else:
|
||||
print("Couldn't Connect!")
|
||||
print("ERROR: socket file not found")
|
||||
print("Done")
|
||||
|
||||
|
||||
|
||||
newLines=False
|
||||
while True:
|
||||
|
||||
d4_data = redis_d4.rpop(data_queue)
|
||||
if d4_data is None:
|
||||
time.sleep(1)
|
||||
continue
|
||||
|
||||
if newLines:
|
||||
d4_data = d4_data + b'\n'
|
||||
|
||||
print(d4_data)
|
||||
|
||||
client.send(d4_data)
|
||||
|
||||
client.close()
|
|
@ -0,0 +1,81 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import redis
|
||||
import time
|
||||
import datetime
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import logging.handlers
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description='Export d4 data to stdout')
|
||||
parser.add_argument('-t', '--type', help='d4 type' , type=int, dest='type', required=True)
|
||||
parser.add_argument('-u', '--uuid', help='queue uuid' , type=str, dest='uuid', required=True)
|
||||
parser.add_argument('-f', '--files', help='read data from files. Append file to stdin', action="store_true")
|
||||
parser.add_argument('-n', '--newline', help='add new lines', action="store_true")
|
||||
parser.add_argument('-i', '--ip',help='redis host' , type=str, default='127.0.0.1', dest='host_redis')
|
||||
parser.add_argument('-p', '--port',help='redis port' , type=int, default=6380, dest='port_redis')
|
||||
args = parser.parse_args()
|
||||
|
||||
if not args.uuid or not args.type:
|
||||
parser.print_help()
|
||||
sys.exit(0)
|
||||
|
||||
host_redis=args.host_redis
|
||||
port_redis=args.port_redis
|
||||
newLines = args.newline
|
||||
read_files = args.files
|
||||
|
||||
redis_d4= redis.StrictRedis(
|
||||
host=host_redis,
|
||||
port=port_redis,
|
||||
db=2)
|
||||
try:
|
||||
redis_d4.ping()
|
||||
except redis.exceptions.ConnectionError:
|
||||
print('Error: Redis server {}:{}, ConnectionError'.format(host_redis, port_redis))
|
||||
sys.exit(1)
|
||||
|
||||
# logs_dir = 'logs'
|
||||
# if not os.path.isdir(logs_dir):
|
||||
# os.makedirs(logs_dir)
|
||||
#
|
||||
# log_filename = 'logs/d4-stdout.log'
|
||||
# logger = logging.getLogger()
|
||||
# formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
|
||||
# handler_log = logging.handlers.TimedRotatingFileHandler(log_filename, when="midnight", interval=1)
|
||||
# handler_log.suffix = '%Y-%m-%d.log'
|
||||
# handler_log.setFormatter(formatter)
|
||||
# logger.addHandler(handler_log)
|
||||
# logger.setLevel(args.verbose)
|
||||
#
|
||||
# logger.info('Launching stdout Analyzer ...')
|
||||
|
||||
d4_uuid = args.uuid
|
||||
d4_type = args.type
|
||||
|
||||
data_queue = 'analyzer:{}:{}'.format(d4_type, d4_uuid)
|
||||
|
||||
while True:
|
||||
d4_data = redis_d4.rpop(data_queue)
|
||||
if d4_data is None:
|
||||
time.sleep(1)
|
||||
continue
|
||||
if read_files:
|
||||
try:
|
||||
with open(d4_data, 'rb') as f:
|
||||
sys.stdout.buffer.write(f.read())
|
||||
sys.exit(0)
|
||||
except FileNotFoundError:
|
||||
## TODO: write logs file
|
||||
continue
|
||||
|
||||
else:
|
||||
if newLines:
|
||||
sys.stdout.buffer.write(d4_data + b'\n')
|
||||
else:
|
||||
sys.stdout.buffer.write(d4_data)
|
|
@ -155,7 +155,7 @@ supervised no
|
|||
#
|
||||
# Creating a pid file is best effort: if Redis is not able to create it
|
||||
# nothing bad happens, the server will start and run normally.
|
||||
pidfile /var/run/redis_6379.pid
|
||||
pidfile /var/run/redis_6380.pid
|
||||
|
||||
# Specify the server verbosity level.
|
||||
# This can be one of:
|
||||
|
@ -843,7 +843,7 @@ lua-time-limit 5000
|
|||
# Make sure that instances running in the same system do not have
|
||||
# overlapping cluster configuration file names.
|
||||
#
|
||||
# cluster-config-file nodes-6379.conf
|
||||
# cluster-config-file nodes-6380.conf
|
||||
|
||||
# Cluster node timeout is the amount of milliseconds a node must be unreachable
|
||||
# for it to be considered in failure state.
|
||||
|
@ -971,7 +971,7 @@ lua-time-limit 5000
|
|||
# Example:
|
||||
#
|
||||
# cluster-announce-ip 10.1.1.5
|
||||
# cluster-announce-port 6379
|
||||
# cluster-announce-port 6380
|
||||
# cluster-announce-bus-port 6380
|
||||
|
||||
################################## SLOW LOG ###################################
|
||||
|
|
|
@ -3,3 +3,39 @@
|
|||
use_default_save_directory = yes
|
||||
save_directory = None
|
||||
|
||||
[D4_Server]
|
||||
server_port=4443
|
||||
# registration or shared-secret
|
||||
server_mode = registration
|
||||
default_hmac_key = private key to change
|
||||
analyzer_queues_max_size = 100000000
|
||||
|
||||
[Flask_Server]
|
||||
# UI port number
|
||||
host = 127.0.0.1
|
||||
port = 7000
|
||||
|
||||
[Redis_STREAM]
|
||||
host = localhost
|
||||
port = 6379
|
||||
db = 0
|
||||
|
||||
[Redis_METADATA]
|
||||
host = localhost
|
||||
port = 6380
|
||||
db = 0
|
||||
|
||||
[Redis_SERV]
|
||||
host = localhost
|
||||
port = 6380
|
||||
db = 1
|
||||
|
||||
[Redis_ANALYZER]
|
||||
host = localhost
|
||||
port = 6380
|
||||
db = 2
|
||||
|
||||
[Redis_CACHE]
|
||||
host = localhost
|
||||
port = 6380
|
||||
db = 3
|
||||
|
|
|
@ -0,0 +1,156 @@
|
|||
# Should be using configs but not supported until docker 17.06+
|
||||
# https://www.d4-project.org/2019/05/28/passive-dns-tutorial.html
|
||||
|
||||
version: "3"
|
||||
services:
|
||||
redis-stream:
|
||||
image: redis
|
||||
command: redis-server --port 6379
|
||||
|
||||
redis-metadata:
|
||||
image: redis
|
||||
command: redis-server --port 6380
|
||||
|
||||
redis-analyzer:
|
||||
image: redis
|
||||
command: redis-server --port 6400
|
||||
|
||||
d4-server:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.d4-server
|
||||
image: d4-server:latest
|
||||
depends_on:
|
||||
- redis-stream
|
||||
- redis-metadata
|
||||
environment:
|
||||
- D4_REDIS_STREAM_HOST=redis-stream
|
||||
- D4_REDIS_STREAM_PORT=6379
|
||||
- D4_REDIS_METADATA_HOST=redis-metadata
|
||||
- D4_REDIS_METADATA_PORT=6380
|
||||
ports:
|
||||
- "4443:4443"
|
||||
|
||||
d4-worker_1:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.d4-server
|
||||
image: d4-server:latest
|
||||
depends_on:
|
||||
- redis-stream
|
||||
- redis-metadata
|
||||
environment:
|
||||
- D4_REDIS_STREAM_HOST=redis-stream
|
||||
- D4_REDIS_STREAM_PORT=6379
|
||||
- D4_REDIS_METADATA_HOST=redis-metadata
|
||||
- D4_REDIS_METADATA_PORT=6380
|
||||
entrypoint: bash -c "cd workers/workers_1; ./workers_manager.py; read x"
|
||||
volumes:
|
||||
- d4-data:/usr/src/d4-server/data
|
||||
|
||||
d4-worker_2:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.d4-server
|
||||
image: d4-server:latest
|
||||
depends_on:
|
||||
- redis-stream
|
||||
- redis-metadata
|
||||
environment:
|
||||
- D4_REDIS_STREAM_HOST=redis-stream
|
||||
- D4_REDIS_STREAM_PORT=6379
|
||||
- D4_REDIS_METADATA_HOST=redis-metadata
|
||||
- D4_REDIS_METADATA_PORT=6380
|
||||
entrypoint: bash -c "cd workers/workers_2; ./workers_manager.py; read x"
|
||||
volumes:
|
||||
- d4-data:/usr/src/d4-server/data
|
||||
|
||||
d4-worker_4:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.d4-server
|
||||
image: d4-server:latest
|
||||
depends_on:
|
||||
- redis-stream
|
||||
- redis-metadata
|
||||
environment:
|
||||
- D4_REDIS_STREAM_HOST=redis-stream
|
||||
- D4_REDIS_STREAM_PORT=6379
|
||||
- D4_REDIS_METADATA_HOST=redis-metadata
|
||||
- D4_REDIS_METADATA_PORT=6380
|
||||
entrypoint: bash -c "cd workers/workers_4; ./workers_manager.py; read x"
|
||||
volumes:
|
||||
- d4-data:/usr/src/d4-server/data
|
||||
|
||||
d4-worker_8:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.d4-server
|
||||
image: d4-server:latest
|
||||
depends_on:
|
||||
- redis-stream
|
||||
- redis-metadata
|
||||
environment:
|
||||
- D4_REDIS_STREAM_HOST=redis-stream
|
||||
- D4_REDIS_STREAM_PORT=6379
|
||||
- D4_REDIS_METADATA_HOST=redis-metadata
|
||||
- D4_REDIS_METADATA_PORT=6380
|
||||
entrypoint: bash -c "cd workers/workers_8; ./workers_manager.py; read x"
|
||||
volumes:
|
||||
- d4-data:/usr/src/d4-server/data
|
||||
|
||||
d4-web:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.d4-server
|
||||
image: d4-server:latest
|
||||
depends_on:
|
||||
- redis-stream
|
||||
- redis-metadata
|
||||
environment:
|
||||
- D4_REDIS_STREAM_HOST=redis-stream
|
||||
- D4_REDIS_STREAM_PORT=6379
|
||||
- D4_REDIS_METADATA_HOST=redis-metadata
|
||||
- D4_REDIS_METADATA_PORT=6380
|
||||
entrypoint: bash -c "cd web; ./Flask_server.py; read x"
|
||||
ports:
|
||||
- "7000:7000"
|
||||
volumes:
|
||||
- d4-data:/usr/src/d4-server/data
|
||||
|
||||
d4-analyzer-passivedns-cof:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.analyzer-d4-passivedns
|
||||
image: analyzer-d4-passivedns:latest
|
||||
depends_on:
|
||||
- redis-metadata
|
||||
- redis-analyzer
|
||||
environment:
|
||||
- D4_ANALYZER_REDIS_HOST=redis-analyzer
|
||||
- D4_ANALYZER_REDIS_PORT=6400
|
||||
- D4_REDIS_METADATA_HOST=redis-metadata
|
||||
- D4_REDIS_METADATA_PORT=6380
|
||||
- DEBUG=true
|
||||
entrypoint: bash -c "python ./pdns-cof-server.py; read x"
|
||||
ports:
|
||||
- "8400:8400"
|
||||
|
||||
d4-analyzer-passivedns-ingestion:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.analyzer-d4-passivedns
|
||||
image: analyzer-d4-passivedns:latest
|
||||
depends_on:
|
||||
- redis-metadata
|
||||
- redis-analyzer
|
||||
environment:
|
||||
- D4_ANALYZER_REDIS_HOST=redis-analyzer
|
||||
- D4_ANALYZER_REDIS_PORT=6400
|
||||
- D4_REDIS_METADATA_HOST=redis-metadata
|
||||
- D4_REDIS_METADATA_PORT=6380
|
||||
- DEBUG=true
|
||||
entrypoint: bash -c "python ./pdns-ingestion.py; read x"
|
||||
|
||||
volumes:
|
||||
d4-data:
|
|
@ -0,0 +1,130 @@
|
|||
# D4 core
|
||||
|
||||
![](https://www.d4-project.org/assets/images/logo.png)
|
||||
|
||||
## D4 core server
|
||||
|
||||
D4 core server is a complete server to handle clients (sensors) including the decapsulation of the [D4 protocol](https://github.com/D4-project/architecture/tree/master/format), control of
|
||||
sensor registrations, management of decoding protocols and dispatching to adequate decoders/analysers.
|
||||
|
||||
## Database map - Metadata
|
||||
|
||||
```
|
||||
DB 0 - Stats + sensor configs
|
||||
DB 1 - Users
|
||||
DB 2 - Analyzer queue
|
||||
DB 3 - Flask Cache
|
||||
```
|
||||
|
||||
### DB 1
|
||||
|
||||
##### User Management:
|
||||
| Hset Key | Field | Value |
|
||||
| ------ | ------ | ------ |
|
||||
| user:all | **user id** | **password hash** |
|
||||
| | | |
|
||||
| user:tokens | **token** | **user id** |
|
||||
| | | |
|
||||
| user_metadata:**user id** | token | **token** |
|
||||
| | change_passwd | **boolean** |
|
||||
| | role | **role** |
|
||||
|
||||
| Set Key | Value |
|
||||
| ------ | ------ |
|
||||
| user_role:**role** | **user id** |
|
||||
|
||||
|
||||
| Zrank Key | Field | Value |
|
||||
| ------ | ------ | ------ |
|
||||
| ail:all_role | **role** | **int, role priority (1=admin)** |
|
||||
|
||||
### Server
|
||||
| Key | Value |
|
||||
| --- | --- |
|
||||
| server:hmac_default_key | **hmac_default_key** |
|
||||
|
||||
| Set Key | Value |
|
||||
| --- | --- |
|
||||
| server:accepted_type | **accepted type** |
|
||||
| server:accepted_extended_type | **accepted extended type** |
|
||||
|
||||
###### Server Mode
|
||||
| Set Key | Value |
|
||||
| --- | --- |
|
||||
| blacklist_ip | **blacklisted ip** |
|
||||
| blacklist_ip_by_uuid | **uuidv4** |
|
||||
| blacklist_uuid | **uuidv4** |
|
||||
|
||||
###### Connection Manager
|
||||
| Set Key | Value |
|
||||
| --- | --- |
|
||||
| active_connection | **uuid** |
|
||||
| | |
|
||||
| active_connection:**type** | **uuid** |
|
||||
| active_connection_extended_type:**uuid** | **extended type** |
|
||||
| | |
|
||||
| active_uuid_type2:**uuid** | **session uuid** |
|
||||
| | |
|
||||
| map:active_connection-uuid-session_uuid:**uuid** | **session uuid** |
|
||||
|
||||
| Set Key | Field | Value |
|
||||
| --- | --- | --- |
|
||||
| map:session-uuid_active_extended_type | **session_uuid** | **extended_type** |
|
||||
|
||||
### Stats
|
||||
| Zset Key | Field | Value |
|
||||
| --- | --- | --- |
|
||||
| stat_uuid_ip:**date**:**uuid** | **IP** | **number D4 Packets** |
|
||||
| | | |
|
||||
| stat_uuid_type:**date**:**uuid** | **type** | **number D4 Packets** |
|
||||
| | | |
|
||||
| stat_type_uuid:**date**:**type** | **uuid** | **number D4 Packets** |
|
||||
| | | |
|
||||
| stat_ip_uuid:20190519:158.64.14.86 | **uuid** | **number D4 Packets** |
|
||||
| | | |
|
||||
| | | |
|
||||
| daily_uuid:**date** | **uuid** | **number D4 Packets** |
|
||||
| | | |
|
||||
| daily_type:**date** | **type** | **number D4 Packets** |
|
||||
| | | |
|
||||
| daily_ip:**date** | **IP** | **number D4 Packets** |
|
||||
|
||||
### metadata sensors
|
||||
| Hset Key | Field | Value |
|
||||
| --- | --- | --- |
|
||||
| metadata_uuid:**uuid** | first_seen | **epoch** |
|
||||
| | last_seen | **epoch** |
|
||||
| | description | **description** | (optionnal)
|
||||
| | Error | **error message** | (optionnal)
|
||||
| | hmac_key | **hmac_key** | (optionnal)
|
||||
| | user_id | **user_id** | (optionnal)
|
||||
|
||||
###### Last IP
|
||||
| List Key | Value |
|
||||
| --- | --- |
|
||||
| list_uuid_ip:**uuid** | **IP** |
|
||||
|
||||
### metadata types by sensors
|
||||
| Hset Key | Field | Value |
|
||||
| --- | --- | --- |
|
||||
| metadata_uuid:**uuid** | first_seen | **epoch** |
|
||||
| | last_seen | **epoch** |
|
||||
|
||||
| Set Key | Value |
|
||||
| --- | --- |
|
||||
| all_types_by_uuid:**uuid** | **type** |
|
||||
| all_extended_types_by_uuid:**uuid** | **type** |
|
||||
|
||||
### analyzers
|
||||
###### metadata
|
||||
| Hset Key | Field | Value |
|
||||
| --- | --- | --- |
|
||||
| analyzer:**uuid** | last_updated | **epoch** |
|
||||
| | description | **description** |
|
||||
| | max_size | **queue max size** |
|
||||
|
||||
###### all analyzers by type
|
||||
| Set Key | Value |
|
||||
| --- | --- |
|
||||
| analyzer:**type** | **uuid** |
|
||||
| analyzer:254:**extended type** | **uuid** |
|
|
@ -0,0 +1,94 @@
|
|||
# API DOCUMENTATION
|
||||
|
||||
## General
|
||||
|
||||
### Automation key
|
||||
|
||||
The authentication of the automation is performed via a secure key available in the D4 UI interface. Make sure you keep that key secret. It gives access to the entire database! The API key is available in the ``Settings`` menu under ``My Profile``.
|
||||
|
||||
The authorization is performed by using the following header:
|
||||
|
||||
~~~~
|
||||
Authorization: YOUR_API_KEY
|
||||
~~~~
|
||||
### Accept and Content-Type headers
|
||||
|
||||
When submitting data in a POST, PUT or DELETE operation you need to specify in what content-type you encoded the payload. This is done by setting the below Content-Type headers:
|
||||
|
||||
~~~~
|
||||
Content-Type: application/json
|
||||
~~~~
|
||||
|
||||
Example:
|
||||
|
||||
~~~~
|
||||
curl --header "Authorization: YOUR_API_KEY" --header "Content-Type: application/json" https://D4_URL/
|
||||
~~~~
|
||||
|
||||
## Sensor Registration
|
||||
|
||||
### Register a sensor: `api/v1/add/sensor/register`<a name="add_sensor_register"></a>
|
||||
|
||||
#### Description
|
||||
Register a sensor.
|
||||
|
||||
**Method** : `POST`
|
||||
|
||||
#### Parameters
|
||||
- `uuid`
|
||||
- sensor uuid
|
||||
- *uuid4*
|
||||
- mandatory
|
||||
|
||||
- `hmac_key`
|
||||
- sensor secret key
|
||||
- *binary*
|
||||
- mandatory
|
||||
|
||||
- `description`
|
||||
- sensor description
|
||||
- *str*
|
||||
|
||||
- `mail`
|
||||
- user mail
|
||||
- *str*
|
||||
|
||||
#### JSON response
|
||||
- `uuid`
|
||||
- sensor uuid
|
||||
- *uuid4*
|
||||
|
||||
#### Example
|
||||
```
|
||||
curl https://127.0.0.1:7000/api/v1/add/sensor/register --header "Authorization: iHc1_ChZxj1aXmiFiF1mkxxQkzawwriEaZpPqyTQj " -H "Content-Type: application/json" --data @input.json -X POST
|
||||
```
|
||||
|
||||
#### input.json Example
|
||||
```json
|
||||
{
|
||||
"uuid": "ff7ba400-e76c-4053-982d-feec42bdef38",
|
||||
"hmac_key": "...HMAC_KEY..."
|
||||
}
|
||||
```
|
||||
|
||||
#### Expected Success Response
|
||||
**HTTP Status Code** : `200`
|
||||
|
||||
```json
|
||||
{
|
||||
"uuid": "ff7ba400-e76c-4053-982d-feec42bdef38",
|
||||
}
|
||||
```
|
||||
|
||||
#### Expected Fail Response
|
||||
|
||||
**HTTP Status Code** : `400`
|
||||
```json
|
||||
{"status": "error", "reason": "Mandatory parameter(s) not provided"}
|
||||
{"status": "error", "reason": "Invalid uuid"}
|
||||
```
|
||||
|
||||
**HTTP Status Code** : `409`
|
||||
```json
|
||||
{"status": "error", "reason": "Sensor already registred"}
|
||||
```
|
|
@ -7,3 +7,6 @@ openssl req -sha256 -new -key server.key -out server.csr -config san.cnf
|
|||
openssl x509 -req -in server.csr -CA rootCA.crt -CAkey rootCA.key -CAcreateserial -out server.crt -days 500 -sha256 -extfile ext3.cnf
|
||||
# Concat in pem
|
||||
cat server.crt server.key > ../server.pem
|
||||
# Copy certs for Flask https
|
||||
cp server.key ../web/server.key
|
||||
cp server.crt ../web/server.crt
|
||||
|
|
|
@ -12,6 +12,10 @@ if [ -z "$VIRTUAL_ENV" ]; then
|
|||
fi
|
||||
python3 -m pip install -r requirement.txt
|
||||
|
||||
pushd configs/
|
||||
cp server.conf.sample server.conf
|
||||
popd
|
||||
|
||||
pushd web/
|
||||
./update_web.sh
|
||||
popd
|
||||
|
@ -25,3 +29,17 @@ pushd redis/
|
|||
git checkout 5.0
|
||||
make
|
||||
popd
|
||||
|
||||
# LAUNCH
|
||||
bash LAUNCH.sh -l &
|
||||
wait
|
||||
echo ""
|
||||
|
||||
# create default users
|
||||
pushd web/
|
||||
./create_default_user.py
|
||||
popd
|
||||
|
||||
bash LAUNCH.sh -k &
|
||||
wait
|
||||
echo ""
|
||||
|
|
|
@ -0,0 +1,370 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*-coding:UTF-8 -*
|
||||
|
||||
import os
|
||||
import sys
|
||||
import datetime
|
||||
import time
|
||||
import uuid
|
||||
import redis
|
||||
|
||||
sys.path.append(os.path.join(os.environ['D4_HOME'], 'lib/'))
|
||||
import ConfigLoader
|
||||
import d4_type
|
||||
|
||||
### Config ###
|
||||
config_loader = ConfigLoader.ConfigLoader()
|
||||
r_serv_metadata = config_loader.get_redis_conn("Redis_METADATA")
|
||||
r_serv_analyzer = config_loader.get_redis_conn("Redis_ANALYZER")
|
||||
LIST_DEFAULT_SIZE = config_loader.get_config_int('D4_Server', 'analyzer_queues_max_size')
|
||||
config_loader = None
|
||||
### ###
|
||||
|
||||
def is_valid_uuid_v4(uuid_v4):
|
||||
if uuid_v4:
|
||||
uuid_v4 = uuid_v4.replace('-', '')
|
||||
else:
|
||||
return False
|
||||
|
||||
try:
|
||||
uuid_test = uuid.UUID(hex=uuid_v4, version=4)
|
||||
return uuid_test.hex == uuid_v4
|
||||
except:
|
||||
return False
|
||||
|
||||
def sanitize_uuid(uuid_v4, not_exist=False):
|
||||
if not is_valid_uuid_v4(uuid_v4):
|
||||
uuid_v4 = str(uuid.uuid4())
|
||||
if not_exist:
|
||||
if exist_queue(uuid_v4):
|
||||
uuid_v4 = str(uuid.uuid4())
|
||||
return uuid_v4
|
||||
|
||||
def sanitize_queue_type(format_type):
|
||||
try:
|
||||
format_type = int(format_type)
|
||||
except:
|
||||
format_type = 1
|
||||
if format_type == 2:
|
||||
format_type = 254
|
||||
return format_type
|
||||
|
||||
def exist_queue(queue_uuid):
|
||||
return r_serv_metadata.exists('analyzer:{}'.format(queue_uuid))
|
||||
|
||||
def get_all_queues(r_list=None):
|
||||
res = r_serv_metadata.smembers('all_analyzer_queues')
|
||||
if r_list:
|
||||
return list(res)
|
||||
return res
|
||||
|
||||
def get_all_queues_format_type(r_list=None):
|
||||
res = r_serv_metadata.smembers('all:analyzer:format_type')
|
||||
if r_list:
|
||||
return list(res)
|
||||
return res
|
||||
|
||||
def get_all_queues_extended_type(r_list=None):
|
||||
res = r_serv_metadata.smembers('all:analyzer:extended_type')
|
||||
if r_list:
|
||||
return list(res)
|
||||
return res
|
||||
|
||||
# GLOBAL
|
||||
def get_all_queues_uuid_by_type(format_type, r_list=None):
|
||||
res = r_serv_metadata.smembers('all:analyzer:by:format_type:{}'.format(format_type))
|
||||
if r_list:
|
||||
return list(res)
|
||||
return res
|
||||
|
||||
# GLOBAL
|
||||
def get_all_queues_uuid_by_extended_type(extended_type, r_list=None):
|
||||
res = r_serv_metadata.smembers('all:analyzer:by:extended_type:{}'.format(extended_type))
|
||||
if r_list:
|
||||
return list(res)
|
||||
return res
|
||||
|
||||
def get_queues_list_by_type(queue_type):
|
||||
if isinstance(queue_type ,int):
|
||||
return get_all_queues_by_type(queue_type)
|
||||
else:
|
||||
return get_all_queues_by_extended_type(queue_type)
|
||||
|
||||
# ONLY NON GROUP
|
||||
def get_all_queues_by_type(format_type, r_list=None):
|
||||
'''
|
||||
Get all analyzer Queues by type
|
||||
|
||||
:param format_type: data type
|
||||
:type domain_type: int
|
||||
:param r_list: return list
|
||||
:type r_list: boolean
|
||||
|
||||
:return: list or set of queus (uuid)
|
||||
:rtype: list or set
|
||||
'''
|
||||
# 'all_analyzer_queues_by_type'
|
||||
res = r_serv_metadata.smembers('analyzer:{}'.format(format_type))
|
||||
if r_list:
|
||||
return list(res)
|
||||
return res
|
||||
|
||||
# ONLY NON GROUP
|
||||
def get_all_queues_by_extended_type(extended_type, r_list=None):
|
||||
res = r_serv_metadata.smembers('analyzer:254:{}'.format(extended_type))
|
||||
if r_list:
|
||||
return list(res)
|
||||
return res
|
||||
|
||||
def get_all_queues_group_by_type(format_type, r_list=None):
|
||||
res = r_serv_metadata.smembers('analyzer_uuid_group:{}'.format(format_type))
|
||||
if r_list:
|
||||
return list(res)
|
||||
return res
|
||||
|
||||
def get_all_queues_group_by_extended_type(extended_type, r_list=None):
|
||||
res = r_serv_metadata.smembers('analyzer_uuid_group:254:{}'.format(extended_type))
|
||||
if r_list:
|
||||
return list(res)
|
||||
return res
|
||||
|
||||
def get_all_queues_by_sensor_group(queue_type, sensor_uuid, r_list=None):
|
||||
res = r_serv_metadata.smembers('sensor:queues:{}:{}'.format(queue_type, sensor_uuid))
|
||||
if r_list:
|
||||
return list(res)
|
||||
return res
|
||||
|
||||
def get_queue_group_all_sensors(queue_uuid, r_list=None):
|
||||
res = r_serv_metadata.smembers('analyzer_sensor_group:{}'.format(queue_uuid))
|
||||
if r_list:
|
||||
return list(res)
|
||||
return res
|
||||
|
||||
def get_queue_last_seen(queue_uuid, f_date='str_time'):
|
||||
res = r_serv_metadata.hget('analyzer:{}'.format(queue_uuid), 'last_updated')
|
||||
if f_date == 'str_date':
|
||||
if res is None:
|
||||
res = 'Never'
|
||||
else:
|
||||
res = datetime.datetime.fromtimestamp(float(res)).strftime('%Y-%m-%d %H:%M:%S')
|
||||
return res
|
||||
|
||||
def get_queue_max_size(queue_uuid):
|
||||
max_size = r_serv_metadata.hget('analyzer:{}'.format(queue_uuid), 'max_size')
|
||||
if max_size is None:
|
||||
max_size = LIST_DEFAULT_SIZE
|
||||
return max_size
|
||||
|
||||
def get_queue_size(queue_uuid, format_type, extended_type=None):
|
||||
if format_type==254:
|
||||
if not extended_type:
|
||||
extended_type = get_queue_extended_type(queue_uuid)
|
||||
length = r_serv_analyzer.llen('analyzer:{}:{}'.format(extended_type, queue_uuid))
|
||||
else:
|
||||
length = r_serv_analyzer.llen('analyzer:{}:{}'.format(format_type, queue_uuid))
|
||||
if length is None:
|
||||
length = 0
|
||||
return length
|
||||
|
||||
def get_queue_format_type(queue_uuid):
|
||||
return int(r_serv_metadata.hget('analyzer:{}'.format(queue_uuid), 'type'))
|
||||
|
||||
def get_queue_extended_type(queue_uuid):
|
||||
return r_serv_metadata.hget('analyzer:{}'.format(queue_uuid), 'metatype')
|
||||
|
||||
def is_queue_group_of_sensors(queue_uuid):
|
||||
return r_serv_metadata.exists('analyzer_sensor_group:{}'.format(queue_uuid))
|
||||
|
||||
def get_queue_metadata(queue_uuid, format_type=None, extended_type=None, f_date='str_date', is_group=None, force_is_group_queue=False):
|
||||
dict_queue_meta = {}
|
||||
dict_queue_meta['uuid'] = queue_uuid
|
||||
dict_queue_meta['size_limit'] = get_queue_max_size(queue_uuid)
|
||||
dict_queue_meta['last_updated'] = get_queue_last_seen(queue_uuid, f_date=f_date)
|
||||
|
||||
dict_queue_meta['description'] = r_serv_metadata.hget('analyzer:{}'.format(queue_uuid), 'description')
|
||||
if dict_queue_meta['description'] is None:
|
||||
dict_queue_meta['description'] = ''
|
||||
|
||||
if not format_type:
|
||||
format_type = get_queue_format_type(queue_uuid)
|
||||
dict_queue_meta['format_type'] = format_type
|
||||
if format_type==254:
|
||||
if not extended_type:
|
||||
extended_type = get_queue_extended_type(queue_uuid)
|
||||
dict_queue_meta['extended_type'] = extended_type
|
||||
|
||||
dict_queue_meta['length'] = get_queue_size(queue_uuid, format_type, extended_type=extended_type)
|
||||
|
||||
if is_group and not force_is_group_queue:
|
||||
dict_queue_meta['is_group_queue'] = is_queue_group_of_sensors(queue_uuid)
|
||||
else:
|
||||
if force_is_group_queue:
|
||||
dict_queue_meta['is_group_queue'] = True
|
||||
else:
|
||||
dict_queue_meta['is_group_queue'] = False
|
||||
|
||||
return dict_queue_meta
|
||||
|
||||
def edit_queue_description(queue_uuid, description):
|
||||
if r_serv_metadata.exists('analyzer:{}'.format(queue_uuid)) and description:
|
||||
r_serv_metadata.hset('analyzer:{}'.format(queue_uuid), 'description', description)
|
||||
|
||||
def edit_queue_max_size(queue_uuid, max_size):
|
||||
try:
|
||||
max_size = int(max_size)
|
||||
except:
|
||||
return 'analyzer max size, Invalid Integer'
|
||||
|
||||
if r_serv_metadata.exists('analyzer:{}'.format(queue_uuid)) and max_size > 0:
|
||||
r_serv_metadata.hset('analyzer:{}'.format(queue_uuid), 'max_size', max_size)
|
||||
|
||||
def edit_queue_sensors_set(queue_uuid, l_sensors_uuid):
|
||||
format_type = get_queue_format_type(queue_uuid)
|
||||
set_current_sensors = get_queue_group_all_sensors(queue_uuid)
|
||||
l_new_sensors_uuid = []
|
||||
for sensor_uuid in l_sensors_uuid:
|
||||
l_new_sensors_uuid.append(sensor_uuid.replace('-', ''))
|
||||
|
||||
sensors_to_add = l_sensors_uuid.difference(set_current_sensors)
|
||||
sensors_to_remove = set_current_sensors.difference(l_sensors_uuid)
|
||||
|
||||
for sensor_uuid in sensors_to_add:
|
||||
r_serv_metadata.sadd('analyzer_sensor_group:{}'.format(queue_uuid), sensor_uuid)
|
||||
r_serv_metadata.sadd('sensor:queues:{}:{}'.format(format_type, sensor_uuid), queue_uuid)
|
||||
|
||||
for sensor_uuid in sensors_to_remove:
|
||||
r_serv_metadata.srem('analyzer_sensor_group:{}'.format(queue_uuid), sensor_uuid)
|
||||
r_serv_metadata.srem('sensor:queues:{}:{}'.format(format_type, sensor_uuid), queue_uuid)
|
||||
|
||||
|
||||
# create queu by type or by group of uuid
|
||||
# # TODO: add size limit
|
||||
def create_queues(format_type, queue_uuid=None, l_uuid=[], queue_type='list', metatype_name=None, description=None):
|
||||
format_type = sanitize_queue_type(format_type)
|
||||
|
||||
if not d4_type.is_accepted_format_type(format_type):
|
||||
return {'error': 'Invalid type'}
|
||||
|
||||
if format_type == 254 and not d4_type.is_accepted_extended_type(metatype_name):
|
||||
return {'error': 'Invalid extended type'}
|
||||
|
||||
queue_uuid = sanitize_uuid(queue_uuid, not_exist=True)
|
||||
r_serv_metadata.hset('analyzer:{}'.format(queue_uuid), 'type', format_type)
|
||||
edit_queue_description(queue_uuid, description)
|
||||
|
||||
# # TODO: check l_uuid is valid
|
||||
if l_uuid:
|
||||
analyzer_key_name = 'analyzer_uuid_group'
|
||||
else:
|
||||
analyzer_key_name = 'analyzer'
|
||||
|
||||
r_serv_metadata.sadd('all:analyzer:format_type', format_type)
|
||||
r_serv_metadata.sadd('all:analyzer:by:format_type:{}'.format(format_type), queue_uuid)
|
||||
|
||||
|
||||
if format_type == 254:
|
||||
# TODO: check metatype_name
|
||||
r_serv_metadata.sadd('{}:{}:{}'.format(analyzer_key_name, format_type, metatype_name), queue_uuid)
|
||||
r_serv_metadata.hset('analyzer:{}'.format(queue_uuid), 'metatype', metatype_name)
|
||||
|
||||
r_serv_metadata.sadd('all:analyzer:by:extended_type:{}'.format(metatype_name), queue_uuid)
|
||||
r_serv_metadata.sadd('all:analyzer:extended_type', metatype_name)
|
||||
else:
|
||||
r_serv_metadata.sadd('{}:{}'.format(analyzer_key_name, format_type), queue_uuid)
|
||||
|
||||
# Group by UUID
|
||||
if l_uuid:
|
||||
# # TODO: check sensor_uuid is valid
|
||||
if format_type == 254:
|
||||
queue_type = metatype_name
|
||||
for sensor_uuid in l_uuid:
|
||||
sensor_uuid = sensor_uuid.replace('-', '')
|
||||
r_serv_metadata.sadd('analyzer_sensor_group:{}'.format(queue_uuid), sensor_uuid)
|
||||
r_serv_metadata.sadd('sensor:queues:{}:{}'.format(queue_type, sensor_uuid), queue_uuid)
|
||||
# ALL
|
||||
r_serv_metadata.sadd('all_analyzer_queues', queue_uuid)
|
||||
return queue_uuid
|
||||
|
||||
# format_type int or str (extended type)
|
||||
def add_data_to_queue(sensor_uuid, queue_type, data):
|
||||
if data:
|
||||
# by data type
|
||||
for queue_uuid in get_queues_list_by_type(queue_type):
|
||||
r_serv_analyzer.lpush('analyzer:{}:{}'.format(queue_type, queue_uuid), data)
|
||||
r_serv_metadata.hset('analyzer:{}'.format(queue_uuid), 'last_updated', time.time())
|
||||
analyser_queue_max_size = get_queue_max_size(queue_uuid)
|
||||
r_serv_analyzer.ltrim('analyzer:{}:{}'.format(queue_type, queue_uuid), 0, analyser_queue_max_size)
|
||||
|
||||
# by data type
|
||||
for queue_uuid in get_all_queues_by_sensor_group(queue_type, sensor_uuid):
|
||||
r_serv_analyzer.lpush('analyzer:{}:{}'.format(queue_type, queue_uuid), data)
|
||||
r_serv_metadata.hset('analyzer:{}'.format(queue_uuid), 'last_updated', time.time())
|
||||
analyser_queue_max_size = get_queue_max_size(queue_uuid)
|
||||
r_serv_analyzer.ltrim('analyzer:{}:{}'.format(queue_type, queue_uuid), 0, analyser_queue_max_size)
|
||||
|
||||
|
||||
def flush_queue(queue_uuid, queue_type):
|
||||
r_serv_analyzer.delete('analyzer:{}:{}'.format(queue_type, queue_uuid))
|
||||
|
||||
def remove_queues(queue_uuid, queue_type, metatype_name=None):
|
||||
try:
|
||||
queue_type = int(queue_type)
|
||||
except:
|
||||
print('error: Invalid format type')
|
||||
return {'error': 'Invalid format type'}
|
||||
|
||||
if not is_valid_uuid_v4(queue_uuid):
|
||||
print('error: Invalid uuid')
|
||||
return {'error': 'Invalid uuid'}
|
||||
|
||||
if not exist_queue(queue_uuid):
|
||||
print('error: unknow queue uuid')
|
||||
return {'error': 'unknow queue uuid'}
|
||||
|
||||
if queue_type==254 and not metatype_name:
|
||||
metatype_name = get_queue_extended_type(queue_uuid)
|
||||
|
||||
# delete metadata
|
||||
r_serv_metadata.delete('analyzer:{}'.format(queue_uuid))
|
||||
|
||||
# delete queue group of sensors uuid
|
||||
l_sensors_uuid = get_queue_group_all_sensors(queue_uuid)
|
||||
if l_sensors_uuid:
|
||||
r_serv_metadata.delete('analyzer_sensor_group:{}'.format(queue_uuid))
|
||||
|
||||
if queue_type == 254:
|
||||
queue_type = metatype_name
|
||||
for sensor_uuid in l_sensors_uuid:
|
||||
r_serv_metadata.srem('sensor:queues:{}:{}'.format(queue_type, sensor_uuid), queue_uuid)
|
||||
|
||||
if l_sensors_uuid:
|
||||
analyzer_key_name = 'analyzer_uuid_group'
|
||||
else:
|
||||
analyzer_key_name = 'analyzer'
|
||||
|
||||
r_serv_metadata.srem('all:analyzer:by:format_type:{}'.format(queue_type), queue_uuid)
|
||||
if queue_type == 254:
|
||||
r_serv_metadata.srem('{}:254:{}'.format(analyzer_key_name, metatype_name), queue_uuid)
|
||||
r_serv_metadata.srem('all:analyzer:by:extended_type:{}'.format(metatype_name), queue_uuid)
|
||||
else:
|
||||
r_serv_metadata.srem('{}:{}'.format(analyzer_key_name, queue_type), queue_uuid)
|
||||
|
||||
r_serv_metadata.srem('all_analyzer_queues', queue_uuid)
|
||||
|
||||
## delete global queue ##
|
||||
if not r_serv_metadata.exists('all:analyzer:by:format_type:{}'.format(queue_type)):
|
||||
r_serv_metadata.srem('all:analyzer:format_type', queue_type)
|
||||
if queue_type ==254:
|
||||
if not r_serv_metadata.exists('all:analyzer:by:extended_type:{}'.format(metatype_name)):
|
||||
r_serv_metadata.srem('all:analyzer:extended_type', metatype_name)
|
||||
## --- ##
|
||||
|
||||
# delete qeue
|
||||
r_serv_analyzer.delete('analyzer:{}:{}'.format(queue_type, queue_uuid))
|
||||
|
||||
def get_sensor_queues(sensor_uuid):
|
||||
pass
|
||||
|
||||
if __name__ == '__main__':
|
||||
#create_queues(3, l_uuid=['03c00bcf-fe53-46a1-85bb-ee6084cb5bb2'])
|
||||
remove_queues('a2e6f95c-1efe-4d2b-a0f5-d8e205d85670', 3)
|
|
@ -0,0 +1,54 @@
|
|||
#!/usr/bin/python3
|
||||
|
||||
"""
|
||||
The ``ConfigLoader``
|
||||
===================
|
||||
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import redis
|
||||
import configparser
|
||||
|
||||
# Get Config file
|
||||
config_dir = os.path.join(os.environ['D4_HOME'], 'configs')
|
||||
config_file = os.path.join(config_dir, 'server.conf')
|
||||
if not os.path.exists(config_file):
|
||||
raise Exception('Unable to find the configuration file. \
|
||||
Did you set environment variables? \
|
||||
Or activate the virtualenv.')
|
||||
|
||||
# # TODO: create sphinx doc
|
||||
|
||||
# # TODO: add config_field to reload
|
||||
|
||||
class ConfigLoader(object):
|
||||
"""docstring for Config_Loader."""
|
||||
|
||||
def __init__(self):
|
||||
self.cfg = configparser.ConfigParser()
|
||||
self.cfg.read(config_file)
|
||||
|
||||
def get_redis_conn(self, redis_name, decode_responses=True): ## TODO: verify redis name
|
||||
return redis.StrictRedis( host=self.cfg.get(redis_name, "host"),
|
||||
port=self.cfg.getint(redis_name, "port"),
|
||||
db=self.cfg.getint(redis_name, "db"),
|
||||
decode_responses=decode_responses )
|
||||
|
||||
def get_config_str(self, section, key_name):
|
||||
return self.cfg.get(section, key_name)
|
||||
|
||||
def get_config_int(self, section, key_name):
|
||||
return self.cfg.getint(section, key_name)
|
||||
|
||||
def get_config_boolean(self, section, key_name):
|
||||
return self.cfg.getboolean(section, key_name)
|
||||
|
||||
def has_option(self, section, key_name):
|
||||
return self.cfg.has_option(section, key_name)
|
||||
|
||||
def has_section(self, section):
|
||||
return self.cfg.has_section(section)
|
|
@ -0,0 +1,275 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*-coding:UTF-8 -*
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import uuid
|
||||
import redis
|
||||
|
||||
from flask import escape
|
||||
|
||||
sys.path.append(os.path.join(os.environ['D4_HOME'], 'lib/'))
|
||||
import ConfigLoader
|
||||
import d4_server
|
||||
|
||||
### Config ###
|
||||
config_loader = ConfigLoader.ConfigLoader()
|
||||
r_serv_db = config_loader.get_redis_conn("Redis_METADATA")
|
||||
config_loader = None
|
||||
### ###
|
||||
|
||||
def is_valid_uuid_v4(UUID):
|
||||
UUID = UUID.replace('-', '')
|
||||
try:
|
||||
uuid_test = uuid.UUID(hex=UUID, version=4)
|
||||
return uuid_test.hex == UUID
|
||||
except:
|
||||
return False
|
||||
|
||||
def get_time_sensor_last_seen(sensor_uuid):
|
||||
res = r_serv_db.hget('metadata_uuid:{}'.format(sensor_uuid), 'last_seen')
|
||||
if res:
|
||||
return int(res)
|
||||
else:
|
||||
return 0
|
||||
|
||||
def _get_sensor_type(sensor_uuid, first_seen=True, last_seen=True, time_format='default'):
|
||||
uuid_type = []
|
||||
uuid_all_type = r_serv_db.smembers('all_types_by_uuid:{}'.format(sensor_uuid))
|
||||
for type in uuid_all_type:
|
||||
type_meta = {}
|
||||
type_meta['type'] = type
|
||||
if first_seen:
|
||||
type_meta['first_seen'] = r_serv_db.hget('metadata_type_by_uuid:{}:{}'.format(sensor_uuid, type), 'first_seen')
|
||||
if last_seen:
|
||||
type_meta['last_seen'] = r_serv_db.hget('metadata_type_by_uuid:{}:{}'.format(sensor_uuid, type), 'last_seen')
|
||||
# time format
|
||||
if time_format=='gmt':
|
||||
if type_meta['first_seen']:
|
||||
type_meta['first_seen'] = datetime.datetime.fromtimestamp(float(type_meta['first_seen'])).strftime('%Y-%m-%d %H:%M:%S')
|
||||
if type_meta['last_seen']:
|
||||
type_meta['last_seen'] = datetime.datetime.fromtimestamp(float(type_meta['last_seen'])).strftime('%Y-%m-%d %H:%M:%S')
|
||||
uuid_type.append(type_meta)
|
||||
return uuid_type
|
||||
|
||||
def _get_sensor_metadata(sensor_uuid, first_seen=True, last_seen=True, time_format='default', sensor_types=False, mail=True, description=True):
|
||||
|
||||
meta_sensor = {}
|
||||
meta_sensor['uuid'] = sensor_uuid
|
||||
if first_seen:
|
||||
meta_sensor['first_seen'] = r_serv_db.hget('metadata_uuid:{}'.format(sensor_uuid), 'first_seen')
|
||||
if last_seen:
|
||||
meta_sensor['last_seen'] = r_serv_db.hget('metadata_uuid:{}'.format(sensor_uuid), 'last_seen')
|
||||
# time format
|
||||
if time_format=='gmt':
|
||||
if meta_sensor['first_seen']:
|
||||
meta_sensor['first_seen'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(int(meta_sensor['first_seen'])))
|
||||
if meta_sensor['last_seen']:
|
||||
meta_sensor['last_seen'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(int(meta_sensor['last_seen'])))
|
||||
|
||||
if sensor_types:
|
||||
meta_sensor['types'] = _get_sensor_type(sensor_uuid, first_seen=False, last_seen=False)
|
||||
if description:
|
||||
meta_sensor['description'] = r_serv_db.hget('metadata_uuid:{}'.format(sensor_uuid), 'description')
|
||||
if mail:
|
||||
meta_sensor['mail'] = r_serv_db.hget('metadata_uuid:{}'.format(sensor_uuid), 'user_mail')
|
||||
return meta_sensor
|
||||
|
||||
### BEGIN - SENSOR REGISTRATION ###
|
||||
|
||||
## TODO: add description
|
||||
def register_sensor(req_dict):
|
||||
sensor_uuid = req_dict.get('uuid', None)
|
||||
hmac_key = req_dict.get('hmac_key', None)
|
||||
user_id = req_dict.get('mail', None)
|
||||
third_party = req_dict.get('third_party', None)
|
||||
# verify uuid
|
||||
if not is_valid_uuid_v4(sensor_uuid):
|
||||
return ({"status": "error", "reason": "Invalid uuid"}, 400)
|
||||
sensor_uuid = sensor_uuid.replace('-', '')
|
||||
# sensor already exist
|
||||
if r_serv_db.exists('metadata_uuid:{}'.format(sensor_uuid)):
|
||||
return ({"status": "error", "reason": "Sensor already registered"}, 409)
|
||||
|
||||
# hmac key
|
||||
if not hmac_key:
|
||||
return ({"status": "error", "reason": "Mandatory parameter(s) not provided"}, 400)
|
||||
else:
|
||||
hmac_key = escape(hmac_key)
|
||||
if len(hmac_key)>100:
|
||||
hmac_key=hmac_key[:100]
|
||||
if third_party:
|
||||
third_party = True
|
||||
|
||||
res = _register_sensor(sensor_uuid, hmac_key, user_id=user_id, third_party=third_party, description=None)
|
||||
return res
|
||||
|
||||
|
||||
def _register_sensor(sensor_uuid, secret_key, user_id=None, third_party=False, description=None):
|
||||
r_serv_db.hset('metadata_uuid:{}'.format(sensor_uuid), 'hmac_key', secret_key)
|
||||
if user_id:
|
||||
r_serv_db.hset('metadata_uuid:{}'.format(sensor_uuid), 'user_mail', user_id)
|
||||
if description:
|
||||
r_serv_db.hset('metadata_uuid:{}'.format(sensor_uuid), 'description', description)
|
||||
if third_party:
|
||||
r_serv_db.hset('metadata_uuid:{}'.format(sensor_uuid), 'third_party', True)
|
||||
r_serv_db.sadd('sensor_pending_registration', sensor_uuid)
|
||||
return ({'uuid': sensor_uuid}, 200)
|
||||
|
||||
def get_pending_sensor():
|
||||
return list(r_serv_db.smembers('sensor_pending_registration'))
|
||||
|
||||
def get_nb_pending_sensor():
|
||||
return r_serv_db.scard('sensor_pending_registration')
|
||||
|
||||
def get_nb_registered_sensors():
|
||||
return r_serv_db.scard('registered_uuid')
|
||||
|
||||
def get_registered_sensors():
|
||||
return list(r_serv_db.smembers('registered_uuid'))
|
||||
|
||||
def approve_sensor(req_dict):
|
||||
sensor_uuid = req_dict.get('uuid', None)
|
||||
if not is_valid_uuid_v4(sensor_uuid):
|
||||
return ({"status": "error", "reason": "Invalid uuid"}, 400)
|
||||
sensor_uuid = sensor_uuid.replace('-', '')
|
||||
# sensor not registred
|
||||
#if r_serv_db.sismember('sensor_pending_registration', sensor_uuid):
|
||||
# return ({"status": "error", "reason": "Sensor not registred"}, 404)
|
||||
# sensor already approved
|
||||
if r_serv_db.sismember('registered_uuid', sensor_uuid):
|
||||
return ({"status": "error", "reason": "Sensor already approved"}, 409)
|
||||
return _approve_sensor(sensor_uuid)
|
||||
|
||||
def _approve_sensor(sensor_uuid):
|
||||
r_serv_db.sadd('registered_uuid', sensor_uuid)
|
||||
r_serv_db.srem('sensor_pending_registration', sensor_uuid)
|
||||
return ({'uuid': sensor_uuid}, 200)
|
||||
|
||||
def delete_pending_sensor(req_dict):
|
||||
sensor_uuid = req_dict.get('uuid', None)
|
||||
if not is_valid_uuid_v4(sensor_uuid):
|
||||
return ({"status": "error", "reason": "Invalid uuid"}, 400)
|
||||
sensor_uuid = sensor_uuid.replace('-', '')
|
||||
# sensor not registred
|
||||
#if r_serv_db.sismember('sensor_pending_registration', sensor_uuid):
|
||||
# return ({"status": "error", "reason": "Sensor not registred"}, 404)
|
||||
# sensor already approved
|
||||
if not r_serv_db.sismember('sensor_pending_registration', sensor_uuid):
|
||||
return ({"status": "error", "reason": "Not Pending Sensor"}, 409)
|
||||
return _delete_pending_sensor(sensor_uuid)
|
||||
|
||||
def _delete_pending_sensor(sensor_uuid):
|
||||
r_serv_db.srem('sensor_pending_registration', sensor_uuid)
|
||||
return ({'uuid': sensor_uuid}, 200)
|
||||
|
||||
def delete_registered_sensor(req_dict):
|
||||
sensor_uuid = req_dict.get('uuid', None)
|
||||
if not is_valid_uuid_v4(sensor_uuid):
|
||||
return ({"status": "error", "reason": "Invalid uuid"}, 400)
|
||||
sensor_uuid = sensor_uuid.replace('-', '')
|
||||
# sensor not registred
|
||||
if not r_serv_db.sismember('registered_uuid', sensor_uuid):
|
||||
return ({"status": "error", "reason": "Sensor not registered"}, 404)
|
||||
return _delete_registered_sensor(sensor_uuid)
|
||||
|
||||
def _delete_registered_sensor(sensor_uuid):
|
||||
r_serv_db.srem('registered_uuid', sensor_uuid)
|
||||
return ({'uuid': sensor_uuid}, 200)
|
||||
|
||||
### --- END - SENSOR REGISTRATION --- ###
|
||||
|
||||
|
||||
### BEGIN - SENSOR MONITORING ###
|
||||
def get_sensors_monitoring_last_updated():
|
||||
res = r_serv_db.get('sensors_monitoring:last_updated')
|
||||
if res:
|
||||
return int(res)
|
||||
else:
|
||||
return 0
|
||||
|
||||
def get_all_sensors_to_monitor():
|
||||
return r_serv_db.smembers('to_monitor:sensors')
|
||||
|
||||
def get_to_monitor_delta_time_by_uuid(sensor_uuid):
|
||||
return int(r_serv_db.hget('to_monitor:sensor:{}'.format(sensor_uuid), 'delta_time'))
|
||||
|
||||
def get_all_sensors_to_monitor_dict():
|
||||
dict_to_monitor = {}
|
||||
for sensor_uuid in get_all_sensors_to_monitor():
|
||||
dict_to_monitor[sensor_uuid] = get_to_monitor_delta_time_by_uuid(sensor_uuid)
|
||||
return dict_to_monitor
|
||||
|
||||
def _check_sensor_delta(sensor_uuid, sensor_delta):
|
||||
last_d4_packet = get_time_sensor_last_seen(sensor_uuid)
|
||||
|
||||
# check sensor delta time between two D4 packets + check sensor connection
|
||||
if int(time.time()) - last_d4_packet > sensor_delta or not d4_server.is_sensor_connected(sensor_uuid):
|
||||
r_serv_db.sadd('sensors_monitoring:sensors_error', sensor_uuid)
|
||||
handle_sensor_monitoring_error(sensor_uuid)
|
||||
else:
|
||||
r_serv_db.srem('sensors_monitoring:sensors_error', sensor_uuid)
|
||||
|
||||
def handle_sensor_monitoring_error(sensor_uuid):
|
||||
print('sensor monitoring error: {}'.format(sensor_uuid))
|
||||
## TODO: ##
|
||||
# MAILS
|
||||
# UI Notifications
|
||||
# SNMP
|
||||
# Syslog message
|
||||
## ## ## ##
|
||||
return None
|
||||
|
||||
def is_sensor_monitored(sensor_uuid):
|
||||
return r_serv_db.sismember('to_monitor:sensors', sensor_uuid)
|
||||
|
||||
def get_all_sensors_connection_errors():
|
||||
return r_serv_db.smembers('sensors_monitoring:sensors_error')
|
||||
|
||||
def api_get_all_sensors_connection_errors():
|
||||
return list(get_all_sensors_connection_errors()), 200
|
||||
|
||||
def add_sensor_to_monitor(sensor_uuid, delta_time):
|
||||
r_serv_db.sadd('to_monitor:sensors', sensor_uuid)
|
||||
r_serv_db.hset('to_monitor:sensor:{}'.format(sensor_uuid), 'delta_time', delta_time)
|
||||
r_serv_db.set('sensors_monitoring:last_updated', int(time.time()))
|
||||
r_serv_db.srem('sensors_monitoring:sensors_error', sensor_uuid)
|
||||
|
||||
def delete_sensor_to_monitor(sensor_uuid):
|
||||
r_serv_db.srem('to_monitor:sensors', sensor_uuid)
|
||||
r_serv_db.delete('to_monitor:sensor:{}'.format(sensor_uuid))
|
||||
r_serv_db.set('sensors_monitoring:last_updated', int(time.time()))
|
||||
r_serv_db.srem('sensors_monitoring:sensors_error', sensor_uuid)
|
||||
|
||||
def api_add_sensor_to_monitor(data_dict):
|
||||
sensor_uuid = data_dict.get('uuid', None)
|
||||
delta_time = data_dict.get('delta_time', None)
|
||||
|
||||
if not is_valid_uuid_v4(sensor_uuid):
|
||||
return ({"status": "error", "reason": "Invalid uuid"}, 400)
|
||||
sensor_uuid = sensor_uuid.replace('-', '')
|
||||
|
||||
# hmac key
|
||||
if not delta_time:
|
||||
return ({"status": "error", "reason": "Mandatory parameter(s) not provided"}, 400)
|
||||
else:
|
||||
try:
|
||||
delta_time = int(delta_time)
|
||||
if delta_time < 1:
|
||||
return ({"status": "error", "reason": "Invalid delta_time"}, 400)
|
||||
except Exception:
|
||||
return ({"status": "error", "reason": "Invalid delta_time"}, 400)
|
||||
add_sensor_to_monitor(sensor_uuid, delta_time)
|
||||
|
||||
def api_delete_sensor_to_monitor(data_dict):
|
||||
sensor_uuid = data_dict.get('uuid', None)
|
||||
if not is_valid_uuid_v4(sensor_uuid):
|
||||
return ({"status": "error", "reason": "Invalid uuid"}, 400)
|
||||
sensor_uuid = sensor_uuid.replace('-', '')
|
||||
if not is_sensor_monitored(sensor_uuid):
|
||||
return ({"status": "error", "reason": "Sensor not monitored"}, 400)
|
||||
delete_sensor_to_monitor(sensor_uuid)
|
||||
|
||||
|
||||
### --- END - SENSOR REGISTRATION --- ###
|
|
@ -0,0 +1,74 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*-coding:UTF-8 -*
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import redis
|
||||
import bcrypt
|
||||
import random
|
||||
|
||||
from flask_login import UserMixin
|
||||
|
||||
sys.path.append(os.path.join(os.environ['D4_HOME'], 'lib/'))
|
||||
import ConfigLoader
|
||||
|
||||
config_loader = ConfigLoader.ConfigLoader()
|
||||
r_serv_db = config_loader.get_redis_conn("Redis_SERV")
|
||||
config_loader = None
|
||||
|
||||
# CONFIG #
|
||||
config_loader = ConfigLoader.ConfigLoader()
|
||||
|
||||
class User(UserMixin):
|
||||
|
||||
def __init__(self, id):
|
||||
|
||||
self.r_serv_db = r_serv_db
|
||||
|
||||
if self.r_serv_db.hexists('user:all', id):
|
||||
self.id = id
|
||||
else:
|
||||
self.id = "__anonymous__"
|
||||
|
||||
# return True or False
|
||||
#def is_authenticated():
|
||||
|
||||
# return True or False
|
||||
#def is_anonymous():
|
||||
|
||||
@classmethod
|
||||
def get(self_class, id):
|
||||
return self_class(id)
|
||||
|
||||
def user_is_anonymous(self):
|
||||
if self.id == "__anonymous__":
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def check_password(self, password):
|
||||
if self.user_is_anonymous():
|
||||
return False
|
||||
|
||||
rand_sleep = random.randint(1,300)/1000
|
||||
time.sleep(rand_sleep)
|
||||
|
||||
password = password.encode()
|
||||
hashed_password = self.r_serv_db.hget('user:all', self.id).encode()
|
||||
if bcrypt.checkpw(password, hashed_password):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def request_password_change(self):
|
||||
if self.r_serv_db.hget('user_metadata:{}'.format(self.id), 'change_passwd') == 'True':
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def is_in_role(self, role):
|
||||
if self.r_serv_db.sismember('user_role:{}'.format(role), self.id):
|
||||
return True
|
||||
else:
|
||||
return False
|
|
@ -0,0 +1,44 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*-coding:UTF-8 -*
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import uuid
|
||||
import redis
|
||||
|
||||
from flask import escape
|
||||
|
||||
sys.path.append(os.path.join(os.environ['D4_HOME'], 'lib/'))
|
||||
import ConfigLoader
|
||||
|
||||
### Config ###
|
||||
config_loader = ConfigLoader.ConfigLoader()
|
||||
r_stream = config_loader.get_redis_conn("Redis_STREAM")
|
||||
config_loader = None
|
||||
### ###
|
||||
|
||||
### BEGIN - SENSOR CONNECTION ###
|
||||
|
||||
def get_all_connected_sensors(r_list=False):
|
||||
res = r_stream.smembers('active_connection')
|
||||
if r_list:
|
||||
if res:
|
||||
return list(res)
|
||||
else:
|
||||
return []
|
||||
else:
|
||||
return res
|
||||
|
||||
def get_all_connected_sensors_by_type(d4_type, d4_extended_type=None):
|
||||
# D4 extended type
|
||||
if d4_type == 254 and d4_extended_type:
|
||||
return r_stream.smembers('active_connection_extended_type:{}'.format(d4_extended_type))
|
||||
# type 1-253
|
||||
else:
|
||||
return r_stream.smembers('active_connection:{}'.format(d4_type))
|
||||
|
||||
def is_sensor_connected(sensor_uuid):
|
||||
return r_stream.sismember('active_connection', sensor_uuid)
|
||||
|
||||
### --- END - SENSOR CONNECTION --- ###
|
|
@ -0,0 +1,42 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*-coding:UTF-8 -*
|
||||
|
||||
import os
|
||||
import sys
|
||||
import datetime
|
||||
import time
|
||||
import uuid
|
||||
import redis
|
||||
|
||||
sys.path.append(os.path.join(os.environ['D4_HOME'], 'lib/'))
|
||||
import ConfigLoader
|
||||
|
||||
### Config ###
|
||||
config_loader = ConfigLoader.ConfigLoader()
|
||||
r_serv_metadata = config_loader.get_redis_conn("Redis_METADATA")
|
||||
config_loader = None
|
||||
### ###
|
||||
|
||||
def get_all_accepted_format_type(r_list=False):
|
||||
res = r_serv_metadata.smembers('server:accepted_type')
|
||||
if r_list:
|
||||
if res:
|
||||
return list(res)
|
||||
else:
|
||||
return []
|
||||
return res
|
||||
|
||||
def get_all_accepted_extended_type(r_list=False):
|
||||
res = r_serv_metadata.smembers('server:accepted_extended_type')
|
||||
if r_list:
|
||||
if res:
|
||||
return list(res)
|
||||
else:
|
||||
return []
|
||||
return res
|
||||
|
||||
def is_accepted_format_type(format_type):
|
||||
return r_serv_metadata.sismember('server:accepted_type', format_type)
|
||||
|
||||
def is_accepted_extended_type(extended_type):
|
||||
return r_serv_metadata.sismember('server:accepted_extended_type', extended_type)
|
|
@ -1,6 +1,9 @@
|
|||
twisted[tls]
|
||||
redis
|
||||
flask
|
||||
flask==2.2.2
|
||||
flask-login
|
||||
bcrypt
|
||||
Werkzeug==2.2.2
|
||||
|
||||
#sudo python3 -m pip install --upgrade service_identity
|
||||
|
||||
|
|
|
@ -0,0 +1,54 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import redis
|
||||
|
||||
sys.path.append(os.path.join(os.environ['D4_HOME'], 'lib/'))
|
||||
import ConfigLoader
|
||||
import Sensor
|
||||
|
||||
### Config ###
|
||||
config_loader = ConfigLoader.ConfigLoader()
|
||||
#redis_server_stream = config_loader.get_redis_conn("Redis_STREAM", decode_responses=False)
|
||||
redis_server_metadata = config_loader.get_redis_conn("Redis_METADATA")
|
||||
config_loader = None
|
||||
### ###
|
||||
|
||||
try:
|
||||
redis_server_metadata.ping()
|
||||
except redis.exceptions.ConnectionError:
|
||||
print('Error: Redis server: Redis_METADATA, ConnectionError')
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def reload_all_sensors_to_monitor_dict(dict_to_monitor, last_updated):
|
||||
if not dict_to_monitor:
|
||||
dict_to_monitor = Sensor.get_all_sensors_to_monitor_dict()
|
||||
else:
|
||||
monitoring_last_updated = Sensor.get_sensors_monitoring_last_updated()
|
||||
if monitoring_last_updated > last_updated:
|
||||
dict_to_monitor = Sensor.get_all_sensors_to_monitor_dict()
|
||||
last_updated = int(time.time())
|
||||
print('updated: List of sensors to monitor')
|
||||
return dict_to_monitor
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
time_refresh = int(time.time())
|
||||
last_updated = time_refresh
|
||||
all_sensors_to_monitor = Sensor.get_all_sensors_to_monitor_dict()
|
||||
|
||||
while True:
|
||||
|
||||
for sensor_uuid in all_sensors_to_monitor:
|
||||
Sensor._check_sensor_delta(sensor_uuid, all_sensors_to_monitor[sensor_uuid])
|
||||
time.sleep(10)
|
||||
|
||||
## reload dict_to_monitor ##
|
||||
curr_time = int(time.time())
|
||||
if curr_time - time_refresh >= 60:
|
||||
time_refresh = curr_time
|
||||
all_sensors_to_monitor = reload_all_sensors_to_monitor_dict(all_sensors_to_monitor, last_updated)
|
||||
##-- --##
|
335
server/server.py
|
@ -13,6 +13,8 @@ import argparse
|
|||
import logging
|
||||
import logging.handlers
|
||||
|
||||
import configparser
|
||||
|
||||
from twisted.internet import ssl, task, protocol, endpoints, defer
|
||||
from twisted.python import log
|
||||
from twisted.python.modules import getModule
|
||||
|
@ -20,12 +22,16 @@ from twisted.python.modules import getModule
|
|||
from twisted.internet.protocol import Protocol
|
||||
from twisted.protocols.policies import TimeoutMixin
|
||||
|
||||
sys.path.append(os.path.join(os.environ['D4_HOME'], 'lib/'))
|
||||
import ConfigLoader
|
||||
|
||||
hmac_reset = bytearray(32)
|
||||
hmac_key = b'private key to change'
|
||||
|
||||
accepted_type = [1, 2, 4, 8, 254]
|
||||
accepted_extended_type = ['ja3-jl']
|
||||
|
||||
all_server_modes = ('registration', 'shared-secret')
|
||||
|
||||
timeout_time = 30
|
||||
|
||||
header_size = 62
|
||||
|
@ -33,47 +39,145 @@ header_size = 62
|
|||
data_default_size_limit = 1000000
|
||||
default_max_entries_by_stream = 10000
|
||||
|
||||
host_redis_stream = "localhost"
|
||||
port_redis_stream = 6379
|
||||
### Config ###
|
||||
config_loader = ConfigLoader.ConfigLoader()
|
||||
|
||||
host_redis_metadata = "localhost"
|
||||
port_redis_metadata= 6380
|
||||
# REDIS #
|
||||
redis_server_stream = config_loader.get_redis_conn("Redis_STREAM", decode_responses=False)
|
||||
redis_server_metadata = config_loader.get_redis_conn("Redis_METADATA", decode_responses=False)
|
||||
|
||||
redis_server_stream = redis.StrictRedis(
|
||||
host=host_redis_stream,
|
||||
port=port_redis_stream,
|
||||
db=0)
|
||||
# get server_mode
|
||||
try:
|
||||
D4server_port = config_loader.get_config_int("D4_Server", "server_port")
|
||||
except configparser.NoOptionError:
|
||||
D4server_port = 4443
|
||||
|
||||
redis_server_metadata = redis.StrictRedis(
|
||||
host=host_redis_metadata,
|
||||
port=port_redis_metadata,
|
||||
db=0)
|
||||
server_mode = config_loader.get_config_str("D4_Server", "server_mode")
|
||||
try:
|
||||
hmac_key = config_loader.get_config_str("D4_Server", "default_hmac_key")
|
||||
except configparser.NoOptionError:
|
||||
hmac_key = 'private key to change'
|
||||
|
||||
config_loader = None
|
||||
### ###
|
||||
|
||||
try:
|
||||
redis_server_stream.ping()
|
||||
except redis.exceptions.ConnectionError:
|
||||
print('Error: Redis server {}:{}, ConnectionError'.format(host_redis_stream, port_redis_stream))
|
||||
print('Error: Redis server Redis_STREAM, ConnectionError')
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
redis_server_metadata.ping()
|
||||
except redis.exceptions.ConnectionError:
|
||||
print('Error: Redis server {}:{}, ConnectionError'.format(host_redis_metadata, port_redis_metadata))
|
||||
print('Error: Redis server Redis_METADATA, ConnectionError')
|
||||
sys.exit(1)
|
||||
|
||||
### REDIS ###
|
||||
|
||||
# set hmac default key
|
||||
redis_server_metadata.set('server:hmac_default_key', hmac_key)
|
||||
|
||||
# init redis_server_metadata
|
||||
redis_server_metadata.delete('server:accepted_type')
|
||||
for type in accepted_type:
|
||||
redis_server_metadata.sadd('server:accepted_type', type)
|
||||
redis_server_metadata.delete('server:accepted_extended_type')
|
||||
for type in accepted_extended_type:
|
||||
redis_server_metadata.sadd('server:accepted_extended_type', type)
|
||||
|
||||
dict_all_connection = {}
|
||||
|
||||
### FUNCTIONS ###
|
||||
|
||||
# kick sensors
|
||||
def kick_sensors():
|
||||
for client_uuid in redis_server_stream.smembers('server:sensor_to_kick'):
|
||||
client_uuid = client_uuid.decode()
|
||||
for session_uuid in redis_server_stream.smembers('map:active_connection-uuid-session_uuid:{}'.format(client_uuid)):
|
||||
session_uuid = session_uuid.decode()
|
||||
logger.warning('Sensor kicked uuid={}, session_uuid={}'.format(client_uuid, session_uuid))
|
||||
redis_server_stream.set('temp_blacklist_uuid:{}'.format(client_uuid), 'some random string')
|
||||
redis_server_stream.expire('temp_blacklist_uuid:{}'.format(client_uuid), 30)
|
||||
dict_all_connection[session_uuid].transport.abortConnection()
|
||||
redis_server_stream.srem('server:sensor_to_kick', client_uuid)
|
||||
|
||||
# Unpack D4 Header
|
||||
#def unpack_header(data):
|
||||
# data_header = {}
|
||||
# if len(data) >= header_size:
|
||||
# data_header['version'] = struct.unpack('B', data[0:1])[0]
|
||||
# data_header['type'] = struct.unpack('B', data[1:2])[0]
|
||||
# data_header['uuid_header'] = data[2:18].hex()
|
||||
# data_header['timestamp'] = struct.unpack('Q', data[18:26])[0]
|
||||
# data_header['hmac_header'] = data[26:58]
|
||||
# data_header['size'] = struct.unpack('I', data[58:62])[0]
|
||||
# return data_header
|
||||
|
||||
def is_valid_uuid_v4(header_uuid):
|
||||
try:
|
||||
uuid_test = uuid.UUID(hex=header_uuid, version=4)
|
||||
return uuid_test.hex == header_uuid
|
||||
except:
|
||||
logger.info('Not UUID v4: uuid={}, session_uuid={}'.format(header_uuid, self.session_uuid))
|
||||
return False
|
||||
|
||||
# # TODO: check timestamp
|
||||
def is_valid_header(uuid_to_check, type):
|
||||
if is_valid_uuid_v4(uuid_to_check):
|
||||
if redis_server_metadata.sismember('server:accepted_type', type):
|
||||
return True
|
||||
else:
|
||||
logger.warning('Invalid type, the server don\'t accept this type: {}, uuid={}, session_uuid={}'.format(type, uuid_to_check, self.session_uuid))
|
||||
return False
|
||||
else:
|
||||
logger.info('Invalid Header, uuid={}, session_uuid={}'.format(uuid_to_check, self.session_uuid))
|
||||
return False
|
||||
|
||||
def extract_ip(ip_string):
|
||||
#remove interface
|
||||
ip_string = ip_string.split('%')[0]
|
||||
# IPv4
|
||||
#extract ipv4
|
||||
if '.' in ip_string:
|
||||
return ip_string.split(':')[-1]
|
||||
# IPv6
|
||||
else:
|
||||
return ip_string
|
||||
|
||||
def server_mode_registration(header_uuid):
|
||||
# only accept registered uuid
|
||||
if server_mode == 'registration':
|
||||
if not redis_server_metadata.sismember('registered_uuid', header_uuid):
|
||||
error_msg = 'Not registered UUID={}, connection closed'.format(header_uuid)
|
||||
print(error_msg)
|
||||
logger.warning(error_msg)
|
||||
#redis_server_metadata.hset('metadata_uuid:{}'.format(data_header['uuid_header']), 'Error', 'Error: This UUID is temporarily blacklisted')
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
else:
|
||||
return True
|
||||
|
||||
def is_client_ip_blacklisted():
|
||||
pass
|
||||
|
||||
def is_uuid_blacklisted(uuid):
|
||||
return redis_server_metadata.sismember('blacklist_uuid', data_header['uuid_header'])
|
||||
|
||||
|
||||
# return True if not blocked
|
||||
# False if blacklisted
|
||||
def check_blacklist():
|
||||
pass
|
||||
|
||||
# Kill Connection + create log
|
||||
#def manual_abort_connection(self, message, log_level='WARNING'):
|
||||
# logger.log(message)
|
||||
# self.transport.abortConnection()
|
||||
# return 1
|
||||
|
||||
### ###
|
||||
|
||||
|
||||
class D4_Server(Protocol, TimeoutMixin):
|
||||
|
||||
def __init__(self):
|
||||
|
@ -83,6 +187,7 @@ class D4_Server(Protocol, TimeoutMixin):
|
|||
self.data_saved = False
|
||||
self.update_stream_type = True
|
||||
self.first_connection = True
|
||||
self.duplicate = False
|
||||
self.ip = None
|
||||
self.source_port = None
|
||||
self.stream_max_size = None
|
||||
|
@ -95,20 +200,12 @@ class D4_Server(Protocol, TimeoutMixin):
|
|||
|
||||
def dataReceived(self, data):
|
||||
# check and kick sensor by uuid
|
||||
for client_uuid in redis_server_stream.smembers('server:sensor_to_kick'):
|
||||
client_uuid = client_uuid.decode()
|
||||
for session_uuid in redis_server_stream.smembers('map:active_connection-uuid-session_uuid:{}'.format(client_uuid)):
|
||||
session_uuid = session_uuid.decode()
|
||||
logger.warning('Sensor kicked uuid={}, session_uuid={}'.format(client_uuid, session_uuid))
|
||||
redis_server_stream.set('temp_blacklist_uuid:{}'.format(client_uuid), 'some random string')
|
||||
redis_server_stream.expire('temp_blacklist_uuid:{}'.format(client_uuid), 30)
|
||||
dict_all_connection[session_uuid].transport.abortConnection()
|
||||
redis_server_stream.srem('server:sensor_to_kick', client_uuid)
|
||||
kick_sensors()
|
||||
|
||||
self.resetTimeout()
|
||||
if self.first_connection or self.ip is None:
|
||||
client_info = self.transport.client
|
||||
self.ip = self.extract_ip(client_info[0])
|
||||
self.ip = extract_ip(client_info[0])
|
||||
self.source_port = client_info[1]
|
||||
logger.debug('New connection, ip={}, port={} session_uuid={}'.format(self.ip, self.source_port, self.session_uuid))
|
||||
# check blacklisted_ip
|
||||
|
@ -135,10 +232,29 @@ class D4_Server(Protocol, TimeoutMixin):
|
|||
def connectionLost(self, reason):
|
||||
redis_server_stream.sadd('ended_session', self.session_uuid)
|
||||
self.setTimeout(None)
|
||||
redis_server_stream.srem('active_connection:{}'.format(self.type), '{}:{}'.format(self.ip, self.uuid))
|
||||
redis_server_stream.srem('active_connection', '{}'.format(self.uuid))
|
||||
|
||||
if not self.duplicate:
|
||||
if self.type == 254 or self.type == 2:
|
||||
redis_server_stream.srem('active_uuid_type{}:{}'.format(self.type, self.uuid), self.session_uuid)
|
||||
if not redis_server_stream.exists('active_uuid_type{}:{}'.format(self.type, self.uuid)):
|
||||
redis_server_stream.srem('active_connection:{}'.format(self.type), self.uuid)
|
||||
redis_server_stream.srem('active_connection_by_uuid:{}'.format(self.uuid), self.type)
|
||||
# clean extended type
|
||||
current_extended_type = redis_server_stream.hget('map:session-uuid_active_extended_type', self.session_uuid)
|
||||
if current_extended_type:
|
||||
redis_server_stream.hdel('map:session-uuid_active_extended_type', self.session_uuid)
|
||||
redis_server_stream.srem('active_connection_extended_type:{}'.format(self.uuid), current_extended_type)
|
||||
|
||||
else:
|
||||
if self.uuid:
|
||||
redis_server_stream.srem('active_connection:{}'.format(self.type), self.uuid)
|
||||
redis_server_stream.srem('active_connection_by_uuid:{}'.format(self.uuid), self.type)
|
||||
|
||||
if self.uuid:
|
||||
redis_server_stream.srem('map:active_connection-uuid-session_uuid:{}'.format(self.uuid), self.session_uuid)
|
||||
if not redis_server_stream.exists('active_connection_by_uuid:{}'.format(self.uuid)):
|
||||
redis_server_stream.srem('active_connection', self.uuid)
|
||||
|
||||
logger.debug('Connection closed: session_uuid={}'.format(self.session_uuid))
|
||||
dict_all_connection.pop(self.session_uuid)
|
||||
|
||||
|
@ -153,35 +269,19 @@ class D4_Server(Protocol, TimeoutMixin):
|
|||
data_header['size'] = struct.unpack('I', data[58:62])[0]
|
||||
return data_header
|
||||
|
||||
def extract_ip(self, ip_string):
|
||||
#remove interface
|
||||
ip_string = ip_string.split('%')[0]
|
||||
# IPv4
|
||||
#extract ipv4
|
||||
if '.' in ip_string:
|
||||
return ip_string.split(':')[-1]
|
||||
# IPv6
|
||||
else:
|
||||
return ip_string
|
||||
def check_hmac_key(self, hmac_header, data):
|
||||
if self.hmac_key is None:
|
||||
self.hmac_key = redis_server_metadata.hget('metadata_uuid:{}'.format(self.uuid), 'hmac_key')
|
||||
if self.hmac_key is None:
|
||||
self.hmac_key = redis_server_metadata.get('server:hmac_default_key')
|
||||
|
||||
def is_valid_uuid_v4(self, header_uuid):
|
||||
try:
|
||||
uuid_test = uuid.UUID(hex=header_uuid, version=4)
|
||||
return uuid_test.hex == header_uuid
|
||||
except:
|
||||
logger.info('Not UUID v4: uuid={}, session_uuid={}'.format(header_uuid, self.session_uuid))
|
||||
return False
|
||||
# set hmac_header to 0
|
||||
data = data.replace(hmac_header, hmac_reset, 1)
|
||||
|
||||
# # TODO: check timestamp
|
||||
def is_valid_header(self, uuid_to_check, type):
|
||||
if self.is_valid_uuid_v4(uuid_to_check):
|
||||
if redis_server_metadata.sismember('server:accepted_type', type):
|
||||
return True
|
||||
else:
|
||||
logger.warning('Invalid type, the server don\'t accept this type: {}, uuid={}, session_uuid={}'.format(type, uuid_to_check, self.session_uuid))
|
||||
else:
|
||||
logger.info('Invalid Header, uuid={}, session_uuid={}'.format(uuid_to_check, self.session_uuid))
|
||||
return False
|
||||
HMAC = hmac.new(self.hmac_key, msg=data, digestmod='sha256')
|
||||
hmac_header = hmac_header.hex()
|
||||
# hmac match
|
||||
return hmac_header == HMAC.hexdigest()
|
||||
|
||||
def check_connection_validity(self, data_header):
|
||||
# blacklist ip by uuid
|
||||
|
@ -197,6 +297,11 @@ class D4_Server(Protocol, TimeoutMixin):
|
|||
self.transport.abortConnection()
|
||||
return False
|
||||
|
||||
# Check server mode
|
||||
if not server_mode_registration(data_header['uuid_header']):
|
||||
self.transport.abortConnection()
|
||||
return False
|
||||
|
||||
# check temp blacklist
|
||||
if redis_server_stream.exists('temp_blacklist_uuid:{}'.format(data_header['uuid_header'])):
|
||||
logger.warning('Temporarily Blacklisted UUID={}, connection closed'.format(data_header['uuid_header']))
|
||||
|
@ -226,19 +331,55 @@ class D4_Server(Protocol, TimeoutMixin):
|
|||
if data_header:
|
||||
if not self.check_connection_validity(data_header):
|
||||
return 1
|
||||
if self.is_valid_header(data_header['uuid_header'], data_header['type']):
|
||||
if is_valid_header(data_header['uuid_header'], data_header['type']):
|
||||
|
||||
# auto kill connection # TODO: map type
|
||||
if self.first_connection:
|
||||
self.first_connection = False
|
||||
if redis_server_stream.sismember('active_connection:{}'.format(data_header['type']), '{}:{}'.format(ip, data_header['uuid_header'])):
|
||||
if data_header['type'] == 2:
|
||||
redis_server_stream.sadd('active_uuid_type2:{}'.format(data_header['uuid_header']), self.session_uuid)
|
||||
|
||||
# type 254, check if previous type 2 saved
|
||||
elif data_header['type'] == 254:
|
||||
logger.warning('a type 2 packet must be sent, ip={} uuid={} type={} session_uuid={}'.format(ip, data_header['uuid_header'], data_header['type'], self.session_uuid))
|
||||
redis_server_metadata.hset('metadata_uuid:{}'.format(data_header['uuid_header']), 'Error', 'Error: a type 2 packet must be sent, type={}'.format(data_header['type']))
|
||||
self.duplicate = True
|
||||
self.transport.abortConnection()
|
||||
return 1
|
||||
|
||||
# accept only one type/by uuid (except for type 2/254)
|
||||
elif redis_server_stream.sismember('active_connection:{}'.format(data_header['type']), '{}'.format(data_header['uuid_header'])):
|
||||
# same IP-type for an UUID
|
||||
logger.warning('is using the same UUID for one type, ip={} uuid={} type={} session_uuid={}'.format(ip, data_header['uuid_header'], data_header['type'], self.session_uuid))
|
||||
redis_server_metadata.hset('metadata_uuid:{}'.format(data_header['uuid_header']), 'Error', 'Error: This UUID is using the same UUID for one type={}'.format(data_header['type']))
|
||||
self.duplicate = True
|
||||
self.transport.abortConnection()
|
||||
return 1
|
||||
else:
|
||||
#self.version = None
|
||||
|
||||
self.type = data_header['type']
|
||||
self.uuid = data_header['uuid_header']
|
||||
|
||||
# # check HMAC /!\ incomplete data
|
||||
# if not self.check_hmac_key(data_header['hmac_header'], data):
|
||||
# print('hmac do not match')
|
||||
# print(data)
|
||||
# logger.debug("HMAC don't match, uuid={}, session_uuid={}".format(self.uuid, self.session_uuid))
|
||||
# redis_server_metadata.hset('metadata_uuid:{}'.format(data_header['uuid_header']), 'Error', 'Error: HMAC don\'t match')
|
||||
# self.transport.abortConnection()
|
||||
# return 1
|
||||
|
||||
## save active connection ##
|
||||
#active Connection
|
||||
redis_server_stream.sadd('active_connection:{}'.format(self.type), self.uuid)
|
||||
redis_server_stream.sadd('active_connection_by_uuid:{}'.format(self.uuid), self.type)
|
||||
redis_server_stream.sadd('active_connection', self.uuid)
|
||||
# map session_uuid/uuid
|
||||
redis_server_stream.sadd('map:active_connection-uuid-session_uuid:{}'.format(self.uuid), self.session_uuid)
|
||||
|
||||
# map all type by uuid ## TODO: # FIXME: put me in workers ??????
|
||||
redis_server_metadata.sadd('all_types_by_uuid:{}'.format(data_header['uuid_header']), data_header['type'])
|
||||
## ##
|
||||
|
||||
# check if type change
|
||||
if self.data_saved:
|
||||
# type change detected
|
||||
|
@ -246,25 +387,37 @@ class D4_Server(Protocol, TimeoutMixin):
|
|||
# Meta types
|
||||
if self.type == 2 and data_header['type'] == 254:
|
||||
self.update_stream_type = True
|
||||
self.type = data_header['type']
|
||||
#redis_server_stream.hdel('map-type:session_uuid-uuid:2', self.session_uuid) # # TODO: to remove / refractor
|
||||
redis_server_stream.srem('active_uuid_type2:{}'.format(self.uuid), self.session_uuid)
|
||||
|
||||
# remove type 2 connection
|
||||
if not redis_server_stream.exists('active_uuid_type2:{}'.format(self.uuid)):
|
||||
redis_server_stream.srem('active_connection:2', self.uuid)
|
||||
redis_server_stream.srem('active_connection_by_uuid:{}'.format(self.uuid), 2)
|
||||
|
||||
## save active connection ##
|
||||
#active Connection
|
||||
redis_server_stream.sadd('active_connection:{}'.format(self.type), self.uuid)
|
||||
redis_server_stream.sadd('active_connection_by_uuid:{}'.format(self.uuid), self.type)
|
||||
redis_server_stream.sadd('active_connection', self.uuid)
|
||||
|
||||
redis_server_stream.sadd('active_uuid_type254:{}'.format(self.uuid), self.session_uuid)
|
||||
|
||||
# map all type by uuid ## TODO: # FIXME: put me in workers ??????
|
||||
redis_server_metadata.sadd('all_types_by_uuid:{}'.format(data_header['uuid_header']), data_header['type'])
|
||||
## ##
|
||||
|
||||
|
||||
#redis_server_stream.hset('map-type:session_uuid-uuid:{}'.format(data_header['type']), self.session_uuid, data_header['uuid_header'])
|
||||
|
||||
|
||||
# Type Error
|
||||
else:
|
||||
logger.warning('Unexpected type change, type={} new type={}, ip={} uuid={} session_uuid={}'.format(ip, data_header['uuid_header'], data_header['type'], self.session_uuid))
|
||||
redis_server_metadata.hset('metadata_uuid:{}'.format(data_header['uuid_header']), 'Error', 'Error: Unexpected type change type={}, new type={}'.format(self.type, data_header['type']))
|
||||
self.transport.abortConnection()
|
||||
return 1
|
||||
# type 254, check if previous type 2 saved
|
||||
elif data_header['type'] == 254:
|
||||
logger.warning('a type 2 packet must be sent, ip={} uuid={} type={} session_uuid={}'.format(ip, data_header['uuid_header'], data_header['type'], self.session_uuid))
|
||||
redis_server_metadata.hset('metadata_uuid:{}'.format(data_header['uuid_header']), 'Error', 'Error: a type 2 packet must be sent, type={}'.format(data_header['type']))
|
||||
self.transport.abortConnection()
|
||||
return 1
|
||||
self.type = data_header['type']
|
||||
self.uuid = data_header['uuid_header']
|
||||
#active Connection
|
||||
redis_server_stream.sadd('active_connection:{}'.format(self.type), '{}:{}'.format(ip, self.uuid))
|
||||
redis_server_stream.sadd('active_connection', '{}'.format(self.uuid))
|
||||
# map session_uuid/uuid
|
||||
redis_server_stream.sadd('map:active_connection-uuid-session_uuid:{}'.format(self.uuid), self.session_uuid)
|
||||
|
||||
# check if the uuid is the same
|
||||
if self.uuid != data_header['uuid_header']:
|
||||
|
@ -340,15 +493,6 @@ class D4_Server(Protocol, TimeoutMixin):
|
|||
def process_d4_data(self, data, data_header, ip):
|
||||
# empty buffer
|
||||
self.buffer = b''
|
||||
# set hmac_header to 0
|
||||
data = data.replace(data_header['hmac_header'], hmac_reset, 1)
|
||||
if self.hmac_key is None:
|
||||
self.hmac_key = redis_server_metadata.hget('metadata_uuid:{}'.format(data_header['uuid_header']), 'hmac_key')
|
||||
if self.hmac_key is None:
|
||||
self.hmac_key = redis_server_metadata.get('server:hmac_default_key')
|
||||
|
||||
HMAC = hmac.new(self.hmac_key, msg=data, digestmod='sha256')
|
||||
data_header['hmac_header'] = data_header['hmac_header'].hex()
|
||||
|
||||
### Debug ###
|
||||
#print('hexdigest: {}'.format( HMAC.hexdigest() ))
|
||||
|
@ -361,7 +505,7 @@ class D4_Server(Protocol, TimeoutMixin):
|
|||
### ###
|
||||
|
||||
# hmac match
|
||||
if data_header['hmac_header'] == HMAC.hexdigest():
|
||||
if self.check_hmac_key(data_header['hmac_header'], data):
|
||||
if not self.stream_max_size:
|
||||
temp = redis_server_metadata.hget('stream_max_size_by_uuid', data_header['uuid_header'])
|
||||
if temp is not None:
|
||||
|
@ -386,24 +530,25 @@ class D4_Server(Protocol, TimeoutMixin):
|
|||
redis_server_metadata.zincrby('stat_uuid_type:{}:{}'.format(date, data_header['uuid_header']), 1, data_header['type'])
|
||||
|
||||
#
|
||||
d4_packet_rcv_time = int(time.time())
|
||||
if not redis_server_metadata.hexists('metadata_uuid:{}'.format(data_header['uuid_header']), 'first_seen'):
|
||||
redis_server_metadata.hset('metadata_uuid:{}'.format(data_header['uuid_header']), 'first_seen', data_header['timestamp'])
|
||||
redis_server_metadata.hset('metadata_uuid:{}'.format(data_header['uuid_header']), 'last_seen', data_header['timestamp'])
|
||||
redis_server_metadata.hset('metadata_type_by_uuid:{}:{}'.format(data_header['uuid_header'], data_header['type']), 'last_seen', data_header['timestamp'])
|
||||
redis_server_metadata.hset('metadata_uuid:{}'.format(data_header['uuid_header']), 'first_seen', d4_packet_rcv_time)
|
||||
redis_server_metadata.hset('metadata_uuid:{}'.format(data_header['uuid_header']), 'last_seen', d4_packet_rcv_time)
|
||||
redis_server_metadata.hset('metadata_type_by_uuid:{}:{}'.format(data_header['uuid_header'], data_header['type']), 'last_seen', d4_packet_rcv_time)
|
||||
|
||||
if not self.data_saved:
|
||||
# worker entry point: map type:session_uuid
|
||||
redis_server_stream.sadd('session_uuid:{}'.format(data_header['type']), self.session_uuid.encode())
|
||||
|
||||
#UUID IP: ## TODO: use d4 timestamp ?
|
||||
redis_server_metadata.lpush('list_uuid_ip:{}'.format(data_header['uuid_header']), '{}-{}'.format(ip, datetime.datetime.now().strftime("%Y%m%d%H%M%S")))
|
||||
redis_server_metadata.ltrim('list_uuid_ip:{}'.format(data_header['uuid_header']), 0, 15)
|
||||
|
||||
self.data_saved = True
|
||||
if self.update_stream_type:
|
||||
redis_server_stream.sadd('session_uuid:{}'.format(data_header['type']), self.session_uuid.encode())
|
||||
redis_server_stream.hset('map-type:session_uuid-uuid:{}'.format(data_header['type']), self.session_uuid, data_header['uuid_header'])
|
||||
redis_server_metadata.sadd('all_types_by_uuid:{}'.format(data_header['uuid_header']), data_header['type'])
|
||||
|
||||
if not redis_server_metadata.hexists('metadata_type_by_uuid:{}:{}'.format(data_header['uuid_header'], data_header['type']), 'first_seen'):
|
||||
redis_server_metadata.hset('metadata_type_by_uuid:{}:{}'.format(data_header['uuid_header'], data_header['type']), 'first_seen', data_header['timestamp'])
|
||||
redis_server_metadata.hset('metadata_type_by_uuid:{}:{}'.format(data_header['uuid_header'], data_header['type']), 'first_seen', d4_packet_rcv_time)
|
||||
self.update_stream_type = False
|
||||
return 0
|
||||
else:
|
||||
|
@ -434,7 +579,7 @@ def main(reactor):
|
|||
certificate = ssl.PrivateCertificate.loadPEM(certData)
|
||||
factory = protocol.Factory.forProtocol(D4_Server)
|
||||
# use interface to support both IPv4 and IPv6
|
||||
reactor.listenSSL(4443, factory, certificate.options(), interface='::')
|
||||
reactor.listenSSL(D4server_port, factory, certificate.options(), interface='::')
|
||||
return defer.Deferred()
|
||||
|
||||
|
||||
|
@ -460,6 +605,14 @@ if __name__ == "__main__":
|
|||
logger.addHandler(handler_log)
|
||||
logger.setLevel(args.verbose)
|
||||
|
||||
# get server_mode
|
||||
if server_mode not in all_server_modes:
|
||||
print('Error: incorrect server_mode')
|
||||
logger.critical('Error: incorrect server_mode')
|
||||
sys.exit(1)
|
||||
logger.info('Server mode: {}'.format(server_mode))
|
||||
|
||||
|
||||
logger.info('Launching Server ...')
|
||||
|
||||
task.react(main)
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*-coding:UTF-8 -*
|
||||
|
||||
import os
|
||||
import sys
|
||||
import datetime
|
||||
import time
|
||||
import uuid
|
||||
import redis
|
||||
|
||||
sys.path.append(os.path.join(os.environ['D4_HOME'], 'lib/'))
|
||||
import ConfigLoader
|
||||
import Analyzer_Queue
|
||||
import d4_type
|
||||
|
||||
### Config ###
|
||||
config_loader = ConfigLoader.ConfigLoader()
|
||||
r_serv_metadata = config_loader.get_redis_conn("Redis_METADATA")
|
||||
config_loader = None
|
||||
### ###
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
for format_type in d4_type.get_all_accepted_format_type():
|
||||
format_type = int(format_type)
|
||||
for queue_uuid in Analyzer_Queue.get_all_queues_by_type(format_type):
|
||||
r_serv_metadata.hset('analyzer:{}'.format(queue_uuid), 'type', format_type)
|
||||
r_serv_metadata.sadd('all:analyzer:format_type', format_type)
|
||||
r_serv_metadata.sadd('all:analyzer:by:format_type:{}'.format(format_type), queue_uuid)
|
||||
|
||||
for extended_type in d4_type.get_all_accepted_extended_type():
|
||||
for queue_uuid in Analyzer_Queue.get_all_queues_by_extended_type(extended_type):
|
||||
r_serv_metadata.hset('analyzer:{}'.format(queue_uuid), 'type', 254)
|
||||
r_serv_metadata.hset('analyzer:{}'.format(queue_uuid), 'metatype', extended_type)
|
||||
r_serv_metadata.sadd('all:analyzer:extended_type', extended_type)
|
||||
r_serv_metadata.sadd('all:analyzer:format_type', 254)
|
||||
r_serv_metadata.sadd('all:analyzer:by:extended_type:{}'.format(extended_type), queue_uuid)
|
||||
r_serv_metadata.sadd('all:analyzer:by:format_type:254', queue_uuid)
|
|
@ -3,25 +3,45 @@
|
|||
|
||||
import os
|
||||
import re
|
||||
import ssl
|
||||
import sys
|
||||
import uuid
|
||||
import time
|
||||
import json
|
||||
import redis
|
||||
import time
|
||||
import uuid
|
||||
import flask
|
||||
import redis
|
||||
import random
|
||||
import datetime
|
||||
import ipaddress
|
||||
|
||||
import subprocess
|
||||
|
||||
from flask import Flask, render_template, jsonify, request, Blueprint, redirect, url_for
|
||||
from flask import Flask, render_template, jsonify, request, Blueprint, redirect, url_for, Response, escape
|
||||
from flask_login import LoginManager, current_user, login_user, logout_user, login_required
|
||||
|
||||
import bcrypt
|
||||
|
||||
# Import Role_Manager
|
||||
from Role_Manager import create_user_db, check_password_strength, check_user_role_integrity
|
||||
from Role_Manager import login_user_basic, login_admin
|
||||
|
||||
sys.path.append(os.path.join(os.environ['D4_HOME'], 'lib'))
|
||||
from User import User
|
||||
import Sensor
|
||||
import ConfigLoader
|
||||
import Analyzer_Queue
|
||||
|
||||
# Import Blueprint
|
||||
from blueprints.restApi import restApi
|
||||
from blueprints.settings import settings
|
||||
from blueprints.analyzer_queue import analyzer_queue
|
||||
from blueprints.D4_sensors import D4_sensors
|
||||
|
||||
baseUrl = ''
|
||||
if baseUrl != '':
|
||||
baseUrl = '/'+baseUrl
|
||||
|
||||
host_redis_stream = "localhost"
|
||||
port_redis_stream = 6379
|
||||
all_server_modes = ('registration', 'shared-secret')
|
||||
|
||||
default_max_entries_by_stream = 10000
|
||||
analyzer_list_max_default_size = 10000
|
||||
|
@ -30,26 +50,40 @@ default_analyzer_max_line_len = 3000
|
|||
|
||||
json_type_description_path = os.path.join(os.environ['D4_HOME'], 'web/static/json/type.json')
|
||||
|
||||
redis_server_stream = redis.StrictRedis(
|
||||
host=host_redis_stream,
|
||||
port=port_redis_stream,
|
||||
db=0,
|
||||
decode_responses=True)
|
||||
### Config ###
|
||||
config_loader = ConfigLoader.ConfigLoader()
|
||||
|
||||
host_redis_metadata = "localhost"
|
||||
port_redis_metadata= 6380
|
||||
# get data directory
|
||||
use_default_save_directory = config_loader.get_config_boolean("Save_Directories", "use_default_save_directory")
|
||||
# check if field is None
|
||||
if use_default_save_directory:
|
||||
data_directory = os.path.join(os.environ['D4_HOME'], 'data')
|
||||
else:
|
||||
data_directory = config_loader.get_config_str("Save_Directories", "save_directory")
|
||||
|
||||
redis_server_metadata = redis.StrictRedis(
|
||||
host=host_redis_metadata,
|
||||
port=port_redis_metadata,
|
||||
db=0,
|
||||
decode_responses=True)
|
||||
server_mode = config_loader.get_config_str("D4_Server", "server_mode")
|
||||
if server_mode not in all_server_modes:
|
||||
print('Error: incorrect server_mode')
|
||||
|
||||
redis_server_analyzer = redis.StrictRedis(
|
||||
host=host_redis_metadata,
|
||||
port=port_redis_metadata,
|
||||
db=2,
|
||||
decode_responses=True)
|
||||
try:
|
||||
FLASK_HOST = config_loader.get_config_str("Flask_Server", "host")
|
||||
except Exception as e:
|
||||
print(e)
|
||||
FLASK_HOST = '127.0.0.1'
|
||||
|
||||
try:
|
||||
FLASK_PORT = config_loader.get_config_int("Flask_Server", "port")
|
||||
except Exception:
|
||||
FLASK_PORT = 7000
|
||||
|
||||
redis_server_stream = config_loader.get_redis_conn("Redis_STREAM")
|
||||
redis_server_metadata = config_loader.get_redis_conn("Redis_METADATA")
|
||||
redis_users = config_loader.get_redis_conn("Redis_SERV")
|
||||
redis_server_analyzer = config_loader.get_redis_conn("Redis_ANALYZER")
|
||||
r_cache = config_loader.get_redis_conn("Redis_CACHE")
|
||||
|
||||
config_loader = None
|
||||
### ###
|
||||
|
||||
with open(json_type_description_path, 'r') as f:
|
||||
json_type = json.loads(f.read())
|
||||
|
@ -57,9 +91,41 @@ json_type_description = {}
|
|||
for type_info in json_type:
|
||||
json_type_description[type_info['type']] = type_info
|
||||
|
||||
Flask_dir = os.path.join(os.environ['D4_HOME'], 'web')
|
||||
|
||||
# ========= TLS =========#
|
||||
ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
|
||||
ssl_context.load_cert_chain(certfile=os.path.join(Flask_dir, 'server.crt'), keyfile=os.path.join(Flask_dir, 'server.key'))
|
||||
#print(ssl_context.get_ciphers())
|
||||
# ========= =========#
|
||||
|
||||
app = Flask(__name__, static_url_path=baseUrl+'/static/')
|
||||
app.config['MAX_CONTENT_LENGTH'] = 900 * 1024 * 1024
|
||||
|
||||
# ========= Cookie name ========
|
||||
app.config.update(SESSION_COOKIE_NAME='d4_project_server{}'.format(uuid.uuid4().int))
|
||||
|
||||
# ========= session ========
|
||||
app.secret_key = str(random.getrandbits(256))
|
||||
login_manager = LoginManager()
|
||||
login_manager.login_view = 'login'
|
||||
login_manager.init_app(app)
|
||||
# ========= =========#
|
||||
|
||||
# ========= BLUEPRINT =========#
|
||||
app.register_blueprint(restApi)
|
||||
app.register_blueprint(settings)
|
||||
app.register_blueprint(analyzer_queue)
|
||||
app.register_blueprint(D4_sensors)
|
||||
# ========= =========#
|
||||
|
||||
# ========= LOGIN MANAGER ========
|
||||
|
||||
@login_manager.user_loader
|
||||
def load_user(user_id):
|
||||
return User.get(user_id)
|
||||
# ========= =========#
|
||||
|
||||
# ========== FUNCTIONS ============
|
||||
def is_valid_uuid_v4(header_uuid):
|
||||
try:
|
||||
|
@ -117,19 +183,202 @@ def get_substract_date_range(num_day, date_from=None):
|
|||
l_date.append( date.strftime('%Y%m%d') )
|
||||
return list(reversed(l_date))
|
||||
|
||||
def get_uuid_all_types_disk(uuid_name):
|
||||
uuid_data_directory = os.path.join(data_directory, uuid_name)
|
||||
all_types_on_disk = []
|
||||
# Get all types save on disk
|
||||
for file in os.listdir(uuid_data_directory):
|
||||
uuid_type_path = os.path.join(uuid_data_directory, file)
|
||||
if os.path.isdir(uuid_type_path):
|
||||
all_types_on_disk.append(file)
|
||||
return all_types_on_disk
|
||||
|
||||
def get_uuid_disk_statistics(uuid_name, date_day='', type='', all_types_on_disk=[], all_stats=True):
|
||||
# # TODO: escape uuid_name
|
||||
|
||||
stat_disk_uuid = {}
|
||||
uuid_data_directory = os.path.join(data_directory, uuid_name)
|
||||
if date_day:
|
||||
directory_date = os.path.join(date_day[0:4], date_day[4:6], date_day[6:8])
|
||||
all_types_on_disk = {}
|
||||
|
||||
if all_types_on_disk:
|
||||
for type in all_types_on_disk:
|
||||
if date_day:
|
||||
uuid_type_path = os.path.join(uuid_data_directory, type, directory_date)
|
||||
else:
|
||||
uuid_type_path = os.path.join(uuid_data_directory, type)
|
||||
all_types_on_disk[type] = uuid_type_path
|
||||
else:
|
||||
# Get all types save on disk
|
||||
if os.path.isdir(uuid_data_directory):
|
||||
for file in os.listdir(uuid_data_directory):
|
||||
if date_day:
|
||||
uuid_type_path = os.path.join(uuid_data_directory, file, directory_date)
|
||||
else:
|
||||
uuid_type_path = os.path.join(uuid_data_directory, file)
|
||||
if os.path.isdir(uuid_type_path):
|
||||
all_types_on_disk[file] = uuid_type_path
|
||||
|
||||
nb_file = 0
|
||||
total_size = 0
|
||||
|
||||
for uuid_type in all_types_on_disk:
|
||||
nb_file_type = 0
|
||||
total_size_type = 0
|
||||
for dirpath, dirnames, filenames in os.walk(all_types_on_disk[uuid_type]):
|
||||
stat_disk_uuid[uuid_type] = {}
|
||||
for f in filenames:
|
||||
fp = os.path.join(dirpath, f)
|
||||
file_size = os.path.getsize(fp)
|
||||
total_size_type += file_size
|
||||
total_size += file_size
|
||||
nb_file_type += 1
|
||||
nb_file += 1
|
||||
stat_disk_uuid[uuid_type]['nb_files'] = nb_file_type
|
||||
stat_disk_uuid[uuid_type]['total_size'] = total_size_type
|
||||
if all_stats:
|
||||
stat_all = {}
|
||||
stat_all['nb_files'] = nb_file
|
||||
stat_all['total_size'] = total_size
|
||||
stat_disk_uuid['All'] = stat_all
|
||||
return stat_disk_uuid
|
||||
|
||||
# ========== ERRORS ============
|
||||
|
||||
@app.errorhandler(404)
|
||||
def page_not_found(e):
|
||||
# API - JSON
|
||||
if request.path.startswith('/api/'):
|
||||
return Response(json.dumps({"status": "error", "reason": "404 Not Found"}, indent=2, sort_keys=True), mimetype='application/json'), 404
|
||||
# UI - HTML Template
|
||||
else:
|
||||
return render_template('404.html'), 404
|
||||
|
||||
@app.errorhandler(405)
|
||||
def _handle_client_error(e):
|
||||
if request.path.startswith('/api/'):
|
||||
res_dict = {"status": "error", "reason": "Method Not Allowed: The method is not allowed for the requested URL"}
|
||||
anchor_id = request.path[8:]
|
||||
anchor_id = anchor_id.replace('/', '_')
|
||||
api_doc_url = 'https://d4-project.org#{}'.format(anchor_id)
|
||||
res_dict['documentation'] = api_doc_url
|
||||
return Response(json.dumps(res_dict, indent=2, sort_keys=True), mimetype='application/json'), 405
|
||||
else:
|
||||
return
|
||||
|
||||
# ========== ROUTES ============
|
||||
@app.route('/login', methods=['POST', 'GET'])
|
||||
def login():
|
||||
|
||||
current_ip = request.remote_addr
|
||||
login_failed_ip = r_cache.get('failed_login_ip:{}'.format(current_ip))
|
||||
|
||||
# brute force by ip
|
||||
if login_failed_ip:
|
||||
login_failed_ip = int(login_failed_ip)
|
||||
if login_failed_ip >= 5:
|
||||
error = 'Max Connection Attempts reached, Please wait {}s'.format(r_cache.ttl('failed_login_ip:{}'.format(current_ip)))
|
||||
return render_template("login.html", error=error)
|
||||
|
||||
if request.method == 'POST':
|
||||
username = request.form.get('username')
|
||||
password = request.form.get('password')
|
||||
next_page = request.form.get('next_page')
|
||||
|
||||
if username is not None:
|
||||
user = User.get(username)
|
||||
|
||||
login_failed_user_id = r_cache.get('failed_login_user_id:{}'.format(username))
|
||||
# brute force by user_id
|
||||
if login_failed_user_id:
|
||||
login_failed_user_id = int(login_failed_user_id)
|
||||
if login_failed_user_id >= 5:
|
||||
error = 'Max Connection Attempts reached, Please wait {}s'.format(r_cache.ttl('failed_login_user_id:{}'.format(username)))
|
||||
return render_template("login.html", error=error)
|
||||
|
||||
if user and user.check_password(password):
|
||||
#if not check_user_role_integrity(user.get_id()):
|
||||
# error = 'Incorrect User ACL, Please contact your administrator'
|
||||
# return render_template("login.html", error=error)
|
||||
if not user.is_in_role('user'):
|
||||
return render_template("403.html"), 403
|
||||
login_user(user) ## TODO: use remember me ?
|
||||
if user.request_password_change():
|
||||
return redirect(url_for('change_password'))
|
||||
else:
|
||||
if next_page and next_page!='None':
|
||||
return redirect(next_page)
|
||||
else:
|
||||
return redirect(url_for('index'))
|
||||
# login failed
|
||||
else:
|
||||
# set brute force protection
|
||||
#logger.warning("Login failed, ip={}, username={}".format(current_ip, username))
|
||||
r_cache.incr('failed_login_ip:{}'.format(current_ip))
|
||||
r_cache.expire('failed_login_ip:{}'.format(current_ip), 300)
|
||||
r_cache.incr('failed_login_user_id:{}'.format(username))
|
||||
r_cache.expire('failed_login_user_id:{}'.format(username), 300)
|
||||
|
||||
error = 'Password Incorrect'
|
||||
return render_template("login.html", error=error)
|
||||
|
||||
return 'please provide a valid username'
|
||||
|
||||
else:
|
||||
next_page = request.args.get('next')
|
||||
error = request.args.get('error')
|
||||
return render_template("login.html" , error=error, next_page=next_page)
|
||||
|
||||
@app.route('/change_password', methods=['POST', 'GET'])
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def change_password():
|
||||
password1 = request.form.get('password1')
|
||||
password2 = request.form.get('password2')
|
||||
error = request.args.get('error')
|
||||
|
||||
if error:
|
||||
return render_template("change_password.html", error=error)
|
||||
|
||||
if current_user.is_authenticated and password1!=None:
|
||||
if password1==password2:
|
||||
if check_password_strength(password1):
|
||||
user_id = current_user.get_id()
|
||||
create_user_db(user_id , password1, update=True)
|
||||
return redirect(url_for('index'))
|
||||
else:
|
||||
error = 'Incorrect password'
|
||||
return render_template("change_password.html", error=error)
|
||||
else:
|
||||
error = "Passwords don't match"
|
||||
return render_template("change_password.html", error=error)
|
||||
else:
|
||||
error = 'Please choose a new password'
|
||||
return render_template("change_password.html", error=error)
|
||||
|
||||
@app.route('/logout')
|
||||
@login_required
|
||||
def logout():
|
||||
logout_user()
|
||||
return redirect(url_for('login'))
|
||||
|
||||
# role error template
|
||||
@app.route('/role', methods=['POST', 'GET'])
|
||||
@login_required
|
||||
def role():
|
||||
return render_template("403.html"), 403
|
||||
|
||||
@app.route('/')
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def index():
|
||||
date = datetime.datetime.now().strftime("%Y/%m/%d")
|
||||
return render_template("index.html", date=date)
|
||||
|
||||
@app.route('/_json_daily_uuid_stats')
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def _json_daily_uuid_stats():
|
||||
date = datetime.datetime.now().strftime("%Y%m%d")
|
||||
daily_uuid = redis_server_metadata.zrange('daily_uuid:{}'.format(date), 0, -1, withscores=True)
|
||||
|
@ -141,6 +390,8 @@ def _json_daily_uuid_stats():
|
|||
return jsonify(data_daily_uuid)
|
||||
|
||||
@app.route('/_json_daily_type_stats')
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def _json_daily_type_stats():
|
||||
date = datetime.datetime.now().strftime("%Y%m%d")
|
||||
daily_uuid = redis_server_metadata.zrange('daily_type:{}'.format(date), 0, -1, withscores=True)
|
||||
|
@ -157,6 +408,8 @@ def _json_daily_type_stats():
|
|||
return jsonify(data_daily_uuid)
|
||||
|
||||
@app.route('/sensors_status')
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def sensors_status():
|
||||
active_connection_filter = request.args.get('active_connection_filter')
|
||||
if active_connection_filter is None:
|
||||
|
@ -174,12 +427,44 @@ def sensors_status():
|
|||
else:
|
||||
daily_uuid = redis_server_stream.smembers('active_connection')
|
||||
|
||||
type_description_json = get_json_type_description()
|
||||
|
||||
status_daily_uuid = []
|
||||
types_description = {}
|
||||
for result in daily_uuid:
|
||||
first_seen = redis_server_metadata.hget('metadata_uuid:{}'.format(result), 'first_seen')
|
||||
first_seen_gmt = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(int(first_seen)))
|
||||
last_seen = redis_server_metadata.hget('metadata_uuid:{}'.format(result), 'last_seen')
|
||||
last_seen_gmt = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(int(last_seen)))
|
||||
description = redis_server_metadata.hget('metadata_uuid:{}'.format(result), 'description')
|
||||
if not description:
|
||||
description = ''
|
||||
type_connection_status = {}
|
||||
l_uuid_types = []
|
||||
l_uuid_typ = redis_server_metadata.smembers('all_types_by_uuid:{}'.format(result))
|
||||
for type in l_uuid_typ:
|
||||
type = int(type)
|
||||
if redis_server_stream.sismember('active_connection:{}'.format(type), result):
|
||||
type_connection_status[type] = True
|
||||
else:
|
||||
type_connection_status[type] = False
|
||||
l_uuid_types.append(type)
|
||||
if type not in types_description:
|
||||
types_description[type] = type_description_json[type]['description']
|
||||
if not types_description[type]:
|
||||
types_description[type] = 'please update your web server'
|
||||
|
||||
l_uuid_types.sort()
|
||||
if 254 in l_uuid_types:
|
||||
extended_type = list(redis_server_metadata.smembers('all_extended_types_by_uuid:{}'.format(result)))
|
||||
extended_type.sort()
|
||||
for extended in extended_type:
|
||||
if redis_server_stream.sismember('active_connection_extended_type:{}'.format(result), extended):
|
||||
type_connection_status[extended] = True
|
||||
else:
|
||||
type_connection_status[extended] = False
|
||||
types_description[extended] = ''
|
||||
l_uuid_types.extend(extended_type)
|
||||
if redis_server_metadata.sismember('blacklist_ip_by_uuid', result):
|
||||
Error = "All IP using this UUID are Blacklisted"
|
||||
elif redis_server_metadata.sismember('blacklist_uuid', result):
|
||||
|
@ -192,14 +477,20 @@ def sensors_status():
|
|||
active_connection = False
|
||||
|
||||
if first_seen is not None and last_seen is not None:
|
||||
status_daily_uuid.append({"uuid": result,"first_seen": first_seen, "last_seen": last_seen,
|
||||
status_daily_uuid.append({"uuid": result,
|
||||
"active_connection": active_connection,
|
||||
"first_seen_gmt": first_seen_gmt, "last_seen_gmt": last_seen_gmt, "Error": Error})
|
||||
"type_connection_status": type_connection_status,
|
||||
"description": description,
|
||||
"first_seen_gmt": first_seen_gmt, "last_seen_gmt": last_seen_gmt,
|
||||
"l_uuid_types": l_uuid_types, "Error": Error})
|
||||
|
||||
return render_template("sensors_status.html", status_daily_uuid=status_daily_uuid,
|
||||
types_description=types_description,
|
||||
active_connection_filter=active_connection_filter)
|
||||
|
||||
@app.route('/show_active_uuid')
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def show_active_uuid():
|
||||
#swap switch value
|
||||
active_connection_filter = request.args.get('show_active_connection')
|
||||
|
@ -214,7 +505,12 @@ def show_active_uuid():
|
|||
return redirect(url_for('sensors_status', active_connection_filter=active_connection_filter))
|
||||
|
||||
@app.route('/server_management')
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def server_management():
|
||||
nb_sensors_registered = Sensor.get_nb_registered_sensors()
|
||||
nb_sensors_pending = Sensor.get_nb_pending_sensor()
|
||||
|
||||
blacklisted_ip = request.args.get('blacklisted_ip')
|
||||
unblacklisted_ip = request.args.get('unblacklisted_ip')
|
||||
blacklisted_uuid = request.args.get('blacklisted_uuid')
|
||||
|
@ -236,62 +532,56 @@ def server_management():
|
|||
description = 'Please update your web server'
|
||||
|
||||
list_analyzer_uuid = []
|
||||
for analyzer_uuid in redis_server_metadata.smembers('analyzer:{}'.format(type)):
|
||||
size_limit = redis_server_metadata.hget('analyzer:{}'.format(analyzer_uuid), 'max_size')
|
||||
if size_limit is None:
|
||||
size_limit = analyzer_list_max_default_size
|
||||
last_updated = redis_server_metadata.hget('analyzer:{}'.format(analyzer_uuid), 'last_updated')
|
||||
if last_updated is None:
|
||||
last_updated = 'Never'
|
||||
else:
|
||||
last_updated = datetime.datetime.fromtimestamp(float(last_updated)).strftime('%Y-%m-%d %H:%M:%S')
|
||||
description_analyzer = redis_server_metadata.hget('analyzer:{}'.format(analyzer_uuid), 'description')
|
||||
if description_analyzer is None:
|
||||
description_analyzer = ''
|
||||
len_queue = redis_server_analyzer.llen('analyzer:{}:{}'.format(type, analyzer_uuid))
|
||||
if len_queue is None:
|
||||
len_queue = 0
|
||||
list_analyzer_uuid.append({'uuid': analyzer_uuid, 'description': description_analyzer, 'size_limit': size_limit,'last_updated': last_updated, 'length': len_queue})
|
||||
for analyzer_uuid in Analyzer_Queue.get_all_queues_by_type(type):
|
||||
list_analyzer_uuid.append(Analyzer_Queue.get_queue_metadata(analyzer_uuid, format_type=type))
|
||||
|
||||
for analyzer_uuid in Analyzer_Queue.get_all_queues_group_by_type(type):
|
||||
list_analyzer_uuid.append(Analyzer_Queue.get_queue_metadata(analyzer_uuid, format_type=type, force_is_group_queue=True))
|
||||
|
||||
list_accepted_types.append({"id": int(type), "description": description, 'list_analyzer_uuid': list_analyzer_uuid})
|
||||
|
||||
list_accepted_extended_types = []
|
||||
l_queue_extended_type = []
|
||||
for extended_type in redis_server_metadata.smembers('server:accepted_extended_type'):
|
||||
list_accepted_extended_types.append({"name": extended_type, 'list_analyzer_uuid': []})
|
||||
|
||||
list_analyzer_uuid = []
|
||||
for analyzer_uuid in redis_server_metadata.smembers('analyzer:254:{}'.format(extended_type)):
|
||||
size_limit = redis_server_metadata.hget('analyzer:{}'.format(analyzer_uuid), 'max_size')
|
||||
if size_limit is None:
|
||||
size_limit = analyzer_list_max_default_size
|
||||
last_updated = redis_server_metadata.hget('analyzer:{}'.format(analyzer_uuid), 'last_updated')
|
||||
if last_updated is None:
|
||||
last_updated = 'Never'
|
||||
else:
|
||||
last_updated = datetime.datetime.fromtimestamp(float(last_updated)).strftime('%Y-%m-%d %H:%M:%S')
|
||||
description_analyzer = redis_server_metadata.hget('analyzer:{}'.format(analyzer_uuid), 'description')
|
||||
if description_analyzer is None:
|
||||
description_analyzer = ''
|
||||
len_queue = redis_server_analyzer.llen('analyzer:{}:{}'.format(extended_type, analyzer_uuid))
|
||||
if len_queue is None:
|
||||
len_queue = 0
|
||||
list_analyzer_uuid.append({'uuid': analyzer_uuid, 'description': description_analyzer, 'size_limit': size_limit,'last_updated': last_updated, 'length': len_queue})
|
||||
for extended_queue_uuid in Analyzer_Queue.get_all_queues_by_extended_type(extended_type):
|
||||
l_queue_extended_type.append(Analyzer_Queue.get_queue_metadata(extended_queue_uuid, format_type=254, extended_type=extended_type))
|
||||
|
||||
for extended_queue_uuid in Analyzer_Queue.get_all_queues_group_by_extended_type(extended_type):
|
||||
l_queue_extended_type.append(Analyzer_Queue.get_queue_metadata(extended_queue_uuid, format_type=254, extended_type=extended_type, force_is_group_queue=True))
|
||||
|
||||
list_accepted_extended_types.append({"name": extended_type, 'list_analyzer_uuid': list_analyzer_uuid})
|
||||
|
||||
return render_template("server_management.html", list_accepted_types=list_accepted_types, list_accepted_extended_types=list_accepted_extended_types,
|
||||
server_mode=server_mode,
|
||||
l_queue_extended_type=l_queue_extended_type,
|
||||
nb_sensors_registered=nb_sensors_registered, nb_sensors_pending=nb_sensors_pending,
|
||||
default_analyzer_max_line_len=default_analyzer_max_line_len,
|
||||
blacklisted_ip=blacklisted_ip, unblacklisted_ip=unblacklisted_ip,
|
||||
blacklisted_uuid=blacklisted_uuid, unblacklisted_uuid=unblacklisted_uuid)
|
||||
|
||||
@app.route('/uuid_management')
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def uuid_management():
|
||||
uuid_sensor = request.args.get('uuid')
|
||||
if is_valid_uuid_v4(uuid_sensor):
|
||||
uuid_sensor = uuid_sensor.replace('-', '')
|
||||
|
||||
disk_stats = get_uuid_disk_statistics(uuid_sensor)
|
||||
first_seen = redis_server_metadata.hget('metadata_uuid:{}'.format(uuid_sensor), 'first_seen')
|
||||
if first_seen:
|
||||
first_seen_gmt = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(int(first_seen)))
|
||||
else:
|
||||
first_seen_gmt = '-'
|
||||
last_seen = redis_server_metadata.hget('metadata_uuid:{}'.format(uuid_sensor), 'last_seen')
|
||||
if last_seen:
|
||||
last_seen_gmt = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(int(last_seen)))
|
||||
else:
|
||||
last_seen_gmt = '-'
|
||||
description = redis_server_metadata.hget('metadata_uuid:{}'.format(uuid_sensor), 'description')
|
||||
if not description:
|
||||
description = ''
|
||||
Error = redis_server_metadata.hget('metadata_uuid:{}'.format(uuid_sensor), 'Error')
|
||||
if redis_server_stream.exists('temp_blacklist_uuid:{}'.format(uuid_sensor)):
|
||||
temp_blacklist_uuid = True
|
||||
|
@ -307,11 +597,13 @@ def uuid_management():
|
|||
Error = "All IP using this UUID are Blacklisted"
|
||||
else:
|
||||
blacklisted_ip_by_uuid = False
|
||||
data_uuid= {"first_seen": first_seen, "last_seen": last_seen,
|
||||
data_uuid= {"description": description,
|
||||
"temp_blacklist_uuid": temp_blacklist_uuid,
|
||||
"blacklisted_uuid": blacklisted_uuid, "blacklisted_ip_by_uuid": blacklisted_ip_by_uuid,
|
||||
"first_seen_gmt": first_seen_gmt, "last_seen_gmt": last_seen_gmt, "Error": Error}
|
||||
|
||||
data_uuid['is_monitored'] = Sensor.is_sensor_monitored(uuid_sensor)
|
||||
|
||||
if redis_server_stream.sismember('active_connection', uuid_sensor):
|
||||
active_connection = True
|
||||
else:
|
||||
|
@ -346,11 +638,14 @@ def uuid_management():
|
|||
|
||||
return render_template("uuid_management.html", uuid_sensor=uuid_sensor, active_connection=active_connection,
|
||||
uuid_key=uuid_key, data_uuid=data_uuid, uuid_all_type=uuid_all_type_list,
|
||||
disk_stats=disk_stats,
|
||||
max_uuid_stream=max_uuid_stream, all_ip=all_ip)
|
||||
else:
|
||||
return 'Invalid uuid'
|
||||
|
||||
@app.route('/blacklisted_ip')
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def blacklisted_ip():
|
||||
blacklisted_ip = request.args.get('blacklisted_ip')
|
||||
unblacklisted_ip = request.args.get('unblacklisted_ip')
|
||||
|
@ -376,6 +671,8 @@ def blacklisted_ip():
|
|||
unblacklisted_ip=unblacklisted_ip, blacklisted_ip=blacklisted_ip)
|
||||
|
||||
@app.route('/blacklisted_uuid')
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def blacklisted_uuid():
|
||||
blacklisted_uuid = request.args.get('blacklisted_uuid')
|
||||
unblacklisted_uuid = request.args.get('unblacklisted_uuid')
|
||||
|
@ -400,8 +697,62 @@ def blacklisted_uuid():
|
|||
page=page, nb_page_max=nb_page_max,
|
||||
unblacklisted_uuid=unblacklisted_uuid, blacklisted_uuid=blacklisted_uuid)
|
||||
|
||||
@app.route('/server/registered_sensor')
|
||||
@login_required
|
||||
@login_admin
|
||||
def registered_sensor():
|
||||
sensors = Sensor.get_registered_sensors()
|
||||
all_sensors = []
|
||||
for sensor_uuid in sensors:
|
||||
all_sensors.append(Sensor._get_sensor_metadata(sensor_uuid, time_format='gmt', sensor_types=True))
|
||||
return render_template("registered_sensors.html", all_sensors=all_sensors)
|
||||
|
||||
@app.route('/server/pending_sensor')
|
||||
@login_required
|
||||
@login_admin
|
||||
def pending_sensors():
|
||||
sensors = Sensor.get_pending_sensor()
|
||||
all_pending = []
|
||||
for sensor_uuid in sensors:
|
||||
all_pending.append(Sensor._get_sensor_metadata(sensor_uuid, first_seen=False, last_seen=False))
|
||||
return render_template("pending_sensor.html", all_pending=all_pending)
|
||||
|
||||
@app.route('/server/approve_sensor')
|
||||
@login_required
|
||||
@login_admin
|
||||
def approve_sensor():
|
||||
uuid_sensor = request.args.get('uuid')
|
||||
res = Sensor.approve_sensor({'uuid': uuid_sensor})
|
||||
if res[1] == 200:
|
||||
return redirect(url_for('pending_sensors'))
|
||||
else:
|
||||
return jsonify(res[0])
|
||||
|
||||
@app.route('/server/delete_pending_sensor')
|
||||
@login_required
|
||||
@login_admin
|
||||
def delete_pending_sensor():
|
||||
uuid_sensor = request.args.get('uuid')
|
||||
res = Sensor.delete_pending_sensor({'uuid': uuid_sensor})
|
||||
if res[1] == 200:
|
||||
return redirect(url_for('pending_sensors'))
|
||||
else:
|
||||
return jsonify(res[0])
|
||||
|
||||
@app.route('/server/delete_registered_sensor')
|
||||
@login_required
|
||||
@login_admin
|
||||
def delete_registered_sensor():
|
||||
uuid_sensor = request.args.get('uuid')
|
||||
res = Sensor.delete_registered_sensor({'uuid': uuid_sensor})
|
||||
if res[1] == 200:
|
||||
return redirect(url_for('registered_sensor'))
|
||||
else:
|
||||
return jsonify(res[0])
|
||||
|
||||
@app.route('/uuid_change_stream_max_size')
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def uuid_change_stream_max_size():
|
||||
uuid_sensor = request.args.get('uuid')
|
||||
user = request.args.get('redirect')
|
||||
|
@ -419,99 +770,67 @@ def uuid_change_stream_max_size():
|
|||
else:
|
||||
return 'Invalid uuid'
|
||||
|
||||
# # TODO: check analyser uuid dont exist
|
||||
@app.route('/add_new_analyzer')
|
||||
def add_new_analyzer():
|
||||
type = request.args.get('type')
|
||||
user = request.args.get('redirect')
|
||||
metatype_name = request.args.get('metatype_name')
|
||||
analyzer_description = request.args.get('analyzer_description')
|
||||
analyzer_uuid = request.args.get('analyzer_uuid')
|
||||
if is_valid_uuid_v4(analyzer_uuid):
|
||||
try:
|
||||
type = int(type)
|
||||
if type < 0:
|
||||
return 'type, Invalid Integer'
|
||||
except:
|
||||
return 'type, Invalid Integer'
|
||||
if type == 254:
|
||||
# # TODO: check metatype_name
|
||||
redis_server_metadata.sadd('analyzer:{}:{}'.format(type, metatype_name), analyzer_uuid)
|
||||
@app.route('/uuid_change_description')
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def uuid_change_description():
|
||||
uuid_sensor = request.args.get('uuid')
|
||||
description = request.args.get('description')
|
||||
if is_valid_uuid_v4(uuid_sensor):
|
||||
redis_server_metadata.hset('metadata_uuid:{}'.format(uuid_sensor), 'description', description)
|
||||
return jsonify()
|
||||
else:
|
||||
redis_server_metadata.sadd('analyzer:{}'.format(type), analyzer_uuid)
|
||||
if redis_server_metadata.exists('analyzer:{}:{}'.format(type, metatype_name)) or redis_server_metadata.exists('analyzer:{}'.format(type)):
|
||||
redis_server_metadata.hset('analyzer:{}'.format(analyzer_uuid), 'description', analyzer_description)
|
||||
if user:
|
||||
return redirect(url_for('server_management'))
|
||||
else:
|
||||
return 'Invalid uuid'
|
||||
return jsonify({'error':'invalid uuid'}), 400
|
||||
|
||||
@app.route('/empty_analyzer_queue')
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def empty_analyzer_queue():
|
||||
analyzer_uuid = request.args.get('analyzer_uuid')
|
||||
type = request.args.get('type')
|
||||
format_type = request.args.get('type')
|
||||
metatype_name = request.args.get('metatype_name')
|
||||
user = request.args.get('redirect')
|
||||
if is_valid_uuid_v4(analyzer_uuid):
|
||||
try:
|
||||
type = int(type)
|
||||
if type < 0:
|
||||
return 'type, Invalid Integer'
|
||||
except:
|
||||
return 'type, Invalid Integer'
|
||||
if type == 254:
|
||||
redis_server_analyzer.delete('analyzer:{}:{}'.format(metatype_name, analyzer_uuid))
|
||||
else:
|
||||
redis_server_analyzer.delete('analyzer:{}:{}'.format(type, analyzer_uuid))
|
||||
if format_type == 254:
|
||||
format_type = metatype_name
|
||||
Analyzer_Queue.flush_queue(analyzer_uuid, format_type)
|
||||
if user:
|
||||
return redirect(url_for('server_management'))
|
||||
else:
|
||||
return 'Invalid uuid'
|
||||
|
||||
@app.route('/remove_analyzer')
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def remove_analyzer():
|
||||
analyzer_uuid = request.args.get('analyzer_uuid')
|
||||
type = request.args.get('type')
|
||||
format_type = request.args.get('type')
|
||||
metatype_name = request.args.get('metatype_name')
|
||||
user = request.args.get('redirect')
|
||||
if is_valid_uuid_v4(analyzer_uuid):
|
||||
try:
|
||||
type = int(type)
|
||||
if type < 0:
|
||||
return 'type, Invalid Integer'
|
||||
except:
|
||||
return 'type, Invalid Integer'
|
||||
if type == 254:
|
||||
redis_server_metadata.srem('analyzer:{}:{}'.format(type, metatype_name), analyzer_uuid)
|
||||
redis_server_analyzer.delete('analyzer:{}:{}'.format(metatype_name, analyzer_uuid))
|
||||
else:
|
||||
redis_server_metadata.srem('analyzer:{}'.format(type), analyzer_uuid)
|
||||
redis_server_analyzer.delete('analyzer:{}:{}'.format(type, analyzer_uuid))
|
||||
redis_server_metadata.delete('analyzer:{}'.format(analyzer_uuid))
|
||||
Analyzer_Queue.remove_queues(analyzer_uuid, format_type)
|
||||
if user:
|
||||
return redirect(url_for('server_management'))
|
||||
else:
|
||||
return 'Invalid uuid'
|
||||
|
||||
@app.route('/analyzer_change_max_size')
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def analyzer_change_max_size():
|
||||
analyzer_uuid = request.args.get('analyzer_uuid')
|
||||
user = request.args.get('redirect')
|
||||
max_size_analyzer = request.args.get('max_size_analyzer')
|
||||
if is_valid_uuid_v4(analyzer_uuid):
|
||||
try:
|
||||
max_size_analyzer = int(max_size_analyzer)
|
||||
if max_size_analyzer < 0:
|
||||
return 'analyzer max size, Invalid Integer'
|
||||
except:
|
||||
return 'analyzer max size, Invalid Integer'
|
||||
redis_server_metadata.hset('analyzer:{}'.format(analyzer_uuid), 'max_size', max_size_analyzer)
|
||||
Analyzer_Queue.edit_queue_max_size(analyzer_uuid, max_size_analyzer)
|
||||
if user:
|
||||
return redirect(url_for('server_management'))
|
||||
else:
|
||||
return 'Invalid uuid'
|
||||
|
||||
@app.route('/kick_uuid')
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def kick_uuid():
|
||||
uuid_sensor = request.args.get('uuid')
|
||||
if is_valid_uuid_v4(uuid_sensor):
|
||||
|
@ -521,6 +840,8 @@ def kick_uuid():
|
|||
return 'Invalid uuid'
|
||||
|
||||
@app.route('/blacklist_uuid')
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def blacklist_uuid():
|
||||
uuid_sensor = request.args.get('uuid')
|
||||
user = request.args.get('redirect')
|
||||
|
@ -541,6 +862,8 @@ def blacklist_uuid():
|
|||
return 'Invalid uuid'
|
||||
|
||||
@app.route('/unblacklist_uuid')
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def unblacklist_uuid():
|
||||
uuid_sensor = request.args.get('uuid')
|
||||
user = request.args.get('redirect')
|
||||
|
@ -564,6 +887,8 @@ def unblacklist_uuid():
|
|||
return 'Invalid uuid'
|
||||
|
||||
@app.route('/blacklist_ip')
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def blacklist_ip():
|
||||
ip = request.args.get('ip')
|
||||
user = request.args.get('redirect')
|
||||
|
@ -589,6 +914,8 @@ def blacklist_ip():
|
|||
return 'Invalid ip'
|
||||
|
||||
@app.route('/unblacklist_ip')
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def unblacklist_ip():
|
||||
ip = request.args.get('ip')
|
||||
user = request.args.get('redirect')
|
||||
|
@ -616,6 +943,8 @@ def unblacklist_ip():
|
|||
return 'Invalid ip'
|
||||
|
||||
@app.route('/blacklist_ip_by_uuid')
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def blacklist_ip_by_uuid():
|
||||
uuid_sensor = request.args.get('uuid')
|
||||
user = request.args.get('redirect')
|
||||
|
@ -627,6 +956,8 @@ def blacklist_ip_by_uuid():
|
|||
return 'Invalid uuid'
|
||||
|
||||
@app.route('/unblacklist_ip_by_uuid')
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def unblacklist_ip_by_uuid():
|
||||
uuid_sensor = request.args.get('uuid')
|
||||
user = request.args.get('redirect')
|
||||
|
@ -638,6 +969,8 @@ def unblacklist_ip_by_uuid():
|
|||
return 'Invalid uuid'
|
||||
|
||||
@app.route('/add_accepted_type')
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def add_accepted_type():
|
||||
type = request.args.get('type')
|
||||
extended_type_name = request.args.get('extended_type_name')
|
||||
|
@ -657,6 +990,8 @@ def add_accepted_type():
|
|||
return 'Invalid type'
|
||||
|
||||
@app.route('/remove_accepted_type')
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def remove_accepted_type():
|
||||
type = request.args.get('type')
|
||||
user = request.args.get('redirect')
|
||||
|
@ -669,6 +1004,8 @@ def remove_accepted_type():
|
|||
return 'Invalid type'
|
||||
|
||||
@app.route('/remove_accepted_extended_type')
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def remove_accepted_extended_type():
|
||||
type_name = request.args.get('type_name')
|
||||
redis_server_metadata.srem('server:accepted_extended_type', type_name)
|
||||
|
@ -676,6 +1013,8 @@ def remove_accepted_extended_type():
|
|||
|
||||
# demo function
|
||||
@app.route('/delete_data')
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def delete_data():
|
||||
date = datetime.datetime.now().strftime("%Y%m%d")
|
||||
redis_server_metadata.delete('daily_type:{}'.format(date))
|
||||
|
@ -684,17 +1023,24 @@ def delete_data():
|
|||
|
||||
# demo function
|
||||
@app.route('/set_uuid_hmac_key')
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def set_uuid_hmac_key():
|
||||
uuid_sensor = request.args.get('uuid')
|
||||
user = request.args.get('redirect')
|
||||
key = request.args.get('key')
|
||||
redis_server_metadata.hset('metadata_uuid:{}'.format(uuid_sensor), 'hmac_key', key)
|
||||
hmac_key = escape(key)
|
||||
if len(hmac_key)>100:
|
||||
hmac_key=hmac_key[:100]
|
||||
redis_server_metadata.hset('metadata_uuid:{}'.format(uuid_sensor), 'hmac_key', hmac_key)
|
||||
if user:
|
||||
return redirect(url_for('uuid_management', uuid=uuid_sensor))
|
||||
|
||||
|
||||
# demo function
|
||||
@app.route('/whois_data')
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def whois_data():
|
||||
ip = request.args.get('ip')
|
||||
if is_valid_ip:
|
||||
|
@ -703,11 +1049,15 @@ def whois_data():
|
|||
return 'Invalid IP'
|
||||
|
||||
@app.route('/generate_uuid')
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def generate_uuid():
|
||||
new_uuid = uuid.uuid4()
|
||||
return jsonify({'uuid': new_uuid})
|
||||
|
||||
@app.route('/get_analyser_sample')
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def get_analyser_sample():
|
||||
type = request.args.get('type')
|
||||
analyzer_uuid = request.args.get('analyzer_uuid')
|
||||
|
@ -735,6 +1085,8 @@ def get_analyser_sample():
|
|||
return jsonify('Incorrect UUID')
|
||||
|
||||
@app.route('/get_uuid_type_history_json')
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def get_uuid_type_history_json():
|
||||
uuid_sensor = request.args.get('uuid_sensor')
|
||||
if is_valid_uuid_v4(uuid_sensor):
|
||||
|
@ -764,8 +1116,38 @@ def get_uuid_type_history_json():
|
|||
else:
|
||||
return jsonify('Incorrect UUID')
|
||||
|
||||
@app.route('/get_uuid_stats_history_json')
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def get_uuid_stats_history_json():
|
||||
uuid_sensor = request.args.get('uuid_sensor')
|
||||
stats = request.args.get('stats')
|
||||
if is_valid_uuid_v4(uuid_sensor):
|
||||
if stats not in ['nb_files', 'total_size']:
|
||||
stats = 'nb_files'
|
||||
|
||||
num_day_type = 7
|
||||
date_range = get_substract_date_range(num_day_type)
|
||||
stat_type_history = []
|
||||
range_decoder = []
|
||||
all_type = get_uuid_all_types_disk(uuid_sensor)
|
||||
|
||||
default_dict_type = {}
|
||||
for type in all_type:
|
||||
default_dict_type[type] = 0
|
||||
|
||||
for date in date_range:
|
||||
day_type = default_dict_type.copy()
|
||||
daily_stat = get_uuid_disk_statistics(uuid_sensor, date, all_types_on_disk=all_type, all_stats=False)
|
||||
day_type['date']= date[0:4] + '-' + date[4:6] + '-' + date[6:8]
|
||||
for type_key in daily_stat:
|
||||
day_type[type_key] += daily_stat[type_key][stats]
|
||||
stat_type_history.append(day_type)
|
||||
|
||||
return jsonify(stat_type_history)
|
||||
else:
|
||||
return jsonify('Incorrect UUID')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run(host='0.0.0.0', port=7000, threaded=True)
|
||||
app.run(host=FLASK_HOST, port=FLASK_PORT, threaded=True, ssl_context=ssl_context)
|
||||
|
|
|
@ -0,0 +1,184 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*-coding:UTF-8 -*
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import redis
|
||||
import bcrypt
|
||||
|
||||
from functools import wraps
|
||||
from flask_login import LoginManager, current_user, login_user, logout_user, login_required
|
||||
|
||||
from flask import request, current_app
|
||||
|
||||
sys.path.append(os.path.join(os.environ['D4_HOME'], 'lib/'))
|
||||
import ConfigLoader
|
||||
|
||||
login_manager = LoginManager()
|
||||
login_manager.login_view = 'role'
|
||||
|
||||
### Config ###
|
||||
config_loader = ConfigLoader.ConfigLoader()
|
||||
r_serv_db = config_loader.get_redis_conn("Redis_SERV")
|
||||
config_loader = None
|
||||
### ###
|
||||
|
||||
default_passwd_file = os.path.join(os.environ['D4_HOME'], 'DEFAULT_PASSWORD')
|
||||
|
||||
regex_password = r'^(?=(.*\d){2})(?=.*[a-z])(?=.*[A-Z]).{10,100}$'
|
||||
regex_password = re.compile(regex_password)
|
||||
|
||||
###############################################################
|
||||
############### CHECK ROLE ACCESS ##################
|
||||
###############################################################
|
||||
|
||||
def login_admin(func):
|
||||
@wraps(func)
|
||||
def decorated_view(*args, **kwargs):
|
||||
if not current_user.is_authenticated:
|
||||
return login_manager.unauthorized()
|
||||
elif (not current_user.is_in_role('admin')):
|
||||
return login_manager.unauthorized()
|
||||
return func(*args, **kwargs)
|
||||
return decorated_view
|
||||
|
||||
def login_user_basic(func):
|
||||
@wraps(func)
|
||||
def decorated_view(*args, **kwargs):
|
||||
if not current_user.is_authenticated:
|
||||
return login_manager.unauthorized()
|
||||
elif (not current_user.is_in_role('user')):
|
||||
return login_manager.unauthorized()
|
||||
return func(*args, **kwargs)
|
||||
return decorated_view
|
||||
|
||||
|
||||
|
||||
###############################################################
|
||||
###############################################################
|
||||
###############################################################
|
||||
|
||||
def gen_password(length=30, charset="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_!@#$%^&*()"):
|
||||
random_bytes = os.urandom(length)
|
||||
len_charset = len(charset)
|
||||
indices = [int(len_charset * (byte / 256.0)) for byte in random_bytes]
|
||||
return "".join([charset[index] for index in indices])
|
||||
|
||||
def gen_token(length=41, charset="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_"):
|
||||
random_bytes = os.urandom(length)
|
||||
len_charset = len(charset)
|
||||
indices = [int(len_charset * (byte / 256.0)) for byte in random_bytes]
|
||||
return "".join([charset[index] for index in indices])
|
||||
|
||||
def generate_new_token(user_id):
|
||||
# create user token
|
||||
current_token = r_serv_db.hget('user_metadata:{}'.format(user_id), 'token')
|
||||
if current_token:
|
||||
r_serv_db.hdel('user:tokens', current_token)
|
||||
token = gen_token(41)
|
||||
r_serv_db.hset('user:tokens', token, user_id)
|
||||
r_serv_db.hset('user_metadata:{}'.format(user_id), 'token', token)
|
||||
|
||||
def get_default_admin_token():
|
||||
if r_serv_db.exists('user_metadata:admin@admin.test'):
|
||||
return r_serv_db.hget('user_metadata:admin@admin.test', 'token')
|
||||
else:
|
||||
return ''
|
||||
|
||||
def create_user_db(username_id , password, default=False, role=None, update=False):
|
||||
password = password.encode()
|
||||
password_hash = hashing_password(password)
|
||||
|
||||
# create user token
|
||||
generate_new_token(username_id)
|
||||
|
||||
if update:
|
||||
r_serv_db.hdel('user_metadata:{}'.format(username_id), 'change_passwd')
|
||||
# remove default user password file
|
||||
if username_id=='admin@admin.test':
|
||||
os.remove(default_passwd_file)
|
||||
else:
|
||||
if default:
|
||||
r_serv_db.hset('user_metadata:{}'.format(username_id), 'change_passwd', 'True')
|
||||
if role:
|
||||
if role in get_all_role():
|
||||
for role_to_add in get_all_user_role(role):
|
||||
r_serv_db.sadd('user_role:{}'.format(role_to_add), username_id)
|
||||
r_serv_db.hset('user_metadata:{}'.format(username_id), 'role', role)
|
||||
|
||||
r_serv_db.hset('user:all', username_id, password_hash)
|
||||
|
||||
def edit_user_db(user_id, password=None, role=None):
|
||||
if password:
|
||||
password_hash = hashing_password(password.encode())
|
||||
r_serv_db.hset('user:all', user_id, password_hash)
|
||||
|
||||
current_role = r_serv_db.hget('user_metadata:{}'.format(user_id), 'role')
|
||||
if role != current_role:
|
||||
request_level = get_role_level(role)
|
||||
current_role = get_role_level(current_role)
|
||||
|
||||
if current_role < request_level:
|
||||
role_to_remove = get_user_role_by_range(current_role -1, request_level - 2)
|
||||
for role_id in role_to_remove:
|
||||
r_serv_db.srem('user_role:{}'.format(role_id), user_id)
|
||||
r_serv_db.hset('user_metadata:{}'.format(user_id), 'role', role)
|
||||
else:
|
||||
role_to_add = get_user_role_by_range(request_level -1, current_role)
|
||||
for role_id in role_to_add:
|
||||
r_serv_db.sadd('user_role:{}'.format(role_id), user_id)
|
||||
r_serv_db.hset('user_metadata:{}'.format(user_id), 'role', role)
|
||||
|
||||
def delete_user_db(user_id):
|
||||
if r_serv_db.exists('user_metadata:{}'.format(user_id)):
|
||||
role_to_remove =get_all_role()
|
||||
for role_id in role_to_remove:
|
||||
r_serv_db.srem('user_role:{}'.format(role_id), user_id)
|
||||
user_token = r_serv_db.hget('user_metadata:{}'.format(user_id), 'token')
|
||||
r_serv_db.hdel('user:tokens', user_token)
|
||||
r_serv_db.delete('user_metadata:{}'.format(user_id))
|
||||
r_serv_db.hdel('user:all', user_id)
|
||||
|
||||
def hashing_password(bytes_password):
|
||||
hashed = bcrypt.hashpw(bytes_password, bcrypt.gensalt())
|
||||
return hashed
|
||||
|
||||
def check_password_strength(password):
|
||||
result = regex_password.match(password)
|
||||
if result:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def get_all_role():
|
||||
return r_serv_db.zrange('d4:all_role', 0, -1)
|
||||
|
||||
def get_role_level(role):
|
||||
return int(r_serv_db.zscore('d4:all_role', role))
|
||||
|
||||
def get_all_user_role(user_role):
|
||||
current_role_val = get_role_level(user_role)
|
||||
return r_serv_db.zrangebyscore('d4:all_role', current_role_val, 50)
|
||||
|
||||
def get_all_user_upper_role(user_role):
|
||||
current_role_val = get_role_level(user_role)
|
||||
# remove one rank
|
||||
if current_role_val > 1:
|
||||
return r_serv_db.zrange('d4:all_role', 0, current_role_val -2)
|
||||
else:
|
||||
return []
|
||||
|
||||
def get_user_role_by_range(inf, sup):
|
||||
return r_serv_db.zrange('d4:all_role', inf, sup)
|
||||
|
||||
def get_user_role(user_id):
|
||||
return r_serv_db.hget('user_metadata:{}'.format(user_id), 'role')
|
||||
|
||||
def check_user_role_integrity(user_id):
|
||||
user_role = get_user_role(user_id)
|
||||
all_user_role = get_all_user_role(user_role)
|
||||
res = True
|
||||
if user_role not in all_user_role:
|
||||
return False
|
||||
return res
|
|
@ -0,0 +1,76 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*-coding:UTF-8 -*
|
||||
|
||||
'''
|
||||
Flask functions and routes for all D4 sensors
|
||||
'''
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import json
|
||||
import redis
|
||||
|
||||
sys.path.append(os.path.join(os.environ['D4_HOME'], 'lib'))
|
||||
import ConfigLoader
|
||||
import Sensor
|
||||
|
||||
from flask import Flask, render_template, jsonify, request, Blueprint, redirect, url_for, Response
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from Role_Manager import login_admin, login_user_basic
|
||||
|
||||
# ============ BLUEPRINT ============
|
||||
|
||||
D4_sensors = Blueprint('D4_sensors', __name__, template_folder='templates')
|
||||
|
||||
# ============ VARIABLES ============
|
||||
|
||||
### Config ###
|
||||
config_loader = ConfigLoader.ConfigLoader()
|
||||
r_serv_metadata = config_loader.get_redis_conn("Redis_METADATA")
|
||||
r_serv_db = config_loader.get_redis_conn("Redis_SERV")
|
||||
config_loader = None
|
||||
### ###
|
||||
|
||||
# ============ FUNCTIONS ============
|
||||
|
||||
|
||||
# ============= ROUTES ==============
|
||||
|
||||
@D4_sensors.route("/sensors/monitoring/add", methods=['GET'])
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def add_sensor_to_monitor():
|
||||
sensor_uuid = request.args.get("uuid")
|
||||
return render_template("sensors/add_sensor_to_monitor.html",
|
||||
sensor_uuid=sensor_uuid)
|
||||
|
||||
@D4_sensors.route("/sensors/monitoring/add_post", methods=['POST'])
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def add_sensor_to_monitor_post():
|
||||
sensor_uuid = request.form.get("uuid")
|
||||
delta_time = request.form.get("delta_time")
|
||||
res = Sensor.api_add_sensor_to_monitor({'uuid':sensor_uuid, 'delta_time': delta_time})
|
||||
if res:
|
||||
Response(json.dumps(res[0], indent=2, sort_keys=True), mimetype='application/json'), res[1]
|
||||
return redirect(url_for('uuid_management', uuid=sensor_uuid))
|
||||
|
||||
@D4_sensors.route("/sensors/monitoring/delete", methods=['GET'])
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def delete_sensor_to_monitor():
|
||||
sensor_uuid = request.args.get("uuid")
|
||||
res = Sensor.api_delete_sensor_to_monitor({'uuid':sensor_uuid})
|
||||
if res:
|
||||
Response(json.dumps(res[0], indent=2, sort_keys=True), mimetype='application/json'), res[1]
|
||||
return redirect(url_for('uuid_management', uuid=sensor_uuid))
|
||||
|
||||
|
||||
@D4_sensors.route("/sensors/monitoring/errors", methods=['GET'])
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def get_all_sensors_connection_errors():
|
||||
res = Sensor.api_get_all_sensors_connection_errors()
|
||||
return Response(json.dumps(res[0], indent=2, sort_keys=True), mimetype='application/json'), res[1]
|
|
@ -0,0 +1,122 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*-coding:UTF-8 -*
|
||||
|
||||
'''
|
||||
Flask functions and routes for the rest api
|
||||
'''
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import redis
|
||||
|
||||
sys.path.append(os.path.join(os.environ['D4_HOME'], 'lib'))
|
||||
import ConfigLoader
|
||||
import Analyzer_Queue
|
||||
|
||||
from flask import Flask, render_template, jsonify, request, Blueprint, redirect, url_for, Response
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from Role_Manager import login_admin, login_user_basic
|
||||
|
||||
# ============ BLUEPRINT ============
|
||||
|
||||
analyzer_queue = Blueprint('analyzer_queue', __name__, template_folder='templates')
|
||||
|
||||
# ============ VARIABLES ============
|
||||
|
||||
### Config ###
|
||||
config_loader = ConfigLoader.ConfigLoader()
|
||||
r_serv_metadata = config_loader.get_redis_conn("Redis_METADATA")
|
||||
r_serv_db = config_loader.get_redis_conn("Redis_SERV")
|
||||
config_loader = None
|
||||
### ###
|
||||
|
||||
# ============ FUNCTIONS ============
|
||||
|
||||
|
||||
# ============= ROUTES ==============
|
||||
|
||||
@analyzer_queue.route("/analyzer_queue/create_queue", methods=['GET'])
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def create_analyzer_queue():
|
||||
return render_template("analyzer_queue/queue_creator.html")
|
||||
|
||||
@analyzer_queue.route("/analyzer_queue/create_queue_post", methods=['POST'])
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def create_analyzer_queue_post():
|
||||
l_queue_meta = ['analyzer_type', 'analyzer_metatype', 'description', 'analyzer_uuid']
|
||||
queue_type = request.form.get("analyzer_type")
|
||||
queue_metatype = request.form.get("analyzer_metatype")
|
||||
queue_description = request.form.get("description")
|
||||
queue_uuid = request.form.get("analyzer_uuid")
|
||||
|
||||
queue_type = Analyzer_Queue.sanitize_queue_type(queue_type)
|
||||
|
||||
# unpack uuid group
|
||||
l_uuid = set()
|
||||
l_invalid_uuid = set()
|
||||
for obj_tuple in list(request.form):
|
||||
if obj_tuple not in l_queue_meta:
|
||||
sensor_uuid = request.form.get(obj_tuple)
|
||||
if Analyzer_Queue.is_valid_uuid_v4(sensor_uuid):
|
||||
l_uuid.add(sensor_uuid)
|
||||
else:
|
||||
if sensor_uuid:
|
||||
l_invalid_uuid.add(sensor_uuid)
|
||||
|
||||
l_uuid = list(l_uuid)
|
||||
l_invalid_uuid = list(l_invalid_uuid)
|
||||
if l_invalid_uuid:
|
||||
return render_template("analyzer_queue/queue_creator.html", queue_uuid=queue_uuid, queue_type=queue_type, metatype_name=queue_metatype,
|
||||
description=queue_description, l_uuid=l_uuid, l_invalid_uuid=l_invalid_uuid)
|
||||
|
||||
res = Analyzer_Queue.create_queues(queue_type, queue_uuid=queue_uuid, l_uuid=l_uuid, metatype_name=queue_metatype, description=queue_description)
|
||||
if isinstance(res,dict):
|
||||
return jsonify(res)
|
||||
if res:
|
||||
return redirect(url_for('server_management', _anchor=res))
|
||||
|
||||
@analyzer_queue.route("/analyzer_queue/edit_queue", methods=['GET'])
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def edit_queue_analyzer_queue():
|
||||
queue_uuid = request.args.get("queue_uuid")
|
||||
queue_metadata = Analyzer_Queue.get_queue_metadata(queue_uuid, is_group=True)
|
||||
if 'is_group_queue' in queue_metadata:
|
||||
l_sensors_uuid = Analyzer_Queue.get_queue_group_all_sensors(queue_uuid)
|
||||
else:
|
||||
l_sensors_uuid = None
|
||||
return render_template("analyzer_queue/queue_editor.html", queue_metadata=queue_metadata, l_sensors_uuid=l_sensors_uuid)
|
||||
|
||||
@analyzer_queue.route("/analyzer_queue/edit_queue_post", methods=['POST'])
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def edit_queue_analyzer_queue_post():
|
||||
l_queue_meta = ['queue_uuid', 'description']
|
||||
queue_uuid = request.form.get("queue_uuid")
|
||||
queue_description = request.form.get("description")
|
||||
|
||||
l_uuid = set()
|
||||
l_invalid_uuid = set()
|
||||
for obj_tuple in list(request.form):
|
||||
if obj_tuple not in l_queue_meta:
|
||||
sensor_uuid = request.form.get(obj_tuple)
|
||||
if Analyzer_Queue.is_valid_uuid_v4(sensor_uuid):
|
||||
l_uuid.add(sensor_uuid)
|
||||
else:
|
||||
if sensor_uuid:
|
||||
l_invalid_uuid.add(sensor_uuid)
|
||||
|
||||
if l_invalid_uuid:
|
||||
queue_metadata = Analyzer_Queue.get_queue_metadata(queue_uuid, is_group=True)
|
||||
if queue_description:
|
||||
queue_metadata['description'] = queue_description
|
||||
return render_template("analyzer_queue/queue_editor.html", queue_metadata=queue_metadata, l_sensors_uuid=l_uuid, l_invalid_uuid=l_invalid_uuid)
|
||||
|
||||
Analyzer_Queue.edit_queue_description(queue_uuid, queue_description)
|
||||
Analyzer_Queue.edit_queue_sensors_set(queue_uuid, l_uuid)
|
||||
|
||||
return redirect(url_for('analyzer_queue.edit_queue_analyzer_queue', queue_uuid=queue_uuid))
|
|
@ -0,0 +1,162 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*-coding:UTF-8 -*
|
||||
|
||||
'''
|
||||
Flask functions and routes for the rest api
|
||||
'''
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
import uuid
|
||||
import json
|
||||
import redis
|
||||
import random
|
||||
import datetime
|
||||
|
||||
from flask import Flask, render_template, jsonify, request, Blueprint, redirect, url_for, Response
|
||||
from flask_login import login_required
|
||||
|
||||
from functools import wraps
|
||||
|
||||
sys.path.append(os.path.join(os.environ['D4_HOME'], 'lib'))
|
||||
import Sensor
|
||||
import ConfigLoader
|
||||
|
||||
# ============ BLUEPRINT ============
|
||||
|
||||
restApi = Blueprint('restApi', __name__, template_folder='templates')
|
||||
|
||||
# ============ VARIABLES ============
|
||||
|
||||
### Config ###
|
||||
config_loader = ConfigLoader.ConfigLoader()
|
||||
r_serv_metadata = config_loader.get_redis_conn("Redis_METADATA")
|
||||
r_serv_db = config_loader.get_redis_conn("Redis_SERV")
|
||||
r_cache = config_loader.get_redis_conn("Redis_CACHE")
|
||||
config_loader = None
|
||||
### ###
|
||||
|
||||
# ============ AUTH FUNCTIONS ============
|
||||
|
||||
def check_token_format(strg, search=re.compile(r'[^a-zA-Z0-9_-]').search):
|
||||
return not bool(search(strg))
|
||||
|
||||
def verify_token(token):
|
||||
if len(token) != 41:
|
||||
return False
|
||||
|
||||
if not check_token_format(token):
|
||||
return False
|
||||
|
||||
rand_sleep = random.randint(1,300)/1000
|
||||
time.sleep(rand_sleep)
|
||||
if r_serv_db.hexists('user:tokens', token):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def get_user_from_token(token):
|
||||
return r_serv_db.hget('user:tokens', token)
|
||||
|
||||
def verify_user_role(role, token):
|
||||
user_id = get_user_from_token(token)
|
||||
if user_id:
|
||||
if is_in_role(user_id, role):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
else:
|
||||
return False
|
||||
|
||||
def is_in_role(user_id, role):
|
||||
if r_serv_db.sismember('user_role:{}'.format(role), user_id):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
# ============ DECORATOR ============
|
||||
|
||||
def token_required(user_role):
|
||||
def actual_decorator(funct):
|
||||
@wraps(funct)
|
||||
def api_token(*args, **kwargs):
|
||||
data = authErrors(user_role)
|
||||
if data:
|
||||
return Response(json.dumps(data[0], indent=2, sort_keys=True), mimetype='application/json'), data[1]
|
||||
else:
|
||||
return funct(*args, **kwargs)
|
||||
return api_token
|
||||
return actual_decorator
|
||||
|
||||
def get_auth_from_header():
|
||||
token = request.headers.get('Authorization').replace(' ', '') # remove space
|
||||
return token
|
||||
|
||||
def authErrors(user_role):
|
||||
# Check auth
|
||||
if not request.headers.get('Authorization'):
|
||||
return ({'status': 'error', 'reason': 'Authentication needed'}, 401)
|
||||
token = get_auth_from_header()
|
||||
data = None
|
||||
# verify token format
|
||||
|
||||
# brute force protection
|
||||
current_ip = request.remote_addr
|
||||
login_failed_ip = r_cache.get('failed_login_ip_api:{}'.format(current_ip))
|
||||
# brute force by ip
|
||||
if login_failed_ip:
|
||||
login_failed_ip = int(login_failed_ip)
|
||||
if login_failed_ip >= 5:
|
||||
return ({'status': 'error', 'reason': 'Max Connection Attempts reached, Please wait {}s'.format(r_cache.ttl('failed_login_ip_api:{}'.format(current_ip)))}, 401)
|
||||
|
||||
try:
|
||||
authenticated = False
|
||||
if verify_token(token):
|
||||
authenticated = True
|
||||
|
||||
# check user role
|
||||
if not verify_user_role(user_role, token):
|
||||
data = ({'status': 'error', 'reason': 'Access Forbidden'}, 403)
|
||||
|
||||
if not authenticated:
|
||||
r_cache.incr('failed_login_ip_api:{}'.format(current_ip))
|
||||
r_cache.expire('failed_login_ip_api:{}'.format(current_ip), 300)
|
||||
data = ({'status': 'error', 'reason': 'Authentication failed'}, 401)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
data = ({'status': 'error', 'reason': 'Malformed Authentication String'}, 400)
|
||||
if data:
|
||||
return data
|
||||
else:
|
||||
return None
|
||||
|
||||
# ============ FUNCTIONS ============
|
||||
|
||||
def is_valid_uuid_v4(header_uuid):
|
||||
try:
|
||||
header_uuid=header_uuid.replace('-', '')
|
||||
uuid_test = uuid.UUID(hex=header_uuid, version=4)
|
||||
return uuid_test.hex == header_uuid
|
||||
except:
|
||||
return False
|
||||
|
||||
def build_json_response(resp_data, resp_code):
|
||||
return Response(json.dumps(resp_data, indent=2, sort_keys=True), mimetype='application/json'), resp_code
|
||||
|
||||
# ============= ROUTES ==============
|
||||
|
||||
|
||||
@restApi.route("/api/v1/add/sensor/register", methods=['POST'])
|
||||
@token_required('sensor_register')
|
||||
def add_sensor_register():
|
||||
data = request.get_json()
|
||||
res = Sensor.register_sensor(data)
|
||||
return Response(json.dumps(res[0], indent=2, sort_keys=True), mimetype='application/json'), res[1]
|
||||
|
||||
@restApi.route("/api/v1/sensors/monitoring/errors", methods=['GET'])
|
||||
@token_required('user')
|
||||
def get_all_sensors_connection_errors():
|
||||
res = Sensor.api_get_all_sensors_connection_errors()
|
||||
return build_json_response(res[0], res[1])
|
|
@ -0,0 +1,184 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*-coding:UTF-8 -*
|
||||
|
||||
'''
|
||||
Flask functions and routes for the rest api
|
||||
'''
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import redis
|
||||
|
||||
sys.path.append(os.path.join(os.environ['D4_HOME'], 'lib'))
|
||||
import ConfigLoader
|
||||
|
||||
from flask import Flask, render_template, jsonify, request, Blueprint, redirect, url_for, Response
|
||||
from flask_login import login_required, current_user
|
||||
|
||||
from Role_Manager import login_admin, login_user_basic
|
||||
from Role_Manager import create_user_db, edit_user_db, delete_user_db, check_password_strength, generate_new_token, gen_password, get_all_role
|
||||
|
||||
# ============ BLUEPRINT ============
|
||||
|
||||
settings = Blueprint('settings', __name__, template_folder='templates')
|
||||
|
||||
# ============ VARIABLES ============
|
||||
|
||||
email_regex = r'[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,6}'
|
||||
email_regex = re.compile(email_regex)
|
||||
|
||||
### Config ###
|
||||
config_loader = ConfigLoader.ConfigLoader()
|
||||
r_serv_metadata = config_loader.get_redis_conn("Redis_METADATA")
|
||||
r_serv_db = config_loader.get_redis_conn("Redis_SERV")
|
||||
config_loader = None
|
||||
### ###
|
||||
|
||||
# ============ FUNCTIONS ============
|
||||
|
||||
def one():
|
||||
return 1
|
||||
|
||||
def check_email(email):
|
||||
return email_regex.match(email)
|
||||
|
||||
def get_user_metadata(user_id):
|
||||
user_metadata = {}
|
||||
user_metadata['email'] = user_id
|
||||
user_metadata['role'] = r_serv_db.hget('user_metadata:{}'.format(user_id), 'role')
|
||||
user_metadata['api_key'] = r_serv_db.hget('user_metadata:{}'.format(user_id), 'token')
|
||||
return user_metadata
|
||||
|
||||
def get_users_metadata(list_users):
|
||||
users = []
|
||||
for user in list_users:
|
||||
users.append(get_user_metadata(user))
|
||||
return users
|
||||
|
||||
def get_all_users():
|
||||
return r_serv_db.hkeys('user:all')
|
||||
|
||||
|
||||
# ============= ROUTES ==============
|
||||
|
||||
@settings.route("/settings/", methods=['GET'])
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def settings_page():
|
||||
return redirect(url_for('settings.edit_profile'))
|
||||
|
||||
@settings.route("/settings/edit_profile", methods=['GET'])
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def edit_profile():
|
||||
user_metadata = get_user_metadata(current_user.get_id())
|
||||
admin_level = current_user.is_in_role('admin')
|
||||
return render_template("edit_profile.html", user_metadata=user_metadata,
|
||||
admin_level=admin_level)
|
||||
|
||||
@settings.route("/settings/new_token", methods=['GET'])
|
||||
@login_required
|
||||
@login_user_basic
|
||||
def new_token():
|
||||
generate_new_token(current_user.get_id())
|
||||
return redirect(url_for('settings.edit_profile'))
|
||||
|
||||
@settings.route("/settings/new_token_user", methods=['GET'])
|
||||
@login_required
|
||||
@login_admin
|
||||
def new_token_user():
|
||||
user_id = request.args.get('user_id')
|
||||
if r_serv_db.exists('user_metadata:{}'.format(user_id)):
|
||||
generate_new_token(user_id)
|
||||
return redirect(url_for('settings.users_list'))
|
||||
|
||||
@settings.route("/settings/create_user", methods=['GET'])
|
||||
@login_required
|
||||
@login_admin
|
||||
def create_user():
|
||||
user_id = request.args.get('user_id')
|
||||
error = request.args.get('error')
|
||||
error_mail = request.args.get('error_mail')
|
||||
role = None
|
||||
if r_serv_db.exists('user_metadata:{}'.format(user_id)):
|
||||
role = r_serv_db.hget('user_metadata:{}'.format(user_id), 'role')
|
||||
else:
|
||||
user_id = None
|
||||
all_roles = get_all_role()
|
||||
return render_template("create_user.html", all_roles=all_roles, user_id=user_id, user_role=role,
|
||||
error=error, error_mail=error_mail,
|
||||
admin_level=True)
|
||||
|
||||
@settings.route("/settings/create_user_post", methods=['POST'])
|
||||
@login_required
|
||||
@login_admin
|
||||
def create_user_post():
|
||||
email = request.form.get('username')
|
||||
role = request.form.get('user_role')
|
||||
password1 = request.form.get('password1')
|
||||
password2 = request.form.get('password2')
|
||||
|
||||
all_roles = get_all_role()
|
||||
|
||||
if email and len(email)< 300 and check_email(email) and role:
|
||||
if role in all_roles:
|
||||
# password set
|
||||
if password1 and password2:
|
||||
if password1==password2:
|
||||
if check_password_strength(password1):
|
||||
password = password1
|
||||
else:
|
||||
return render_template("create_user.html", all_roles=all_roles, error="Incorrect Password", admin_level=True)
|
||||
else:
|
||||
return render_template("create_user.html", all_roles=all_roles, error="Passwords don't match", admin_level=True)
|
||||
# generate password
|
||||
else:
|
||||
password = gen_password()
|
||||
|
||||
if current_user.is_in_role('admin'):
|
||||
# edit user
|
||||
if r_serv_db.exists('user_metadata:{}'.format(email)):
|
||||
if password1 and password2:
|
||||
edit_user_db(email, password=password, role=role)
|
||||
return redirect(url_for('settings.users_list', new_user=email, new_user_password=password, new_user_edited=True))
|
||||
else:
|
||||
edit_user_db(email, role=role)
|
||||
return redirect(url_for('settings.users_list', new_user=email, new_user_password='Password not changed', new_user_edited=True))
|
||||
# create user
|
||||
else:
|
||||
create_user_db(email, password, default=True, role=role)
|
||||
return redirect(url_for('settings.users_list', new_user=email, new_user_password=password, new_user_edited=False))
|
||||
|
||||
else:
|
||||
return render_template("create_user.html", all_roles=all_roles, admin_level=True)
|
||||
else:
|
||||
return render_template("create_user.html", all_roles=all_roles, error_mail=True, admin_level=True)
|
||||
|
||||
@settings.route("/settings/users_list", methods=['GET'])
|
||||
@login_required
|
||||
@login_admin
|
||||
def users_list():
|
||||
all_users = get_users_metadata(get_all_users())
|
||||
new_user = request.args.get('new_user')
|
||||
new_user_dict = {}
|
||||
if new_user:
|
||||
new_user_dict['email'] = new_user
|
||||
new_user_dict['edited'] = request.args.get('new_user_edited')
|
||||
new_user_dict['password'] = request.args.get('new_user_password')
|
||||
return render_template("users_list.html", all_users=all_users, new_user=new_user_dict, admin_level=True)
|
||||
|
||||
@settings.route("/settings/edit_user", methods=['GET'])
|
||||
@login_required
|
||||
@login_admin
|
||||
def edit_user():
|
||||
user_id = request.args.get('user_id')
|
||||
return redirect(url_for('settings.create_user', user_id=user_id))
|
||||
|
||||
@settings.route("/settings/delete_user", methods=['GET'])
|
||||
@login_required
|
||||
@login_admin
|
||||
def delete_user():
|
||||
user_id = request.args.get('user_id')
|
||||
delete_user_db(user_id)
|
||||
return redirect(url_for('settings.users_list'))
|
|
@ -0,0 +1,52 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*-coding:UTF-8 -*
|
||||
|
||||
import os
|
||||
import sys
|
||||
import redis
|
||||
|
||||
sys.path.append(os.path.join(os.environ['D4_HOME'], 'lib'))
|
||||
import ConfigLoader
|
||||
|
||||
from Role_Manager import create_user_db, edit_user_db, get_default_admin_token, gen_password
|
||||
|
||||
### Config ###
|
||||
config_loader = ConfigLoader.ConfigLoader()
|
||||
r_serv = config_loader.get_redis_conn("Redis_SERV")
|
||||
config_loader = None
|
||||
### ###
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
# create role_list
|
||||
if not r_serv.exists('d4:all_role'):
|
||||
role_dict = {'admin': 1, 'user': 2, 'sensor_register': 20}
|
||||
r_serv.zadd('d4:all_role', role_dict)
|
||||
|
||||
username = 'admin@admin.test'
|
||||
password = gen_password()
|
||||
if r_serv.exists('user_metadata:{}'.format(username)):
|
||||
edit_user_db(username, password=password, role='admin')
|
||||
else:
|
||||
create_user_db(username, password, role='admin', default=True)
|
||||
|
||||
|
||||
username2 = 'config_generator@register.test'
|
||||
password2 = gen_password()
|
||||
if r_serv.exists('user_metadata:config_generator@register.test'):
|
||||
edit_user_db(username2, password=password2, role='sensor_register')
|
||||
else:
|
||||
create_user_db(username2, password2, role='sensor_register', default=True)
|
||||
|
||||
|
||||
token = get_default_admin_token()
|
||||
|
||||
default_passwd_file = os.path.join(os.environ['D4_HOME'], 'DEFAULT_PASSWORD')
|
||||
to_write_str = '# Password Generated by default\n# This file is deleted after the first login\n#\nemail=admin@admin.test\npassword='
|
||||
to_write_str = to_write_str + password + '\nAPI_Key=' + token
|
||||
with open(default_passwd_file, 'w') as f:
|
||||
f.write(to_write_str)
|
||||
|
||||
print('new user created: {}'.format(username))
|
||||
print('password: {}'.format(password))
|
||||
print('token: {}'.format(token))
|
|
@ -0,0 +1,58 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
<head>
|
||||
<title>403 - D4-Project</title>
|
||||
<link rel="icon" href="{{ url_for('static', filename='image/d4-logo.png') }}">
|
||||
|
||||
<!-- Core CSS -->
|
||||
<link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet">
|
||||
<link href="{{ url_for('static', filename='font-awesome/css/font-awesome.css') }}" rel="stylesheet">
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
{% include 'navbar.html' %}
|
||||
|
||||
<div>
|
||||
<br>
|
||||
<br>
|
||||
<h1 class="text-center">403 Forbidden</h1>
|
||||
</div>
|
||||
<br>
|
||||
<br>
|
||||
<br>
|
||||
<br>
|
||||
<div class="d-flex justify-content-center">
|
||||
<pre>
|
||||
,d8 ,a8888a, ad888888b,
|
||||
,d888 ,8P"' `"Y8, d8" "88
|
||||
,d8" 88 ,8P Y8, a8P
|
||||
,d8" 88 88 88 aad8"
|
||||
,d8" 88 88 88 ""Y8,
|
||||
8888888888888 `8b d8' "8b
|
||||
88 `8ba, ,ad8' Y8, a88
|
||||
88 "Y8888P" "Y888888P'
|
||||
|
||||
88888888888 88 88 88 88
|
||||
88 88 "" 88 88
|
||||
88 88 88 88
|
||||
88aaaaa ,adPPYba, 8b,dPPYba, 88,dPPYba, 88 ,adPPYb,88 ,adPPYb,88 ,adPPYba, 8b,dPPYba,
|
||||
88""""" a8" "8a 88P' "Y8 88P' "8a 88 a8" `Y88 a8" `Y88 a8P_____88 88P' `"8a
|
||||
88 8b d8 88 88 d8 88 8b 88 8b 88 8PP""""""" 88 88
|
||||
88 "8a, ,a8" 88 88b, ,a8" 88 "8a, ,d88 "8a, ,d88 "8b, ,aa 88 88
|
||||
88 `"YbbdP"' 88 8Y"Ybbd8"' 88 `"8bbdP"Y8 `"8bbdP"Y8 `"Ybbd8"' 88 88
|
||||
</pre>
|
||||
</div>
|
||||
|
||||
{% include 'navfooter.html' %}
|
||||
|
||||
<body>
|
||||
|
||||
<script>
|
||||
$(document).ready(function(){
|
||||
$("#nav-home").addClass("active");
|
||||
} );
|
||||
</script>
|
||||
|
||||
</html>
|
|
@ -23,22 +23,7 @@
|
|||
|
||||
<body>
|
||||
|
||||
<nav class="navbar navbar-expand-sm navbar-dark bg-dark">
|
||||
<a class="navbar-brand" href="{{ url_for('index') }}">
|
||||
<img src="{{ url_for('static', filename='img/d4-logo.png')}}" alt="D4 Project" style="width:80px;">
|
||||
</a>
|
||||
<ul class="navbar-nav">
|
||||
<li class="nav-item">
|
||||
<a class="nav-link mr-3" href="{{ url_for('index') }}">Home <span class="sr-only">(current)</span></a>
|
||||
</li>
|
||||
<li class="nav-item" mr-3>
|
||||
<a class="nav-link mr-3" href="{{ url_for('sensors_status') }}">Sensors Status</a>
|
||||
</li>
|
||||
<li class="nav-item mr-3">
|
||||
<a class="nav-link" href="{{ url_for('server_management') }}" tabindex="-1" aria-disabled="true">Server Management</a>
|
||||
</li>
|
||||
</ul>
|
||||
</nav>
|
||||
{% include 'navbar.html' %}
|
||||
|
||||
<div class="d-flex justify-content-center">
|
||||
<pre>
|
||||
|
@ -68,3 +53,11 @@
|
|||
|
||||
{% include 'navfooter.html' %}
|
||||
</body>
|
||||
|
||||
<script>
|
||||
$(document).ready(function(){
|
||||
$("#nav-home").addClass("active");
|
||||
} );
|
||||
</script>
|
||||
|
||||
</html>
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
<div class="input-group mb-1">
|
||||
<input type="text" class="form-control col-10 {%if error%}is-invalid{%else%}is-valid{%endif%}" name="{{sensor_uuid}}" value="{{sensor_uuid}}">
|
||||
<span class="btn btn-danger input-group-addon delete-field col-2"><i class="fa fa-trash"></i></span>
|
||||
<div class="invalid-feedback">
|
||||
Please provide a valid UUID v4.
|
||||
</div>
|
||||
</div>
|
|
@ -0,0 +1,157 @@
|
|||
<!DOCTYPE html>
|
||||
|
||||
<html>
|
||||
<head>
|
||||
<title>D4-Project</title>
|
||||
<link rel="icon" href="{{ url_for('static', filename='img/d4-logo.png')}}">
|
||||
<!-- Core CSS -->
|
||||
<link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet">
|
||||
<link href="{{ url_for('static', filename='font-awesome/css/font-awesome.css') }}" rel="stylesheet">
|
||||
<link href="{{ url_for('static', filename='css/dataTables.bootstrap.min.css') }}" rel="stylesheet">
|
||||
|
||||
<!-- JS -->
|
||||
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/popper.min.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/bootstrap.min.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/jquery.dataTables.min.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/dataTables.bootstrap.min.js')}}"></script>
|
||||
|
||||
<style>
|
||||
.popover{
|
||||
max-width: 100%;
|
||||
}
|
||||
</style>
|
||||
|
||||
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
{% include 'navbar.html' %}
|
||||
|
||||
|
||||
|
||||
<div class="card mb-3 mt-1">
|
||||
<div class="card-header text-white bg-dark">
|
||||
<h5 class="card-title">Create Analyzer Queue</h5>
|
||||
</div>
|
||||
<div class="card-body">
|
||||
|
||||
<form action="{{ url_for('analyzer_queue.create_analyzer_queue_post') }}" method="post" enctype=multipart/form-data onsubmit="submitPaste()">
|
||||
|
||||
<div class="form-group mb-4">
|
||||
<label for="analyzer_type"><b>Analyzer Type</b></label>
|
||||
<input class="form-control col-md-4" type="number" name="analyzer_type" id="analyzer_type" value="{%if queue_type%}{{queue_type}}{%else%}1{%endif%}" min="1" max="254" required>
|
||||
<input class="form-control" type="text" name="analyzer_metatype" id="analyzer_metatype_name" placeholder="Meta Type Name" {%if metatype_name%}value="{{metatype_name}}"{%endif%}>
|
||||
</div>
|
||||
|
||||
<div class="input-group my-2">
|
||||
<div class="input-group-prepend">
|
||||
<button class="btn btn-outline-secondary" type="button" onclick="generate_new_uuid();"><i class="fa fa-random"></i></button>
|
||||
</div>
|
||||
<input class="form-control col-md-4" type="text" name="analyzer_uuid" id="analyzer_uuid" {%if queue_uuid%}value="{{queue_uuid}}"{%endif%} placeholder="Analyzer uuid - (Optional)">
|
||||
</div>
|
||||
|
||||
<div class="form-group my-2">
|
||||
<input class="form-control" type="text" name="description" id="analyzer_description" {%if description%}value="{{description}}"{%endif%} placeholder="Description - (Optional)">
|
||||
</div>
|
||||
|
||||
<div id="container-id-to-import">
|
||||
|
||||
<p>Create Queue by Group of UUID</p>
|
||||
|
||||
<div for="first_sensor_uuid"><b>Sensor UUID</b></div>
|
||||
|
||||
<div class="form-horizontal">
|
||||
<div class="form-body">
|
||||
<div class="form-group">
|
||||
<div class="fields">
|
||||
|
||||
{% if l_uuid %}
|
||||
{% for sensor_uuid in l_uuid %}
|
||||
{% with sensor_uuid=sensor_uuid, error=False%}
|
||||
{% include 'analyzer_queue/block_add_sensor_to_group_block.html' %}
|
||||
{% endwith %}
|
||||
{% endfor %}
|
||||
<br>
|
||||
{% endif %}
|
||||
{% if l_invalid_uuid %}
|
||||
{% for sensor_uuid in l_invalid_uuid %}
|
||||
{% with sensor_uuid=sensor_uuid, error=True%}
|
||||
{% include 'analyzer_queue/block_add_sensor_to_group_block.html' %}
|
||||
{% endwith %}
|
||||
{% endfor %}
|
||||
<br>
|
||||
{% endif %}
|
||||
<div class="input-group mb-1">
|
||||
<input type="text" class="form-control col-10" name="first_sensor_uuid" id="first_sensor_uuid">
|
||||
<span class="btn btn-info input-group-addon add-field col-2"><i class="fa fa-plus"></i></span>
|
||||
</div>
|
||||
<span class="help-block" hidden>Export Objects</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<button class="btn btn-info" type="submit">Create Queue</button>
|
||||
</div>
|
||||
|
||||
</form>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
{% include 'navfooter.html' %}
|
||||
</body>
|
||||
|
||||
<script>
|
||||
$(document).ready(function(){
|
||||
{%if queue_type!=2 and queue_type!=254%}
|
||||
$('#analyzer_metatype_name').hide();
|
||||
{%endif%}
|
||||
});
|
||||
|
||||
|
||||
var input_part_1 = '<div class="input-group mb-1"><input type="text" class="form-control col-10" name="'
|
||||
var input_part_2 = '"></div>'
|
||||
var minusButton = '<span class="btn btn-danger input-group-addon delete-field col-2"><i class="fa fa-trash"></i></span>'
|
||||
|
||||
$('.add-field').click(function() {
|
||||
var new_uuid = uuidv4();
|
||||
var template = input_part_1 + new_uuid + input_part_2;
|
||||
var temp = $(template).insertBefore('.help-block');
|
||||
temp.append(minusButton);
|
||||
});
|
||||
|
||||
$('.fields').on('click', '.delete-field', function(){
|
||||
console.log($(this).parent());
|
||||
$(this).parent().remove();
|
||||
//$.get( "#")
|
||||
});
|
||||
|
||||
function uuidv4() {
|
||||
return ([1e7]+-1e3+-4e3+-8e3+-1e11).replace(/[018]/g, c =>
|
||||
(c ^ crypto.getRandomValues(new Uint8Array(1))[0] & 15 >> c / 4).toString(16)
|
||||
);
|
||||
}
|
||||
|
||||
$('#analyzer_type').on('input', function() {
|
||||
if ($('#analyzer_type').val() == 2 || $('#analyzer_type').val() == 254){
|
||||
$('#analyzer_metatype_name').show()
|
||||
} else {
|
||||
$('#analyzer_metatype_name').hide()
|
||||
}
|
||||
});
|
||||
|
||||
function generate_new_uuid(){
|
||||
$.getJSON( "{{url_for('generate_uuid')}}", function( data ) {
|
||||
console.log(data['uuid'])
|
||||
$( "#analyzer_uuid" ).val(data['uuid']);
|
||||
});
|
||||
}
|
||||
|
||||
</script>
|
|
@ -0,0 +1,168 @@
|
|||
<!DOCTYPE html>
|
||||
|
||||
<html>
|
||||
<head>
|
||||
<title>D4-Project</title>
|
||||
<link rel="icon" href="{{ url_for('static', filename='img/d4-logo.png')}}">
|
||||
<!-- Core CSS -->
|
||||
<link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet">
|
||||
<link href="{{ url_for('static', filename='font-awesome/css/font-awesome.css') }}" rel="stylesheet">
|
||||
<link href="{{ url_for('static', filename='css/dataTables.bootstrap.min.css') }}" rel="stylesheet">
|
||||
|
||||
<!-- JS -->
|
||||
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/popper.min.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/bootstrap.min.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/jquery.dataTables.min.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/dataTables.bootstrap.min.js')}}"></script>
|
||||
|
||||
<style>
|
||||
.popover{
|
||||
max-width: 100%;
|
||||
}
|
||||
</style>
|
||||
|
||||
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
{% include 'navbar.html' %}
|
||||
|
||||
|
||||
|
||||
<div class="card mb-3 mt-1">
|
||||
<div class="card-header text-white bg-dark">
|
||||
<h5 class="card-title">Analyzer Queue: <b>{{queue_metadata['uuid']}}</b></h5>
|
||||
</div>
|
||||
<div class="card-body">
|
||||
|
||||
<table class="table table-striped table-bordered">
|
||||
<thead class="thead-dark">
|
||||
<tr>
|
||||
<th>Type Name</th>
|
||||
<th>Group</th>
|
||||
<th style="max-width: 800px;">Name</th>
|
||||
<th style="max-width: 800px;">Last updated</th>
|
||||
<th style="max-width: 800px;">Change max size limit</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>
|
||||
{%if queue_metadata['format_type'] == 254%}
|
||||
{{queue_metadata['extended_type']}}
|
||||
{%else%}
|
||||
{{queue_metadata['format_type']}}
|
||||
{%endif%}
|
||||
</td>
|
||||
{%if queue_metadata['is_group_queue']%}
|
||||
<td class="text-center"><i class="fa fa-group"></i></td>
|
||||
{%else%}
|
||||
<td></td>
|
||||
{%endif%}
|
||||
<td>
|
||||
<div class="d-flex">
|
||||
<b>{{queue_metadata['uuid']}}:{{queue_metadata['format_type']}}{%if queue_metadata['format_type'] == 254%}:{{queue_metadata['extended_type']}}{%endif%}</b>
|
||||
</div>
|
||||
</td>
|
||||
<td>{{queue_metadata['last_updated']}}</td>
|
||||
<td>
|
||||
<div class="d-xl-flex justify-content-xl-center">
|
||||
<input class="form-control mr-lg-1" style="max-width: 100px;" type="number" id="max_size_analyzer_{{queue_metadata['uuid']}}" value="{{queue_metadata['size_limit']}}" min="0" required="">
|
||||
<button type="button" class="btn btn-outline-secondary" onclick="window.location.href ='{{ url_for('analyzer_change_max_size') }}?analyzer_uuid={{queue_metadata['uuid']}}&redirect=0&max_size_analyzer='+$('#max_size_analyzer_{{queue_metadata['uuid']}}').val();">Change Max Size</button>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
<form action="{{ url_for('analyzer_queue.edit_queue_analyzer_queue_post') }}" method="post" enctype=multipart/form-data>
|
||||
|
||||
<input class="form-control" type="text" name="queue_uuid" id="queue_uuid" value="{{queue_metadata['uuid']}}" hidden>
|
||||
|
||||
<div class="form-group my-2">
|
||||
<input class="form-control" type="text" name="description" id="analyzer_description" {%if 'description' in queue_metadata%}value="{{queue_metadata['description']}}"{%endif%} placeholder="Description - (Optional)">
|
||||
</div>
|
||||
|
||||
<div>
|
||||
|
||||
<br>
|
||||
|
||||
<div for="first_sensor_uuid"><b>Sensor UUID</b></div>
|
||||
|
||||
<div class="form-horizontal">
|
||||
<div class="form-body">
|
||||
<div class="form-group">
|
||||
<div class="fields">
|
||||
|
||||
{% if l_sensors_uuid %}
|
||||
{% for sensor_uuid in l_sensors_uuid %}
|
||||
{% with sensor_uuid=sensor_uuid, error=False%}
|
||||
{% include 'analyzer_queue/block_add_sensor_to_group_block.html' %}
|
||||
{% endwith %}
|
||||
{% endfor %}
|
||||
<br>
|
||||
{% endif %}
|
||||
{% if l_invalid_uuid %}
|
||||
{% for sensor_uuid in l_invalid_uuid %}
|
||||
{% with sensor_uuid=sensor_uuid, error=True%}
|
||||
{% include 'analyzer_queue/block_add_sensor_to_group_block.html' %}
|
||||
{% endwith %}
|
||||
{% endfor %}
|
||||
<br>
|
||||
{% endif %}
|
||||
<div class="input-group mb-1">
|
||||
<input type="text" class="form-control col-10" name="first_sensor_uuid" id="first_sensor_uuid">
|
||||
<span class="btn btn-info input-group-addon add-field col-2"><i class="fa fa-plus"></i></span>
|
||||
</div>
|
||||
<span class="help-block" hidden>Sensor UUID</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<button class="btn btn-info" type="submit">Edit Queue</button>
|
||||
</div>
|
||||
|
||||
</form>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
{% include 'navfooter.html' %}
|
||||
</body>
|
||||
|
||||
<script>
|
||||
$(document).ready(function(){
|
||||
});
|
||||
|
||||
|
||||
var input_part_1 = '<div class="input-group mb-1"><input type="text" class="form-control col-10" name="'
|
||||
var input_part_2 = '"></div>'
|
||||
var minusButton = '<span class="btn btn-danger input-group-addon delete-field col-2"><i class="fa fa-trash"></i></span>'
|
||||
|
||||
$('.add-field').click(function() {
|
||||
var new_uuid = uuidv4();
|
||||
var template = input_part_1 + new_uuid + input_part_2;
|
||||
var temp = $(template).insertBefore('.help-block');
|
||||
temp.append(minusButton);
|
||||
});
|
||||
|
||||
$('.fields').on('click', '.delete-field', function(){
|
||||
console.log($(this).parent());
|
||||
$(this).parent().remove();
|
||||
//$.get( "#")
|
||||
});
|
||||
|
||||
function uuidv4() {
|
||||
return ([1e7]+-1e3+-4e3+-8e3+-1e11).replace(/[018]/g, c =>
|
||||
(c ^ crypto.getRandomValues(new Uint8Array(1))[0] & 15 >> c / 4).toString(16)
|
||||
);
|
||||
}
|
||||
|
||||
</script>
|
|
@ -23,22 +23,7 @@
|
|||
|
||||
<body>
|
||||
|
||||
<nav class="navbar navbar-expand-sm navbar-dark bg-dark">
|
||||
<a class="navbar-brand" href="{{ url_for('index') }}">
|
||||
<img src="{{ url_for('static', filename='img/d4-logo.png')}}" alt="D4 Project" style="width:80px;">
|
||||
</a>
|
||||
<ul class="navbar-nav">
|
||||
<li class="nav-item">
|
||||
<a class="nav-link mr-3" href="{{ url_for('index') }}">Home <span class="sr-only">(current)</span></a>
|
||||
</li>
|
||||
<li class="nav-item" mr-3>
|
||||
<a class="nav-link mr-3" href="{{ url_for('sensors_status') }}">Sensors Status</a>
|
||||
</li>
|
||||
<li class="nav-item mr-3">
|
||||
<a class="nav-link" href="{{ url_for('server_management') }}" tabindex="-1" aria-disabled="true">Server Management</a>
|
||||
</li>
|
||||
</ul>
|
||||
</nav>
|
||||
{% include 'navbar.html' %}
|
||||
|
||||
<div class="card-deck justify-content-center ml-0 mr-0">
|
||||
<div class="card border-dark mt-3 ml-4 mr-4">
|
||||
|
|
|
@ -23,22 +23,7 @@
|
|||
|
||||
<body>
|
||||
|
||||
<nav class="navbar navbar-expand-sm navbar-dark bg-dark">
|
||||
<a class="navbar-brand" href="{{ url_for('index') }}">
|
||||
<img src="{{ url_for('static', filename='img/d4-logo.png')}}" alt="D4 Project" style="width:80px;">
|
||||
</a>
|
||||
<ul class="navbar-nav">
|
||||
<li class="nav-item">
|
||||
<a class="nav-link mr-3" href="{{ url_for('index') }}">Home <span class="sr-only">(current)</span></a>
|
||||
</li>
|
||||
<li class="nav-item" mr-3>
|
||||
<a class="nav-link mr-3" href="{{ url_for('sensors_status') }}">Sensors Status</a>
|
||||
</li>
|
||||
<li class="nav-item mr-3">
|
||||
<a class="nav-link" href="{{ url_for('server_management') }}" tabindex="-1" aria-disabled="true">Server Management</a>
|
||||
</li>
|
||||
</ul>
|
||||
</nav>
|
||||
{% include 'navbar.html' %}
|
||||
|
||||
<div class="card-deck justify-content-center ml-0 mr-0">
|
||||
<div class="card border-dark mt-3 ml-4 mr-4">
|
||||
|
|
|
@ -0,0 +1,108 @@
|
|||
<!DOCTYPE html>
|
||||
|
||||
<html>
|
||||
<head>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
|
||||
<title>D4-Project</title>
|
||||
<link rel="icon" href="{{ url_for('static', filename='img/d4-logo.png')}}">
|
||||
<!-- Core CSS -->
|
||||
<link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet">
|
||||
<link href="{{ url_for('static', filename='font-awesome/css/font-awesome.css') }}" rel="stylesheet">
|
||||
|
||||
<!-- JS -->
|
||||
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/bootstrap.min.js')}}"></script>
|
||||
|
||||
|
||||
<style>
|
||||
html,
|
||||
body {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
body {
|
||||
display: -ms-flexbox;
|
||||
display: flex;
|
||||
-ms-flex-align: center;
|
||||
align-items: center;
|
||||
padding-top: 40px;
|
||||
padding-bottom: 40px;
|
||||
background-color: #f5f5f5;
|
||||
}
|
||||
|
||||
.form-signin {
|
||||
width: 100%;
|
||||
max-width: 330px;
|
||||
padding: 15px;
|
||||
margin: auto;
|
||||
}
|
||||
.form-signin .checkbox {
|
||||
font-weight: 400;
|
||||
}
|
||||
.form-signin .form-control {
|
||||
position: relative;
|
||||
box-sizing: border-box;
|
||||
height: auto;
|
||||
padding: 10px;
|
||||
font-size: 16px;
|
||||
}
|
||||
.form-signin .form-control:focus {
|
||||
z-index: 2;
|
||||
}
|
||||
.form-signin input[type="password"] {
|
||||
margin-bottom: 10px;
|
||||
border-top-left-radius: 0;
|
||||
border-top-right-radius: 0;
|
||||
}
|
||||
</style>
|
||||
|
||||
</head>
|
||||
|
||||
<body class="text-center">
|
||||
|
||||
|
||||
<form class="form-signin" action="{{ url_for('change_password')}}" autocomplete="off" method="post">
|
||||
<img class="mb-4" src="{{ url_for('static', filename='img/d4-logo.png')}}" width="300">
|
||||
<h1 class="h3 mb-3 text-secondary">Change Password</h1>
|
||||
<label for="inputPassword1" class="sr-only">Password</label>
|
||||
<input type="password" id="inputPassword1" name="password1" class="form-control {% if error %}is-invalid{% endif %}" placeholder="Password" autocomplete="new-password" required autofocus>
|
||||
<label for="inputPassword2" class="sr-only">Confirm Password</label>
|
||||
<input type="password" id="inputPassword2" name="password2" class="form-control {% if error %}is-invalid{% endif %}" placeholder="Confirm Password" value="" autocomplete="new-password" required>
|
||||
{% if error %}
|
||||
<div class="invalid-feedback">
|
||||
{{error}}
|
||||
</div>
|
||||
{% endif %}
|
||||
<button class="btn btn-lg btn-primary btn-block" type="submit">Submit</button>
|
||||
|
||||
<br>
|
||||
<br>
|
||||
<br>
|
||||
<h5 class="h3 mb-3 text-secondary">Password Requirements</h5>
|
||||
<ul class="list-group">
|
||||
<li class="list-group-item d-flex justify-content-between align-items-center">
|
||||
Minimal length
|
||||
<span class="badge badge-primary badge-pill">10</span>
|
||||
</li>
|
||||
<li class="list-group-item d-flex justify-content-between align-items-center">
|
||||
Upper characters: A-Z
|
||||
<span class="badge badge-primary badge-pill">1</span>
|
||||
</li>
|
||||
<li class="list-group-item d-flex justify-content-between align-items-center">
|
||||
Lower characters: a-z
|
||||
<span class="badge badge-primary badge-pill">1</span>
|
||||
</li>
|
||||
<li class="list-group-item d-flex justify-content-between align-items-center">
|
||||
Digits: 0-9
|
||||
<span class="badge badge-primary badge-pill">2</span>
|
||||
</li>
|
||||
<li class="list-group-item d-flex justify-content-between align-items-center">
|
||||
Maximum length
|
||||
<span class="badge badge-primary badge-pill">100</span>
|
||||
</li>
|
||||
</ul>
|
||||
|
||||
</form>
|
||||
|
||||
|
||||
</body>
|
|
@ -0,0 +1,156 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
<head>
|
||||
<title>D4-Project</title>
|
||||
<link rel="icon" href="{{ url_for('static', filename='img/d4-logo.png') }}">
|
||||
|
||||
<!-- Core CSS -->
|
||||
<link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet">
|
||||
<link href="{{ url_for('static', filename='font-awesome/css/font-awesome.css') }}" rel="stylesheet">
|
||||
<link href="{{ url_for('static', filename='css/dataTables.bootstrap.min.css') }}" rel="stylesheet">
|
||||
|
||||
<!-- JS -->
|
||||
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/popper.min.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/bootstrap.min.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/jquery.dataTables.min.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/dataTables.bootstrap.min.js')}}"></script>
|
||||
|
||||
</head>
|
||||
<body>
|
||||
|
||||
{% include 'navbar.html' %}
|
||||
|
||||
<div class="container-fluid">
|
||||
<div class="row">
|
||||
|
||||
{% include 'sidebar_settings.html' %}
|
||||
|
||||
<div class="col-12 col-lg-10" id="core_content">
|
||||
|
||||
<form class="form-signin" action="{{ url_for('settings.create_user_post')}}" autocomplete="off" method="post">
|
||||
|
||||
<h1 class="h3 mt-1 mb-3 text-center text-secondary">Create User</h1>
|
||||
<label for="inputEmail" class="sr-only">Email address</label>
|
||||
<input type="email" id="inputEmail" name="username" class="form-control {% if error_mail %}is-invalid{% endif %}" placeholder="Email address" autocomplete="off" required {% if user_id %}value="{{user_id}}"{% else %}{% endif %}>
|
||||
{% if error_mail %}
|
||||
<div class="invalid-feedback">
|
||||
Please provide a valid email address
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<label class="mt-3" for="role_selector">User Role</label>
|
||||
<select class="custom-select" id="role_selector" name="user_role">
|
||||
{% for role in all_roles %}
|
||||
{% if role == user_role %}
|
||||
<option value="{{role}}" selected>{{role}}</option>
|
||||
{% else %}
|
||||
<option value="{{role}}">{{role}}</option>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
</select>
|
||||
|
||||
<div class="custom-control custom-switch mt-4 mb-3">
|
||||
<input type="checkbox" class="custom-control-input" id="set_manual_password" value="" onclick="toggle_password_fields();">
|
||||
<label class="custom-control-label" for="set_manual_password">Set Password</label>
|
||||
</div>
|
||||
|
||||
<div id="password-section">
|
||||
<h1 class="h3 mb-3 text-center text-secondary">Create Password</h1>
|
||||
<label for="inputPassword1" class="sr-only">Password</label>
|
||||
<input type="password" id="inputPassword1" name="password1" class="form-control {% if error %}is-invalid{% endif %}" placeholder="Password" autocomplete="new-password">
|
||||
<label for="inputPassword2" class="sr-only">Confirm Password</label>
|
||||
<input type="password" id="inputPassword2" name="password2" class="form-control {% if error %}is-invalid{% endif %}" placeholder="Confirm Password" value="" autocomplete="new-password">
|
||||
{% if error %}
|
||||
<div class="invalid-feedback">
|
||||
{{error}}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
</div>
|
||||
<button class="btn btn-lg btn-primary btn-block mt-3" type="submit">Submit</button>
|
||||
|
||||
<div id="password-section-info">
|
||||
<br>
|
||||
<br>
|
||||
<br>
|
||||
<h5 class="h3 mb-3 text-center text-secondary">Password Requirements</h5>
|
||||
<ul class="list-group">
|
||||
<li class="list-group-item d-flex justify-content-between align-items-center">
|
||||
Minimal length
|
||||
<span class="badge badge-primary badge-pill">10</span>
|
||||
</li>
|
||||
<li class="list-group-item d-flex justify-content-between align-items-center">
|
||||
Upper characters: A-Z
|
||||
<span class="badge badge-primary badge-pill">1</span>
|
||||
</li>
|
||||
<li class="list-group-item d-flex justify-content-between align-items-center">
|
||||
Lower characters: a-z
|
||||
<span class="badge badge-primary badge-pill">1</span>
|
||||
</li>
|
||||
<li class="list-group-item d-flex justify-content-between align-items-center">
|
||||
Digits: 0-9
|
||||
<span class="badge badge-primary badge-pill">2</span>
|
||||
</li>
|
||||
<li class="list-group-item d-flex justify-content-between align-items-center">
|
||||
Maximum length
|
||||
<span class="badge badge-primary badge-pill">100</span>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
</form>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% include 'navfooter.html' %}
|
||||
|
||||
</body>
|
||||
|
||||
<script>
|
||||
$(document).ready(function(){
|
||||
$("#password-section").hide();
|
||||
$("#password-section-info").hide();
|
||||
$("#nav-settings").addClass("active");
|
||||
$("#nav_create_user").addClass("active");
|
||||
$("#nav_user_management").removeClass("text-muted");
|
||||
|
||||
{% if error %}
|
||||
toggle_password_fields();
|
||||
{% endif %}
|
||||
} );
|
||||
|
||||
function toggle_sidebar(){
|
||||
if($('#nav_menu').is(':visible')){
|
||||
$('#nav_menu').hide();
|
||||
$('#side_menu').removeClass('border-right')
|
||||
$('#side_menu').removeClass('col-lg-2')
|
||||
$('#core_content').removeClass('col-lg-10')
|
||||
}else{
|
||||
$('#nav_menu').show();
|
||||
$('#side_menu').addClass('border-right')
|
||||
$('#side_menu').addClass('col-lg-2')
|
||||
$('#core_content').addClass('col-lg-10')
|
||||
}
|
||||
}
|
||||
|
||||
function toggle_password_fields() {
|
||||
var password_div = $("#password-section");
|
||||
if(password_div.is(":visible")){
|
||||
$("#password-section").hide();
|
||||
$("#password-section-info").hide();
|
||||
$("#inputPassword1").prop('required',false);
|
||||
$("#inputPassword2").prop('required',false);
|
||||
} else {
|
||||
$("#password-section").show();
|
||||
$("#password-section-info").show();
|
||||
$("#inputPassword1").prop('required',true);
|
||||
$("#inputPassword2").prop('required',true);
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
</html>
|
|
@ -0,0 +1,99 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
<head>
|
||||
<title>D4-Project</title>
|
||||
<link rel="icon" href="{{ url_for('static', filename='img/d4-logo.png') }}">
|
||||
|
||||
<!-- Core CSS -->
|
||||
<link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet">
|
||||
<link href="{{ url_for('static', filename='font-awesome/css/font-awesome.css') }}" rel="stylesheet">
|
||||
<link href="{{ url_for('static', filename='css/dataTables.bootstrap.min.css') }}" rel="stylesheet">
|
||||
|
||||
<!-- JS -->
|
||||
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/popper.min.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/bootstrap.min.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/jquery.dataTables.min.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/dataTables.bootstrap.min.js')}}"></script>
|
||||
|
||||
</head>
|
||||
<body>
|
||||
|
||||
{% include 'navbar.html' %}
|
||||
|
||||
<div class="container-fluid">
|
||||
<div class="row">
|
||||
|
||||
{% include 'sidebar_settings.html' %}
|
||||
|
||||
<div class="col-12 col-lg-10" id="core_content">
|
||||
|
||||
<div class="card mb-3 mt-1">
|
||||
<div class="card-header text-white bg-dark pb-1">
|
||||
<h5 class="card-title">My Profile :</h5>
|
||||
</div>
|
||||
<div class="card-body">
|
||||
|
||||
<div class="row">
|
||||
<div class="col-xl-6">
|
||||
<div class="card text-center border-secondary">
|
||||
<div class="card-body px-1 py-0">
|
||||
<table class="table table-sm">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>Email</td>
|
||||
<td>{{user_metadata['email']}}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Role</td>
|
||||
<td>{{user_metadata['role']}}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>API Key</td>
|
||||
<td>
|
||||
{{user_metadata['api_key']}}
|
||||
<a class="ml-3" href="{{url_for('settings.new_token')}}"><i class="fa fa-random"></i></a>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% include 'navfooter.html' %}
|
||||
|
||||
</body>
|
||||
|
||||
<script>
|
||||
$(document).ready(function(){
|
||||
$("#nav-settings").addClass("active");
|
||||
$("#nav_edit_profile").addClass("active");
|
||||
$("#nav_my_profile").removeClass("text-muted");
|
||||
} );
|
||||
|
||||
function toggle_sidebar(){
|
||||
if($('#nav_menu').is(':visible')){
|
||||
$('#nav_menu').hide();
|
||||
$('#side_menu').removeClass('border-right')
|
||||
$('#side_menu').removeClass('col-lg-2')
|
||||
$('#core_content').removeClass('col-lg-10')
|
||||
}else{
|
||||
$('#nav_menu').show();
|
||||
$('#side_menu').addClass('border-right')
|
||||
$('#side_menu').addClass('col-lg-2')
|
||||
$('#core_content').addClass('col-lg-10')
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
</html>
|
|
@ -32,6 +32,10 @@
|
|||
}
|
||||
.bar{
|
||||
fill:#eaeaea;
|
||||
}
|
||||
.bars:hover{
|
||||
fill: aqua;
|
||||
cursor: pointer;
|
||||
}
|
||||
text.label{
|
||||
fill: #777777;
|
||||
|
@ -42,6 +46,10 @@
|
|||
text.category{
|
||||
fill: #666666;
|
||||
font-size: 18px;
|
||||
}
|
||||
text.categorys:hover{
|
||||
fill: black;
|
||||
cursor: pointer;
|
||||
}
|
||||
</style>
|
||||
|
||||
|
@ -50,22 +58,7 @@
|
|||
|
||||
<body>
|
||||
|
||||
<nav class="navbar navbar-expand-sm navbar-dark bg-dark">
|
||||
<a class="navbar-brand" href="{{ url_for('index') }}">
|
||||
<img src="{{ url_for('static', filename='img/d4-logo.png')}}" alt="D4 Project" style="width:80px;">
|
||||
</a>
|
||||
<ul class="navbar-nav">
|
||||
<li class="nav-item active">
|
||||
<a class="nav-link mr-3" href="{{ url_for('index') }}">Home <span class="sr-only">(current)</span></a>
|
||||
</li>
|
||||
<li class="nav-item" mr-3>
|
||||
<a class="nav-link mr-3" href="{{ url_for('sensors_status') }}">Sensors Status</a>
|
||||
</li>
|
||||
<li class="nav-item mr-3">
|
||||
<a class="nav-link" href="{{ url_for('server_management') }}" tabindex="-1" aria-disabled="true">Server Management</a>
|
||||
</li>
|
||||
</ul>
|
||||
</nav>
|
||||
{% include 'navbar.html' %}
|
||||
|
||||
|
||||
<div class="row mr-0">
|
||||
|
@ -97,17 +90,13 @@
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<div class="d-flex justify-content-center">
|
||||
<a href="{{ url_for('delete_data') }}">
|
||||
<button type="button" class="btn btn-primary mt-3 mb-2">Delete All Data (Demo)</button>
|
||||
</a>
|
||||
</div>
|
||||
|
||||
|
||||
{% include 'navfooter.html' %}
|
||||
</body>
|
||||
|
||||
<script>
|
||||
$(document).ready(function(){
|
||||
$("#nav-home").addClass("active");
|
||||
} );
|
||||
|
||||
////
|
||||
//http://bl.ocks.org/charlesdguthrie/11356441, updated and modified
|
||||
|
@ -137,12 +126,12 @@ var setup = function(targetID){
|
|||
|
||||
var settings = {
|
||||
margin:margin, width:width, height:height, categoryIndent:categoryIndent,
|
||||
svg:svg, x:x, y:y
|
||||
svg:svg, x:x, y:y, myid:targetID
|
||||
}
|
||||
return settings;
|
||||
}
|
||||
|
||||
var redrawChart = function(targetID, newdata) {
|
||||
var redrawChart = function(div_id, targetID, newdata) {
|
||||
|
||||
//Import settings
|
||||
var margin=targetID.margin, width=targetID.width, height=targetID.height, categoryIndent=targetID.categoryIndent,
|
||||
|
@ -171,6 +160,16 @@ var redrawChart = function(targetID, newdata) {
|
|||
.attr("class", "chartRow")
|
||||
.attr("transform", "translate(0," + height + margin.top + margin.bottom + ")");
|
||||
|
||||
if (div_id=='chart_uuid'){
|
||||
//bars
|
||||
newRow.insert("rect")
|
||||
.on("click", function (d) { window.location.href = "{{ url_for('uuid_management') }}?uuid="+d.key })
|
||||
.attr("class","bar bars")
|
||||
.attr("x", 0)
|
||||
.attr("opacity",0)
|
||||
.attr("height", y.bandwidth())
|
||||
.attr("width", function(d) { return x(d.value);})
|
||||
} else {
|
||||
//bars
|
||||
newRow.insert("rect")
|
||||
.attr("class","bar")
|
||||
|
@ -178,6 +177,7 @@ var redrawChart = function(targetID, newdata) {
|
|||
.attr("opacity",0)
|
||||
.attr("height", y.bandwidth())
|
||||
.attr("width", function(d) { return x(d.value);})
|
||||
}
|
||||
|
||||
//labels
|
||||
newRow.append("text")
|
||||
|
@ -189,6 +189,19 @@ var redrawChart = function(targetID, newdata) {
|
|||
.attr("dx","0.5em")
|
||||
.text(function(d){return d.value;});
|
||||
|
||||
if (div_id=='chart_uuid'){
|
||||
//text
|
||||
newRow.append("text")
|
||||
.on("click", function (d) { window.location.href = "{{ url_for('uuid_management') }}?uuid="+d.key })
|
||||
.attr("class","category categorys")
|
||||
.attr("text-overflow","ellipsis")
|
||||
.attr("y", y.bandwidth()/2)
|
||||
.attr("x",categoryIndent)
|
||||
.attr("opacity",0)
|
||||
.attr("dy",".35em")
|
||||
.attr("dx","5em")
|
||||
.text(function(d){return d.key});
|
||||
} else {
|
||||
//text
|
||||
newRow.append("text")
|
||||
.attr("class","category")
|
||||
|
@ -199,7 +212,7 @@ var redrawChart = function(targetID, newdata) {
|
|||
.attr("dy",".35em")
|
||||
.attr("dx","5em")
|
||||
.text(function(d){return d.key});
|
||||
|
||||
}
|
||||
|
||||
//////////
|
||||
//UPDATE//
|
||||
|
@ -251,10 +264,10 @@ var redrawChart = function(targetID, newdata) {
|
|||
.attr("transform", function(d){ return "translate(0," + y(d.key) + ")"; });
|
||||
};
|
||||
|
||||
var pullData = function(json_url,settings,callback){
|
||||
var pullData = function(div_id,json_url,settings,callback){
|
||||
d3.json(json_url, function (err, data){
|
||||
if (err) return console.warn(err);
|
||||
callback(settings,data);
|
||||
callback(div_id,settings,data);
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -266,8 +279,8 @@ var formatData = function(data){
|
|||
.slice(0, 15); // linit to 15 items
|
||||
}
|
||||
|
||||
var redraw = function(json_url,settings){
|
||||
pullData(json_url,settings,redrawChart)
|
||||
var redraw = function(div_id,json_url,settings){
|
||||
pullData(div_id,json_url,settings,redrawChart)
|
||||
}
|
||||
|
||||
json_url_uuid = "{{ url_for('_json_daily_uuid_stats') }}"
|
||||
|
@ -275,17 +288,17 @@ json_url_type = "{{ url_for('_json_daily_type_stats') }}"
|
|||
|
||||
//setup
|
||||
var settings = setup('#chart_uuid');
|
||||
redraw(json_url_uuid,settings)
|
||||
redraw(json_url_uuid,settings)
|
||||
redraw('chart_uuid',json_url_uuid,settings)
|
||||
redraw('chart_uuid',json_url_uuid,settings)
|
||||
|
||||
var settings_type = setup('#chart_type');
|
||||
redraw(json_url_type,settings_type)
|
||||
redraw(json_url_type,settings_type)
|
||||
redraw('chart_type',json_url_type,settings_type)
|
||||
redraw('chart_type',json_url_type,settings_type)
|
||||
|
||||
//Interval
|
||||
setInterval(function(){
|
||||
redraw(json_url_uuid,settings)
|
||||
redraw(json_url_type,settings_type)
|
||||
redraw('chart_uuid',json_url_uuid,settings)
|
||||
redraw('chart_type',json_url_type,settings_type)
|
||||
}, 4000);
|
||||
////
|
||||
|
||||
|
|
|
@ -0,0 +1,86 @@
|
|||
<!DOCTYPE html>
|
||||
|
||||
<html>
|
||||
<head>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
|
||||
<title>D4-Project</title>
|
||||
<link rel="icon" href="{{ url_for('static', filename='img/d4-logo.png')}}">
|
||||
<!-- Core CSS -->
|
||||
<link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet">
|
||||
<link href="{{ url_for('static', filename='font-awesome/css/font-awesome.css') }}" rel="stylesheet">
|
||||
|
||||
<!-- JS -->
|
||||
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/bootstrap.min.js')}}"></script>
|
||||
|
||||
|
||||
<style>
|
||||
html,
|
||||
body {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
body {
|
||||
display: -ms-flexbox;
|
||||
display: flex;
|
||||
-ms-flex-align: center;
|
||||
align-items: center;
|
||||
padding-top: 40px;
|
||||
padding-bottom: 40px;
|
||||
background-color: #f5f5f5;
|
||||
}
|
||||
|
||||
.form-signin {
|
||||
width: 100%;
|
||||
max-width: 330px;
|
||||
padding: 15px;
|
||||
margin: auto;
|
||||
}
|
||||
.form-signin .checkbox {
|
||||
font-weight: 400;
|
||||
}
|
||||
.form-signin .form-control {
|
||||
position: relative;
|
||||
box-sizing: border-box;
|
||||
height: auto;
|
||||
padding: 10px;
|
||||
font-size: 16px;
|
||||
}
|
||||
.form-signin .form-control:focus {
|
||||
z-index: 2;
|
||||
}
|
||||
.form-signin input[type="email"] {
|
||||
margin-bottom: -1px;
|
||||
border-bottom-right-radius: 0;
|
||||
border-bottom-left-radius: 0;
|
||||
}
|
||||
.form-signin input[type="password"] {
|
||||
margin-bottom: 10px;
|
||||
border-top-left-radius: 0;
|
||||
border-top-right-radius: 0;
|
||||
}
|
||||
</style>
|
||||
|
||||
</head>
|
||||
|
||||
<body class="text-center">
|
||||
|
||||
|
||||
<form class="form-signin" action="{{ url_for('login')}}" method="post">
|
||||
<img class="mb-4" src="{{ url_for('static', filename='img/d4-logo.png')}}" width="300">
|
||||
<h1 class="h3 mb-3 text-secondary">Please sign in</h1>
|
||||
<input type="text" id="next_page" name="next_page" value="{{next_page}}" hidden>
|
||||
<label for="inputEmail" class="sr-only">Email address</label>
|
||||
<input type="email" id="inputEmail" name="username" class="form-control" placeholder="Email address" required autofocus>
|
||||
<label for="inputPassword" class="sr-only">Password</label>
|
||||
<input type="password" id="inputPassword" name="password" class="form-control {% if error %}is-invalid{% endif %}" placeholder="Password" required>
|
||||
{% if error %}
|
||||
<div class="invalid-feedback">
|
||||
{{error}}
|
||||
</div>
|
||||
{% endif %}
|
||||
<button class="btn btn-lg btn-primary btn-block" type="submit">Sign in</button>
|
||||
</form>
|
||||
|
||||
|
||||
</body>
|
|
@ -0,0 +1,22 @@
|
|||
<nav class="navbar navbar-expand-sm navbar-dark bg-dark">
|
||||
<a class="navbar-brand" href="{{ url_for('index') }}">
|
||||
<img src="{{ url_for('static', filename='img/d4-logo.png')}}" alt="D4 Project" style="width:80px;">
|
||||
</a>
|
||||
<ul class="navbar-nav">
|
||||
<li class="nav-item">
|
||||
<a class="nav-link mr-3" id="nav-home" href="{{ url_for('index') }}">Home <span class="sr-only">(current)</span></a>
|
||||
</li>
|
||||
<li class="nav-item" mr-3>
|
||||
<a class="nav-link mr-3" id="nav-sensor" href="{{ url_for('sensors_status') }}">Sensors Status</a>
|
||||
</li>
|
||||
<li class="nav-item mr-3">
|
||||
<a class="nav-link" id="nav-server" href="{{ url_for('server_management') }}" tabindex="-1" aria-disabled="true">Server Management</a>
|
||||
</li>
|
||||
<li class="nav-item mr-3">
|
||||
<a class="nav-link" id="nav-settings" href="{{ url_for('settings.settings_page') }}" tabindex="-1" aria-disabled="true">Settings</a>
|
||||
</li>
|
||||
<li class="nav-item mr-3">
|
||||
<a class="nav-link" href="{{ url_for('logout') }}" tabindex="-1" aria-disabled="true"><i class="fa fa-sign-out"></i>Log Out</a>
|
||||
</li>
|
||||
</ul>
|
||||
</nav>
|
|
@ -0,0 +1,96 @@
|
|||
<!DOCTYPE html>
|
||||
|
||||
<html>
|
||||
<head>
|
||||
<title>D4-Project</title>
|
||||
<link rel="icon" href="{{ url_for('static', filename='img/d4-logo.png')}}">
|
||||
<!-- Core CSS -->
|
||||
<link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet">
|
||||
<link href="{{ url_for('static', filename='font-awesome/css/font-awesome.css') }}" rel="stylesheet">
|
||||
<link href="{{ url_for('static', filename='css/dataTables.bootstrap.min.css') }}" rel="stylesheet">
|
||||
|
||||
<!-- JS -->
|
||||
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/popper.min.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/bootstrap.min.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/jquery.dataTables.min.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/dataTables.bootstrap.min.js')}}"></script>
|
||||
|
||||
<style>
|
||||
.popover{
|
||||
max-width: 100%;
|
||||
}
|
||||
</style>
|
||||
|
||||
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
{% include 'navbar.html' %}
|
||||
|
||||
<div class="d-flex justify-content-center">
|
||||
<div class="card border-dark mt-3 text-center" style="max-width: 30rem;">
|
||||
<div class="card-body text-dark">
|
||||
<h5 class="card-title">Approve New Sensor UUID</h5>
|
||||
<input class="form-control" type="text" id="uuid" value="" required>
|
||||
<button type="button" class="btn btn-outline-secondary mt-1" onclick="window.location.href ='{{ url_for('approve_sensor') }}?uuid='+$('#uuid').val();">Approve UUID</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="py-3 mx-2">
|
||||
<table class="table table-striped table-bordered table-hover text-center" id="myTable_1">
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="bg-info text-white">UUID</th>
|
||||
<th class="bg-info text-white">description</th>
|
||||
<th class="bg-info text-white">mail</th>
|
||||
<th class="bg-info text-white"></th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for row_uuid in all_pending %}
|
||||
<tr data-trigger="hover" title="" data-content="test content" data-original-title="test title">
|
||||
<td>
|
||||
<a class="" href="{{ url_for('uuid_management') }}?uuid={{row_uuid['uuid']}}">
|
||||
{{row_uuid['uuid']}}
|
||||
</a>
|
||||
</td>
|
||||
<td>{{row_uuid['description']}}</td>
|
||||
<td>{{row_uuid['mail']}}</td>
|
||||
<td>
|
||||
<a href="{{ url_for('approve_sensor') }}?uuid={{row_uuid['uuid']}}">
|
||||
<button type="button" class="btn btn-outline-info"><i class="fa fa-plus"></i></button>
|
||||
</a>
|
||||
<a href="{{ url_for('delete_pending_sensor') }}?uuid={{row_uuid['uuid']}}">
|
||||
<button type="button" class="btn btn-outline-danger"><i class="fa fa-trash"></i></button>
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
|
||||
{% include 'navfooter.html' %}
|
||||
</body>
|
||||
|
||||
<script>
|
||||
$(document).ready(function(){
|
||||
table = $('#myTable_1').DataTable(
|
||||
{
|
||||
"aLengthMenu": [[5, 10, 15, 20, -1], [5, 10, 15, 20, "All"]],
|
||||
"iDisplayLength": 10,
|
||||
"order": [[ 0, "asc" ]]
|
||||
}
|
||||
);
|
||||
$('[data-toggle="popover"]').popover({
|
||||
placement: 'top',
|
||||
container: 'body',
|
||||
html : false,
|
||||
})
|
||||
});
|
||||
|
||||
</script>
|
|
@ -0,0 +1,116 @@
|
|||
<!DOCTYPE html>
|
||||
|
||||
<html>
|
||||
<head>
|
||||
<title>D4-Project</title>
|
||||
<link rel="icon" href="{{ url_for('static', filename='img/d4-logo.png')}}">
|
||||
<!-- Core CSS -->
|
||||
<link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet">
|
||||
<link href="{{ url_for('static', filename='font-awesome/css/font-awesome.css') }}" rel="stylesheet">
|
||||
<link href="{{ url_for('static', filename='css/dataTables.bootstrap.min.css') }}" rel="stylesheet">
|
||||
|
||||
<!-- JS -->
|
||||
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/popper.min.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/bootstrap.min.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/jquery.dataTables.min.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/dataTables.bootstrap.min.js')}}"></script>
|
||||
|
||||
<style>
|
||||
.popover{
|
||||
max-width: 100%;
|
||||
}
|
||||
</style>
|
||||
|
||||
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
{% include 'navbar.html' %}
|
||||
|
||||
<div class="mx-2 py-3">
|
||||
<table class="table table-striped table-bordered table-hover text-center" id="myTable_1">
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="bg-info text-white">UUID</th>
|
||||
<th class="bg-info text-white">first seen</th>
|
||||
<th class="bg-info text-white">last seen</th>
|
||||
<th class="bg-info text-white">types</th>
|
||||
<th class="bg-secondary text-white">Status</th>
|
||||
<th class="bg-secondary text-white"></th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for row_uuid in all_sensors %}
|
||||
<tr data-trigger="hover" title="" data-content="test content" data-original-title="test title">
|
||||
<td>
|
||||
<a class="" href="{{ url_for('uuid_management') }}?uuid={{row_uuid['uuid']}}">
|
||||
{{row_uuid['uuid']}}
|
||||
</a>
|
||||
{% if row_uuid['description'] %}
|
||||
<div class="text-info"><small>{{row_uuid['description']}}</small></div>
|
||||
{% endif %}
|
||||
</td>
|
||||
<td>
|
||||
{% if row_uuid['first_seen'] %}
|
||||
{{row_uuid['first_seen']}}
|
||||
{% else %}
|
||||
{{'-'}}
|
||||
{% endif %}
|
||||
</td>
|
||||
<td>
|
||||
{% if row_uuid['first_seen'] %}
|
||||
{{row_uuid['first_seen']}}
|
||||
{% else %}
|
||||
{{'-'}}
|
||||
{% endif %}
|
||||
</td>
|
||||
<td>
|
||||
{{type_description}}
|
||||
{% for uuid_type in row_uuid['types'] %}
|
||||
<span class="badge badge-dark">
|
||||
{{uuid_type['type']}}
|
||||
</span>
|
||||
{% endfor %}
|
||||
</td>
|
||||
<td
|
||||
{% if not row_uuid['Error'] %}
|
||||
div class="text-success">
|
||||
OK -
|
||||
{% else %}
|
||||
div class="text-danger">
|
||||
<i class="fa fa-times-circle"></i> {{row_uuid['Error']}}
|
||||
{% endif %}
|
||||
{% if row_uuid['active_connection'] %}
|
||||
<i class="fa fa-check-circle"></i> Connected
|
||||
{% endif %}
|
||||
</td>
|
||||
<td>
|
||||
<a href="{{ url_for('delete_registered_sensor') }}?uuid={{row_uuid['uuid']}}">
|
||||
<button type="button" class="btn btn-outline-danger"><i class="fa fa-trash"></i></button>
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
|
||||
{% include 'navfooter.html' %}
|
||||
</body>
|
||||
|
||||
<script>
|
||||
$(document).ready(function(){
|
||||
$("#nav-sensor").addClass("active");
|
||||
table = $('#myTable_1').DataTable(
|
||||
{
|
||||
"aLengthMenu": [[5, 10, 15, 20, -1], [5, 10, 15, 20, "All"]],
|
||||
"iDisplayLength": 10,
|
||||
"order": [[ 0, "asc" ]]
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
</script>
|
|
@ -0,0 +1,57 @@
|
|||
<!DOCTYPE html>
|
||||
|
||||
<html>
|
||||
<head>
|
||||
<title>D4-Project</title>
|
||||
<link rel="icon" href="{{ url_for('static', filename='img/d4-logo.png')}}">
|
||||
<!-- Core CSS -->
|
||||
<link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet">
|
||||
<link href="{{ url_for('static', filename='font-awesome/css/font-awesome.css') }}" rel="stylesheet">
|
||||
|
||||
<!-- JS -->
|
||||
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/popper.min.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/bootstrap.min.js')}}"></script>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
{% include 'navbar.html' %}
|
||||
|
||||
<form action="{{ url_for('D4_sensors.add_sensor_to_monitor_post') }}" method="post" enctype=multipart/form-data>
|
||||
|
||||
<div class="d-flex justify-content-center">
|
||||
<div class="col-sm-6">
|
||||
<h4 class="my-3">Monitor a Sensor</h4>
|
||||
<div class="form-group">
|
||||
<input class="form-control text-center bg-dark text-white" type="text" value="{{sensor_uuid}}" disabled>
|
||||
<input type="text" name="uuid" id="uuid" value="{{sensor_uuid}}" hidden>
|
||||
</div>
|
||||
|
||||
<div class="input-group mt-2 mb-2">
|
||||
<div class="input-group-prepend">
|
||||
<span class="input-group-text bg-light"><i class="fa fa-clock-o"></i> </span>
|
||||
</div>
|
||||
<input class="form-control" type="number" id="delta_time" value="3600" min="30" name="delta_time" required>
|
||||
<div class="input-group-append">
|
||||
<span class="input-group-text">Maxinum Time (seconds) between two D4 packets</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<button class="btn btn-primary" type="submit">Monitor Sensor</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</form>
|
||||
|
||||
{% include 'navfooter.html' %}
|
||||
</body>
|
||||
|
||||
<script>
|
||||
$(document).ready(function(){
|
||||
$("#nav-sensor").addClass("active");
|
||||
});
|
||||
|
||||
</script>
|
|
@ -7,13 +7,19 @@
|
|||
<!-- Core CSS -->
|
||||
<link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet">
|
||||
<link href="{{ url_for('static', filename='font-awesome/css/font-awesome.css') }}" rel="stylesheet">
|
||||
<link href="{{ url_for('static', filename='css/dataTables.bootstrap.min.css') }}" rel="stylesheet">
|
||||
|
||||
<!-- JS -->
|
||||
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/popper.min.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/bootstrap.min.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/jquery.dataTables.min.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/dataTables.bootstrap.min.js')}}"></script>
|
||||
|
||||
<style>
|
||||
|
||||
.popover{
|
||||
max-width: 100%;
|
||||
}
|
||||
</style>
|
||||
|
||||
|
||||
|
@ -21,22 +27,7 @@
|
|||
|
||||
<body>
|
||||
|
||||
<nav class="navbar navbar-expand-lg navbar-dark bg-dark">
|
||||
<a class="navbar-brand" href="{{ url_for('index') }}">
|
||||
<img src="{{ url_for('static', filename='img/d4-logo.png')}}" alt="D4 Project" style="width:80px;">
|
||||
</a>
|
||||
<ul class="navbar-nav">
|
||||
<li class="nav-item">
|
||||
<a class="nav-link mr-3" href="{{ url_for('index') }}">Home <span class="sr-only">(current)</span></a>
|
||||
</li>
|
||||
<li class="nav-item" mr-3>
|
||||
<a class="nav-link active mr-3" href="{{ url_for('sensors_status') }}">Sensors Status</a>
|
||||
</li>
|
||||
<li class="nav-item mr-3">
|
||||
<a class="nav-link" href="{{ url_for('server_management') }}" tabindex="-1" aria-disabled="true">Server Management</a>
|
||||
</li>
|
||||
</ul>
|
||||
</nav>
|
||||
{% include 'navbar.html' %}
|
||||
|
||||
<div class="card mt-2 mb-2">
|
||||
<div class="card-body bg-dark text-white">
|
||||
|
@ -63,56 +54,79 @@
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<div class="mx-2">
|
||||
<table class="table table-striped table-bordered table-hover text-center" id="myTable_1">
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="bg-info text-white">UUID</th>
|
||||
<th class="bg-info text-white">first seen</th>
|
||||
<th class="bg-info text-white">last seen</th>
|
||||
<th class="bg-info text-white">types</th>
|
||||
<th class="bg-secondary text-white">Status</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for row_uuid in status_daily_uuid %}
|
||||
<div class="card text-center mt-3 ml-2 mr-2">
|
||||
<a class="btn btn-outline-dark px-1 py-1" href="{{ url_for('uuid_management') }}?uuid={{row_uuid['uuid']}}">
|
||||
<div class="card-header bg-dark text-white">
|
||||
UUID: {{row_uuid['uuid']}}
|
||||
</div>
|
||||
<tr data-trigger="hover" title="" data-content="test content" data-original-title="test title">
|
||||
<td>
|
||||
<a class="" href="{{ url_for('uuid_management') }}?uuid={{row_uuid['uuid']}}">
|
||||
{{row_uuid['uuid']}}
|
||||
</a>
|
||||
<div class="card-body">
|
||||
<div class="card-group">
|
||||
<div class="card">
|
||||
<div class="card-header bg-info text-white">
|
||||
First Seen
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<p class="card-text">{{row_uuid['first_seen_gmt']}} - ({{row_uuid['first_seen']}})</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="card">
|
||||
<div class="card-header bg-info text-white">
|
||||
Last Seen
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<p class="card-text">{{row_uuid['last_seen_gmt']}} - ({{row_uuid['last_seen']}})</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="card">
|
||||
{% if not row_uuid['Error'] %}
|
||||
<div class="card-header bg-success text-white">
|
||||
Status
|
||||
</div>
|
||||
<div class="card-body text-success">
|
||||
<p class="card-text">OK</p>
|
||||
<div class="text-info"><small>{{row_uuid['description']}}</small></div>
|
||||
</td>
|
||||
<td>{{row_uuid['first_seen_gmt']}}</td>
|
||||
<td>{{row_uuid['last_seen_gmt']}}</td>
|
||||
<td>
|
||||
{{type_description}}
|
||||
{% for uuid_type in row_uuid['l_uuid_types'] %}
|
||||
{% if row_uuid['type_connection_status'][uuid_type] %}
|
||||
<span class="badge badge-success" data-toggle="popover" data-trigger="hover" title="" data-content="{{types_description[uuid_type]}}" data-original-title="{{uuid_type}}">
|
||||
{{uuid_type}}
|
||||
</span>
|
||||
{% else %}
|
||||
<div class="card-header bg-danger text-white">
|
||||
Status
|
||||
</div>
|
||||
<div class="card-body text-danger">
|
||||
<p class="card-text">{{row_uuid['Error']}}</p>
|
||||
<span class="badge badge-dark" data-toggle="popover" data-trigger="hover" title="" data-content="{{types_description[uuid_type]}}" data-original-title="{{uuid_type}}">
|
||||
{{uuid_type}}
|
||||
</span>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
</td>
|
||||
<td
|
||||
{% if not row_uuid['Error'] %}
|
||||
div class="text-success">
|
||||
OK -
|
||||
{% else %}
|
||||
div class="text-danger">
|
||||
<i class="fa fa-times-circle"></i> {{row_uuid['Error']}}
|
||||
{% endif %}
|
||||
{% if row_uuid['active_connection'] %}
|
||||
<div style="color:Green; display:inline-block">
|
||||
<i class="fa fa-check-circle"></i> Connected
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
|
||||
{% include 'navfooter.html' %}
|
||||
</body>
|
||||
|
||||
<script>
|
||||
$(document).ready(function(){
|
||||
$("#nav-sensor").addClass("active");
|
||||
table = $('#myTable_1').DataTable(
|
||||
{
|
||||
"aLengthMenu": [[5, 10, 15, 20, -1], [5, 10, 15, 20, "All"]],
|
||||
"iDisplayLength": 10,
|
||||
"order": [[ 0, "asc" ]]
|
||||
}
|
||||
);
|
||||
$('[data-toggle="popover"]').popover({
|
||||
placement: 'top',
|
||||
container: 'body',
|
||||
html : false,
|
||||
})
|
||||
});
|
||||
|
||||
</script>
|
||||
|
|
|
@ -19,22 +19,25 @@
|
|||
|
||||
<body>
|
||||
|
||||
<nav class="navbar navbar-expand-sm navbar-dark bg-dark">
|
||||
<a class="navbar-brand" href="{{ url_for('index') }}">
|
||||
<img src="{{ url_for('static', filename='img/d4-logo.png')}}" alt="D4 Project" style="width:80px;">
|
||||
{% include 'navbar.html' %}
|
||||
|
||||
<div class="d-flex justify-content-center">
|
||||
<div class="card border-secondary mt-3 text-center" style="max-width: 30rem;">
|
||||
<div class="card-body text-dark">
|
||||
<h5 class="card-title">D4 Server mode:
|
||||
<span class="badge badge-dark">
|
||||
{{server_mode}}
|
||||
</span>
|
||||
</h5>
|
||||
<a href="{{ url_for('registered_sensor') }}">
|
||||
<button type="button" class="btn btn-info">Registered Sensors <span class="badge badge-light">{{nb_sensors_registered}}</span></button>
|
||||
</a>
|
||||
<ul class="navbar-nav">
|
||||
<li class="nav-item">
|
||||
<a class="nav-link mr-3" href="{{ url_for('index') }}">Home <span class="sr-only">(current)</span></a>
|
||||
</li>
|
||||
<li class="nav-item" mr-3>
|
||||
<a class="nav-link mr-3" href="{{ url_for('sensors_status') }}">Sensors Status</a>
|
||||
</li>
|
||||
<li class="nav-item mr-3">
|
||||
<a class="nav-link active" href="{{ url_for('server_management') }}" tabindex="-1" aria-disabled="true">Server Management</a>
|
||||
</li>
|
||||
</ul>
|
||||
</nav>
|
||||
<a href="{{ url_for('pending_sensors') }}">
|
||||
<button type="button" class="btn btn-outline-secondary">Pending Sensors <span class="badge badge-danger">{{nb_sensors_pending}}</span></button>
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="card-deck ml-0 mr-0">
|
||||
<div class="card text-center mt-3 ml-xl-4">
|
||||
|
@ -228,11 +231,12 @@
|
|||
<div class="card-body text-dark">
|
||||
|
||||
<div class="row">
|
||||
<div class="col-xl-8">
|
||||
<div class="col-xl-10">
|
||||
<table class="table table-striped table-bordered table-hover" id="myTable_1">
|
||||
<thead class="thead-dark">
|
||||
<tr>
|
||||
<th>Type</th>
|
||||
<th>Group</th>
|
||||
<th style="max-width: 800px;">uuid</th>
|
||||
<th style="max-width: 800px;">last updated</th>
|
||||
<th style="max-width: 800px;">Change max size limit</th>
|
||||
|
@ -243,11 +247,18 @@
|
|||
{% for type in list_accepted_types %}
|
||||
{% if type['list_analyzer_uuid'] %}
|
||||
{% for analyzer in type['list_analyzer_uuid'] %}
|
||||
<tr>
|
||||
<tr id="{{analyzer['uuid']}}">
|
||||
<td>{{type['id']}}</td>
|
||||
{%if analyzer['is_group_queue']%}
|
||||
<td class="text-center"><i class="fa fa-group"></i></td>
|
||||
{%else%}
|
||||
<td></td>
|
||||
{%endif%}
|
||||
<td>
|
||||
<div class="d-flex">
|
||||
<a href="{{ url_for('analyzer_queue.edit_queue_analyzer_queue') }}?queue_uuid={{analyzer['uuid']}}">
|
||||
{{analyzer['uuid']}}
|
||||
</a>
|
||||
<a href="{{ url_for('remove_analyzer') }}?redirect=1&type={{type['id']}}&analyzer_uuid={{analyzer['uuid']}}" class="ml-auto">
|
||||
<button type="button" class="btn btn-outline-danger px-2 py-0"><i class="fa fa-trash"></i></button>
|
||||
</a>
|
||||
|
@ -281,6 +292,7 @@
|
|||
<thead class="thead-dark">
|
||||
<tr>
|
||||
<th>Type Name</th>
|
||||
<th>Group</th>
|
||||
<th style="max-width: 800px;">uuid</th>
|
||||
<th style="max-width: 800px;">last updated</th>
|
||||
<th style="max-width: 800px;">Change max size limit</th>
|
||||
|
@ -288,60 +300,51 @@
|
|||
</tr>
|
||||
</thead>
|
||||
<tbody id="analyzer_accepted_extended_types_tbody">
|
||||
{% for type in list_accepted_extended_types %}
|
||||
{% if type['list_analyzer_uuid'] %}
|
||||
{% for analyzer in type['list_analyzer_uuid'] %}
|
||||
{% for dict_queue in l_queue_extended_type %}
|
||||
<tr>
|
||||
<td>{{type['name']}}</td>
|
||||
<td>{{dict_queue['extended_type']}}</td>
|
||||
{%if dict_queue['is_group_queue']%}
|
||||
<td class="text-center"><i class="fa fa-group"></i></td>
|
||||
{%else%}
|
||||
<td></td>
|
||||
{%endif%}
|
||||
<td>
|
||||
<div class="d-flex">
|
||||
{{analyzer['uuid']}}
|
||||
<a href="{{ url_for('remove_analyzer') }}?redirect=1&type=254&metatype_name={{type['name']}}&analyzer_uuid={{analyzer['uuid']}}" class="ml-auto">
|
||||
<a href="{{ url_for('analyzer_queue.edit_queue_analyzer_queue') }}?queue_uuid={{dict_queue['uuid']}}">
|
||||
{{dict_queue['uuid']}}
|
||||
</a>
|
||||
<a href="{{ url_for('remove_analyzer') }}?redirect=1&type=254&metatype_name={{dict_queue['extended_type']}}&analyzer_uuid={{dict_queue['uuid']}}" class="ml-auto">
|
||||
<button type="button" class="btn btn-outline-danger px-2 py-0"><i class="fa fa-trash"></i></button>
|
||||
</a>
|
||||
</div>
|
||||
{%if analyzer['description']%}
|
||||
<div class="text-info"><small>{{analyzer['description']}}</small></div>
|
||||
{%if dict_queue['description']%}
|
||||
<div class="text-info"><small>{{dict_queue['description']}}</small></div>
|
||||
{%endif%}
|
||||
</td>
|
||||
<td>{{analyzer['last_updated']}}</td>
|
||||
<td>{{dict_queue['last_updated']}}</td>
|
||||
<td>
|
||||
<div class="d-xl-flex justify-content-xl-center">
|
||||
<input class="form-control mr-lg-1" style="max-width: 100px;" type="number" id="max_size_analyzer_{{analyzer['uuid']}}" value="{{analyzer['size_limit']}}" min="0" required="">
|
||||
<button type="button" class="btn btn-outline-secondary" onclick="window.location.href ='{{ url_for('analyzer_change_max_size') }}?analyzer_uuid={{analyzer['uuid']}}&redirect=0&max_size_analyzer='+$('#max_size_analyzer_{{analyzer['uuid']}}').val();">Change Max Size</button>
|
||||
<input class="form-control mr-lg-1" style="max-width: 100px;" type="number" id="max_size_analyzer_{{dict_queue['uuid']}}" value="{{dict_queue['size_limit']}}" min="0" required="">
|
||||
<button type="button" class="btn btn-outline-secondary" onclick="window.location.href ='{{ url_for('analyzer_change_max_size') }}?analyzer_uuid={{dict_queue['uuid']}}&redirect=0&max_size_analyzer='+$('#max_size_analyzer_{{dict_queue['uuid']}}').val();">Change Max Size</button>
|
||||
</div>
|
||||
</td>
|
||||
<td>
|
||||
<a href="{{ url_for('empty_analyzer_queue') }}?redirect=1&type=254&metatype_name={{type['name']}}&analyzer_uuid={{analyzer['uuid']}}">
|
||||
<a href="{{ url_for('empty_analyzer_queue') }}?redirect=1&type=254&metatype_name={{dict_queue['extended_type']}}&analyzer_uuid={{dict_queue['uuid']}}">
|
||||
<button type="button" class="btn btn-outline-danger"><i class="fa fa-eraser"></i></button>
|
||||
</a>
|
||||
<button type="button" class="btn btn-outline-info ml-xl-3" onclick="get_analyser_sample('{{type['name']}}', '{{analyzer['uuid']}}');"><i class="fa fa-database"></i> {{analyzer['length']}}</button>
|
||||
<button type="button" class="btn btn-outline-info ml-xl-3" onclick="get_analyser_sample('{{dict_queue['extended_type']}}', '{{dict_queue['uuid']}}');"><i class="fa fa-database"></i> {{dict_queue['length']}}</button>
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
<div class="col-xl-4">
|
||||
<div class="card border-dark mt-3" style="max-width: 18rem;">
|
||||
<div class="card-body text-dark">
|
||||
<h5 class="card-title">Add New Analyzer Queue</h5>
|
||||
<input class="form-control" type="number" id="analyzer_type" value="1" min="1" max="254" required>
|
||||
<input class="form-control" type="text" id="analyzer_metatype_name" placeholder="Meta Type Name">
|
||||
<div class="input-group">
|
||||
<div class="input-group-prepend">
|
||||
<button class="btn btn-outline-secondary" type="button" onclick="generate_new_uuid();"><i class="fa fa-random"></i></button>
|
||||
</div>
|
||||
<input class="form-control" type="text" id="analyzer_uuid" required placeholder="Analyzer uuid">
|
||||
</div>
|
||||
<input class="form-control" type="text" id="analyzer_description" required placeholder="Optional Description">
|
||||
<button type="button" class="btn btn-outline-primary mt-1" onclick="window.location.href ='{{ url_for('add_new_analyzer') }}?redirect=1&type='+$('#analyzer_type').val()+'&analyzer_uuid='+$('#analyzer_uuid').val()+'&metatype_name='+$('#analyzer_metatype_name').val()+'&analyzer_description='+$('#analyzer_description').val();">Add New Analyzer</button>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-xl-2">
|
||||
<a href="{{ url_for('analyzer_queue.create_analyzer_queue') }}" class="ml-auto">
|
||||
<button type="button" class="btn btn-primary"><i class="fa fa-plus"></i> Add New Analyzer Queue</button>
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
@ -384,6 +387,7 @@ var table
|
|||
$(document).ready(function(){
|
||||
$('#extended_type_name').hide()
|
||||
$('#analyzer_metatype_name').hide()
|
||||
$("#nav-server").addClass("active");
|
||||
|
||||
table = $('#myTable_').DataTable(
|
||||
{
|
||||
|
@ -426,7 +430,7 @@ if (tbody.children().length == 0) {
|
|||
}
|
||||
|
||||
$('#accepted_type').on('input', function() {
|
||||
if ($('#accepted_type').val() == 254){
|
||||
if ($('#analyzer_type').val() == 2 || $('#accepted_type').val() == 254){
|
||||
$('#extended_type_name').show()
|
||||
} else {
|
||||
$('#extended_type_name').hide()
|
||||
|
@ -434,7 +438,7 @@ $('#accepted_type').on('input', function() {
|
|||
});
|
||||
|
||||
$('#analyzer_type').on('input', function() {
|
||||
if ($('#analyzer_type').val() == 254){
|
||||
if ($('#analyzer_type').val() == 2 || $('#analyzer_type').val() == 254){
|
||||
$('#analyzer_metatype_name').show()
|
||||
} else {
|
||||
$('#analyzer_metatype_name').hide()
|
||||
|
|
|
@ -0,0 +1,47 @@
|
|||
<div class="col-12 col-lg-2 p-0 bg-light border-right" id="side_menu">
|
||||
|
||||
<button type="button" class="btn btn-outline-secondary mt-1 ml-3" onclick="toggle_sidebar();">
|
||||
<i class="fa align-left"></i>
|
||||
<span>Toggle Sidebar</span>
|
||||
</button>
|
||||
|
||||
<nav class="navbar navbar-expand navbar-light bg-light flex-md-column flex-row align-items-start py-2" id="nav_menu">
|
||||
<h5 class="d-flex text-muted w-100 py-2" id="nav_my_profile">
|
||||
<span>My Profile</span>
|
||||
</h5>
|
||||
<ul class="nav flex-md-column flex-row navbar-nav justify-content-between w-100"> <!--nav-pills-->
|
||||
<li class="nav-item">
|
||||
<a class="nav-link" href="{{url_for('settings.edit_profile')}}" id="nav_edit_profile">
|
||||
<i class="fa fa-user"></i>
|
||||
<span>My Profile</span>
|
||||
</a>
|
||||
</li>
|
||||
<li class="nav-item">
|
||||
<a class="nav-link" href="{{url_for('change_password')}}" id="nav_dashboard">
|
||||
<i class="fa fa-key"></i>
|
||||
<span>Change Password</span>
|
||||
</a>
|
||||
</li>
|
||||
</ul>
|
||||
{% if admin_level %}
|
||||
<h5 class="d-flex text-muted w-100 py-2" id="nav_user_management">
|
||||
<span>User Management</span>
|
||||
</h5>
|
||||
<ul class="nav flex-md-column flex-row navbar-nav justify-content-between w-100"> <!--nav-pills-->
|
||||
<li class="nav-item">
|
||||
<a class="nav-link" href="{{url_for('settings.create_user')}}" id="nav_create_user">
|
||||
<i class="fa fa-user-plus"></i>
|
||||
<span>Create User</span>
|
||||
</a>
|
||||
</li>
|
||||
<li class="nav-item">
|
||||
<a class="nav-link" href="{{url_for('settings.users_list')}}" id="nav_users_list">
|
||||
<i class="fa fa-users"></i>
|
||||
<span>Users List</span>
|
||||
</a>
|
||||
</li>
|
||||
</ul>
|
||||
{% endif %}
|
||||
|
||||
</nav>
|
||||
</div>
|
|
@ -0,0 +1,125 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
<head>
|
||||
<title>D4-Project</title>
|
||||
<link rel="icon" href="{{ url_for('static', filename='img/d4-logo.png')}}">
|
||||
|
||||
<!-- Core CSS -->
|
||||
<link href="{{ url_for('static', filename='css/bootstrap.min.css') }}" rel="stylesheet">
|
||||
<link href="{{ url_for('static', filename='font-awesome/css/font-awesome.css') }}" rel="stylesheet">
|
||||
<link href="{{ url_for('static', filename='css/dataTables.bootstrap.min.css') }}" rel="stylesheet">
|
||||
|
||||
<!-- JS -->
|
||||
<script src="{{ url_for('static', filename='js/jquery.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/popper.min.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/bootstrap.min.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/jquery.dataTables.min.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/dataTables.bootstrap.min.js')}}"></script>
|
||||
|
||||
<style>
|
||||
.edit_icon:hover{
|
||||
cursor: pointer;
|
||||
color: #17a2b8;
|
||||
}
|
||||
.trash_icon:hover{
|
||||
cursor: pointer;
|
||||
color: #c82333;
|
||||
}
|
||||
</style>
|
||||
|
||||
</head>
|
||||
<body>
|
||||
|
||||
{% include 'navbar.html' %}
|
||||
|
||||
<div class="container-fluid">
|
||||
<div class="row">
|
||||
|
||||
{% include 'sidebar_settings.html' %}
|
||||
|
||||
<div class="col-12 col-lg-10" id="core_content">
|
||||
|
||||
{% if new_user %}
|
||||
<div class="text-center my-3 ">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
{% if new_user['edited']=='True' %}
|
||||
<h5 class="card-title">User Edited</h5>
|
||||
{% else %}
|
||||
<h5 class="card-title">User Created</h5>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<p>User: {{new_user['email']}}</p>
|
||||
<p>Password: {{new_user['password']}}</p>
|
||||
<a href="{{url_for('settings.users_list')}}" class="btn btn-primary"><i class="fa fa-eye-slash"></i> Hide</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<div class="table-responsive mt-1 table-hover table-borderless table-striped">
|
||||
<table class="table">
|
||||
<thead class="thead-dark">
|
||||
<tr>
|
||||
<th>Email</th>
|
||||
<th>Role</th>
|
||||
<th>Api Key</th>
|
||||
<th>Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody id="tbody_last_crawled">
|
||||
{% for user in all_users %}
|
||||
<tr>
|
||||
<td>{{user['email']}}</td>
|
||||
<td>{{user['role']}}</td>
|
||||
<td>
|
||||
{{user['api_key']}}
|
||||
<a class="ml-3" href="{{url_for('settings.new_token_user')}}?user_id={{user['email']}}"><i class="fa fa-random"></i></a>
|
||||
</td>
|
||||
<td>
|
||||
<a href="{{ url_for('settings.edit_user')}}?user_id={{user['email']}}">
|
||||
<i class="fa fa-pencil edit_icon"></i>
|
||||
</a>
|
||||
<a href="{{ url_for('settings.delete_user')}}?user_id={{user['email']}}" class="ml-4">
|
||||
<i class="fa fa-trash trash_icon"></i>
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% include 'navfooter.html' %}
|
||||
|
||||
</body>
|
||||
|
||||
<script>
|
||||
$(document).ready(function(){
|
||||
$("#nav-settings").addClass("active");
|
||||
$("#nav_users_list").addClass("active");
|
||||
$("#nav_user_management").removeClass("text-muted");
|
||||
} );
|
||||
|
||||
function toggle_sidebar(){
|
||||
if($('#nav_menu').is(':visible')){
|
||||
$('#nav_menu').hide();
|
||||
$('#side_menu').removeClass('border-right')
|
||||
$('#side_menu').removeClass('col-lg-2')
|
||||
$('#core_content').removeClass('col-lg-10')
|
||||
}else{
|
||||
$('#nav_menu').show();
|
||||
$('#side_menu').addClass('border-right')
|
||||
$('#side_menu').addClass('col-lg-2')
|
||||
$('#core_content').addClass('col-lg-10')
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
</html>
|
|
@ -17,39 +17,49 @@
|
|||
<script src="{{ url_for('static', filename='js/dataTables.bootstrap.min.js')}}"></script>
|
||||
<script src="{{ url_for('static', filename='js/d3v5.min.js')}}"></script>
|
||||
|
||||
<style>
|
||||
.edit_icon:hover{
|
||||
cursor: pointer;
|
||||
color: #17a2b8;
|
||||
}
|
||||
</style>
|
||||
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
<nav class="navbar navbar-expand-sm navbar-dark bg-dark">
|
||||
<a class="navbar-brand" href="{{ url_for('index') }}">
|
||||
<img src="{{ url_for('static', filename='img/d4-logo.png')}}" alt="D4 Project" style="width:80px;">
|
||||
</a>
|
||||
<ul class="navbar-nav">
|
||||
<li class="nav-item">
|
||||
<a class="nav-link mr-3" href="{{ url_for('index') }}">Home <span class="sr-only">(current)</span></a>
|
||||
</li>
|
||||
<li class="nav-item" mr-3>
|
||||
<a class="nav-link mr-3" href="{{ url_for('sensors_status') }}">Sensors Status</a>
|
||||
</li>
|
||||
<li class="nav-item mr-3">
|
||||
<a class="nav-link" href="{{ url_for('server_management') }}" tabindex="-1" aria-disabled="true">Server Management</a>
|
||||
</li>
|
||||
</ul>
|
||||
</nav>
|
||||
{% include 'navbar.html' %}
|
||||
|
||||
<div class="card text-center mt-3 ml-2 mr-2">
|
||||
<div class="card-header bg-dark text-white">
|
||||
UUID: {{uuid_sensor}}
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<div class="mb-2">
|
||||
<span id="description-text-block">
|
||||
<span id="description-text">{{data_uuid['description']}}</span>
|
||||
<span onclick="show_edit_description();">
|
||||
<i class="fa fa-pencil edit_icon"></i>
|
||||
</span>
|
||||
</span>
|
||||
<span id="description-edit-block" hidden>
|
||||
<div class="input-group">
|
||||
<input class="form-control" type="text" id="input-description" value="{{data_uuid['description']}}"></input>
|
||||
<div class="input-group-append">
|
||||
<button class="btn btn-info" onclick="edit_description();">
|
||||
<i class="fa fa-pencil edit_icon"></i>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</span>
|
||||
</div>
|
||||
<div class="card-group">
|
||||
<div class="card">
|
||||
<div class="card-header bg-info text-white">
|
||||
First Seen
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<p class="card-text">{{data_uuid['first_seen_gmt']}} - ({{data_uuid['first_seen']}})</p>
|
||||
<p class="card-text">{{data_uuid['first_seen_gmt']}}</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="card">
|
||||
|
@ -57,7 +67,7 @@
|
|||
Last Seen
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<p class="card-text">{{data_uuid['last_seen_gmt']}} - ({{data_uuid['last_seen']}})</p>
|
||||
<p class="card-text">{{data_uuid['last_seen_gmt']}}</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="card">
|
||||
|
@ -91,6 +101,18 @@
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<div class="d-flex justify-content-center mt-2">
|
||||
{% if not data_uuid.get('is_monitored', False) %}
|
||||
<a href="{{ url_for('D4_sensors.add_sensor_to_monitor') }}?uuid={{uuid_sensor}}">
|
||||
<button type="button" class="btn btn-primary">Monitor Sensor</button>
|
||||
</a>
|
||||
{% else %}
|
||||
<a href="{{ url_for('D4_sensors.delete_sensor_to_monitor') }}?uuid={{uuid_sensor}}">
|
||||
<button type="button" class="btn btn-danger">Remove Sensor from monitoring</button>
|
||||
</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<div class="card-deck justify-content-center ml-0 mr-0">
|
||||
<div class="card border-dark mt-3" style="max-width: 18rem;">
|
||||
<div class="card-body text-dark">
|
||||
|
@ -147,7 +169,7 @@
|
|||
Types Used:
|
||||
</div>
|
||||
<div class="row ml-0 mr-0">
|
||||
<div class="col-lg-4">
|
||||
<div class="col-xl-4">
|
||||
<div class="mt-2">
|
||||
<table class="table table-striped table-bordered table-hover" id="myTable_1">
|
||||
<thead class="thead-dark">
|
||||
|
@ -169,7 +191,7 @@
|
|||
</table>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-lg-8">
|
||||
<div class="col-xl-8">
|
||||
<div id="barchart_type">
|
||||
</div>
|
||||
</div>
|
||||
|
@ -177,6 +199,47 @@
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<div class="card text-center mt-3 mx-3">
|
||||
<div class="card-header bg-dark text-white">
|
||||
Data Saved:
|
||||
</div>
|
||||
<div class="row ml-0 mr-0">
|
||||
<div class="col-xl-4">
|
||||
<div class="mt-2">
|
||||
<table class="table table-striped table-bordered table-hover" id="myTable_2">
|
||||
<thead class="thead-dark">
|
||||
<tr>
|
||||
<th>Type</th>
|
||||
<th style="max-width: 800px;">Size (Kb)</th>
|
||||
<th style="max-width: 800px;">Nb Files</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for type_stats in disk_stats %}
|
||||
<tr>
|
||||
<td>{{type_stats}}</td>
|
||||
<td>{{disk_stats[type_stats]['total_size']}}</td>
|
||||
<td>{{disk_stats[type_stats]['nb_files']}}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-xl-8">
|
||||
<input value="nb_files" id="type_stats_disk" hidden></input>
|
||||
<h4 id="stats_disk_title">Number of files save on disk :</h4>
|
||||
<div id="barchart_type_disk">
|
||||
</div>
|
||||
<button type="button" id="stats_disk_btn" class="btn btn-outline-secondary mt-1" onclick="swap_stats_type();">
|
||||
Show Size Chart
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="row ml-0 mr-0">
|
||||
<div class="col-lg-6">
|
||||
<div class="card text-center mt-3">
|
||||
|
@ -209,6 +272,9 @@
|
|||
<script>
|
||||
var chart = {};
|
||||
$(document).ready(function(){
|
||||
$('#description-edit-block').hide();
|
||||
$('#description-edit-block').removeAttr("hidden")
|
||||
|
||||
table = $('#myTable_1').DataTable(
|
||||
{
|
||||
"aLengthMenu": [[5, 10, 15, 20, -1], [5, 10, 15, 20, "All"]],
|
||||
|
@ -216,7 +282,15 @@ $(document).ready(function(){
|
|||
"order": [[ 0, "asc" ]]
|
||||
}
|
||||
);
|
||||
chart.stackBarChart =barchart_type_stack("{{ url_for('get_uuid_type_history_json') }}?uuid_sensor={{uuid_sensor}}", 'id');
|
||||
table = $('#myTable_2').DataTable(
|
||||
{
|
||||
"aLengthMenu": [[5, 10, 15, 20, -1], [5, 10, 15, 20, "All"]],
|
||||
"iDisplayLength": 10,
|
||||
"order": [[ 0, "asc" ]]
|
||||
}
|
||||
);
|
||||
chart.stackBarChart1 =barchart_type_stack("{{ url_for('get_uuid_type_history_json') }}?uuid_sensor={{uuid_sensor}}", '#barchart_type');
|
||||
chart.stackBarChart2 =barchart_type_stack("{{ url_for('get_uuid_stats_history_json') }}?uuid_sensor={{uuid_sensor}}", '#barchart_type_disk');
|
||||
|
||||
chart.onResize();
|
||||
$(window).on("resize", function() {
|
||||
|
@ -231,14 +305,53 @@ $(document).ready(function(){
|
|||
|
||||
});
|
||||
|
||||
function get_whois_data(ip){
|
||||
function get_whois_data(ip){
|
||||
|
||||
$.getJSON( "{{url_for('whois_data')}}?ip="+ip, function( data ) {
|
||||
$( "#whois_data" ).removeClass( "d-none" );
|
||||
$( "#whois_output" ).text(data);
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
function swap_stats_type(){
|
||||
var stats_value = $('#type_stats_disk').val();
|
||||
if(stats_value==='nb_files'){
|
||||
$('#type_stats_disk').val('total_size');
|
||||
$('#stats_disk_title').text('Size of files save on disk :');
|
||||
$('#stats_disk_btn').text('Show # Files Chart');
|
||||
stats_value = 'total_size';
|
||||
} else {
|
||||
$('#type_stats_disk').val('nb_files');
|
||||
$('#stats_disk_title').text('Number of files save on disk :');
|
||||
$('#stats_disk_btn').text('Show Size Chart');
|
||||
stats_value = 'nb_files';
|
||||
}
|
||||
|
||||
$('#barchart_type_disk').children().remove();
|
||||
url_json_stats = "{{ url_for('get_uuid_stats_history_json') }}?uuid_sensor={{uuid_sensor}}&stats=" + stats_value;
|
||||
chart.stackBarChart2 =barchart_type_stack(url_json_stats, '#barchart_type_disk');
|
||||
chart.onResize();
|
||||
}
|
||||
|
||||
function show_edit_description(){
|
||||
$('#description-text-block').hide();
|
||||
$('#description-edit-block').show();
|
||||
}
|
||||
|
||||
function edit_description(){
|
||||
var new_description = $('#input-description').val()
|
||||
var data_to_send = { uuid: "{{uuid_sensor}}", "description": new_description}
|
||||
|
||||
$.get("{{ url_for('uuid_change_description') }}", data_to_send, function(data, status){
|
||||
if(status == "success") {
|
||||
$('#description-text').text(new_description)
|
||||
$('#description-edit-block').hide();
|
||||
$('#description-text-block').show();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
</script>
|
||||
|
||||
<script>
|
||||
|
@ -246,17 +359,20 @@ var margin = {top: 20, right: 90, bottom: 55, left: 0},
|
|||
width = parseInt(d3.select('#barchart_type').style('width'), 10);
|
||||
width = 1000 - margin.left - margin.right,
|
||||
height = 500 - margin.top - margin.bottom;
|
||||
var x = d3.scaleBand().rangeRound([0, width]).padding(0.1);
|
||||
|
||||
var y = d3.scaleLinear().rangeRound([height, 0]);
|
||||
function barchart_type_stack(url, id) {
|
||||
|
||||
var xAxis = d3.axisBottom(x);
|
||||
var x = d3.scaleBand().rangeRound([0, width]).padding(0.1);
|
||||
|
||||
var yAxis = d3.axisLeft(y);
|
||||
var y = d3.scaleLinear().rangeRound([height, 0]);
|
||||
|
||||
var color = d3.scaleOrdinal(d3.schemeSet3);
|
||||
var xAxis = d3.axisBottom(x);
|
||||
|
||||
var svg = d3.select("#barchart_type").append("svg")
|
||||
var yAxis = d3.axisLeft(y);
|
||||
|
||||
var color = d3.scaleOrdinal(d3.schemeSet3);
|
||||
|
||||
var svg = d3.select(id).append("svg")
|
||||
.attr("id", "thesvg")
|
||||
.attr("viewBox", "0 0 "+width+" 500")
|
||||
.attr("width", width + margin.left + margin.right)
|
||||
|
@ -264,9 +380,6 @@ var svg = d3.select("#barchart_type").append("svg")
|
|||
.append("g")
|
||||
.attr("transform", "translate(" + margin.left + "," + margin.top + ")");
|
||||
|
||||
|
||||
function barchart_type_stack(url, id) {
|
||||
|
||||
d3.json(url)
|
||||
.then(function(data){
|
||||
|
||||
|
@ -346,9 +459,7 @@ function barchart_type_stack(url, id) {
|
|||
drawLegend(varNames);
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
function drawLegend (varNames) {
|
||||
function drawLegend (varNames) {
|
||||
var legend = svg.selectAll(".legend")
|
||||
.data(varNames.slice().reverse())
|
||||
.enter().append("g")
|
||||
|
@ -369,15 +480,9 @@ function drawLegend (varNames) {
|
|||
.attr("dy", ".35em")
|
||||
.style("text-anchor", "end")
|
||||
.text(function (d) { return d; });
|
||||
}
|
||||
|
||||
function removePopovers () {
|
||||
$('.popover').each(function() {
|
||||
$(this).remove();
|
||||
});
|
||||
}
|
||||
|
||||
function showPopover (d) {
|
||||
function showPopover (d) {
|
||||
$(this).popover({
|
||||
title: d.name,
|
||||
placement: 'top',
|
||||
|
@ -389,15 +494,28 @@ function showPopover (d) {
|
|||
"<br/>num: " + d3.format(",")(d.value ? d.value: d.y1 - d.y0); }
|
||||
});
|
||||
$(this).popover('show')
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
chart.onResize = function () {
|
||||
var aspect = width / height, chart = $("#thesvg");
|
||||
function removePopovers () {
|
||||
$('.popover').each(function() {
|
||||
$(this).remove();
|
||||
});
|
||||
}
|
||||
|
||||
function resize_chart_by_id(id_chart) {
|
||||
var aspect = width / height, chart = $(id_chart).children();
|
||||
var targetWidth = chart.parent().width();
|
||||
chart.attr("width", targetWidth);
|
||||
chart.attr("height", targetWidth / 2);
|
||||
}
|
||||
|
||||
chart.onResize = function () {
|
||||
resize_chart_by_id("#barchart_type");
|
||||
resize_chart_by_id("#barchart_type_disk");
|
||||
}
|
||||
|
||||
window.chart = chart;
|
||||
|
||||
</script>
|
||||
|
|
|
@ -47,8 +47,8 @@ mv temp/bootstrap-${BOOTSTRAP_VERSION}-dist/js/bootstrap.min.js ./static/js/
|
|||
mv temp/bootstrap-${BOOTSTRAP_VERSION}-dist/css/bootstrap.min.css ./static/css/
|
||||
mv temp/bootstrap-${BOOTSTRAP_VERSION}-dist/css/bootstrap.min.css.map ./static/css/
|
||||
|
||||
mv temp/popper.js-1.14.3/dist/umd/popper.min.js ./static/js/
|
||||
mv temp/popper.js-1.14.3/dist/umd/popper.min.js.map ./static/js/
|
||||
mv temp/floating-ui-1.14.3/dist/umd/popper.min.js ./static/js/
|
||||
mv temp/floating-ui-1.14.3/dist/umd/popper.min.js.map ./static/js/
|
||||
|
||||
mv temp/Font-Awesome-${FONT_AWESOME_VERSION} temp/font-awesome
|
||||
|
||||
|
|
|
@ -10,6 +10,9 @@ import datetime
|
|||
|
||||
import signal
|
||||
|
||||
sys.path.append(os.path.join(os.environ['D4_HOME'], 'lib/'))
|
||||
import ConfigLoader
|
||||
|
||||
class GracefulKiller:
|
||||
kill_now = False
|
||||
def __init__(self):
|
||||
|
@ -45,27 +48,12 @@ def compress_file(file_full_path, session_uuid,i=0):
|
|||
analyser_queue_max_size = analyzer_list_max_default_size
|
||||
redis_server_analyzer.ltrim('analyzer:{}:{}'.format(type, analyzer_uuid), 0, analyser_queue_max_size)
|
||||
|
||||
|
||||
host_redis_stream = "localhost"
|
||||
port_redis_stream = 6379
|
||||
|
||||
host_redis_metadata = "localhost"
|
||||
port_redis_metadata = 6380
|
||||
|
||||
redis_server_stream = redis.StrictRedis(
|
||||
host=host_redis_stream,
|
||||
port=port_redis_stream,
|
||||
db=0)
|
||||
|
||||
redis_server_metadata = redis.StrictRedis(
|
||||
host=host_redis_metadata,
|
||||
port=port_redis_metadata,
|
||||
db=0)
|
||||
|
||||
redis_server_analyzer = redis.StrictRedis(
|
||||
host=host_redis_metadata,
|
||||
port=port_redis_metadata,
|
||||
db=2)
|
||||
### Config ###
|
||||
config_loader = ConfigLoader.ConfigLoader()
|
||||
redis_server_stream = config_loader.get_redis_conn("Redis_STREAM", decode_responses=False)
|
||||
redis_server_metadata = config_loader.get_redis_conn("Redis_METADATA", decode_responses=False)
|
||||
redis_server_analyzer = config_loader.get_redis_conn("Redis_ANALYZER", decode_responses=False)
|
||||
config_loader = None
|
||||
|
||||
type = 1
|
||||
sleep_time = 300
|
||||
|
@ -96,10 +84,10 @@ if __name__ == "__main__":
|
|||
|
||||
new_date = datetime.datetime.now().strftime("%Y%m%d")
|
||||
|
||||
# get all directory files
|
||||
all_files = os.listdir(worker_data_directory)
|
||||
not_compressed_file = []
|
||||
# filter: get all not compressed files
|
||||
if os.path.isdir(worker_data_directory):
|
||||
all_files = os.listdir(worker_data_directory)
|
||||
for file in all_files:
|
||||
if file.endswith('.cap'):
|
||||
not_compressed_file.append(os.path.join(worker_data_directory, file))
|
||||
|
|
|
@ -8,7 +8,10 @@ import redis
|
|||
import shutil
|
||||
import datetime
|
||||
import subprocess
|
||||
import configparser
|
||||
|
||||
sys.path.append(os.path.join(os.environ['D4_HOME'], 'lib/'))
|
||||
import ConfigLoader
|
||||
import Analyzer_Queue
|
||||
|
||||
def data_incorrect_format(stream_name, session_uuid, uuid):
|
||||
redis_server_stream.sadd('Error:IncorrectType', session_uuid)
|
||||
|
@ -37,49 +40,21 @@ def compress_file(file_full_path, i=0):
|
|||
shutil.copyfileobj(f_in, f_out)
|
||||
os.remove(file_full_path)
|
||||
# save full path in anylyzer queue
|
||||
for analyzer_uuid in redis_server_metadata.smembers('analyzer:{}'.format(type)):
|
||||
analyzer_uuid = analyzer_uuid.decode()
|
||||
redis_server_analyzer.lpush('analyzer:{}:{}'.format(type, analyzer_uuid), compressed_filename)
|
||||
redis_server_metadata.hset('analyzer:{}'.format(analyzer_uuid), 'last_updated', time.time())
|
||||
analyser_queue_max_size = redis_server_metadata.hget('analyzer:{}'.format(analyzer_uuid), 'max_size')
|
||||
if analyser_queue_max_size is None:
|
||||
analyser_queue_max_size = analyzer_list_max_default_size
|
||||
redis_server_analyzer.ltrim('analyzer:{}:{}'.format(type, analyzer_uuid), 0, analyser_queue_max_size)
|
||||
Analyzer_Queue.add_data_to_queue(uuid, type, compressed_filename)
|
||||
|
||||
host_redis_stream = "localhost"
|
||||
port_redis_stream = 6379
|
||||
|
||||
host_redis_metadata = "localhost"
|
||||
port_redis_metadata = 6380
|
||||
|
||||
redis_server_stream = redis.StrictRedis(
|
||||
host=host_redis_stream,
|
||||
port=port_redis_stream,
|
||||
db=0)
|
||||
|
||||
redis_server_metadata = redis.StrictRedis(
|
||||
host=host_redis_metadata,
|
||||
port=port_redis_metadata,
|
||||
db=0)
|
||||
|
||||
redis_server_analyzer = redis.StrictRedis(
|
||||
host=host_redis_metadata,
|
||||
port=port_redis_metadata,
|
||||
db=2)
|
||||
|
||||
# get file config
|
||||
config_file_server = os.path.join(os.environ['D4_HOME'], 'configs/server.conf')
|
||||
config_server = configparser.ConfigParser()
|
||||
config_server.read(config_file_server)
|
||||
config_loader = ConfigLoader.ConfigLoader()
|
||||
redis_server_stream = config_loader.get_redis_conn("Redis_STREAM", decode_responses=False)
|
||||
redis_server_metadata = config_loader.get_redis_conn("Redis_METADATA", decode_responses=False)
|
||||
redis_server_analyzer = config_loader.get_redis_conn("Redis_ANALYZER", decode_responses=False)
|
||||
|
||||
# get data directory
|
||||
use_default_save_directory = config_server['Save_Directories'].getboolean('use_default_save_directory')
|
||||
use_default_save_directory = config_loader.get_config_boolean("Save_Directories", "use_default_save_directory")
|
||||
# check if field is None
|
||||
if use_default_save_directory:
|
||||
data_directory = os.path.join(os.environ['D4_HOME'], 'data')
|
||||
else:
|
||||
data_directory = config_server['Save_Directories'].get('save_directory')
|
||||
|
||||
data_directory = config_loader.get_config_str("Save_Directories", "save_directory")
|
||||
config_loader = None
|
||||
|
||||
type = 1
|
||||
tcp_dump_cycle = '300'
|
||||
|
@ -110,8 +85,8 @@ if __name__ == "__main__":
|
|||
os.makedirs(rel_path)
|
||||
print('---- worker launched, uuid={} session_uuid={} epoch={}'.format(uuid, session_uuid, time.time()))
|
||||
else:
|
||||
print('Incorrect Stream, Closing worker: type={} session_uuid={}'.format(type, session_uuid))
|
||||
sys.exit(1)
|
||||
print('Incorrect message')
|
||||
redis_server_stream.sadd('working_session_uuid:{}'.format(type), session_uuid)
|
||||
|
||||
#LAUNCH a tcpdump
|
||||
|
@ -174,8 +149,8 @@ if __name__ == "__main__":
|
|||
except subprocess.TimeoutExpired:
|
||||
process_compressor.kill()
|
||||
### compress all files ###
|
||||
date = datetime.datetime.now().strftime("%Y%m%d")
|
||||
worker_data_directory = os.path.join(full_tcpdump_path, date[0:4], date[4:6], date[6:8])
|
||||
if os.path.isdir(worker_data_directory):
|
||||
all_files = os.listdir(worker_data_directory)
|
||||
all_files.sort()
|
||||
if all_files:
|
||||
|
|
|
@ -6,19 +6,19 @@ import time
|
|||
import redis
|
||||
import subprocess
|
||||
|
||||
host_redis_stream = "localhost"
|
||||
port_redis_stream = 6379
|
||||
sys.path.append(os.path.join(os.environ['D4_HOME'], 'lib/'))
|
||||
import ConfigLoader
|
||||
|
||||
config_loader = ConfigLoader.ConfigLoader()
|
||||
redis_server_stream = config_loader.get_redis_conn("Redis_STREAM", decode_responses=False)
|
||||
config_loader = None
|
||||
|
||||
redis_server_stream = redis.StrictRedis(
|
||||
host=host_redis_stream,
|
||||
port=port_redis_stream,
|
||||
db=0)
|
||||
type = 1
|
||||
|
||||
try:
|
||||
redis_server_stream.ping()
|
||||
except redis.exceptions.ConnectionError:
|
||||
print('Error: Redis server {}:{}, ConnectionError'.format(host_redis, port_redis))
|
||||
print('Error: Redis server {}, ConnectionError'.format("Redis_STREAM"))
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -10,6 +10,9 @@ import datetime
|
|||
|
||||
import signal
|
||||
|
||||
sys.path.append(os.path.join(os.environ['D4_HOME'], 'lib/'))
|
||||
import ConfigLoader
|
||||
|
||||
class GracefulKiller:
|
||||
kill_now = False
|
||||
def __init__(self):
|
||||
|
@ -45,27 +48,14 @@ def compress_file(file_full_path, session_uuid,i=0):
|
|||
analyser_queue_max_size = analyzer_list_max_default_size
|
||||
redis_server_analyzer.ltrim('analyzer:{}:{}'.format(type, analyzer_uuid), 0, analyser_queue_max_size)
|
||||
|
||||
### Config ###
|
||||
config_loader = ConfigLoader.ConfigLoader()
|
||||
|
||||
host_redis_stream = "localhost"
|
||||
port_redis_stream = 6379
|
||||
|
||||
host_redis_metadata = "localhost"
|
||||
port_redis_metadata = 6380
|
||||
|
||||
redis_server_stream = redis.StrictRedis(
|
||||
host=host_redis_stream,
|
||||
port=port_redis_stream,
|
||||
db=0)
|
||||
|
||||
redis_server_metadata = redis.StrictRedis(
|
||||
host=host_redis_metadata,
|
||||
port=port_redis_metadata,
|
||||
db=0)
|
||||
|
||||
redis_server_analyzer = redis.StrictRedis(
|
||||
host=host_redis_metadata,
|
||||
port=port_redis_metadata,
|
||||
db=2)
|
||||
redis_server_stream = config_loader.get_redis_conn("Redis_STREAM", decode_responses=False)
|
||||
redis_server_metadata = config_loader.get_redis_conn("Redis_METADATA", decode_responses=False)
|
||||
redis_server_analyzer = config_loader.get_redis_conn("Redis_ANALYZER", decode_responses=False)
|
||||
config_loader = None
|
||||
### ###
|
||||
|
||||
type = 1
|
||||
sleep_time = 300
|
||||
|
|
|
@ -8,7 +8,11 @@ import gzip
|
|||
import redis
|
||||
import shutil
|
||||
import datetime
|
||||
import configparser
|
||||
|
||||
sys.path.append(os.path.join(os.environ['D4_HOME'], 'lib/'))
|
||||
import ConfigLoader
|
||||
import Analyzer_Queue
|
||||
|
||||
|
||||
DEFAULT_FILE_EXTENSION = 'txt'
|
||||
DEFAULT_FILE_SEPARATOR = b'\n'
|
||||
|
@ -16,26 +20,12 @@ ROTATION_SAVE_CYCLE = 300 # seconds
|
|||
MAX_BUFFER_LENGTH = 100000
|
||||
TYPE = 254
|
||||
|
||||
host_redis_stream = "localhost"
|
||||
port_redis_stream = 6379
|
||||
# CONFIG #
|
||||
config_loader = ConfigLoader.ConfigLoader()
|
||||
|
||||
redis_server_stream = redis.StrictRedis(
|
||||
host=host_redis_stream,
|
||||
port=port_redis_stream,
|
||||
db=0)
|
||||
|
||||
host_redis_metadata = "localhost"
|
||||
port_redis_metadata = 6380
|
||||
|
||||
redis_server_metadata = redis.StrictRedis(
|
||||
host=host_redis_metadata,
|
||||
port=port_redis_metadata,
|
||||
db=0)
|
||||
|
||||
redis_server_analyzer = redis.StrictRedis(
|
||||
host=host_redis_metadata,
|
||||
port=port_redis_metadata,
|
||||
db=2)
|
||||
redis_server_stream = config_loader.get_redis_conn("Redis_STREAM", decode_responses=False)
|
||||
redis_server_metadata = config_loader.get_redis_conn("Redis_METADATA", decode_responses=False)
|
||||
redis_server_analyzer = config_loader.get_redis_conn("Redis_ANALYZER", decode_responses=False)
|
||||
|
||||
analyzer_list_max_default_size = 10000
|
||||
|
||||
|
@ -48,17 +38,13 @@ class MetaTypesDefault:
|
|||
self.buffer = b''
|
||||
self.file_rotation_mode = True
|
||||
|
||||
# get file config
|
||||
config_file_server = os.path.join(os.environ['D4_HOME'], 'configs/server.conf')
|
||||
config_server = configparser.ConfigParser()
|
||||
config_server.read(config_file_server)
|
||||
# get data directory
|
||||
use_default_save_directory = config_server['Save_Directories'].getboolean('use_default_save_directory')
|
||||
use_default_save_directory = config_loader.get_config_boolean("Save_Directories", "use_default_save_directory")
|
||||
# check if field is None
|
||||
if use_default_save_directory:
|
||||
data_directory = os.path.join(os.environ['D4_HOME'], 'data')
|
||||
else:
|
||||
data_directory = config_server['Save_Directories'].get('save_directory')
|
||||
data_directory = config_loader.get_config_str("Save_Directories", "save_directory")
|
||||
self.data_directory = data_directory
|
||||
|
||||
self.parse_json(json_file)
|
||||
|
@ -76,6 +62,8 @@ class MetaTypesDefault:
|
|||
def process_data(self, data):
|
||||
# save data on disk
|
||||
self.save_rotate_file(data)
|
||||
# do something with the data (send to analyzer queue by default)
|
||||
self.reconstruct_data(data)
|
||||
|
||||
######## CORE FUNCTIONS ########
|
||||
|
||||
|
@ -156,6 +144,7 @@ class MetaTypesDefault:
|
|||
self.reset_buffer()
|
||||
all_line = data.split(self.get_file_separator())
|
||||
for reconstructed_data in all_line[:-1]:
|
||||
if reconstructed_data != b'':
|
||||
self.handle_reconstructed_data(reconstructed_data)
|
||||
|
||||
# save incomplete element in buffer
|
||||
|
@ -186,15 +175,7 @@ class MetaTypesDefault:
|
|||
os.remove(file_full_path)
|
||||
|
||||
def send_to_analyzers(self, data_to_send):
|
||||
## save full path in anylyzer queue
|
||||
for analyzer_uuid in redis_server_metadata.smembers('analyzer:{}:{}'.format(TYPE, self.get_type_name())):
|
||||
analyzer_uuid = analyzer_uuid.decode()
|
||||
redis_server_analyzer.lpush('analyzer:{}:{}'.format(self.get_type_name(), analyzer_uuid), data_to_send)
|
||||
redis_server_metadata.hset('analyzer:{}'.format(analyzer_uuid), 'last_updated', time.time())
|
||||
analyser_queue_max_size = redis_server_metadata.hget('analyzer:{}'.format(analyzer_uuid), 'max_size')
|
||||
if analyser_queue_max_size is None:
|
||||
analyser_queue_max_size = analyzer_list_max_default_size
|
||||
redis_server_analyzer.ltrim('analyzer:{}:{}'.format(self.get_type_name(), analyzer_uuid), 0, analyser_queue_max_size)
|
||||
Analyzer_Queue.add_data_to_queue(self.uuid, self.get_type_name(), data_to_send)
|
||||
|
||||
######## GET FUNCTIONS ########
|
||||
|
||||
|
|
|
@ -0,0 +1,80 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
from meta_types_modules.MetaTypesDefault import MetaTypesDefault
|
||||
import hashlib
|
||||
import time
|
||||
import os
|
||||
import datetime
|
||||
import base64
|
||||
import shutil
|
||||
import gzip
|
||||
|
||||
class TypeHandler(MetaTypesDefault):
|
||||
|
||||
def __init__(self, uuid, json_file):
|
||||
super().__init__(uuid, json_file)
|
||||
self.compress = False
|
||||
self.extension = ''
|
||||
self.segregate = True
|
||||
if "compress" in json_file:
|
||||
self.compress = json_file['compress']
|
||||
if "extension" in json_file:
|
||||
self.extension = json_file['extension']
|
||||
if "segregate" in json_file:
|
||||
self.segregate = json_file['segregate']
|
||||
self.set_rotate_file_mode(False)
|
||||
self.saved_dir = ''
|
||||
|
||||
def process_data(self, data):
|
||||
# Unpack the thing
|
||||
self.reconstruct_data(data)
|
||||
|
||||
# pushing the filepath instead of the file content to the analyzer
|
||||
def handle_reconstructed_data(self, data):
|
||||
m = hashlib.sha256()
|
||||
self.set_last_time_saved(time.time())
|
||||
self.set_last_saved_date(datetime.datetime.now().strftime("%Y%m%d%H%M%S"))
|
||||
|
||||
# Create folder
|
||||
save_dir = os.path.join(self.get_save_dir(save_by_uuid=self.segregate), 'files')
|
||||
if not os.path.isdir(save_dir):
|
||||
os.makedirs(save_dir)
|
||||
# write file to disk
|
||||
decodeddata = base64.b64decode(data)
|
||||
|
||||
m.update(decodeddata)
|
||||
path = os.path.join(save_dir, m.hexdigest())
|
||||
path = '{}.{}'.format(path, self.extension)
|
||||
with open(path, 'wb') as p:
|
||||
p.write(decodeddata)
|
||||
if self.compress:
|
||||
compressed_filename = '{}.gz'.format(path)
|
||||
with open(path, 'rb') as f_in:
|
||||
with gzip.open(compressed_filename, 'wb') as f_out:
|
||||
shutil.copyfileobj(f_in, f_out)
|
||||
os.remove(path)
|
||||
self.send_to_analyzers(compressed_filename)
|
||||
else:
|
||||
self.send_to_analyzers(path)
|
||||
|
||||
def reconstruct_data(self, data):
|
||||
# save data in buffer
|
||||
self.add_to_buffer(data)
|
||||
data = self.get_buffer()
|
||||
|
||||
# end of element found in data
|
||||
if self.get_file_separator() in data:
|
||||
# empty buffer
|
||||
self.reset_buffer()
|
||||
all_line = data.split(self.get_file_separator())
|
||||
for reconstructed_data in all_line[:-1]:
|
||||
if reconstructed_data != b'':
|
||||
self.handle_reconstructed_data(reconstructed_data)
|
||||
|
||||
# save incomplete element in buffer
|
||||
if all_line[-1] != b'':
|
||||
self.add_to_buffer(all_line[-1])
|
||||
|
||||
|
||||
def test(self):
|
||||
print('Class: filewatcher')
|
|
@ -0,0 +1,38 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
from meta_types_modules.MetaTypesDefault import MetaTypesDefault
|
||||
import hashlib
|
||||
import time
|
||||
import os
|
||||
import datetime
|
||||
|
||||
class TypeHandler(MetaTypesDefault):
|
||||
|
||||
def __init__(self, uuid, json_file):
|
||||
super().__init__(uuid, json_file)
|
||||
self.set_rotate_file_mode(False)
|
||||
self.saved_dir = ''
|
||||
|
||||
def process_data(self, data):
|
||||
self.reconstruct_data(data)
|
||||
|
||||
# pushing the filepath instead of the file content to the analyzer
|
||||
def handle_reconstructed_data(self, data):
|
||||
m = hashlib.sha256()
|
||||
self.set_last_time_saved(time.time())
|
||||
self.set_last_saved_date(datetime.datetime.now().strftime("%Y%m%d%H%M%S"))
|
||||
|
||||
# Create folder
|
||||
jsons_save_dir = os.path.join(self.get_save_dir(save_by_uuid=True), 'files')
|
||||
if not os.path.isdir(jsons_save_dir):
|
||||
os.makedirs(jsons_save_dir)
|
||||
# write json file to disk
|
||||
m.update(data)
|
||||
jsons_path = os.path.join(jsons_save_dir, m.hexdigest()+'.json')
|
||||
with open(jsons_path, 'wb') as j:
|
||||
j.write(data)
|
||||
# Send data to Analyszer
|
||||
self.send_to_analyzers(jsons_path)
|
||||
|
||||
def test(self):
|
||||
print('Class: filewatcherjson')
|
|
@ -23,6 +23,7 @@ class TypeHandler(MetaTypesDefault):
|
|||
self.reconstruct_data(data)
|
||||
|
||||
def handle_reconstructed_data(self, data):
|
||||
decoded_data = data.decode()
|
||||
self.set_last_time_saved(time.time())
|
||||
self.set_last_saved_date(datetime.datetime.now().strftime("%Y%m%d%H%M%S"))
|
||||
|
||||
|
@ -35,7 +36,14 @@ class TypeHandler(MetaTypesDefault):
|
|||
os.makedirs(jsons_save_dir)
|
||||
|
||||
# Extract certificates from json
|
||||
mtjson = json.loads(data.decode())
|
||||
try:
|
||||
mtjson = json.loads(decoded_data)
|
||||
res = True
|
||||
except Exception as e:
|
||||
print(decoded_data)
|
||||
res = False
|
||||
if res:
|
||||
#mtjson = json.loads(decoded_data)
|
||||
for certificate in mtjson["Certificates"] or []:
|
||||
cert = binascii.a2b_base64(certificate["Raw"])
|
||||
# one could also load this cert with
|
||||
|
@ -50,7 +58,7 @@ class TypeHandler(MetaTypesDefault):
|
|||
# write json file to disk
|
||||
jsons_path = os.path.join(jsons_save_dir, mtjson["Timestamp"]+'.json')
|
||||
with open(jsons_path, 'w') as j:
|
||||
j.write(data.decode())
|
||||
j.write(decoded_data)
|
||||
# Send data to Analyszer
|
||||
self.send_to_analyzers(jsons_path)
|
||||
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
from meta_types_modules.MetaTypesDefault import MetaTypesDefault
|
||||
|
||||
class TypeHandler(MetaTypesDefault):
|
||||
|
||||
def __init__(self, uuid, json_file):
|
||||
super().__init__(uuid, json_file)
|
||||
self.set_rotate_file_mode(False)
|
||||
self.saved_dir = ''
|
||||
|
||||
def process_data(self, data):
|
||||
self.reconstruct_data(data)
|
||||
|
||||
def test(self):
|
||||
print('Class: maltrail')
|
|
@ -10,21 +10,15 @@ import datetime
|
|||
|
||||
from meta_types_modules import MetaTypesDefault
|
||||
|
||||
host_redis_stream = "localhost"
|
||||
port_redis_stream = 6379
|
||||
sys.path.append(os.path.join(os.environ['D4_HOME'], 'lib/'))
|
||||
import ConfigLoader
|
||||
|
||||
redis_server_stream = redis.StrictRedis(
|
||||
host=host_redis_stream,
|
||||
port=port_redis_stream,
|
||||
db=0)
|
||||
|
||||
host_redis_metadata = "localhost"
|
||||
port_redis_metadata = 6380
|
||||
|
||||
redis_server_metadata = redis.StrictRedis(
|
||||
host=host_redis_metadata,
|
||||
port=port_redis_metadata,
|
||||
db=0)
|
||||
### Config ###
|
||||
config_loader = ConfigLoader.ConfigLoader()
|
||||
redis_server_stream = config_loader.get_redis_conn("Redis_STREAM", decode_responses=False)
|
||||
redis_server_metadata = config_loader.get_redis_conn("Redis_METADATA", decode_responses=False)
|
||||
config_loader = None
|
||||
### ###
|
||||
|
||||
type_meta_header = 2
|
||||
type_defined = 254
|
||||
|
@ -60,10 +54,16 @@ def clean_db(session_uuid):
|
|||
clean_stream(stream_defined, type_defined, session_uuid)
|
||||
redis_server_stream.srem('ended_session', session_uuid)
|
||||
redis_server_stream.srem('working_session_uuid:{}'.format(type_meta_header), session_uuid)
|
||||
# clean extended type (used)
|
||||
redis_server_stream.hdel('map:session-uuid_active_extended_type', session_uuid)
|
||||
try:
|
||||
redis_server_stream.srem('active_connection_extended_type:{}'.format(uuid), extended_type)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
def clean_stream(stream_name, type, session_uuid):
|
||||
redis_server_stream.srem('session_uuid:{}'.format(type), session_uuid)
|
||||
redis_server_stream.hdel('map-type:session_uuid-uuid:{}'.format(type), session_uuid)
|
||||
#redis_server_stream.hdel('map-type:session_uuid-uuid:{}'.format(type), session_uuid)
|
||||
redis_server_stream.delete(stream_name)
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@ -151,6 +151,10 @@ if __name__ == "__main__":
|
|||
clean_db(session_uuid)
|
||||
sys.exit(1)
|
||||
|
||||
# create active_connection for extended type
|
||||
redis_server_stream.sadd('active_connection_extended_type:{}'.format(uuid), extended_type)
|
||||
|
||||
redis_server_stream.hset('map:session-uuid_active_extended_type', session_uuid, extended_type)
|
||||
|
||||
#### Handle Specific MetaTypes ####
|
||||
# Use Specific Handler defined
|
||||
|
@ -172,9 +176,17 @@ if __name__ == "__main__":
|
|||
type = type_defined
|
||||
id = 0
|
||||
buffer = b''
|
||||
|
||||
type_handler.test()
|
||||
|
||||
# update uuid: extended type list
|
||||
redis_server_metadata.sadd('all_extended_types_by_uuid:{}'.format(uuid), extended_type)
|
||||
|
||||
# update metadata extended type
|
||||
time_val = int(time.time())
|
||||
if not redis_server_metadata.hexists('metadata_extended_type_by_uuid:{}:{}'.format(uuid, extended_type), 'first_seen'):
|
||||
redis_server_metadata.hset('metadata_extended_type_by_uuid:{}:{}'.format(uuid, extended_type), 'first_seen', time_val)
|
||||
redis_server_metadata.hset('metadata_extended_type_by_uuid:{}:{}'.format(uuid, extended_type), 'last_seen', time_val)
|
||||
|
||||
# handle 254 type
|
||||
while True:
|
||||
res = redis_server_stream.xread({stream_name: id}, count=1)
|
||||
|
@ -185,6 +197,8 @@ if __name__ == "__main__":
|
|||
data = res[0][1][0][1]
|
||||
|
||||
if id and data:
|
||||
# update metadata extended type
|
||||
redis_server_metadata.hset('metadata_extended_type_by_uuid:{}:{}'.format(uuid, extended_type), 'last_seen', int(time.time()) )
|
||||
# process 254 data type
|
||||
type_handler.process_data(data[b'message'])
|
||||
# remove data from redis stream
|
||||
|
|
|
@ -6,19 +6,21 @@ import time
|
|||
import redis
|
||||
import subprocess
|
||||
|
||||
host_redis_stream = "localhost"
|
||||
port_redis_stream = 6379
|
||||
sys.path.append(os.path.join(os.environ['D4_HOME'], 'lib/'))
|
||||
import ConfigLoader
|
||||
|
||||
### Config ###
|
||||
config_loader = ConfigLoader.ConfigLoader()
|
||||
redis_server_stream = config_loader.get_redis_conn("Redis_STREAM", decode_responses=False)
|
||||
config_loader = None
|
||||
### ###
|
||||
|
||||
redis_server_stream = redis.StrictRedis(
|
||||
host=host_redis_stream,
|
||||
port=port_redis_stream,
|
||||
db=0)
|
||||
type = 2
|
||||
|
||||
try:
|
||||
redis_server_stream.ping()
|
||||
except redis.exceptions.ConnectionError:
|
||||
print('Error: Redis server {}:{}, ConnectionError'.format(host_redis, port_redis))
|
||||
print('Error: Redis server: Redis_STREAM, ConnectionError')
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -0,0 +1,180 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import gzip
|
||||
import redis
|
||||
|
||||
import shutil
|
||||
import datetime
|
||||
|
||||
sys.path.append(os.path.join(os.environ['D4_HOME'], 'lib/'))
|
||||
import ConfigLoader
|
||||
import Analyzer_Queue
|
||||
|
||||
def data_incorrect_format(session_uuid):
|
||||
print('Incorrect format')
|
||||
sys.exit(1)
|
||||
|
||||
config_loader = ConfigLoader.ConfigLoader()
|
||||
redis_server_stream = config_loader.get_redis_conn("Redis_STREAM", decode_responses=False)
|
||||
redis_server_analyzer = config_loader.get_redis_conn("Redis_ANALYZER", decode_responses=False)
|
||||
redis_server_metadata = config_loader.get_redis_conn("Redis_METADATA", decode_responses=False)
|
||||
|
||||
# get data directory
|
||||
use_default_save_directory = config_loader.get_config_boolean("Save_Directories", "use_default_save_directory")
|
||||
# check if field is None
|
||||
if use_default_save_directory:
|
||||
data_directory = os.path.join(os.environ['D4_HOME'], 'data')
|
||||
else:
|
||||
data_directory = config_loader.get_config_str("Save_Directories", "save_directory")
|
||||
config_loader = None
|
||||
|
||||
type = 3
|
||||
rotation_save_cycle = 300 #seconds
|
||||
|
||||
max_buffer_length = 10000
|
||||
|
||||
save_to_file = True
|
||||
|
||||
def compress_file(file_full_path, i=0):
|
||||
if i==0:
|
||||
compressed_filename = '{}.gz'.format(file_full_path)
|
||||
else:
|
||||
compressed_filename = '{}.{}.gz'.format(file_full_path, i)
|
||||
if os.path.isfile(compressed_filename):
|
||||
compress_file(file_full_path, i+1)
|
||||
else:
|
||||
with open(file_full_path, 'rb') as f_in:
|
||||
with gzip.open(compressed_filename, 'wb') as f_out:
|
||||
shutil.copyfileobj(f_in, f_out)
|
||||
os.remove(file_full_path)
|
||||
|
||||
def get_save_dir(dir_data_uuid, year, month, day):
|
||||
dir_path = os.path.join(dir_data_uuid, year, month, day)
|
||||
if not os.path.isdir(dir_path):
|
||||
os.makedirs(dir_path)
|
||||
return dir_path
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
if len(sys.argv) != 2:
|
||||
print('usage:', 'Worker.py', 'session_uuid')
|
||||
exit(1)
|
||||
|
||||
session_uuid = sys.argv[1]
|
||||
stream_name = 'stream:{}:{}'.format(type, session_uuid)
|
||||
id = '0'
|
||||
buffer = b''
|
||||
|
||||
# track launched worker
|
||||
redis_server_stream.sadd('working_session_uuid:{}'.format(type), session_uuid)
|
||||
|
||||
# get uuid
|
||||
res = redis_server_stream.xread({stream_name: id}, count=1)
|
||||
if res:
|
||||
uuid = res[0][1][0][1][b'uuid'].decode()
|
||||
# init file rotation
|
||||
if save_to_file:
|
||||
rotate_file = False
|
||||
time_file = time.time()
|
||||
date_file = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
|
||||
dir_data_uuid = os.path.join(data_directory, uuid, str(type))
|
||||
dir_full_path = get_save_dir(dir_data_uuid, date_file[0:4], date_file[4:6], date_file[6:8])
|
||||
filename = '{}-{}-{}-{}-{}.syslog.txt'.format(uuid, date_file[0:4], date_file[4:6], date_file[6:8], date_file[8:14])
|
||||
save_path = os.path.join(dir_full_path, filename)
|
||||
|
||||
print('---- worker launched, uuid={} session_uuid={} epoch={}'.format(uuid, session_uuid, time.time()))
|
||||
else:
|
||||
########################### # TODO: clean db on error
|
||||
print('Incorrect Stream, Closing worker: type={} session_uuid={}'.format(type, session_uuid))
|
||||
sys.exit(1)
|
||||
|
||||
while True:
|
||||
|
||||
res = redis_server_stream.xread({stream_name: id}, count=1)
|
||||
if res:
|
||||
new_id = res[0][1][0][0].decode()
|
||||
if id != new_id:
|
||||
id = new_id
|
||||
data = res[0][1][0][1]
|
||||
|
||||
if id and data:
|
||||
# reconstruct data
|
||||
if buffer != b'':
|
||||
data[b'message'] = b''.join([buffer, data[b'message']])
|
||||
buffer = b''
|
||||
|
||||
# send data to redis
|
||||
# new line in received data
|
||||
if b'\n' in data[b'message']:
|
||||
all_line = data[b'message'].split(b'\n')
|
||||
for line in all_line[:-1]:
|
||||
Analyzer_Queue.add_data_to_queue(uuid, type, line)
|
||||
# analyzer_uuid = analyzer_uuid.decode()
|
||||
# keep incomplete line
|
||||
if all_line[-1] != b'':
|
||||
buffer += all_line[-1]
|
||||
else:
|
||||
if len(buffer) < max_buffer_length:
|
||||
buffer += data[b'message']
|
||||
else:
|
||||
print('Error, infinite loop, max buffer length reached')
|
||||
# force new line
|
||||
buffer += b''.join([ data[b'message'], b'\n' ])
|
||||
|
||||
|
||||
# save data on disk
|
||||
if save_to_file and b'\n' in data[b'message']:
|
||||
new_date = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
|
||||
# check if a new rotation is needed
|
||||
if ( new_date[0:8] != date_file[0:8] ) or ( time.time() - time_file > rotation_save_cycle ):
|
||||
date_file = new_date
|
||||
rotate_file = True
|
||||
|
||||
# file rotation
|
||||
if rotate_file:
|
||||
end_file, start_new_file = data[b'message'].rsplit(b'\n', maxsplit=1)
|
||||
# save end of file
|
||||
with open(save_path, 'ab') as f:
|
||||
f.write(end_file)
|
||||
compress_file(save_path)
|
||||
|
||||
# get new save_path
|
||||
dir_full_path = get_save_dir(dir_data_uuid, date_file[0:4], date_file[4:6], date_file[6:8])
|
||||
filename = '{}-{}-{}-{}-{}.syslog.txt'.format(uuid, date_file[0:4], date_file[4:6], date_file[6:8], date_file[8:14])
|
||||
save_path = os.path.join(dir_full_path, filename)
|
||||
|
||||
# save start of new file
|
||||
if start_new_file != b'':
|
||||
with open(save_path, 'ab') as f:
|
||||
f.write(start_new_file)
|
||||
# end of rotation
|
||||
rotate_file = False
|
||||
time_file = time.time()
|
||||
|
||||
else:
|
||||
with open(save_path, 'ab') as f:
|
||||
f.write(data[b'message'])
|
||||
|
||||
redis_server_stream.xdel(stream_name, id)
|
||||
|
||||
else:
|
||||
# sucess, all data are saved
|
||||
if redis_server_stream.sismember('ended_session', session_uuid):
|
||||
redis_server_stream.srem('ended_session', session_uuid)
|
||||
redis_server_stream.srem('session_uuid:{}'.format(type), session_uuid)
|
||||
redis_server_stream.srem('working_session_uuid:{}'.format(type), session_uuid)
|
||||
redis_server_stream.hdel('map-type:session_uuid-uuid:{}'.format(type), session_uuid)
|
||||
redis_server_stream.delete(stream_name)
|
||||
try:
|
||||
if os.path.isfile(save_path):
|
||||
#print('save')
|
||||
compress_file(save_path)
|
||||
except NameError:
|
||||
pass
|
||||
print('---- syslog DONE, uuid={} session_uuid={} epoch={}'.format(uuid, session_uuid, time.time()))
|
||||
sys.exit(0)
|
||||
else:
|
||||
time.sleep(10)
|
|
@ -0,0 +1,37 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import redis
|
||||
import subprocess
|
||||
|
||||
sys.path.append(os.path.join(os.environ['D4_HOME'], 'lib/'))
|
||||
import ConfigLoader
|
||||
|
||||
config_loader = ConfigLoader.ConfigLoader()
|
||||
redis_server_stream = config_loader.get_redis_conn("Redis_STREAM", decode_responses=False)
|
||||
config_loader = None
|
||||
|
||||
type = 3
|
||||
|
||||
try:
|
||||
redis_server_stream.ping()
|
||||
except redis.exceptions.ConnectionError:
|
||||
print('Error: Redis server {}, ConnectionError'.format("Redis_STREAM"))
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
stream_name = 'stream:{}'.format(type)
|
||||
redis_server_stream.delete('working_session_uuid:{}'.format(type))
|
||||
|
||||
while True:
|
||||
for session_uuid in redis_server_stream.smembers('session_uuid:{}'.format(type)):
|
||||
session_uuid = session_uuid.decode()
|
||||
if not redis_server_stream.sismember('working_session_uuid:{}'.format(type), session_uuid):
|
||||
|
||||
process = subprocess.Popen(['./worker.py', session_uuid])
|
||||
print('Launching new worker{} ... session_uuid={}'.format(type, session_uuid))
|
||||
|
||||
#print('.')
|
||||
time.sleep(10)
|
|
@ -6,19 +6,18 @@ import time
|
|||
import redis
|
||||
|
||||
import datetime
|
||||
import configparser
|
||||
|
||||
sys.path.append(os.path.join(os.environ['D4_HOME'], 'lib/'))
|
||||
import ConfigLoader
|
||||
import Analyzer_Queue
|
||||
|
||||
def data_incorrect_format(session_uuid):
|
||||
print('Incorrect format')
|
||||
sys.exit(1)
|
||||
|
||||
host_redis_stream = "localhost"
|
||||
port_redis_stream = 6379
|
||||
|
||||
redis_server_stream = redis.StrictRedis(
|
||||
host=host_redis_stream,
|
||||
port=port_redis_stream,
|
||||
db=0)
|
||||
config_loader = ConfigLoader.ConfigLoader()
|
||||
redis_server_stream = config_loader.get_redis_conn("Redis_STREAM", decode_responses=False)
|
||||
config_loader = None
|
||||
|
||||
# get file config
|
||||
config_file_server = os.path.join(os.environ['D4_HOME'], 'configs/server.conf')
|
||||
|
@ -26,13 +25,13 @@ config_server = configparser.ConfigParser()
|
|||
config_server.read(config_file_server)
|
||||
|
||||
# get data directory
|
||||
use_default_save_directory = config_server['Save_Directories'].getboolean('use_default_save_directory')
|
||||
use_default_save_directory = config_loader.get_config_boolean("Save_Directories", "use_default_save_directory")
|
||||
# check if field is None
|
||||
if use_default_save_directory:
|
||||
data_directory = os.path.join(os.environ['D4_HOME'], 'data')
|
||||
else:
|
||||
data_directory = config_server['Save_Directories'].get('save_directory')
|
||||
|
||||
data_directory = get_config_str.get_config_boolean("Save_Directories", "save_directory")
|
||||
config_loader = None
|
||||
|
||||
type = 4
|
||||
rotation_save_cycle = 300 #seconds
|
||||
|
@ -61,8 +60,8 @@ if __name__ == "__main__":
|
|||
rel_path = os.path.join(dir_path, filename)
|
||||
print('---- worker launched, uuid={} session_uuid={} epoch={}'.format(uuid, session_uuid, time.time()))
|
||||
else:
|
||||
print('Incorrect Stream, Closing worker: type={} session_uuid={}'.format(type, session_uuid))
|
||||
sys.exit(1)
|
||||
print('Incorrect message')
|
||||
|
||||
time_file = time.time()
|
||||
rotate_file = False
|
||||
|
|
|
@ -6,19 +6,19 @@ import time
|
|||
import redis
|
||||
import subprocess
|
||||
|
||||
host_redis_stream = "localhost"
|
||||
port_redis_stream = 6379
|
||||
sys.path.append(os.path.join(os.environ['D4_HOME'], 'lib/'))
|
||||
import ConfigLoader
|
||||
|
||||
config_loader = ConfigLoader.ConfigLoader()
|
||||
redis_server_stream = config_loader.get_redis_conn("Redis_STREAM", decode_responses=False)
|
||||
config_loader = None
|
||||
|
||||
redis_server_stream = redis.StrictRedis(
|
||||
host=host_redis_stream,
|
||||
port=port_redis_stream,
|
||||
db=0)
|
||||
type = 4
|
||||
|
||||
try:
|
||||
redis_server_stream.ping()
|
||||
except redis.exceptions.ConnectionError:
|
||||
print('Error: Redis server {}:{}, ConnectionError'.format(host_redis, port_redis))
|
||||
print('Error: Redis server {}, ConnectionError'.format("Redis_STREAM"))
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -8,46 +8,28 @@ import redis
|
|||
|
||||
import shutil
|
||||
import datetime
|
||||
import configparser
|
||||
|
||||
sys.path.append(os.path.join(os.environ['D4_HOME'], 'lib/'))
|
||||
import ConfigLoader
|
||||
import Analyzer_Queue
|
||||
|
||||
def data_incorrect_format(session_uuid):
|
||||
print('Incorrect format')
|
||||
sys.exit(1)
|
||||
|
||||
host_redis_stream = "localhost"
|
||||
port_redis_stream = 6379
|
||||
|
||||
redis_server_stream = redis.StrictRedis(
|
||||
host=host_redis_stream,
|
||||
port=port_redis_stream,
|
||||
db=0)
|
||||
|
||||
host_redis_metadata = "localhost"
|
||||
port_redis_metadata = 6380
|
||||
|
||||
redis_server_metadata = redis.StrictRedis(
|
||||
host=host_redis_metadata,
|
||||
port=port_redis_metadata,
|
||||
db=0)
|
||||
|
||||
redis_server_analyzer = redis.StrictRedis(
|
||||
host=host_redis_metadata,
|
||||
port=port_redis_metadata,
|
||||
db=2)
|
||||
|
||||
# get file config
|
||||
config_file_server = os.path.join(os.environ['D4_HOME'], 'configs/server.conf')
|
||||
config_server = configparser.ConfigParser()
|
||||
config_server.read(config_file_server)
|
||||
config_loader = ConfigLoader.ConfigLoader()
|
||||
redis_server_stream = config_loader.get_redis_conn("Redis_STREAM", decode_responses=False)
|
||||
redis_server_analyzer = config_loader.get_redis_conn("Redis_ANALYZER", decode_responses=False)
|
||||
redis_server_metadata = config_loader.get_redis_conn("Redis_METADATA", decode_responses=False)
|
||||
|
||||
# get data directory
|
||||
use_default_save_directory = config_server['Save_Directories'].getboolean('use_default_save_directory')
|
||||
use_default_save_directory = config_loader.get_config_boolean("Save_Directories", "use_default_save_directory")
|
||||
# check if field is None
|
||||
if use_default_save_directory:
|
||||
data_directory = os.path.join(os.environ['D4_HOME'], 'data')
|
||||
else:
|
||||
data_directory = config_server['Save_Directories'].get('save_directory')
|
||||
|
||||
data_directory = config_loader.get_config_str("Save_Directories", "save_directory")
|
||||
config_loader = None
|
||||
|
||||
type = 8
|
||||
rotation_save_cycle = 300 #seconds
|
||||
|
@ -131,14 +113,7 @@ if __name__ == "__main__":
|
|||
if b'\n' in data[b'message']:
|
||||
all_line = data[b'message'].split(b'\n')
|
||||
for line in all_line[:-1]:
|
||||
for analyzer_uuid in redis_server_metadata.smembers('analyzer:{}'.format(type)):
|
||||
analyzer_uuid = analyzer_uuid.decode()
|
||||
redis_server_analyzer.lpush('analyzer:{}:{}'.format(type, analyzer_uuid), line)
|
||||
redis_server_metadata.hset('analyzer:{}'.format(analyzer_uuid), 'last_updated', time.time())
|
||||
analyser_queue_max_size = redis_server_metadata.hget('analyzer:{}'.format(analyzer_uuid), 'max_size')
|
||||
if analyser_queue_max_size is None:
|
||||
analyser_queue_max_size = analyzer_list_max_default_size
|
||||
redis_server_analyzer.ltrim('analyzer:{}:{}'.format(type, analyzer_uuid), 0, analyser_queue_max_size)
|
||||
Analyzer_Queue.add_data_to_queue(uuid, type, line)
|
||||
# keep incomplete line
|
||||
if all_line[-1] != b'':
|
||||
buffer += all_line[-1]
|
||||
|
@ -152,7 +127,7 @@ if __name__ == "__main__":
|
|||
|
||||
|
||||
# save data on disk
|
||||
if save_to_file:
|
||||
if save_to_file and b'\n' in data[b'message']:
|
||||
new_date = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
|
||||
# check if a new rotation is needed
|
||||
if ( new_date[0:8] != date_file[0:8] ) or ( time.time() - time_file > rotation_save_cycle ):
|
||||
|
@ -160,7 +135,7 @@ if __name__ == "__main__":
|
|||
rotate_file = True
|
||||
|
||||
# file rotation
|
||||
if rotate_file and b'\n' in data[b'message']:
|
||||
if rotate_file:
|
||||
end_file, start_new_file = data[b'message'].rsplit(b'\n', maxsplit=1)
|
||||
# save end of file
|
||||
with open(save_path, 'ab') as f:
|
||||
|
|
|
@ -6,19 +6,19 @@ import time
|
|||
import redis
|
||||
import subprocess
|
||||
|
||||
host_redis_stream = "localhost"
|
||||
port_redis_stream = 6379
|
||||
sys.path.append(os.path.join(os.environ['D4_HOME'], 'lib/'))
|
||||
import ConfigLoader
|
||||
|
||||
config_loader = ConfigLoader.ConfigLoader()
|
||||
redis_server_stream = config_loader.get_redis_conn("Redis_STREAM", decode_responses=False)
|
||||
config_loader = None
|
||||
|
||||
redis_server_stream = redis.StrictRedis(
|
||||
host=host_redis_stream,
|
||||
port=port_redis_stream,
|
||||
db=0)
|
||||
type = 8
|
||||
|
||||
try:
|
||||
redis_server_stream.ping()
|
||||
except redis.exceptions.ConnectionError:
|
||||
print('Error: Redis server {}:{}, ConnectionError'.format(host_redis, port_redis))
|
||||
print('Error: Redis server {}, ConnectionError'.format("Redis_STREAM"))
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|