fix: [analyzer] Local analyzer works
parent
a0934b116f
commit
3844908794
2
Pipfile
2
Pipfile
|
@ -10,7 +10,7 @@ redis = "*"
|
|||
hiredis = "*"
|
||||
pyshark = "*"
|
||||
ipa = {editable = true,path = "."}
|
||||
markdown = "*"
|
||||
markdown-strings = "*"
|
||||
|
||||
[requires]
|
||||
python_version = "3.6"
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "5b257321cbb7c926fc129a7e5195588ed28b9a84037556db11573e3e67442438"
|
||||
"sha256": "9313ed688a2fb9ba22b23a7dbec3e0978f1e6d1f5ff8bb7dc00651bc11b50ade"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {
|
||||
|
@ -81,10 +81,10 @@
|
|||
],
|
||||
"version": "==4.4.1"
|
||||
},
|
||||
"markdown": {
|
||||
"markdown-strings": {
|
||||
"hashes": [
|
||||
"sha256:2e50876bcdd74517e7b71f3e7a76102050edec255b3983403f1a63e7c8a41e7a",
|
||||
"sha256:56a46ac655704b91e5b7e6326ce43d5ef72411376588afa1dd90e881b83c7e8c"
|
||||
"sha256:141e55d48742d6c19b09500bb71760f3aa04b6532c5d7725b9a030ce90663708",
|
||||
"sha256:21bfc2ff1e6eec1209bb5835f090f9576cec46de38fb1991f57a422bb5efcf6f"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==3.1.1"
|
||||
|
|
|
@ -31,7 +31,7 @@ pipenv install
|
|||
|
||||
## Usage
|
||||
#### Start the redis server
|
||||
Don't forget to set the DB directory in the redis.conf configuration. By default, the redis for Passive DNS is running on TCP port 6400
|
||||
Don't forget to set the DB directory in the redis.conf configuration. By default, the redis for IPA is running on TCP port 6405.
|
||||
```shell script
|
||||
../redis/src/redis-server ./etc/redis.conf
|
||||
```
|
||||
|
@ -41,8 +41,7 @@ Don't forget to set the DB directory in the redis.conf configuration. By default
|
|||
cd ./etc
|
||||
cp analyzer.conf.sample analyzer.conf
|
||||
```
|
||||
|
||||
Edit the analyzer.conf to match the UUID of the analyzer queue from your D4 server.
|
||||
Edit analyzer.conf to match the UUID of the analyzer queue from your D4 server.
|
||||
```shell script
|
||||
[global]
|
||||
my-uuid = 6072e072-bfaa-4395-9bb1-cdb3b470d715
|
||||
|
@ -51,7 +50,7 @@ d4-server = 127.0.0.1:6380
|
|||
logging-level = INFO
|
||||
```
|
||||
|
||||
Then you can start the analyzer.
|
||||
#### Start the analyzer
|
||||
```shell script
|
||||
cd ../bin
|
||||
python3 run_ipa.py
|
||||
|
|
|
@ -0,0 +1,37 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Markdown export module
|
||||
#
|
||||
# Copyright (C) 2019 Romain Kieffer
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
import markdown_strings as mds
|
||||
import redis
|
||||
import os
|
||||
|
||||
|
||||
analyzer_redis_host = os.getenv('D4_ANALYZER_REDIS_HOST', '127.0.0.1')
|
||||
analyzer_redis_port = int(os.getenv('D4_ANALYZER_REDIS_PORT', 6405))
|
||||
r = redis.Redis(host=analyzer_redis_host, port=analyzer_redis_port)
|
||||
|
||||
|
||||
def export_icmp_types():
|
||||
res = mds.table_row(['ICMP Type', 'Count'], [10, 10]) + '\n'
|
||||
res += '| :----- | -----: |\n'
|
||||
redis_dict = r.hgetall('icmp')
|
||||
for key in redis_dict:
|
||||
res += mds.table_row([key.decode(), redis_dict[key].decode()], [10, 10]) + '\n'
|
||||
return res
|
|
@ -26,7 +26,7 @@ import argparse
|
|||
if __name__ == "__main__":
|
||||
|
||||
parser = argparse.ArgumentParser(description='D4-IPA')
|
||||
parser.add_argument('-p', '--path', type=int, nargs=1, help='Path of local dataset.')
|
||||
parser.add_argument('-p', '--path', type=str, nargs=1, help='Path of local dataset.')
|
||||
|
||||
dataset = None
|
||||
|
||||
|
|
|
@ -89,7 +89,7 @@ protected-mode yes
|
|||
|
||||
# Accept connections on the specified port, default is 6379 (IANA #815344).
|
||||
# If port 0 is specified Redis will not listen on a TCP socket.
|
||||
port 6400
|
||||
port 6405
|
||||
|
||||
# TCP listen() backlog.
|
||||
#
|
||||
|
|
|
@ -25,7 +25,7 @@ import configparser
|
|||
import logging
|
||||
|
||||
from lib.inspection import get_cap, get_protocol, check_icmp_checksum, get_icmp_payload, get_icmp_ip, \
|
||||
unassigned_icmp_types, deprecated_icmp_types, get_src_port, get_dst_port, list_caps
|
||||
unassigned_icmp_types, deprecated_icmp_types, get_src_port, get_dst_port, list_caps, init_cap_list
|
||||
|
||||
|
||||
class Analyzer:
|
||||
|
@ -51,7 +51,7 @@ class Analyzer:
|
|||
self.logger.addHandler(self.ch)
|
||||
|
||||
analyzer_redis_host = os.getenv('D4_ANALYZER_REDIS_HOST', '127.0.0.1')
|
||||
analyzer_redis_port = int(os.getenv('D4_ANALYZER_REDIS_PORT', 6400))
|
||||
analyzer_redis_port = int(os.getenv('D4_ANALYZER_REDIS_PORT', 6405))
|
||||
self.r = redis.Redis(host=analyzer_redis_host, port=analyzer_redis_port)
|
||||
|
||||
self.dataset = dataset_path
|
||||
|
@ -65,9 +65,15 @@ class Analyzer:
|
|||
self.r_d4 = redis.Redis(host=host_redis_metadata, port=port_redis_metadata, db=2)
|
||||
else:
|
||||
self.logger.info("Starting local analyzer")
|
||||
self.update_queue()
|
||||
self.queue = "to_scan"
|
||||
self.cap_list = []
|
||||
self.logger.info("Adding dataset caps to local queue")
|
||||
self.cap_list = init_cap_list(self.dataset)
|
||||
self.logger.info(len(self.cap_list))
|
||||
self.update_queue()
|
||||
self.logger.info("Processing...")
|
||||
self.process_local()
|
||||
self.logger.info("Done.")
|
||||
time.sleep(15)
|
||||
c = self.update_queue()
|
||||
if c == 0:
|
||||
|
@ -83,27 +89,23 @@ class Analyzer:
|
|||
p.execute()
|
||||
|
||||
def update_queue(self):
|
||||
"""
|
||||
Each parser instance is given a list of days, and thus a list of caps to parse.
|
||||
This method lets the parser confront his list of caps with the caps in his queue.
|
||||
"""
|
||||
remaining_caps = list_caps(self.queue, self.r)
|
||||
current_caps = list_caps('scanning', self.r)
|
||||
parsed_caps = list_caps('scanned', self.r)
|
||||
caps_to_add = []
|
||||
if remaining_caps:
|
||||
print('[*] Queue already populated.')
|
||||
self.logger.info('Queue already populated.')
|
||||
if self.cap_list:
|
||||
for cap in self.cap_list:
|
||||
if cap not in remaining_caps and cap not in parsed_caps and cap not in current_caps:
|
||||
caps_to_add.append(cap)
|
||||
if not caps_to_add:
|
||||
print('[*] Already up to date.')
|
||||
self.logger.info('Already up to date.')
|
||||
return 1
|
||||
print('[o] Queue updated.')
|
||||
self.logger.info('Queue updated.')
|
||||
else:
|
||||
if self.cap_list:
|
||||
print('[*] No caps, initializing...')
|
||||
self.logger.info('No caps enqueued, initializing...')
|
||||
caps_to_add = self.cap_list
|
||||
elif current_caps:
|
||||
return 0
|
||||
|
@ -115,10 +117,10 @@ class Analyzer:
|
|||
Dissects the cap file to extract info.
|
||||
"""
|
||||
if cap is None:
|
||||
print('[X] No caps to parse!')
|
||||
self.logger.info('[X] No caps to parse!')
|
||||
return 0
|
||||
|
||||
print('[*] Started parsing...')
|
||||
self.logger.info('Parsing cap ' + cap.input_filename[-15:])
|
||||
|
||||
pipeline = self.r.pipeline()
|
||||
for packet in cap:
|
||||
|
@ -167,9 +169,9 @@ class Analyzer:
|
|||
|
||||
def pop_cap(self):
|
||||
if not self.dataset:
|
||||
absolute_path = self.r_d4.rpop(self.queue)
|
||||
absolute_path = self.r_d4.rpop(self.queue).decode()
|
||||
else:
|
||||
absolute_path = self.r.rpop('to_scan')
|
||||
absolute_path = self.r.lpop('to_scan').decode()
|
||||
return get_cap(absolute_path)
|
||||
|
||||
def process_d4(self):
|
||||
|
|
|
@ -129,16 +129,15 @@ def get_files(path) -> list:
|
|||
return caps
|
||||
|
||||
|
||||
def init_cap_list(dataset_path: str, daylist: list) -> list:
|
||||
cap_list = []
|
||||
if not daylist:
|
||||
return []
|
||||
for day in daylist:
|
||||
cap_path = dataset_path + str(day) + '/*.gz'
|
||||
def init_cap_list(dataset_path: str) -> list:
|
||||
if dataset_path[-1] == '/':
|
||||
extension = '*.gz'
|
||||
else:
|
||||
extension = '/*.gz'
|
||||
cap_path = dataset_path + extension
|
||||
caps = get_files(cap_path)
|
||||
caps.sort()
|
||||
cap_list += caps
|
||||
return cap_list
|
||||
return caps
|
||||
|
||||
|
||||
def list_caps(state: str, redis):
|
||||
|
|
2
setup.py
2
setup.py
|
@ -12,7 +12,7 @@ setup(
|
|||
url='https://github.com/D4-project/analyzer-d4-ipa',
|
||||
description='Pcap icmp parser focused on DDoS detection',
|
||||
packages=['lib'],
|
||||
scripts=[],
|
||||
scripts=['bin/run_ipa.py', 'bin/export.py'],
|
||||
include_package_data=True,
|
||||
classifiers=[
|
||||
'License :: OSI Approved :: BSD License',
|
||||
|
|
Loading…
Reference in New Issue