fix: [analyzer] Local analyzer works

pull/1/head
airkeyp 2019-09-24 16:24:24 +02:00
parent a0934b116f
commit 3844908794
10 changed files with 74 additions and 37 deletions

View File

@ -10,7 +10,7 @@ redis = "*"
hiredis = "*" hiredis = "*"
pyshark = "*" pyshark = "*"
ipa = {editable = true,path = "."} ipa = {editable = true,path = "."}
markdown = "*" markdown-strings = "*"
[requires] [requires]
python_version = "3.6" python_version = "3.6"

8
Pipfile.lock generated
View File

@ -1,7 +1,7 @@
{ {
"_meta": { "_meta": {
"hash": { "hash": {
"sha256": "5b257321cbb7c926fc129a7e5195588ed28b9a84037556db11573e3e67442438" "sha256": "9313ed688a2fb9ba22b23a7dbec3e0978f1e6d1f5ff8bb7dc00651bc11b50ade"
}, },
"pipfile-spec": 6, "pipfile-spec": 6,
"requires": { "requires": {
@ -81,10 +81,10 @@
], ],
"version": "==4.4.1" "version": "==4.4.1"
}, },
"markdown": { "markdown-strings": {
"hashes": [ "hashes": [
"sha256:2e50876bcdd74517e7b71f3e7a76102050edec255b3983403f1a63e7c8a41e7a", "sha256:141e55d48742d6c19b09500bb71760f3aa04b6532c5d7725b9a030ce90663708",
"sha256:56a46ac655704b91e5b7e6326ce43d5ef72411376588afa1dd90e881b83c7e8c" "sha256:21bfc2ff1e6eec1209bb5835f090f9576cec46de38fb1991f57a422bb5efcf6f"
], ],
"index": "pypi", "index": "pypi",
"version": "==3.1.1" "version": "==3.1.1"

View File

@ -31,7 +31,7 @@ pipenv install
## Usage ## Usage
#### Start the redis server #### Start the redis server
Don't forget to set the DB directory in the redis.conf configuration. By default, the redis for Passive DNS is running on TCP port 6400 Don't forget to set the DB directory in the redis.conf configuration. By default, the redis for IPA is running on TCP port 6405.
```shell script ```shell script
../redis/src/redis-server ./etc/redis.conf ../redis/src/redis-server ./etc/redis.conf
``` ```
@ -41,8 +41,7 @@ Don't forget to set the DB directory in the redis.conf configuration. By default
cd ./etc cd ./etc
cp analyzer.conf.sample analyzer.conf cp analyzer.conf.sample analyzer.conf
``` ```
Edit analyzer.conf to match the UUID of the analyzer queue from your D4 server.
Edit the analyzer.conf to match the UUID of the analyzer queue from your D4 server.
```shell script ```shell script
[global] [global]
my-uuid = 6072e072-bfaa-4395-9bb1-cdb3b470d715 my-uuid = 6072e072-bfaa-4395-9bb1-cdb3b470d715
@ -51,7 +50,7 @@ d4-server = 127.0.0.1:6380
logging-level = INFO logging-level = INFO
``` ```
Then you can start the analyzer. #### Start the analyzer
```shell script ```shell script
cd ../bin cd ../bin
python3 run_ipa.py python3 run_ipa.py

37
bin/export.py Normal file
View File

@ -0,0 +1,37 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Markdown export module
#
# Copyright (C) 2019 Romain Kieffer
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import markdown_strings as mds
import redis
import os
analyzer_redis_host = os.getenv('D4_ANALYZER_REDIS_HOST', '127.0.0.1')
analyzer_redis_port = int(os.getenv('D4_ANALYZER_REDIS_PORT', 6405))
r = redis.Redis(host=analyzer_redis_host, port=analyzer_redis_port)
def export_icmp_types():
res = mds.table_row(['ICMP Type', 'Count'], [10, 10]) + '\n'
res += '| :----- | -----: |\n'
redis_dict = r.hgetall('icmp')
for key in redis_dict:
res += mds.table_row([key.decode(), redis_dict[key].decode()], [10, 10]) + '\n'
return res

View File

@ -26,7 +26,7 @@ import argparse
if __name__ == "__main__": if __name__ == "__main__":
parser = argparse.ArgumentParser(description='D4-IPA') parser = argparse.ArgumentParser(description='D4-IPA')
parser.add_argument('-p', '--path', type=int, nargs=1, help='Path of local dataset.') parser.add_argument('-p', '--path', type=str, nargs=1, help='Path of local dataset.')
dataset = None dataset = None

View File

@ -89,7 +89,7 @@ protected-mode yes
# Accept connections on the specified port, default is 6379 (IANA #815344). # Accept connections on the specified port, default is 6379 (IANA #815344).
# If port 0 is specified Redis will not listen on a TCP socket. # If port 0 is specified Redis will not listen on a TCP socket.
port 6400 port 6405
# TCP listen() backlog. # TCP listen() backlog.
# #

View File

@ -25,7 +25,7 @@ import configparser
import logging import logging
from lib.inspection import get_cap, get_protocol, check_icmp_checksum, get_icmp_payload, get_icmp_ip, \ from lib.inspection import get_cap, get_protocol, check_icmp_checksum, get_icmp_payload, get_icmp_ip, \
unassigned_icmp_types, deprecated_icmp_types, get_src_port, get_dst_port, list_caps unassigned_icmp_types, deprecated_icmp_types, get_src_port, get_dst_port, list_caps, init_cap_list
class Analyzer: class Analyzer:
@ -51,7 +51,7 @@ class Analyzer:
self.logger.addHandler(self.ch) self.logger.addHandler(self.ch)
analyzer_redis_host = os.getenv('D4_ANALYZER_REDIS_HOST', '127.0.0.1') analyzer_redis_host = os.getenv('D4_ANALYZER_REDIS_HOST', '127.0.0.1')
analyzer_redis_port = int(os.getenv('D4_ANALYZER_REDIS_PORT', 6400)) analyzer_redis_port = int(os.getenv('D4_ANALYZER_REDIS_PORT', 6405))
self.r = redis.Redis(host=analyzer_redis_host, port=analyzer_redis_port) self.r = redis.Redis(host=analyzer_redis_host, port=analyzer_redis_port)
self.dataset = dataset_path self.dataset = dataset_path
@ -65,9 +65,15 @@ class Analyzer:
self.r_d4 = redis.Redis(host=host_redis_metadata, port=port_redis_metadata, db=2) self.r_d4 = redis.Redis(host=host_redis_metadata, port=port_redis_metadata, db=2)
else: else:
self.logger.info("Starting local analyzer") self.logger.info("Starting local analyzer")
self.update_queue() self.queue = "to_scan"
self.cap_list = [] self.cap_list = []
self.logger.info("Adding dataset caps to local queue")
self.cap_list = init_cap_list(self.dataset)
self.logger.info(len(self.cap_list))
self.update_queue()
self.logger.info("Processing...")
self.process_local() self.process_local()
self.logger.info("Done.")
time.sleep(15) time.sleep(15)
c = self.update_queue() c = self.update_queue()
if c == 0: if c == 0:
@ -83,27 +89,23 @@ class Analyzer:
p.execute() p.execute()
def update_queue(self): def update_queue(self):
"""
Each parser instance is given a list of days, and thus a list of caps to parse.
This method lets the parser confront his list of caps with the caps in his queue.
"""
remaining_caps = list_caps(self.queue, self.r) remaining_caps = list_caps(self.queue, self.r)
current_caps = list_caps('scanning', self.r) current_caps = list_caps('scanning', self.r)
parsed_caps = list_caps('scanned', self.r) parsed_caps = list_caps('scanned', self.r)
caps_to_add = [] caps_to_add = []
if remaining_caps: if remaining_caps:
print('[*] Queue already populated.') self.logger.info('Queue already populated.')
if self.cap_list: if self.cap_list:
for cap in self.cap_list: for cap in self.cap_list:
if cap not in remaining_caps and cap not in parsed_caps and cap not in current_caps: if cap not in remaining_caps and cap not in parsed_caps and cap not in current_caps:
caps_to_add.append(cap) caps_to_add.append(cap)
if not caps_to_add: if not caps_to_add:
print('[*] Already up to date.') self.logger.info('Already up to date.')
return 1 return 1
print('[o] Queue updated.') self.logger.info('Queue updated.')
else: else:
if self.cap_list: if self.cap_list:
print('[*] No caps, initializing...') self.logger.info('No caps enqueued, initializing...')
caps_to_add = self.cap_list caps_to_add = self.cap_list
elif current_caps: elif current_caps:
return 0 return 0
@ -115,10 +117,10 @@ class Analyzer:
Dissects the cap file to extract info. Dissects the cap file to extract info.
""" """
if cap is None: if cap is None:
print('[X] No caps to parse!') self.logger.info('[X] No caps to parse!')
return 0 return 0
print('[*] Started parsing...') self.logger.info('Parsing cap ' + cap.input_filename[-15:])
pipeline = self.r.pipeline() pipeline = self.r.pipeline()
for packet in cap: for packet in cap:
@ -167,9 +169,9 @@ class Analyzer:
def pop_cap(self): def pop_cap(self):
if not self.dataset: if not self.dataset:
absolute_path = self.r_d4.rpop(self.queue) absolute_path = self.r_d4.rpop(self.queue).decode()
else: else:
absolute_path = self.r.rpop('to_scan') absolute_path = self.r.lpop('to_scan').decode()
return get_cap(absolute_path) return get_cap(absolute_path)
def process_d4(self): def process_d4(self):

View File

@ -129,16 +129,15 @@ def get_files(path) -> list:
return caps return caps
def init_cap_list(dataset_path: str, daylist: list) -> list: def init_cap_list(dataset_path: str) -> list:
cap_list = [] if dataset_path[-1] == '/':
if not daylist: extension = '*.gz'
return [] else:
for day in daylist: extension = '/*.gz'
cap_path = dataset_path + str(day) + '/*.gz' cap_path = dataset_path + extension
caps = get_files(cap_path) caps = get_files(cap_path)
caps.sort() caps.sort()
cap_list += caps return caps
return cap_list
def list_caps(state: str, redis): def list_caps(state: str, redis):

View File

@ -12,7 +12,7 @@ setup(
url='https://github.com/D4-project/analyzer-d4-ipa', url='https://github.com/D4-project/analyzer-d4-ipa',
description='Pcap icmp parser focused on DDoS detection', description='Pcap icmp parser focused on DDoS detection',
packages=['lib'], packages=['lib'],
scripts=[], scripts=['bin/run_ipa.py', 'bin/export.py'],
include_package_data=True, include_package_data=True,
classifiers=[ classifiers=[
'License :: OSI Approved :: BSD License', 'License :: OSI Approved :: BSD License',