fix: Avoiding errors with uncommon lines

- Excluding first from data parsed all lines that
  are comments or empty
- Skipping lines with failing indexes
features_csvimport
chrisr3d 2019-10-03 16:03:30 +02:00
parent 3d7de2dc22
commit c5c5c16ff1
No known key found for this signature in database
GPG Key ID: 6BBED1B63A6D639F
1 changed files with 6 additions and 3 deletions

View File

@ -125,7 +125,10 @@ class CsvParser():
score = self.__get_score()
if attribute_indexes:
for line in self.data:
try:
base_attribute = self.__score_mapping[score](line, attribute_indexes)
except IndexError:
continue
for index in types_indexes:
attribute = {'type': self.header[index], 'value': line[index]}
attribute.update(base_attribute)
@ -203,11 +206,11 @@ def __any_mandatory_misp_field(header):
def __special_parsing(data, delimiter):
return list(line.split(delimiter) for line in csv.reader(io.TextIOWrapper(io.BytesIO(data.encode()), encoding='utf-8')))
return list(line.split(delimiter) for line in csv.reader(io.TextIOWrapper(io.BytesIO(data.encode()), encoding='utf-8')) if line and not line.startswith('#'))
def __standard_parsing(data):
return list(line for line in csv.reader(io.TextIOWrapper(io.BytesIO(data.encode()), encoding='utf-8')))
return list(line for line in csv.reader(io.TextIOWrapper(io.BytesIO(data.encode()), encoding='utf-8')) if line and not line[0].startswith('#'))
def handler(q=False):