Merge branch 'master' of github.com:oasis-open/cti-python-stix2

master
chrisr3d 2018-12-14 10:09:58 +01:00
commit a68a43a732
200 changed files with 21456 additions and 4034 deletions

28
.gitignore vendored
View File

@ -68,3 +68,31 @@ cache.sqlite
# PyCharm
.idea/
### macOS template
# General
.DS_Store
.AppleDouble
.LSOverride
# Icon must end with two \r
Icon
# Thumbnails
._*
# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk

View File

@ -2,6 +2,7 @@
skip = workbench.py
not_skip = __init__.py
known_third_party =
antlr4,
dateutil,
medallion,
pytest,
@ -14,3 +15,5 @@ known_third_party =
taxii2client,
known_first_party = stix2
force_sort_within_sections = 1
multi_line_output = 5
include_trailing_comma = True

View File

@ -1,11 +1,16 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
sha: v0.9.4
rev: v1.3.0
hooks:
- id: trailing-whitespace
- id: flake8
args:
- --max-line-length=160
- id: check-merge-conflict
- repo: https://github.com/asottile/add-trailing-comma
rev: v0.6.4
hooks:
- id: add-trailing-comma
- repo: https://github.com/FalconSocial/pre-commit-python-sorter
sha: b57843b0b874df1d16eb0bef00b868792cb245c2
hooks:

View File

@ -1,6 +1,15 @@
CHANGELOG
=========
1.1.0 - 2018-12-11
- Most (if not all) STIX 2.1 SDOs/SROs and core objects have been implemented according to the latest CSD/WD document
- There is an implementation for the conversion scales
- #196, #193 Removing duplicate code for: properties, registering objects, parsing objects, custom objects
- #80, #197 Most (if not all) tests created for v20 are also implemented for v21
- #189 Added extra checks for the pre-commit tool
- #202 It is now possible to pass a Bundle into add() method in Memory datastores
1.0.4 - 2018-11-15
* #225 MemorySource fix to support custom objects

View File

@ -1,42 +1,34 @@
|Build_Status| |Coverage| |Version|
|Build_Status| |Coverage| |Version| |Downloads_Badge|
cti-python-stix2
================
This is an `OASIS TC Open
Repository <https://www.oasis-open.org/resources/open-
repositories/>`__.
This is an `OASIS TC Open Repository <https://www.oasis-open.org/resources/open-repositories/>`__.
See the `Governance <#governance>`__ section for more information.
This repository provides Python APIs for serializing and de-
serializing
STIX 2 JSON content, along with higher-level APIs for common tasks,
including data markings, versioning, and for resolving STIX IDs across
multiple data sources.
This repository provides Python APIs for serializing and de-serializing STIX2
JSON content, along with higher-level APIs for common tasks, including data
markings, versioning, and for resolving STIX IDs across multiple data sources.
For more information, see `the
documentation <https://stix2.readthedocs.io/>`__ on
ReadTheDocs.
For more information, see `the documentation <https://stix2.readthedocs.io/>`__ on ReadTheDocs.
Installation
------------
Install with `pip <https://pip.pypa.io/en/stable/>`__:
::
.. code-block:: bash
pip install stix2
$ pip install stix2
Usage
-----
To create a STIX object, provide keyword arguments to the type's
constructor. Certain required attributes of all objects, such as
``type`` or
``id``, will be set automatically if not provided as keyword
arguments.
To create a STIX object, provide keyword arguments to the type's constructor.
Certain required attributes of all objects, such as ``type`` or ``id``, will
be set automatically if not provided as keyword arguments.
.. code:: python
.. code-block:: python
from stix2 import Indicator
@ -44,135 +36,100 @@ arguments.
labels=["malicious-activity"],
pattern="[file:hashes.md5 = 'd41d8cd98f00b204e9800998ecf8427e']")
To parse a STIX JSON string into a Python STIX object, use
``parse()``:
To parse a STIX JSON string into a Python STIX object, use ``parse()``:
.. code:: python
.. code-block:: python
from stix2 import parse
indicator = parse("""{
"type": "indicator",
"spec_version": "2.1",
"id": "indicator--dbcbd659-c927-4f9a-994f-0a2632274394",
"created": "2017-09-26T23:33:39.829Z",
"modified": "2017-09-26T23:33:39.829Z",
"labels": [
"name": "File hash for malware variant",
"indicator_types": [
"malicious-activity"
],
"name": "File hash for malware variant",
"pattern": "[file:hashes.md5 ='d41d8cd98f00b204e9800998ecf8427e']",
"valid_from": "2017-09-26T23:33:39.829952Z"
}""")
print(indicator)
For more in-depth documentation, please see
`https://stix2.readthedocs.io/ <https://stix2.readthedocs.io/>`__.
For more in-depth documentation, please see `https://stix2.readthedocs.io/ <https://stix2.readthedocs.io/>`__.
STIX 2.X Technical Specification Support
----------------------------------------
This version of python-stix2 supports STIX 2.0 by default. Although,
the
`stix2` Python library is built to support multiple versions of the
STIX
Technical Specification. With every major release of stix2 the
``import stix2``
statement will automatically load the SDO/SROs equivalent to the most
recent
supported 2.X Technical Specification. Please see the library
documentation
for more details.
This version of python-stix2 brings initial support to STIX 2.1 currently at the
CSD level. The intention is to help debug components of the library and also
check for problems that should be fixed in the specification.
The `stix2` Python library is built to support multiple versions of the STIX
Technical Specification. With every major release of stix2 the ``import stix2``
statement will automatically load the SDO/SROs equivalent to the most recent
supported 2.X Committee Specification. Please see the library documentation for
more details.
Governance
----------
This GitHub public repository (
**https://github.com/oasis-open/cti-python-stix2** ) was
`proposed <https://lists.oasis-
open.org/archives/cti/201702/msg00008.html>`__
and
`approved <https://www.oasis-
open.org/committees/download.php/60009/>`__
This GitHub public repository (**https://github.com/oasis-open/cti-python-stix2**) was
`proposed <https://lists.oasis-open.org/archives/cti/201702/msg00008.html>`__ and
`approved <https://www.oasis-open.org/committees/download.php/60009/>`__
[`bis <https://issues.oasis-open.org/browse/TCADMIN-2549>`__] by the
`OASIS Cyber Threat Intelligence (CTI)
TC <https://www.oasis-open.org/committees/cti/>`__ as an `OASIS TC
Open
Repository <https://www.oasis-open.org/resources/open-
repositories/>`__
to support development of open source resources related to Technical
Committee work.
`OASIS Cyber Threat Intelligence (CTI) TC <https://www.oasis-open.org/committees/cti/>`__
as an `OASIS TC Open Repository <https://www.oasis-open.org/resources/open-repositories/>`__
to support development of open source resources related to Technical Committee work.
While this TC Open Repository remains associated with the sponsor TC,
its
development priorities, leadership, intellectual property terms,
participation rules, and other matters of governance are `separate and
distinct <https://github.com/oasis-open/cti-python-
stix2/blob/master/CONTRIBUTING.md#governance-distinct-from-oasis-tc-
process>`__
While this TC Open Repository remains associated with the sponsor TC, its
development priorities, leadership, intellectual property terms, participation
rules, and other matters of governance are `separate and distinct
<https://github.com/oasis-open/cti-python-stix2/blob/master/CONTRIBUTING.md#governance-distinct-from-oasis-tc-process>`__
from the OASIS TC Process and related policies.
All contributions made to this TC Open Repository are subject to open
source license terms expressed in the `BSD-3-Clause
License <https://www.oasis-open.org/sites/www.oasis-
open.org/files/BSD-3-Clause.txt>`__.
That license was selected as the declared `"Applicable
License" <https://www.oasis-open.org/resources/open-
repositories/licenses>`__
source license terms expressed in the `BSD-3-Clause License <https://www.oasis-open.org/sites/www.oasis-open.org/files/BSD-3-Clause.txt>`__.
That license was selected as the declared `"Applicable License" <https://www.oasis-open.org/resources/open-repositories/licenses>`__
when the TC Open Repository was created.
As documented in `"Public Participation
Invited <https://github.com/oasis-open/cti-python-
stix2/blob/master/CONTRIBUTING.md#public-participation-invited>`__",
contributions to this OASIS TC Open Repository are invited from all
parties, whether affiliated with OASIS or not. Participants must have
a
GitHub account, but no fees or OASIS membership obligations are
required. Participation is expected to be consistent with the `OASIS
TC Open Repository Guidelines and
Procedures <https://www.oasis-open.org/policies-guidelines/open-
repositories>`__,
the open source
`LICENSE <https://github.com/oasis-open/cti-python-
stix2/blob/master/LICENSE>`__
As documented in `"Public Participation Invited
<https://github.com/oasis-open/cti-python-stix2/blob/master/CONTRIBUTING.md#public-participation-invited>`__",
contributions to this OASIS TC Open Repository are invited from all parties,
whether affiliated with OASIS or not. Participants must have a GitHub account,
but no fees or OASIS membership obligations are required. Participation is
expected to be consistent with the `OASIS TC Open Repository Guidelines and Procedures
<https://www.oasis-open.org/policies-guidelines/open-repositories>`__,
the open source `LICENSE <https://github.com/oasis-open/cti-python-stix2/blob/master/LICENSE>`__
designated for this particular repository, and the requirement for an
`Individual Contributor License
Agreement <https://www.oasis-open.org/resources/open-
repositories/cla/individual-cla>`__
`Individual Contributor License Agreement <https://www.oasis-open.org/resources/open-repositories/cla/individual-cla>`__
that governs intellectual property.
Maintainers
~~~~~~~~~~~
TC Open Repository
`Maintainers <https://www.oasis-open.org/resources/open-
repositories/maintainers-guide>`__
TC Open Repository `Maintainers <https://www.oasis-open.org/resources/open-repositories/maintainers-guide>`__
are responsible for oversight of this project's community development
activities, including evaluation of GitHub `pull
requests <https://github.com/oasis-open/cti-python-
stix2/blob/master/CONTRIBUTING.md#fork-and-pull-collaboration-
model>`__
and
`preserving <https://www.oasis-open.org/policies-guidelines/open-
repositories#repositoryManagement>`__
open source principles of openness and fairness. Maintainers are
recognized and trusted experts who serve to implement community goals
and consensus design preferences.
activities, including evaluation of GitHub
`pull requests <https://github.com/oasis-open/cti-python-stix2/blob/master/CONTRIBUTING.md#fork-and-pull-collaboration-model>`__
and `preserving <https://www.oasis-open.org/policies-guidelines/open-repositories#repositoryManagement>`__
open source principles of openness and fairness. Maintainers are recognized
and trusted experts who serve to implement community goals and consensus design
preferences.
Initially, the associated TC members have designated one or more
persons
to serve as Maintainer(s); subsequently, participating community
members
may select additional or substitute Maintainers, per `consensus
agreements <https://www.oasis-open.org/resources/open-
repositories/maintainers-guide#additionalMaintainers>`__.
Initially, the associated TC members have designated one or more persons to
serve as Maintainer(s); subsequently, participating community members may
select additional or substitute Maintainers, per `consensus agreements
<https://www.oasis-open.org/resources/open-repositories/maintainers-guide#additionalMaintainers>`__.
.. _currentMaintainers:
.. _currentmaintainers:
**Current Maintainers of this TC Open Repository**
- `Chris Lenk <mailto:clenk@mitre.org>`__; GitHub ID:
https://github.com/clenk/; WWW: `MITRE
Corporation <http://www.mitre.org/>`__
https://github.com/clenk/; WWW: `MITRE Corporation <http://www.mitre.org/>`__
- `Emmanuelle Vargas-Gonzalez <mailto:emmanuelle@mitre.org>`__; GitHub ID:
https://github.com/emmanvg/; WWW: `MITRE
@ -181,39 +138,32 @@ repositories/maintainers-guide#additionalMaintainers>`__.
About OASIS TC Open Repositories
--------------------------------
- `TC Open Repositories: Overview and
Resources <https://www.oasis-open.org/resources/open-
repositories/>`__
- `Frequently Asked
Questions <https://www.oasis-open.org/resources/open-
repositories/faq>`__
- `Open Source
Licenses <https://www.oasis-open.org/resources/open-
repositories/licenses>`__
- `Contributor License Agreements
(CLAs) <https://www.oasis-open.org/resources/open-
repositories/cla>`__
- `Maintainers' Guidelines and
Agreement <https://www.oasis-open.org/resources/open-
repositories/maintainers-guide>`__
- `TC Open Repositories: Overview and Resources <https://www.oasis-open.org/resources/open-repositories/>`__
- `Frequently Asked Questions <https://www.oasis-open.org/resources/open-repositories/faq>`__
- `Open Source Licenses <https://www.oasis-open.org/resources/open-repositories/licenses>`__
- `Contributor License Agreements (CLAs) <https://www.oasis-open.org/resources/open-repositories/cla>`__
- `Maintainers' Guidelines and Agreement <https://www.oasis-open.org/resources/open-repositories/maintainers-guide>`__
Feedback
--------
Questions or comments about this TC Open Repository's activities
should be
composed as GitHub issues or comments. If use of an issue/comment is
not
Questions or comments about this TC Open Repository's activities should be
composed as GitHub issues or comments. If use of an issue/comment is not
possible or appropriate, questions may be directed by email to the
Maintainer(s) `listed above <#currentmaintainers>`__. Please send
general questions about TC Open Repository participation to OASIS
Staff at
Maintainer(s) `listed above <#currentmaintainers>`__. Please send general
questions about TC Open Repository participation to OASIS Staff at
repository-admin@oasis-open.org and any specific CLA-related questions
to repository-cla@oasis-open.org.
.. |Build_Status| image:: https://travis-ci.org/oasis-open/cti-python-stix2.svg?branch=master
:target: https://travis-ci.org/oasis-open/cti-python-stix2
:alt: Build Status
.. |Coverage| image:: https://codecov.io/gh/oasis-open/cti-python-stix2/branch/master/graph/badge.svg
:target: https://codecov.io/gh/oasis-open/cti-python-stix2
:alt: Coverage
.. |Version| image:: https://img.shields.io/pypi/v/stix2.svg?maxAge=3600
:target: https://pypi.python.org/pypi/stix2/
:alt: Version
.. |Downloads_Badge| image:: https://img.shields.io/pypi/dm/stix2.svg?maxAge=3600
:target: https://pypi.python.org/pypi/stix2/
:alt: Downloads

View File

@ -0,0 +1,5 @@
scales
=======================
.. automodule:: stix2.confidence.scales
:members:

View File

@ -0,0 +1,5 @@
confidence
================
.. automodule:: stix2.confidence
:members:

View File

@ -0,0 +1,5 @@
bundle
================
.. automodule:: stix2.v20.bundle
:members:

View File

@ -0,0 +1,5 @@
bundle
================
.. automodule:: stix2.v21.bundle
:members:

View File

@ -0,0 +1,5 @@
common
================
.. automodule:: stix2.v21.common
:members:

View File

@ -0,0 +1,5 @@
observables
=====================
.. automodule:: stix2.v21.observables
:members:

View File

@ -0,0 +1,5 @@
sdo
=============
.. automodule:: stix2.v21.sdo
:members:

View File

@ -0,0 +1,5 @@
sro
=============
.. automodule:: stix2.v21.sro
:members:

View File

@ -1,3 +1,4 @@
import datetime
import os
import re
import sys
@ -6,6 +7,7 @@ from six import class_types
from sphinx.ext.autodoc import ClassDocumenter
from stix2.base import _STIXBase
from stix2.version import __version__
sys.path.insert(0, os.path.abspath('..'))
@ -31,11 +33,11 @@ source_suffix = '.rst'
master_doc = 'index'
project = 'stix2'
copyright = '2017, OASIS Open'
copyright = '{}, OASIS Open'.format(datetime.date.today().year)
author = 'OASIS Open'
version = '1.0.4'
release = '1.0.4'
version = __version__
release = __version__
language = None
exclude_patterns = ['_build', '_templates', 'Thumbs.db', '.DS_Store', 'guide/.ipynb_checkpoints']
@ -49,7 +51,7 @@ html_sidebars = {
'navigation.html',
'relations.html',
'searchbox.html',
]
],
}
latex_elements = {}

View File

@ -7,8 +7,10 @@ import stix2
def main():
collection = Collection("http://127.0.0.1:5000/trustgroup1/collections/52892447-4d7e-4f70-b94d-d7f22742ff63/",
user="admin", password="Password0")
collection = Collection(
"http://127.0.0.1:5000/trustgroup1/collections/52892447-4d7e-4f70-b94d-d7f22742ff63/",
user="admin", password="Password0",
)
# instantiate TAXII data source
taxii = stix2.TAXIICollectionSource(collection)

View File

@ -1,12 +1,10 @@
[bumpversion]
current_version = 1.0.4
current_version = 1.1.0
commit = True
tag = True
[bumpversion:file:stix2/version.py]
[bumpversion:file:docs/conf.py]
[metadata]
license_file = LICENSE

View File

@ -11,26 +11,27 @@ VERSION_FILE = os.path.join(BASE_DIR, 'stix2', 'version.py')
def get_version():
with open(VERSION_FILE) as f:
for line in f.readlines():
if line.startswith("__version__"):
if line.startswith('__version__'):
version = line.split()[-1].strip('"')
return version
raise AttributeError("Package does not have a __version__")
with open('README.rst') as f:
long_description = f.read()
def get_long_description():
with open('README.rst') as f:
return f.read()
setup(
name='stix2',
version=get_version(),
description='Produce and consume STIX 2 JSON content',
long_description=long_description,
url='https://github.com/oasis-open/cti-python-stix2',
long_description=get_long_description(),
url='https://oasis-open.github.io/cti-documentation/',
author='OASIS Cyber Threat Intelligence Technical Committee',
author_email='cti-users@lists.oasis-open.org',
maintainer='Greg Back',
maintainer_email='gback@mitre.org',
maintainer='Chris Lenk, Emmanuelle Vargas-Gonzalez',
maintainer_email='clenk@mitre.org, emmanuelle@mitre.org',
license='BSD',
classifiers=[
'Development Status :: 4 - Beta',
@ -45,7 +46,7 @@ setup(
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
keywords="stix stix2 json cti cyber threat intelligence",
keywords='stix stix2 json cti cyber threat intelligence',
packages=find_packages(exclude=['*.test']),
install_requires=[
'python-dateutil',
@ -55,7 +56,12 @@ setup(
'six',
'stix2-patterns',
],
project_urls={
'Documentation': 'https://stix2.readthedocs.io/',
'Source Code': 'https://github.com/oasis-open/cti-python-stix2/',
'Bug Tracker': 'https://github.com/oasis-open/cti-python-stix2/issues/',
},
extras_require={
'taxii': ['taxii2-client']
}
'taxii': ['taxii2-client'],
},
)

View File

@ -3,6 +3,7 @@
.. autosummary::
:toctree: api
confidence
core
datastore
environment
@ -11,49 +12,57 @@
patterns
properties
utils
workbench
v20.bundle
v20.common
v20.observables
v20.sdo
v20.sro
v21.bundle
v21.common
v21.observables
v21.sdo
v21.sro
workbench
"""
# flake8: noqa
from .core import Bundle, _collect_stix2_obj_maps, _register_type, parse
from .confidence import scales
from .core import _collect_stix2_mappings, parse, parse_observable
from .datastore import CompositeDataSource
from .datastore.filesystem import (FileSystemSink, FileSystemSource,
FileSystemStore)
from .datastore.filesystem import (
FileSystemSink, FileSystemSource, FileSystemStore,
)
from .datastore.filters import Filter
from .datastore.memory import MemorySink, MemorySource, MemoryStore
from .datastore.taxii import (TAXIICollectionSink, TAXIICollectionSource,
TAXIICollectionStore)
from .datastore.taxii import (
TAXIICollectionSink, TAXIICollectionSource, TAXIICollectionStore,
)
from .environment import Environment, ObjectFactory
from .markings import (add_markings, clear_markings, get_markings, is_marked,
remove_markings, set_markings)
from .patterns import (AndBooleanExpression, AndObservationExpression,
BasicObjectPathComponent, BinaryConstant,
BooleanConstant, EqualityComparisonExpression,
FloatConstant, FollowedByObservationExpression,
GreaterThanComparisonExpression,
GreaterThanEqualComparisonExpression, HashConstant,
HexConstant, InComparisonExpression, IntegerConstant,
IsSubsetComparisonExpression,
IsSupersetComparisonExpression,
LessThanComparisonExpression,
LessThanEqualComparisonExpression,
LikeComparisonExpression, ListConstant,
ListObjectPathComponent, MatchesComparisonExpression,
ObjectPath, ObservationExpression, OrBooleanExpression,
OrObservationExpression, ParentheticalExpression,
QualifiedObservationExpression,
ReferenceObjectPathComponent, RepeatQualifier,
StartStopQualifier, StringConstant, TimestampConstant,
WithinQualifier)
from .markings import (
add_markings, clear_markings, get_markings, is_marked, remove_markings,
set_markings,
)
from .patterns import (
AndBooleanExpression, AndObservationExpression, BasicObjectPathComponent,
BinaryConstant, BooleanConstant, EqualityComparisonExpression,
FloatConstant, FollowedByObservationExpression,
GreaterThanComparisonExpression, GreaterThanEqualComparisonExpression,
HashConstant, HexConstant, InComparisonExpression, IntegerConstant,
IsSubsetComparisonExpression, IsSupersetComparisonExpression,
LessThanComparisonExpression, LessThanEqualComparisonExpression,
LikeComparisonExpression, ListConstant, ListObjectPathComponent,
MatchesComparisonExpression, ObjectPath, ObservationExpression,
OrBooleanExpression, OrObservationExpression, ParentheticalExpression,
QualifiedObservationExpression, ReferenceObjectPathComponent,
RepeatQualifier, StartStopQualifier, StringConstant, TimestampConstant,
WithinQualifier,
)
from .utils import new_version, revoke
from .v20 import * # This import will always be the latest STIX 2.X version
from .version import __version__
_collect_stix2_obj_maps()
_collect_stix2_mappings()
DEFAULT_VERSION = "2.0" # Default version will always be the latest STIX 2.X version
DEFAULT_VERSION = '2.0' # Default version will always be the latest STIX 2.X version

View File

@ -1,4 +1,4 @@
"""Base classes for type definitions in the stix2 library."""
"""Base classes for type definitions in the STIX2 library."""
import collections
import copy
@ -6,11 +6,12 @@ import datetime as dt
import simplejson as json
from .exceptions import (AtLeastOnePropertyError, CustomContentError,
DependentPropertiesError, ExtraPropertiesError,
ImmutableError, InvalidObjRefError, InvalidValueError,
MissingPropertiesError,
MutuallyExclusivePropertiesError)
from .exceptions import (
AtLeastOnePropertyError, CustomContentError, DependentPropertiesError,
ExtraPropertiesError, ImmutableError, InvalidObjRefError,
InvalidValueError, MissingPropertiesError,
MutuallyExclusivePropertiesError,
)
from .markings.utils import validate
from .utils import NOW, find_property_index, format_datetime, get_timestamp
from .utils import new_version as _new_version
@ -104,11 +105,11 @@ class _STIXBase(collections.Mapping):
def _check_at_least_one_property(self, list_of_properties=None):
if not list_of_properties:
list_of_properties = sorted(list(self.__class__._properties.keys()))
if "type" in list_of_properties:
list_of_properties.remove("type")
if 'type' in list_of_properties:
list_of_properties.remove('type')
current_properties = self.properties_populated()
list_of_properties_populated = set(list_of_properties).intersection(current_properties)
if list_of_properties and (not list_of_properties_populated or list_of_properties_populated == set(["extensions"])):
if list_of_properties and (not list_of_properties_populated or list_of_properties_populated == set(['extensions'])):
raise AtLeastOnePropertyError(self.__class__, list_of_properties)
def _check_properties_dependency(self, list_of_properties, list_of_dependent_properties):
@ -121,8 +122,8 @@ class _STIXBase(collections.Mapping):
raise DependentPropertiesError(self.__class__, failed_dependency_pairs)
def _check_object_constraints(self):
for m in self.get("granular_markings", []):
validate(self, m.get("selectors"))
for m in self.get('granular_markings', []):
validate(self, m.get('selectors'))
def __init__(self, allow_custom=False, interoperability=False, **kwargs):
cls = self.__class__
@ -191,7 +192,7 @@ class _STIXBase(collections.Mapping):
# usual behavior of this method reads an __init__-assigned attribute,
# which would cause infinite recursion. So this check disables all
# attribute reads until the instance has been properly initialized.
unpickling = "_inner" not in self.__dict__
unpickling = '_inner' not in self.__dict__
if not unpickling and name in self:
return self.__getitem__(name)
raise AttributeError("'%s' object has no attribute '%s'" %
@ -207,8 +208,10 @@ class _STIXBase(collections.Mapping):
def __repr__(self):
props = [(k, self[k]) for k in self.object_properties() if self.get(k)]
return "{0}({1})".format(self.__class__.__name__,
", ".join(["{0!s}={1!r}".format(k, v) for k, v in props]))
return '{0}({1})'.format(
self.__class__.__name__,
', '.join(['{0!s}={1!r}'.format(k, v) for k, v in props]),
)
def __deepcopy__(self, memo):
# Assume: we can ignore the memo argument, because no object will ever contain the same sub-object multiple times.
@ -274,7 +277,7 @@ class _STIXBase(collections.Mapping):
def sort_by(element):
return find_property_index(self, *element)
kwargs.update({'indent': 4, 'separators': (",", ": "), 'item_sort_key': sort_by})
kwargs.update({'indent': 4, 'separators': (',', ': '), 'item_sort_key': sort_by})
if include_optional_defaults:
return json.dumps(self, cls=STIXJSONIncludeOptionalDefaultsEncoder, **kwargs)

View File

@ -0,0 +1,10 @@
"""
Functions to operate with STIX2 Confidence scales.
.. autosummary::
:toctree: confidence
scales
|
"""

571
stix2/confidence/scales.py Normal file
View File

@ -0,0 +1,571 @@
# -*- coding: utf-8 -*-
"""Functions to perform conversions between the different Confidence scales.
As specified in STIX Version 2.1. Part 1: STIX Core Concepts - Appendix B"""
def none_low_med_high_to_value(scale_value):
"""
This method will transform a string value from the None / Low / Med /
High scale to its confidence integer representation.
The scale for this confidence representation is the following:
.. list-table:: None, Low, Med, High to STIX Confidence
:header-rows: 1
* - None/ Low/ Med/ High
- STIX Confidence Value
* - Not Specified
- Not Specified
* - None
- 0
* - Low
- 15
* - Med
- 50
* - High
- 85
Args:
scale_value (str): A string value from the scale. Accepted strings are
"None", "Low", "Med" and "High". Argument is case sensitive.
Returns:
int: The numerical representation corresponding to values in the
None / Low / Med / High scale.
Raises:
ValueError: If `scale_value` is not within the accepted strings.
"""
if scale_value == 'None':
return 0
elif scale_value == 'Low':
return 15
elif scale_value == 'Med':
return 50
elif scale_value == 'High':
return 85
else:
raise ValueError("STIX Confidence value cannot be determined for %s" % scale_value)
def value_to_none_low_medium_high(confidence_value):
"""
This method will transform an integer value into the None / Low / Med /
High scale string representation.
The scale for this confidence representation is the following:
.. list-table:: STIX Confidence to None, Low, Med, High
:header-rows: 1
* - Range of Values
- None/ Low/ Med/ High
* - 0
- None
* - 1-29
- Low
* - 30-69
- Med
* - 70-100
- High
Args:
confidence_value (int): An integer value between 0 and 100.
Returns:
str: A string corresponding to the None / Low / Med / High scale.
Raises:
ValueError: If `confidence_value` is out of bounds.
"""
if confidence_value == 0:
return 'None'
elif 29 >= confidence_value >= 1:
return 'Low'
elif 69 >= confidence_value >= 30:
return 'Med'
elif 100 >= confidence_value >= 70:
return 'High'
else:
raise ValueError("Range of values out of bounds: %s" % confidence_value)
def zero_ten_to_value(scale_value):
"""
This method will transform a string value from the 0-10 scale to its
confidence integer representation.
The scale for this confidence representation is the following:
.. list-table:: 0-10 to STIX Confidence
:header-rows: 1
* - 0-10 Scale
- STIX Confidence Value
* - 0
- 0
* - 1
- 10
* - 2
- 20
* - 3
- 30
* - 4
- 40
* - 5
- 50
* - 6
- 60
* - 7
- 70
* - 8
- 80
* - 9
- 90
* - 10
- 100
Args:
scale_value (str): A string value from the scale. Accepted strings are "0"
through "10" inclusive.
Returns:
int: The numerical representation corresponding to values in the 0-10
scale.
Raises:
ValueError: If `scale_value` is not within the accepted strings.
"""
if scale_value == '0':
return 0
elif scale_value == '1':
return 10
elif scale_value == '2':
return 20
elif scale_value == '3':
return 30
elif scale_value == '4':
return 40
elif scale_value == '5':
return 50
elif scale_value == '6':
return 60
elif scale_value == '7':
return 70
elif scale_value == '8':
return 80
elif scale_value == '9':
return 90
elif scale_value == '10':
return 100
else:
raise ValueError("STIX Confidence value cannot be determined for %s" % scale_value)
def value_to_zero_ten(confidence_value):
"""
This method will transform an integer value into the 0-10 scale string
representation.
The scale for this confidence representation is the following:
.. list-table:: STIX Confidence to 0-10
:header-rows: 1
* - Range of Values
- 0-10 Scale
* - 0-4
- 0
* - 5-14
- 1
* - 15-24
- 2
* - 25-34
- 3
* - 35-44
- 4
* - 45-54
- 5
* - 55-64
- 6
* - 65-74
- 7
* - 75-84
- 8
* - 95-94
- 9
* - 95-100
- 10
Args:
confidence_value (int): An integer value between 0 and 100.
Returns:
str: A string corresponding to the 0-10 scale.
Raises:
ValueError: If `confidence_value` is out of bounds.
"""
if 4 >= confidence_value >= 0:
return '0'
elif 14 >= confidence_value >= 5:
return '1'
elif 24 >= confidence_value >= 15:
return '2'
elif 34 >= confidence_value >= 25:
return '3'
elif 44 >= confidence_value >= 35:
return '4'
elif 54 >= confidence_value >= 45:
return '5'
elif 64 >= confidence_value >= 55:
return '6'
elif 74 >= confidence_value >= 65:
return '7'
elif 84 >= confidence_value >= 75:
return '8'
elif 94 >= confidence_value >= 85:
return '9'
elif 100 >= confidence_value >= 95:
return '10'
else:
raise ValueError("Range of values out of bounds: %s" % confidence_value)
def admiralty_credibility_to_value(scale_value):
"""
This method will transform a string value from the Admiralty Credibility
scale to its confidence integer representation.
The scale for this confidence representation is the following:
.. list-table:: Admiralty Credibility Scale to STIX Confidence
:header-rows: 1
* - Admiralty Credibility
- STIX Confidence Value
* - 6 - Truth cannot be judged
- (Not present)
* - 5 - Improbable
- 10
* - 4 - Doubtful
- 30
* - 3 - Possibly True
- 50
* - 2 - Probably True
- 70
* - 1 - Confirmed by other sources
- 90
Args:
scale_value (str): A string value from the scale. Accepted strings are
"6 - Truth cannot be judged", "5 - Improbable", "4 - Doubtful",
"3 - Possibly True", "2 - Probably True" and
"1 - Confirmed by other sources". Argument is case sensitive.
Returns:
int: The numerical representation corresponding to values in the
Admiralty Credibility scale.
Raises:
ValueError: If `scale_value` is not within the accepted strings.
"""
if scale_value == '6 - Truth cannot be judged':
raise ValueError("STIX Confidence value cannot be determined for %s" % scale_value)
elif scale_value == '5 - Improbable':
return 10
elif scale_value == '4 - Doubtful':
return 30
elif scale_value == '3 - Possibly True':
return 50
elif scale_value == '2 - Probably True':
return 70
elif scale_value == '1 - Confirmed by other sources':
return 90
else:
raise ValueError("STIX Confidence value cannot be determined for %s" % scale_value)
def value_to_admiralty_credibility(confidence_value):
"""
This method will transform an integer value into the Admiralty Credibility
scale string representation.
The scale for this confidence representation is the following:
.. list-table:: STIX Confidence to Admiralty Credibility Scale
:header-rows: 1
* - Range of Values
- Admiralty Credibility
* - N/A
- 6 - Truth cannot be judged
* - 0-19
- 5 - Improbable
* - 20-39
- 4 - Doubtful
* - 40-59
- 3 - Possibly True
* - 60-79
- 2 - Probably True
* - 80-100
- 1 - Confirmed by other sources
Args:
confidence_value (int): An integer value between 0 and 100.
Returns:
str: A string corresponding to the Admiralty Credibility scale.
Raises:
ValueError: If `confidence_value` is out of bounds.
"""
if 19 >= confidence_value >= 0:
return '5 - Improbable'
elif 39 >= confidence_value >= 20:
return '4 - Doubtful'
elif 59 >= confidence_value >= 40:
return '3 - Possibly True'
elif 79 >= confidence_value >= 60:
return '2 - Probably True'
elif 100 >= confidence_value >= 80:
return '1 - Confirmed by other sources'
else:
raise ValueError("Range of values out of bounds: %s" % confidence_value)
def wep_to_value(scale_value):
"""
This method will transform a string value from the WEP scale to its
confidence integer representation.
The scale for this confidence representation is the following:
.. list-table:: WEP to STIX Confidence
:header-rows: 1
* - WEP
- STIX Confidence Value
* - Impossible
- 0
* - Highly Unlikely/Almost Certainly Not
- 10
* - Unlikely/Probably Not
- 20
* - Even Chance
- 50
* - Likely/Probable
- 70
* - Highly likely/Almost Certain
- 90
* - Certain
- 100
Args:
scale_value (str): A string value from the scale. Accepted strings are
"Impossible", "Highly Unlikely/Almost Certainly Not",
"Unlikely/Probably Not", "Even Chance", "Likely/Probable",
"Highly likely/Almost Certain" and "Certain". Argument is case
sensitive.
Returns:
int: The numerical representation corresponding to values in the WEP
scale.
Raises:
ValueError: If `scale_value` is not within the accepted strings.
"""
if scale_value == 'Impossible':
return 0
elif scale_value == 'Highly Unlikely/Almost Certainly Not':
return 10
elif scale_value == 'Unlikely/Probably Not':
return 30
elif scale_value == 'Even Chance':
return 50
elif scale_value == 'Likely/Probable':
return 70
elif scale_value == 'Highly likely/Almost Certain':
return 90
elif scale_value == 'Certain':
return 100
else:
raise ValueError("STIX Confidence value cannot be determined for %s" % scale_value)
def value_to_wep(confidence_value):
"""
This method will transform an integer value into the WEP scale string
representation.
The scale for this confidence representation is the following:
.. list-table:: STIX Confidence to WEP
:header-rows: 1
* - Range of Values
- WEP
* - 0
- Impossible
* - 1-19
- Highly Unlikely/Almost Certainly Not
* - 20-39
- Unlikely/Probably Not
* - 40-59
- Even Chance
* - 60-79
- Likely/Probable
* - 80-99
- Highly likely/Almost Certain
* - 100
- Certain
Args:
confidence_value (int): An integer value between 0 and 100.
Returns:
str: A string corresponding to the WEP scale.
Raises:
ValueError: If `confidence_value` is out of bounds.
"""
if confidence_value == 0:
return 'Impossible'
elif 19 >= confidence_value >= 1:
return 'Highly Unlikely/Almost Certainly Not'
elif 39 >= confidence_value >= 20:
return 'Unlikely/Probably Not'
elif 59 >= confidence_value >= 40:
return 'Even Chance'
elif 79 >= confidence_value >= 60:
return 'Likely/Probable'
elif 99 >= confidence_value >= 80:
return 'Highly likely/Almost Certain'
elif confidence_value == 100:
return 'Certain'
else:
raise ValueError("Range of values out of bounds: %s" % confidence_value)
def dni_to_value(scale_value):
"""
This method will transform a string value from the DNI scale to its
confidence integer representation.
The scale for this confidence representation is the following:
.. list-table:: DNI Scale to STIX Confidence
:header-rows: 1
* - DNI Scale
- STIX Confidence Value
* - Almost No Chance / Remote
- 5
* - Very Unlikely / Highly Improbable
- 15
* - Unlikely / Improbable
- 30
* - Roughly Even Chance / Roughly Even Odds
- 50
* - Likely / Probable
- 70
* - Very Likely / Highly Probable
- 85
* - Almost Certain / Nearly Certain
- 95
Args:
scale_value (str): A string value from the scale. Accepted strings are
"Almost No Chance / Remote", "Very Unlikely / Highly Improbable",
"Unlikely / Improbable", "Roughly Even Chance / Roughly Even Odds",
"Likely / Probable", "Very Likely / Highly Probable" and
"Almost Certain / Nearly Certain". Argument is case sensitive.
Returns:
int: The numerical representation corresponding to values in the DNI
scale.
Raises:
ValueError: If `scale_value` is not within the accepted strings.
"""
if scale_value == 'Almost No Chance / Remote':
return 5
elif scale_value == 'Very Unlikely / Highly Improbable':
return 15
elif scale_value == 'Unlikely / Improbable':
return 30
elif scale_value == 'Roughly Even Chance / Roughly Even Odds':
return 50
elif scale_value == 'Likely / Probable':
return 70
elif scale_value == 'Very Likely / Highly Probable':
return 85
elif scale_value == 'Almost Certain / Nearly Certain':
return 95
else:
raise ValueError("STIX Confidence value cannot be determined for %s" % scale_value)
def value_to_dni(confidence_value):
"""
This method will transform an integer value into the DNI scale string
representation.
The scale for this confidence representation is the following:
.. list-table:: STIX Confidence to DNI Scale
:header-rows: 1
* - Range of Values
- DNI Scale
* - 0-9
- Almost No Chance / Remote
* - 10-19
- Very Unlikely / Highly Improbable
* - 20-39
- Unlikely / Improbable
* - 40-59
- Roughly Even Chance / Roughly Even Odds
* - 60-79
- Likely / Probable
* - 80-89
- Very Likely / Highly Probable
* - 90-100
- Almost Certain / Nearly Certain
Args:
confidence_value (int): An integer value between 0 and 100.
Returns:
str: A string corresponding to the DNI scale.
Raises:
ValueError: If `confidence_value` is out of bounds.
"""
if 9 >= confidence_value >= 0:
return 'Almost No Chance / Remote'
elif 19 >= confidence_value >= 10:
return 'Very Unlikely / Highly Improbable'
elif 39 >= confidence_value >= 20:
return 'Unlikely / Improbable'
elif 59 >= confidence_value >= 40:
return 'Roughly Even Chance / Roughly Even Odds'
elif 79 >= confidence_value >= 60:
return 'Likely / Probable'
elif 89 >= confidence_value >= 80:
return 'Very Likely / Highly Probable'
elif 100 >= confidence_value >= 90:
return 'Almost Certain / Nearly Certain'
else:
raise ValueError("Range of values out of bounds: %s" % confidence_value)

View File

@ -1,76 +1,43 @@
"""STIX 2.0 Objects that are neither SDOs nor SROs."""
"""STIX2 Core Objects and Methods."""
from collections import OrderedDict
import copy
import importlib
import pkgutil
import re
import stix2
from . import exceptions
from .base import _STIXBase
from .properties import IDProperty, ListProperty, Property, TypeProperty
from .utils import _get_dict, get_class_hierarchy_names
from .exceptions import CustomContentError, ParseError
from .markings import _MarkingsMixin
from .utils import _get_dict
STIX2_OBJ_MAPS = {}
class STIXObjectProperty(Property):
def __init__(self, allow_custom=False, interoperability=False, *args, **kwargs):
self.allow_custom = allow_custom
self.interoperability = interoperability
super(STIXObjectProperty, self).__init__(*args, **kwargs)
def clean(self, value):
# Any STIX Object (SDO, SRO, or Marking Definition) can be added to
# a bundle with no further checks.
if any(x in ('STIXDomainObject', 'STIXRelationshipObject', 'MarkingDefinition')
for x in get_class_hierarchy_names(value)):
return value
try:
dictified = _get_dict(value)
except ValueError:
raise ValueError("This property may only contain a dictionary or object")
if dictified == {}:
raise ValueError("This property may only contain a non-empty dictionary or object")
if 'type' in dictified and dictified['type'] == 'bundle':
raise ValueError('This property may not contain a Bundle object')
return parse(dictified, self.allow_custom, self.interoperability)
class Bundle(_STIXBase):
"""For more detailed information on this object's properties, see
`the STIX 2.0 specification <http://docs.oasis-open.org/cti/stix/v2.0/cs01/part1-stix-core/stix-v2.0-cs01-part1-stix-core.html#_Toc496709293>`__.
"""
_type = 'bundle'
_properties = OrderedDict()
_properties.update([
('type', TypeProperty(_type)),
('id', IDProperty(_type)),
('spec_version', Property(fixed="2.0")),
('objects', ListProperty(STIXObjectProperty)),
])
class STIXDomainObject(_STIXBase, _MarkingsMixin):
def __init__(self, *args, **kwargs):
# Add any positional arguments to the 'objects' kwarg.
if args:
if isinstance(args[0], list):
kwargs['objects'] = args[0] + list(args[1:]) + kwargs.get('objects', [])
else:
kwargs['objects'] = list(args) + kwargs.get('objects', [])
allow_custom = kwargs.get('allow_custom', False)
self.__allow_custom = allow_custom
self._properties['objects'].contained.allow_custom = allow_custom
interoperability = kwargs.get('interoperability', False)
self.__interoperability = interoperability
self._properties['id'].interoperability = interoperability
self._properties['objects'].contained.interoperability = interoperability
self._properties['created_by_ref'].interoperability = interoperability
if kwargs.get('object_marking_refs'):
self._properties['object_marking_refs'].contained.interoperability = interoperability
super(Bundle, self).__init__(**kwargs)
super(STIXDomainObject, self).__init__(*args, **kwargs)
STIX2_OBJ_MAPS = {}
class STIXRelationshipObject(_STIXBase, _MarkingsMixin):
def __init__(self, *args, **kwargs):
interoperability = kwargs.get('interoperability', False)
self.__interoperability = interoperability
self._properties['id'].interoperability = interoperability
if kwargs.get('created_by_ref'):
self._properties['created_by_ref'].interoperability = interoperability
if kwargs.get('object_marking_refs'):
self._properties['object_marking_refs'].contained.interoperability = interoperability
super(STIXRelationshipObject, self).__init__(*args, **kwargs)
def parse(data, allow_custom=False, interoperability=False, version=None):
@ -81,18 +48,22 @@ def parse(data, allow_custom=False, interoperability=False, version=None):
allow_custom (bool): Whether to allow custom properties as well unknown
custom objects. Note that unknown custom objects cannot be parsed
into STIX objects, and will be returned as is. Default: False.
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If
None, use latest version.
version (str): If present, it forces the parser to use the version
provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property. If none of the above are
possible, it will use the default version specified by the library.
Returns:
An instantiated Python STIX object.
WARNING: 'allow_custom=True' will allow for the return of any supplied STIX
dict(s) that cannot be found to map to any known STIX object types (both STIX2
domain objects or defined custom STIX2 objects); NO validation is done. This is
done to allow the processing of possibly unknown custom STIX objects (example
scenario: I need to query a third-party TAXII endpoint that could provide custom
STIX objects that I dont know about ahead of time)
Warnings:
'allow_custom=True' will allow for the return of any supplied STIX
dict(s) that cannot be found to map to any known STIX object types
(both STIX2 domain objects or defined custom STIX2 objects); NO
validation is done. This is done to allow the processing of possibly
unknown custom STIX objects (example scenario: I need to query a
third-party TAXII endpoint that could provide custom STIX objects that
I don't know about ahead of time)
"""
# convert STIX object to dict, if not already
@ -107,35 +78,55 @@ def parse(data, allow_custom=False, interoperability=False, version=None):
def dict_to_stix2(stix_dict, allow_custom=False, interoperability=False, version=None):
"""convert dictionary to full python-stix2 object
Args:
stix_dict (dict): a python dictionary of a STIX object
that (presumably) is semantically correct to be parsed
into a full python-stix2 obj
allow_custom (bool): Whether to allow custom properties as well unknown
custom objects. Note that unknown custom objects cannot be parsed
into STIX objects, and will be returned as is. Default: False.
Args:
stix_dict (dict): a python dictionary of a STIX object
that (presumably) is semantically correct to be parsed
into a full python-stix2 obj
allow_custom (bool): Whether to allow custom properties as well
unknown custom objects. Note that unknown custom objects cannot
be parsed into STIX objects, and will be returned as is.
Default: False.
version (str): If present, it forces the parser to use the version
provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property. If none of the above are
possible, it will use the default version specified by the library.
Returns:
An instantiated Python STIX object
Returns:
An instantiated Python STIX object
WARNING: 'allow_custom=True' will allow for the return of any supplied STIX
dict(s) that cannot be found to map to any known STIX object types (both STIX2
domain objects or defined custom STIX2 objects); NO validation is done. This is
done to allow the processing of possibly unknown custom STIX objects (example
scenario: I need to query a third-party TAXII endpoint that could provide custom
STIX objects that I dont know about ahead of time)
Warnings:
'allow_custom=True' will allow for the return of any supplied STIX
dict(s) that cannot be found to map to any known STIX object types
(both STIX2 domain objects or defined custom STIX2 objects); NO
validation is done. This is done to allow the processing of
possibly unknown custom STIX objects (example scenario: I need to
query a third-party TAXII endpoint that could provide custom STIX
objects that I don't know about ahead of time)
"""
if not version:
# Use latest version
v = 'v' + stix2.DEFAULT_VERSION.replace('.', '')
else:
v = 'v' + version.replace('.', '')
OBJ_MAP = STIX2_OBJ_MAPS[v]
if 'type' not in stix_dict:
raise exceptions.ParseError("Can't parse object with no 'type' property: %s" % str(stix_dict))
raise ParseError("Can't parse object with no 'type' property: %s" % str(stix_dict))
if version:
# If the version argument was passed, override other approaches.
v = 'v' + version.replace('.', '')
elif 'spec_version' in stix_dict:
# For STIX 2.0, applies to bundles only.
# For STIX 2.1+, applies to SDOs, SROs, and markings only.
v = 'v' + stix_dict['spec_version'].replace('.', '')
elif stix_dict['type'] == 'bundle':
# bundles without spec_version are ambiguous.
if any('spec_version' in x for x in stix_dict['objects']):
# Only on 2.1 we are allowed to have 'spec_version' in SDOs/SROs.
v = 'v21'
else:
v = 'v' + stix2.DEFAULT_VERSION.replace('.', '')
else:
# The spec says that SDO/SROs without spec_version will default to a
# '2.0' representation.
v = 'v20'
OBJ_MAP = STIX2_OBJ_MAPS[v]['objects']
try:
obj_class = OBJ_MAP[stix_dict['type']]
@ -144,39 +135,187 @@ def dict_to_stix2(stix_dict, allow_custom=False, interoperability=False, version
# flag allows for unknown custom objects too, but will not
# be parsed into STIX object, returned as is
return stix_dict
raise exceptions.ParseError("Can't parse unknown object type '%s'! For custom types, use the CustomObject decorator." % stix_dict['type'])
raise ParseError("Can't parse unknown object type '%s'! For custom types, use the CustomObject decorator." % stix_dict['type'])
return obj_class(allow_custom=allow_custom, interoperability=interoperability, **stix_dict)
def _register_type(new_type, version=None):
def parse_observable(data, _valid_refs=None, allow_custom=False, version=None):
"""Deserialize a string or file-like object into a STIX Cyber Observable
object.
Args:
data (str, dict, file-like object): The STIX2 content to be parsed.
_valid_refs: A list of object references valid for the scope of the
object being parsed. Use empty list if no valid refs are present.
allow_custom (bool): Whether to allow custom properties or not.
Default: False.
version (str): If present, it forces the parser to use the version
provided. Otherwise, the default version specified by the library
will be used.
Returns:
An instantiated Python STIX Cyber Observable object.
"""
obj = _get_dict(data)
# get deep copy since we are going modify the dict and might
# modify the original dict as _get_dict() does not return new
# dict when passed a dict
obj = copy.deepcopy(obj)
obj['_valid_refs'] = _valid_refs or []
if version:
# If the version argument was passed, override other approaches.
v = 'v' + version.replace('.', '')
else:
# Use default version (latest) if no version was provided.
v = 'v' + stix2.DEFAULT_VERSION.replace('.', '')
if 'type' not in obj:
raise ParseError("Can't parse observable with no 'type' property: %s" % str(obj))
try:
OBJ_MAP_OBSERVABLE = STIX2_OBJ_MAPS[v]['observables']
obj_class = OBJ_MAP_OBSERVABLE[obj['type']]
except KeyError:
if allow_custom:
# flag allows for unknown custom objects too, but will not
# be parsed into STIX observable object, just returned as is
return obj
raise CustomContentError("Can't parse unknown observable type '%s'! For custom observables, "
"use the CustomObservable decorator." % obj['type'])
EXT_MAP = STIX2_OBJ_MAPS[v]['observable-extensions']
if 'extensions' in obj and obj['type'] in EXT_MAP:
for name, ext in obj['extensions'].items():
try:
ext_class = EXT_MAP[obj['type']][name]
except KeyError:
if not allow_custom:
raise CustomContentError("Can't parse unknown extension type '%s'"
"for observable type '%s'!" % (name, obj['type']))
else: # extension was found
obj['extensions'][name] = ext_class(allow_custom=allow_custom, **obj['extensions'][name])
return obj_class(allow_custom=allow_custom, **obj)
def _register_object(new_type, version=None):
"""Register a custom STIX Object type.
Args:
new_type (class): A class to register in the Object map.
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If
None, use latest version.
"""
if not version:
# Use latest version
v = 'v' + stix2.DEFAULT_VERSION.replace('.', '')
else:
v = 'v' + version.replace('.', '')
OBJ_MAP = STIX2_OBJ_MAPS[v]
"""
if version:
v = 'v' + version.replace('.', '')
else:
# Use default version (latest) if no version was provided.
v = 'v' + stix2.DEFAULT_VERSION.replace('.', '')
OBJ_MAP = STIX2_OBJ_MAPS[v]['objects']
OBJ_MAP[new_type._type] = new_type
def _collect_stix2_obj_maps():
"""Navigate the package once and retrieve all OBJ_MAP dicts for each v2X
package."""
def _register_marking(new_marking, version=None):
"""Register a custom STIX Marking Definition type.
Args:
new_marking (class): A class to register in the Marking map.
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If
None, use latest version.
"""
if version:
v = 'v' + version.replace('.', '')
else:
# Use default version (latest) if no version was provided.
v = 'v' + stix2.DEFAULT_VERSION.replace('.', '')
OBJ_MAP_MARKING = STIX2_OBJ_MAPS[v]['markings']
OBJ_MAP_MARKING[new_marking._type] = new_marking
def _register_observable(new_observable, version=None):
"""Register a custom STIX Cyber Observable type.
Args:
new_observable (class): A class to register in the Observables map.
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If
None, use latest version.
"""
if version:
v = 'v' + version.replace('.', '')
else:
# Use default version (latest) if no version was provided.
v = 'v' + stix2.DEFAULT_VERSION.replace('.', '')
OBJ_MAP_OBSERVABLE = STIX2_OBJ_MAPS[v]['observables']
OBJ_MAP_OBSERVABLE[new_observable._type] = new_observable
def _register_observable_extension(observable, new_extension, version=None):
"""Register a custom extension to a STIX Cyber Observable type.
Args:
observable: An observable object
new_extension (class): A class to register in the Observables
Extensions map.
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If
None, use latest version.
"""
if version:
v = 'v' + version.replace('.', '')
else:
# Use default version (latest) if no version was provided.
v = 'v' + stix2.DEFAULT_VERSION.replace('.', '')
try:
observable_type = observable._type
except AttributeError:
raise ValueError(
"Unknown observable type. Custom observables must be "
"created with the @CustomObservable decorator.",
)
OBJ_MAP_OBSERVABLE = STIX2_OBJ_MAPS[v]['observables']
EXT_MAP = STIX2_OBJ_MAPS[v]['observable-extensions']
try:
EXT_MAP[observable_type][new_extension._type] = new_extension
except KeyError:
if observable_type not in OBJ_MAP_OBSERVABLE:
raise ValueError(
"Unknown observable type '%s'. Custom observables "
"must be created with the @CustomObservable decorator."
% observable_type,
)
else:
EXT_MAP[observable_type] = {new_extension._type: new_extension}
def _collect_stix2_mappings():
"""Navigate the package once and retrieve all object mapping dicts for each
v2X package. Includes OBJ_MAP, OBJ_MAP_OBSERVABLE, EXT_MAP."""
if not STIX2_OBJ_MAPS:
top_level_module = importlib.import_module('stix2')
path = top_level_module.__path__
prefix = str(top_level_module.__name__) + '.'
for module_loader, name, is_pkg in pkgutil.walk_packages(path=path,
prefix=prefix):
if name.startswith('stix2.v2') and is_pkg:
for module_loader, name, is_pkg in pkgutil.walk_packages(path=path, prefix=prefix):
ver = name.split('.')[1]
if re.match(r'^stix2\.v2[0-9]$', name) and is_pkg:
mod = importlib.import_module(name, str(top_level_module.__name__))
STIX2_OBJ_MAPS[name.split('.')[-1]] = mod.OBJ_MAP
STIX2_OBJ_MAPS[ver] = {}
STIX2_OBJ_MAPS[ver]['objects'] = mod.OBJ_MAP
STIX2_OBJ_MAPS[ver]['observables'] = mod.OBJ_MAP_OBSERVABLE
STIX2_OBJ_MAPS[ver]['observable-extensions'] = mod.EXT_MAP
elif re.match(r'^stix2\.v2[0-9]\.common$', name) and is_pkg is False:
mod = importlib.import_module(name, str(top_level_module.__name__))
STIX2_OBJ_MAPS[ver]['markings'] = mod.OBJ_MAP_MARKING

120
stix2/custom.py Normal file
View File

@ -0,0 +1,120 @@
from collections import OrderedDict
import re
from .base import _cls_init, _Extension, _Observable, _STIXBase
from .core import (
STIXDomainObject, _register_marking, _register_object,
_register_observable, _register_observable_extension,
)
from .utils import TYPE_REGEX, get_class_hierarchy_names
def _custom_object_builder(cls, type, properties, version):
class _CustomObject(cls, STIXDomainObject):
if not re.match(TYPE_REGEX, type):
raise ValueError(
"Invalid type name '%s': must only contain the "
"characters a-z (lowercase ASCII), 0-9, and hyphen (-)." % type,
)
elif len(type) < 3 or len(type) > 250:
raise ValueError(
"Invalid type name '%s': must be between 3 and 250 characters." % type,
)
if not properties or not isinstance(properties, list):
raise ValueError("Must supply a list, containing tuples. For example, [('property1', IntegerProperty())]")
_type = type
_properties = OrderedDict(properties)
def __init__(self, **kwargs):
_STIXBase.__init__(self, **kwargs)
_cls_init(cls, self, kwargs)
_register_object(_CustomObject, version=version)
return _CustomObject
def _custom_marking_builder(cls, type, properties, version):
class _CustomMarking(cls, _STIXBase):
if not properties or not isinstance(properties, list):
raise ValueError("Must supply a list, containing tuples. For example, [('property1', IntegerProperty())]")
_type = type
_properties = OrderedDict(properties)
def __init__(self, **kwargs):
_STIXBase.__init__(self, **kwargs)
_cls_init(cls, self, kwargs)
_register_marking(_CustomMarking, version=version)
return _CustomMarking
def _custom_observable_builder(cls, type, properties, version):
class _CustomObservable(cls, _Observable):
if not re.match(TYPE_REGEX, type):
raise ValueError(
"Invalid observable type name '%s': must only contain the "
"characters a-z (lowercase ASCII), 0-9, and hyphen (-)." % type,
)
elif len(type) < 3 or len(type) > 250:
raise ValueError("Invalid observable type name '%s': must be between 3 and 250 characters." % type)
if not properties or not isinstance(properties, list):
raise ValueError("Must supply a list, containing tuples. For example, [('property1', IntegerProperty())]")
# Check properties ending in "_ref/s" are ObjectReferenceProperties
for prop_name, prop in properties:
if prop_name.endswith('_ref') and ('ObjectReferenceProperty' not in get_class_hierarchy_names(prop)):
raise ValueError(
"'%s' is named like an object reference property but "
"is not an ObjectReferenceProperty." % prop_name,
)
elif (prop_name.endswith('_refs') and ('ListProperty' not in get_class_hierarchy_names(prop)
or 'ObjectReferenceProperty' not in get_class_hierarchy_names(prop.contained))):
raise ValueError(
"'%s' is named like an object reference list property but "
"is not a ListProperty containing ObjectReferenceProperty." % prop_name,
)
_type = type
_properties = OrderedDict(properties)
def __init__(self, **kwargs):
_Observable.__init__(self, **kwargs)
_cls_init(cls, self, kwargs)
_register_observable(_CustomObservable, version=version)
return _CustomObservable
def _custom_extension_builder(cls, observable, type, properties, version):
if not observable or not issubclass(observable, _Observable):
raise ValueError("'observable' must be a valid Observable class!")
class _CustomExtension(cls, _Extension):
if not re.match(TYPE_REGEX, type):
raise ValueError(
"Invalid extension type name '%s': must only contain the "
"characters a-z (lowercase ASCII), 0-9, and hyphen (-)." % type,
)
elif len(type) < 3 or len(type) > 250:
raise ValueError("Invalid extension type name '%s': must be between 3 and 250 characters." % type)
if not properties or not isinstance(properties, list):
raise ValueError("Must supply a list, containing tuples. For example, [('property1', IntegerProperty())]")
_type = type
_properties = OrderedDict(properties)
def __init__(self, **kwargs):
_Extension.__init__(self, **kwargs)
_cls_init(cls, self, kwargs)
_register_observable_extension(observable, _CustomExtension, version=version)
return _CustomExtension

View File

@ -1,4 +1,5 @@
"""Python STIX 2.0 DataStore API.
"""
Python STIX2 DataStore API.
.. autosummary::
:toctree: datastore
@ -83,7 +84,8 @@ class DataStoreMixin(object):
try:
return self.source.get(*args, **kwargs)
except AttributeError:
raise AttributeError('%s has no data source to query' % self.__class__.__name__)
msg = "%s has no data source to query"
raise AttributeError(msg % self.__class__.__name__)
def all_versions(self, *args, **kwargs):
"""Retrieve all versions of a single STIX object by ID.
@ -100,7 +102,8 @@ class DataStoreMixin(object):
try:
return self.source.all_versions(*args, **kwargs)
except AttributeError:
raise AttributeError('%s has no data source to query' % self.__class__.__name__)
msg = "%s has no data source to query"
raise AttributeError(msg % self.__class__.__name__)
def query(self, *args, **kwargs):
"""Retrieve STIX objects matching a set of filters.
@ -118,7 +121,8 @@ class DataStoreMixin(object):
try:
return self.source.query(*args, **kwargs)
except AttributeError:
raise AttributeError('%s has no data source to query' % self.__class__.__name__)
msg = "%s has no data source to query"
raise AttributeError(msg % self.__class__.__name__)
def creator_of(self, *args, **kwargs):
"""Retrieve the Identity refered to by the object's `created_by_ref`.
@ -137,7 +141,8 @@ class DataStoreMixin(object):
try:
return self.source.creator_of(*args, **kwargs)
except AttributeError:
raise AttributeError('%s has no data source to query' % self.__class__.__name__)
msg = "%s has no data source to query"
raise AttributeError(msg % self.__class__.__name__)
def relationships(self, *args, **kwargs):
"""Retrieve Relationships involving the given STIX object.
@ -163,7 +168,8 @@ class DataStoreMixin(object):
try:
return self.source.relationships(*args, **kwargs)
except AttributeError:
raise AttributeError('%s has no data source to query' % self.__class__.__name__)
msg = "%s has no data source to query"
raise AttributeError(msg % self.__class__.__name__)
def related_to(self, *args, **kwargs):
"""Retrieve STIX Objects that have a Relationship involving the given
@ -193,7 +199,8 @@ class DataStoreMixin(object):
try:
return self.source.related_to(*args, **kwargs)
except AttributeError:
raise AttributeError('%s has no data source to query' % self.__class__.__name__)
msg = "%s has no data source to query"
raise AttributeError(msg % self.__class__.__name__)
def add(self, *args, **kwargs):
"""Method for storing STIX objects.
@ -208,7 +215,8 @@ class DataStoreMixin(object):
try:
return self.sink.add(*args, **kwargs)
except AttributeError:
raise AttributeError('%s has no data sink to put objects in' % self.__class__.__name__)
msg = "%s has no data sink to put objects in"
raise AttributeError(msg % self.__class__.__name__)
class DataSink(with_metaclass(ABCMeta)):
@ -301,7 +309,7 @@ class DataSource(with_metaclass(ABCMeta)):
"""
def creator_of(self, obj):
"""Retrieve the Identity refered to by the object's `created_by_ref`.
"""Retrieve the Identity referred to by the object's `created_by_ref`.
Args:
obj: The STIX object whose `created_by_ref` property will be looked
@ -457,7 +465,7 @@ class CompositeDataSource(DataSource):
"""
if not self.has_data_sources():
raise AttributeError('CompositeDataSource has no data sources')
raise AttributeError("CompositeDataSource has no data sources")
all_data = []
all_filters = FilterSet()
@ -504,7 +512,7 @@ class CompositeDataSource(DataSource):
"""
if not self.has_data_sources():
raise AttributeError('CompositeDataSource has no data sources')
raise AttributeError("CompositeDataSource has no data sources")
all_data = []
all_filters = FilterSet()
@ -543,7 +551,7 @@ class CompositeDataSource(DataSource):
"""
if not self.has_data_sources():
raise AttributeError('CompositeDataSource has no data sources')
raise AttributeError("CompositeDataSource has no data sources")
if not query:
# don't mess with the query (i.e. deduplicate, as that's done
@ -594,7 +602,7 @@ class CompositeDataSource(DataSource):
"""
if not self.has_data_sources():
raise AttributeError('CompositeDataSource has no data sources')
raise AttributeError("CompositeDataSource has no data sources")
results = []
for ds in self.data_sources:
@ -634,7 +642,7 @@ class CompositeDataSource(DataSource):
"""
if not self.has_data_sources():
raise AttributeError('CompositeDataSource has no data sources')
raise AttributeError("CompositeDataSource has no data sources")
results = []
for ds in self.data_sources:

View File

@ -1,21 +1,23 @@
"""
Python STIX 2.0 FileSystem Source/Sink
"""
"""Python STIX2 FileSystem Source/Sink"""
# Temporary while we address TODO statement
from __future__ import print_function
import errno
import io
import json
import os
import re
import stat
import sys
import pytz
import six
from stix2 import v20, v21
from stix2.base import _STIXBase
from stix2.core import Bundle, parse
from stix2.core import parse
from stix2.datastore import DataSink, DataSource, DataStoreMixin
from stix2.datastore.filters import Filter, FilterSet, apply_common_filters
from stix2.utils import get_type_from_id, is_marking
from stix2.utils import format_datetime, get_type_from_id, is_marking
def _timestamp2filename(timestamp):
@ -23,15 +25,14 @@ def _timestamp2filename(timestamp):
Encapsulates a way to create unique filenames based on an object's
"modified" property value. This should not include an extension.
:param timestamp: A timestamp, as a datetime.datetime object.
"""
# Different times will only produce different file names if all timestamps
# are in the same time zone! So if timestamp is timezone-aware convert
# to UTC just to be safe. If naive, just use as-is.
if timestamp.tzinfo is not None:
timestamp = timestamp.astimezone(pytz.utc)
Args:
timestamp: A timestamp, as a datetime.datetime object.
return timestamp.strftime("%Y%m%d%H%M%S%f")
"""
# The format_datetime will determine the correct level of precision.
ts = format_datetime(timestamp)
ts = re.sub(r"[-T:\.Z ]", "", ts)
return ts
class AuthSet(object):
@ -45,8 +46,8 @@ class AuthSet(object):
anywhere, which means the query was impossible to match, so you can skip
searching altogether. For a blacklist, this means nothing is excluded
and you must search everywhere.
"""
"""
BLACK = 0
WHITE = 1
@ -56,9 +57,11 @@ class AuthSet(object):
prohibited values. The type of set (black or white) is determined
from the allowed and/or prohibited values given.
:param allowed: A set of allowed values (or None if no allow filters
were found in the query)
:param prohibited: A set of prohibited values (not None)
Args:
allowed: A set of allowed values (or None if no allow filters
were found in the query)
prohibited: A set of prohibited values (not None)
"""
if allowed is None:
self.__values = prohibited
@ -88,7 +91,7 @@ class AuthSet(object):
def __repr__(self):
return "{}list: {}".format(
"white" if self.auth_type == AuthSet.WHITE else "black",
self.values
self.values,
)
@ -103,9 +106,13 @@ def _update_allow(allow_set, value):
implicitly AND'd, the given values are intersected with the existing allow
set, which may remove values. At the end, it may even wind up empty.
:param allow_set: The allow set, or None
:param value: The value(s) to add (single value, or iterable of values)
:return: The updated allow set (not None)
Args:
allow_set: The allow set, or None
value: The value(s) to add (single value, or iterable of values)
Returns:
The updated allow set (not None)
"""
adding_seq = hasattr(value, "__iter__") and \
not isinstance(value, six.string_types)
@ -116,7 +123,6 @@ def _update_allow(allow_set, value):
allow_set.update(value)
else:
allow_set.add(value)
else:
# strangely, the "&=" operator requires a set on the RHS
# whereas the method allows any iterable.
@ -133,11 +139,14 @@ def _find_search_optimizations(filters):
Searches through all the filters, and creates white/blacklists of types and
IDs, which can be used to optimize the filesystem search.
:param filters: An iterable of filter objects representing a query
:return: A 2-tuple of AuthSet objects: the first is for object types, and
the second is for object IDs.
"""
Args:
filters: An iterable of filter objects representing a query
Returns:
A 2-tuple of AuthSet objects: the first is for object types, and
the second is for object IDs.
"""
# The basic approach to this is to determine what is allowed and
# prohibited, independently, and then combine them to create the final
# white/blacklists.
@ -158,15 +167,19 @@ def _find_search_optimizations(filters):
# An "allow" ID filter implies a type filter too, since IDs
# contain types within them.
allowed_ids = _update_allow(allowed_ids, filter_.value)
allowed_types = _update_allow(allowed_types,
get_type_from_id(filter_.value))
allowed_types = _update_allow(
allowed_types,
get_type_from_id(filter_.value),
)
elif filter_.op == "!=":
prohibited_ids.add(filter_.value)
elif filter_.op == "in":
allowed_ids = _update_allow(allowed_ids, filter_.value)
allowed_types = _update_allow(allowed_types, (
get_type_from_id(id_) for id_ in filter_.value
))
allowed_types = _update_allow(
allowed_types, (
get_type_from_id(id_) for id_ in filter_.value
),
)
opt_types = AuthSet(allowed_types, prohibited_types)
opt_ids = AuthSet(allowed_ids, prohibited_ids)
@ -196,30 +209,35 @@ def _get_matching_dir_entries(parent_dir, auth_set, st_mode_test=None, ext=""):
Search a directory (non-recursively), and find entries which match the
given criteria.
:param parent_dir: The directory to search
:param auth_set: an AuthSet instance, which represents a black/whitelist
filter on filenames
:param st_mode_test: A callable allowing filtering based on the type of
directory entry. E.g. just get directories, or just get files. It
will be passed the st_mode field of a stat() structure and should
return True to include the file, or False to exclude it. Easy thing to
do is pass one of the stat module functions, e.g. stat.S_ISREG. If
None, don't filter based on entry type.
:param ext: Determines how names from auth_set match up to directory
entries, and allows filtering by extension. The extension is added
to auth_set values to obtain directory entries; it is removed from
directory entries to obtain auth_set values. In this way, auth_set
may be treated as having only "basenames" of the entries. Only entries
having the given extension will be included in the results. If not
empty, the extension MUST include a leading ".". The default is the
empty string, which will result in direct comparisons, and no
extension-based filtering.
:return: A list of directory entries matching the criteria. These will not
have any path info included; they will just be bare names.
:raises OSError: If there are errors accessing directory contents or
stat()'ing files
"""
Args:
parent_dir: The directory to search
auth_set: an AuthSet instance, which represents a black/whitelist
filter on filenames
st_mode_test: A callable allowing filtering based on the type of
directory entry. E.g. just get directories, or just get files. It
will be passed the st_mode field of a stat() structure and should
return True to include the file, or False to exclude it. Easy thing to
do is pass one of the stat module functions, e.g. stat.S_ISREG. If
None, don't filter based on entry type.
ext: Determines how names from auth_set match up to directory
entries, and allows filtering by extension. The extension is added
to auth_set values to obtain directory entries; it is removed from
directory entries to obtain auth_set values. In this way, auth_set
may be treated as having only "basenames" of the entries. Only entries
having the given extension will be included in the results. If not
empty, the extension MUST include a leading ".". The default is the
empty string, which will result in direct comparisons, and no
extension-based filtering.
Returns:
(list): A list of directory entries matching the criteria. These will not
have any path info included; they will just be bare names.
Raises:
OSError: If there are errors accessing directory contents or stat()'ing
files
"""
results = []
if auth_set.auth_type == AuthSet.WHITE:
for value in auth_set.values:
@ -237,7 +255,6 @@ def _get_matching_dir_entries(parent_dir, auth_set, st_mode_test=None, ext=""):
if e.errno != errno.ENOENT:
raise
# else, file-not-found is ok, just skip
else: # auth_set is a blacklist
for entry in os.listdir(parent_dir):
if ext:
@ -272,28 +289,34 @@ def _check_object_from_file(query, filepath, allow_custom, version):
Read a STIX object from the given file, and check it against the given
filters.
:param query: Iterable of filters
:param filepath: Path to file to read
:param allow_custom: Whether to allow custom properties as well unknown
Args:
query: Iterable of filters
filepath: Path to file to read
allow_custom: Whether to allow custom properties as well unknown
custom objects.
:param version: Which STIX2 version to use. (e.g. "2.0", "2.1"). If None,
use latest version.
:return: The (parsed) STIX object, if the object passes the filters. If
version (str): If present, it forces the parser to use the version
provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
Returns:
The (parsed) STIX object, if the object passes the filters. If
not, None is returned.
:raises TypeError: If the file had invalid JSON
:raises IOError: If there are problems opening/reading the file
:raises stix2.exceptions.STIXError: If there were problems creating a STIX
object from the JSON
Raises:
TypeError: If the file had invalid JSON
IOError: If there are problems opening/reading the file
stix2.exceptions.STIXError: If there were problems creating a STIX
object from the JSON
"""
try:
with open(filepath, "r") as f:
with io.open(filepath, "r") as f:
stix_json = json.load(f)
except ValueError: # not a JSON file
raise TypeError(
"STIX JSON object at '{0}' could either not be parsed "
"to JSON or was not valid STIX JSON".format(
filepath))
"to JSON or was not valid STIX JSON".format(filepath),
)
stix_obj = parse(stix_json, allow_custom, version)
@ -312,35 +335,49 @@ def _search_versioned(query, type_path, auth_ids, allow_custom, version):
particular versioned type (i.e. not markings), and return any which match
the query.
:param query: The query to match against
:param type_path: The directory with type-specific STIX object files
:param auth_ids: Search optimization based on object ID
:param allow_custom: Whether to allow custom properties as well unknown
custom objects.
:param version: Which STIX2 version to use. (e.g. "2.0", "2.1"). If None,
use latest version.
:return: A list of all matching objects
:raises TypeError, stix2.exceptions.STIXError: If any objects had invalid
content
:raises IOError, OSError: If there were any problems opening/reading files
Args:
query: The query to match against
type_path: The directory with type-specific STIX object files
auth_ids: Search optimization based on object ID
allow_custom: Whether to allow custom properties as well unknown
custom objects.
version (str): If present, it forces the parser to use the version
provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
Returns:
A list of all matching objects
Raises:
stix2.exceptions.STIXError: If any objects had invalid content
TypeError: If any objects had invalid content
IOError: If there were any problems opening/reading files
OSError: If there were any problems opening/reading files
"""
results = []
id_dirs = _get_matching_dir_entries(type_path, auth_ids,
stat.S_ISDIR)
id_dirs = _get_matching_dir_entries(
type_path, auth_ids,
stat.S_ISDIR,
)
for id_dir in id_dirs:
id_path = os.path.join(type_path, id_dir)
# This leverages a more sophisticated function to do a simple thing:
# get all the JSON files from a directory. I guess it does give us
# file type checking, ensuring we only get regular files.
version_files = _get_matching_dir_entries(id_path, _AUTHSET_ANY,
stat.S_ISREG, ".json")
version_files = _get_matching_dir_entries(
id_path, _AUTHSET_ANY,
stat.S_ISREG, ".json",
)
for version_file in version_files:
version_path = os.path.join(id_path, version_file)
try:
stix_obj = _check_object_from_file(query, version_path,
allow_custom, version)
stix_obj = _check_object_from_file(
query, version_path,
allow_custom, version,
)
if stix_obj:
results.append(stix_obj)
except IOError as e:
@ -350,14 +387,18 @@ def _search_versioned(query, type_path, auth_ids, allow_custom, version):
# For backward-compatibility, also search for plain files named after
# object IDs, in the type directory.
id_files = _get_matching_dir_entries(type_path, auth_ids, stat.S_ISREG,
".json")
id_files = _get_matching_dir_entries(
type_path, auth_ids, stat.S_ISREG,
".json",
)
for id_file in id_files:
id_path = os.path.join(type_path, id_file)
try:
stix_obj = _check_object_from_file(query, id_path, allow_custom,
version)
stix_obj = _check_object_from_file(
query, id_path, allow_custom,
version,
)
if stix_obj:
results.append(stix_obj)
except IOError as e:
@ -373,27 +414,39 @@ def _search_markings(query, markings_path, auth_ids, allow_custom, version):
Searches the given directory, which contains markings data, and return any
which match the query.
:param query: The query to match against
:param markings_path: The directory with STIX markings files
:param auth_ids: Search optimization based on object ID
:param allow_custom: Whether to allow custom properties as well unknown
custom objects.
:param version: Which STIX2 version to use. (e.g. "2.0", "2.1"). If None,
use latest version.
:return: A list of all matching objects
:raises TypeError, stix2.exceptions.STIXError: If any objects had invalid
content
:raises IOError, OSError: If there were any problems opening/reading files
Args:
query: The query to match against
markings_path: The directory with STIX markings files
auth_ids: Search optimization based on object ID
allow_custom: Whether to allow custom properties as well unknown
custom objects.
version (str): If present, it forces the parser to use the version
provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
Returns:
A list of all matching objects
Raises:
stix2.exceptions.STIXError: If any objects had invalid content
TypeError: If any objects had invalid content
IOError: If there were any problems opening/reading files
OSError: If there were any problems opening/reading files
"""
results = []
id_files = _get_matching_dir_entries(markings_path, auth_ids, stat.S_ISREG,
".json")
id_files = _get_matching_dir_entries(
markings_path, auth_ids, stat.S_ISREG,
".json",
)
for id_file in id_files:
id_path = os.path.join(markings_path, id_file)
try:
stix_obj = _check_object_from_file(query, id_path, allow_custom,
version)
stix_obj = _check_object_from_file(
query, id_path, allow_custom,
version,
)
if stix_obj:
results.append(stix_obj)
except IOError as e:
@ -413,12 +466,12 @@ class FileSystemStore(DataStoreMixin):
Args:
stix_dir (str): path to directory of STIX objects
allow_custom (bool): whether to allow custom STIX content to be
pushed/retrieved. Defaults to True for FileSystemSource side(retrieving data)
and False for FileSystemSink side(pushing data). However, when
parameter is supplied, it will be applied to both FileSystemSource
and FileSystemSink.
bundlify (bool): whether to wrap objects in bundles when saving them.
Default: False.
pushed/retrieved. Defaults to True for FileSystemSource side
(retrieving data) and False for FileSystemSink
side(pushing data). However, when parameter is supplied, it
will be applied to both FileSystemSource and FileSystemSink.
bundlify (bool): whether to wrap objects in bundles when saving
them. Default: False.
Attributes:
source (FileSystemSource): FileSystemSource
@ -434,7 +487,7 @@ class FileSystemStore(DataStoreMixin):
super(FileSystemStore, self).__init__(
source=FileSystemSource(stix_dir=stix_dir, allow_custom=allow_custom_source),
sink=FileSystemSink(stix_dir=stix_dir, allow_custom=allow_custom_sink, bundlify=bundlify)
sink=FileSystemSink(stix_dir=stix_dir, allow_custom=allow_custom_sink, bundlify=bundlify),
)
@ -466,7 +519,7 @@ class FileSystemSink(DataSink):
def stix_dir(self):
return self._stix_dir
def _check_path_and_write(self, stix_obj):
def _check_path_and_write(self, stix_obj, encoding='utf-8'):
"""Write the given STIX object to a file in the STIX file directory.
"""
type_dir = os.path.join(self._stix_dir, stix_obj["type"])
@ -483,10 +536,21 @@ class FileSystemSink(DataSink):
os.makedirs(obj_dir)
if self.bundlify:
stix_obj = Bundle(stix_obj, allow_custom=self.allow_custom)
if 'spec_version' in stix_obj:
# Assuming future specs will allow multiple SDO/SROs
# versions in a single bundle we won't need to check this
# and just use the latest supported Bundle version.
stix_obj = v21.Bundle(stix_obj, allow_custom=self.allow_custom)
else:
stix_obj = v20.Bundle(stix_obj, allow_custom=self.allow_custom)
with open(file_path, "w") as f:
f.write(str(stix_obj))
# TODO: Better handling of the overwriting case.
if os.path.isfile(file_path):
print("Attempted to overwrite file!", file_path, file=sys.stderr)
else:
with io.open(file_path, 'w', encoding=encoding) as f:
stix_obj = stix_obj.serialize(pretty=True, encoding=encoding, ensure_ascii=False)
f.write(stix_obj)
def add(self, stix_data=None, version=None):
"""Add STIX objects to file directory.
@ -495,8 +559,9 @@ class FileSystemSink(DataSink):
stix_data (STIX object OR dict OR str OR list): valid STIX 2.0 content
in a STIX object (or list of), dict (or list of), or a STIX 2.0
json encoded string.
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If
None, use latest version.
version (str): If present, it forces the parser to use the version
provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
Note:
``stix_data`` can be a Bundle object, but each object in it will be
@ -504,7 +569,7 @@ class FileSystemSink(DataSink):
the Bundle contained, but not the Bundle itself.
"""
if isinstance(stix_data, Bundle):
if isinstance(stix_data, (v20.Bundle, v21.Bundle)):
# recursively add individual STIX objects
for stix_obj in stix_data.get("objects", []):
self.add(stix_obj, version=version)
@ -520,12 +585,14 @@ class FileSystemSink(DataSink):
elif isinstance(stix_data, list):
# recursively add individual STIX objects
for stix_obj in stix_data:
self.add(stix_obj, version=version)
self.add(stix_obj)
else:
raise TypeError("stix_data must be a STIX object (or list of), "
"JSON formatted STIX (or list of), "
"or a JSON formatted STIX bundle")
raise TypeError(
"stix_data must be a STIX object (or list of), "
"JSON formatted STIX (or list of), "
"or a JSON formatted STIX bundle",
)
class FileSystemSource(DataSource):
@ -560,8 +627,9 @@ class FileSystemSource(DataSource):
stix_id (str): The STIX ID of the STIX object to be retrieved.
_composite_filters (FilterSet): collection of filters passed from the parent
CompositeDataSource, not user supplied
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If
None, use latest version.
version (str): If present, it forces the parser to use the version
provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
Returns:
(STIX object): STIX object that has the supplied STIX ID.
@ -591,10 +659,11 @@ class FileSystemSource(DataSource):
Args:
stix_id (str): The STIX ID of the STIX objects to be retrieved.
_composite_filters (FilterSet): collection of filters passed from the parent
CompositeDataSource, not user supplied
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If
None, use latest version.
_composite_filters (FilterSet): collection of filters passed from
the parent CompositeDataSource, not user supplied
version (str): If present, it forces the parser to use the version
provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
Returns:
(list): of STIX objects that has the supplied STIX ID.
@ -614,10 +683,11 @@ class FileSystemSource(DataSource):
Args:
query (list): list of filters to search on
_composite_filters (FilterSet): collection of filters passed from the
CompositeDataSource, not user supplied
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If
None, use latest version.
_composite_filters (FilterSet): collection of filters passed from
the CompositeDataSource, not user supplied
version (str): If present, it forces the parser to use the version
provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
Returns:
(list): list of STIX objects that matches the supplied
@ -625,9 +695,7 @@ class FileSystemSource(DataSource):
parsed into a python STIX objects and then returned.
"""
all_data = []
query = FilterSet(query)
# combine all query filters
@ -637,19 +705,22 @@ class FileSystemSource(DataSource):
query.add(_composite_filters)
auth_types, auth_ids = _find_search_optimizations(query)
type_dirs = _get_matching_dir_entries(self._stix_dir, auth_types,
stat.S_ISDIR)
type_dirs = _get_matching_dir_entries(
self._stix_dir, auth_types,
stat.S_ISDIR,
)
for type_dir in type_dirs:
type_path = os.path.join(self._stix_dir, type_dir)
if type_dir == "marking-definition":
type_results = _search_markings(query, type_path, auth_ids,
self.allow_custom, version)
type_results = _search_markings(
query, type_path, auth_ids,
self.allow_custom, version,
)
else:
type_results = _search_versioned(query, type_path, auth_ids,
self.allow_custom, version)
type_results = _search_versioned(
query, type_path, auth_ids,
self.allow_custom, version,
)
all_data.extend(type_results)
return all_data

View File

@ -1,7 +1,4 @@
"""
Filters for Python STIX 2.0 DataSources, DataSinks, DataStores
"""
"""Filters for Python STIX2 DataSources, DataSinks, DataStores"""
import collections
from datetime import datetime
@ -14,8 +11,10 @@ import stix2.utils
FILTER_OPS = ['=', '!=', 'in', '>', '<', '>=', '<=', 'contains']
"""Supported filter value types"""
FILTER_VALUE_TYPES = (bool, dict, float, int, list, tuple, six.string_types,
datetime)
FILTER_VALUE_TYPES = (
bool, dict, float, int, list, tuple, six.string_types,
datetime,
)
def _check_filter_components(prop, op, value):
@ -38,14 +37,14 @@ def _check_filter_components(prop, op, value):
# check filter value type is supported
raise TypeError("Filter value of '%s' is not supported. The type must be a Python immutable type or dictionary" % type(value))
if prop == "type" and "_" in value:
if prop == 'type' and '_' in value:
# check filter where the property is type, value (type name) cannot have underscores
raise ValueError("Filter for property 'type' cannot have its value '%s' include underscores" % value)
return True
class Filter(collections.namedtuple("Filter", ['property', 'op', 'value'])):
class Filter(collections.namedtuple('Filter', ['property', 'op', 'value'])):
"""STIX 2 filters that support the querying functionality of STIX 2
DataStores and DataSources.
@ -157,7 +156,7 @@ def _check_filter(filter_, stix_obj):
"""
# For properties like granular_markings and external_references
# need to extract the first property from the string.
prop = filter_.property.split(".")[0]
prop = filter_.property.split('.')[0]
if prop not in stix_obj.keys():
# check filter "property" is in STIX object - if cant be
@ -165,9 +164,9 @@ def _check_filter(filter_, stix_obj):
# (i.e. did not make it through the filter)
return False
if "." in filter_.property:
if '.' in filter_.property:
# Check embedded properties, from e.g. granular_markings or external_references
sub_property = filter_.property.split(".", 1)[1]
sub_property = filter_.property.split('.', 1)[1]
sub_filter = filter_._replace(property=sub_property)
if isinstance(stix_obj[prop], list):
@ -222,8 +221,9 @@ class FilterSet(object):
Operates like set, only adding unique stix2.Filters to the FilterSet
NOTE: method designed to be very accomodating (i.e. even accepting filters=None)
as it allows for blind calls (very useful in DataStore)
Note:
method designed to be very accomodating (i.e. even accepting filters=None)
as it allows for blind calls (very useful in DataStore)
Args:
filters: stix2.Filter OR list of stix2.Filter OR stix2.FilterSet
@ -244,11 +244,13 @@ class FilterSet(object):
def remove(self, filters=None):
"""Remove a Filter, list of Filters, or FilterSet from the FilterSet.
NOTE: method designed to be very accomodating (i.e. even accepting filters=None)
as it allows for blind calls (very useful in DataStore)
Note:
method designed to be very accomodating (i.e. even accepting filters=None)
as it allows for blind calls (very useful in DataStore)
Args:
filters: stix2.Filter OR list of stix2.Filter or stix2.FilterSet
"""
if not filters:
# so remove() can be called blindly, useful for

View File

@ -1,28 +1,33 @@
"""
Python STIX 2.0 Memory Source/Sink
"""
"""Python STIX2 Memory Source/Sink"""
import io
import itertools
import json
import os
from stix2 import v20, v21
from stix2.base import _STIXBase
from stix2.core import Bundle, parse
from stix2.core import parse
from stix2.datastore import DataSink, DataSource, DataStoreMixin
from stix2.datastore.filters import FilterSet, apply_common_filters
from stix2.utils import is_marking
def _add(store, stix_data=None, allow_custom=True, version=None):
def _add(store, stix_data, allow_custom=True, version=None):
"""Add STIX objects to MemoryStore/Sink.
Adds STIX objects to an in-memory dictionary for fast lookup.
Recursive function, breaks down STIX Bundles and lists.
Args:
store: A MemoryStore, MemorySink or MemorySource object.
stix_data (list OR dict OR STIX object): STIX objects to be added
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If
None, use latest version.
allow_custom (bool): Whether to allow custom properties as well unknown
custom objects. Note that unknown custom objects cannot be parsed
into STIX objects, and will be returned as is. Default: False.
version (str): Which STIX2 version to lock the parser to. (e.g. "2.0",
"2.1"). If None, the library makes the best effort to figure
out the spec representation of the object.
"""
if isinstance(stix_data, list):
@ -70,13 +75,15 @@ class _ObjectFamily(object):
def add(self, obj):
self.all_versions[obj["modified"]] = obj
if self.latest_version is None or \
obj["modified"] > self.latest_version["modified"]:
if (self.latest_version is None or
obj["modified"] > self.latest_version["modified"]):
self.latest_version = obj
def __str__(self):
return "<<{}; latest={}>>".format(self.all_versions,
self.latest_version["modified"])
return "<<{}; latest={}>>".format(
self.all_versions,
self.latest_version["modified"],
)
def __repr__(self):
return str(self)
@ -96,8 +103,6 @@ class MemoryStore(DataStoreMixin):
allow_custom (bool): whether to allow custom STIX content.
Only applied when export/input functions called, i.e.
load_from_file() and save_to_file(). Defaults to True.
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If
None, use latest version.
Attributes:
_data (dict): the in-memory dict that holds STIX objects
@ -109,19 +114,21 @@ class MemoryStore(DataStoreMixin):
self._data = {}
if stix_data:
_add(self, stix_data, allow_custom, version=version)
_add(self, stix_data, allow_custom, version)
super(MemoryStore, self).__init__(
source=MemorySource(stix_data=self._data, allow_custom=allow_custom, version=version, _store=True),
sink=MemorySink(stix_data=self._data, allow_custom=allow_custom, version=version, _store=True)
sink=MemorySink(stix_data=self._data, allow_custom=allow_custom, version=version, _store=True),
)
def save_to_file(self, *args, **kwargs):
"""Write SITX objects from in-memory dictionary to JSON file, as a STIX
Bundle.
Bundle. If a directory is given, the Bundle 'id' will be used as
filename. Otherwise, the provided value will be used.
Args:
file_path (str): file path to write STIX data to
path (str): file path to write STIX data to.
encoding (str): The file encoding. Default utf-8.
"""
return self.sink.save_to_file(*args, **kwargs)
@ -129,13 +136,11 @@ class MemoryStore(DataStoreMixin):
def load_from_file(self, *args, **kwargs):
"""Load STIX data from JSON file.
File format is expected to be a single JSON
STIX object or JSON STIX bundle.
File format is expected to be a single JSON STIX object or JSON STIX
bundle.
Args:
file_path (str): file path to load STIX data from
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If
None, use latest version.
path (str): file path to load STIX data from
"""
return self.source.load_from_file(*args, **kwargs)
@ -156,6 +161,9 @@ class MemorySink(DataSink):
allow_custom (bool): whether to allow custom objects/properties
when exporting STIX content to file.
Default: True.
version (str): If present, it forces the parser to use the version
provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
Attributes:
_data (dict): the in-memory dict that holds STIX objects.
@ -171,25 +179,41 @@ class MemorySink(DataSink):
else:
self._data = {}
if stix_data:
_add(self, stix_data, allow_custom, version=version)
_add(self, stix_data, allow_custom, version)
def add(self, stix_data, version=None):
_add(self, stix_data, self.allow_custom, version)
add.__doc__ = _add.__doc__
def save_to_file(self, file_path):
file_path = os.path.abspath(file_path)
def save_to_file(self, path, encoding="utf-8"):
path = os.path.abspath(path)
all_objs = itertools.chain.from_iterable(
all_objs = list(itertools.chain.from_iterable(
value.all_versions.values() if isinstance(value, _ObjectFamily)
else [value]
for value in self._data.values()
)
))
if not os.path.exists(os.path.dirname(file_path)):
os.makedirs(os.path.dirname(file_path))
with open(file_path, "w") as f:
f.write(str(Bundle(list(all_objs), allow_custom=self.allow_custom)))
if any("spec_version" in x for x in all_objs):
bundle = v21.Bundle(all_objs, allow_custom=self.allow_custom)
else:
bundle = v20.Bundle(all_objs, allow_custom=self.allow_custom)
if path.endswith(".json"):
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
else:
if not os.path.exists(path):
os.makedirs(path)
# if the user only provided a directory, use the bundle id for filename
path = os.path.join(path, bundle["id"] + ".json")
with io.open(path, "w", encoding=encoding) as f:
bundle = bundle.serialize(pretty=True, encoding=encoding, ensure_ascii=False)
f.write(bundle)
return path
save_to_file.__doc__ = MemoryStore.save_to_file.__doc__
@ -209,6 +233,9 @@ class MemorySource(DataSource):
allow_custom (bool): whether to allow custom objects/properties
when importing STIX content from file.
Default: True.
version (str): If present, it forces the parser to use the version
provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
Attributes:
_data (dict): the in-memory dict that holds STIX objects.
@ -224,7 +251,7 @@ class MemorySource(DataSource):
else:
self._data = {}
if stix_data:
_add(self, stix_data, allow_custom, version=version)
_add(self, stix_data, allow_custom, version)
def get(self, stix_id, _composite_filters=None):
"""Retrieve STIX object from in-memory dict via STIX ID.
@ -251,8 +278,8 @@ class MemorySource(DataSource):
all_filters = list(
itertools.chain(
_composite_filters or [],
self.filters
)
self.filters,
),
)
stix_obj = next(apply_common_filters([stix_obj], all_filters), None)
@ -260,15 +287,13 @@ class MemorySource(DataSource):
return stix_obj
def all_versions(self, stix_id, _composite_filters=None):
"""Retrieve STIX objects from in-memory dict via STIX ID, all versions of it
Note: Since Memory sources/sinks don't handle multiple versions of a
STIX object, this operation is unnecessary. Translate call to get().
"""Retrieve STIX objects from in-memory dict via STIX ID, all versions
of it.
Args:
stix_id (str): The STIX ID of the STIX 2 object to retrieve.
_composite_filters (FilterSet): collection of filters passed from the parent
CompositeDataSource, not user supplied
_composite_filters (FilterSet): collection of filters passed from
the parent CompositeDataSource, not user supplied
Returns:
(list): list of STIX objects that have the supplied ID.
@ -289,12 +314,12 @@ class MemorySource(DataSource):
all_filters = list(
itertools.chain(
_composite_filters or [],
self.filters
)
self.filters,
),
)
results.extend(
apply_common_filters(stix_objs_to_filter, all_filters)
apply_common_filters(stix_objs_to_filter, all_filters),
)
return results
@ -308,8 +333,8 @@ class MemorySource(DataSource):
Args:
query (list): list of filters to search on
_composite_filters (FilterSet): collection of filters passed from the
CompositeDataSource, not user supplied
_composite_filters (FilterSet): collection of filters passed from
the CompositeDataSource, not user supplied
Returns:
(list): list of STIX objects that match the supplied query.
@ -335,12 +360,8 @@ class MemorySource(DataSource):
return all_data
def load_from_file(self, file_path, version=None):
with open(os.path.abspath(file_path), "r") as f:
with io.open(os.path.abspath(file_path), "r") as f:
stix_data = json.load(f)
# Override user version selection if loading a bundle
if stix_data["type"] == "bundle":
version = stix_data["spec_version"]
_add(self, stix_data, self.allow_custom, version)
load_from_file.__doc__ = MemoryStore.load_from_file.__doc__

View File

@ -1,12 +1,13 @@
"""
Python STIX 2.x TAXIICollectionStore
"""
"""Python STIX2 TAXIICollection Source/Sink"""
from requests.exceptions import HTTPError
from stix2 import v20, v21
from stix2.base import _STIXBase
from stix2.core import Bundle, parse
from stix2.datastore import (DataSink, DataSource, DataSourceError,
DataStoreMixin)
from stix2.core import parse
from stix2.datastore import (
DataSink, DataSource, DataSourceError, DataStoreMixin,
)
from stix2.datastore.filters import Filter, FilterSet, apply_common_filters
from stix2.utils import deduplicate
@ -43,7 +44,7 @@ class TAXIICollectionStore(DataStoreMixin):
super(TAXIICollectionStore, self).__init__(
source=TAXIICollectionSource(collection, allow_custom=allow_custom_source),
sink=TAXIICollectionSink(collection, allow_custom=allow_custom_sink)
sink=TAXIICollectionSink(collection, allow_custom=allow_custom_sink),
)
@ -66,12 +67,16 @@ class TAXIICollectionSink(DataSink):
if collection.can_write:
self.collection = collection
else:
raise DataSourceError("The TAXII Collection object provided does not have write access"
" to the underlying linked Collection resource")
raise DataSourceError(
"The TAXII Collection object provided does not have write access"
" to the underlying linked Collection resource",
)
except (HTTPError, ValidationError) as e:
raise DataSourceError("The underlying TAXII Collection resource defined in the supplied TAXII"
" Collection object provided could not be reached. Receved error:", e)
raise DataSourceError(
"The underlying TAXII Collection resource defined in the supplied TAXII"
" Collection object provided could not be reached. Receved error:", e,
)
self.allow_custom = allow_custom
@ -79,26 +84,34 @@ class TAXIICollectionSink(DataSink):
"""Add/push STIX content to TAXII Collection endpoint
Args:
stix_data (STIX object OR dict OR str OR list): valid STIX 2.0 content
in a STIX object (or Bundle), STIX onject dict (or Bundle dict), or a STIX 2.0
json encoded string, or list of any of the following
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If
None, use latest version.
stix_data (STIX object OR dict OR str OR list): valid STIX2
content in a STIX object (or Bundle), STIX object dict (or
Bundle dict), or a STIX2 json encoded string, or list of
any of the following.
version (str): If present, it forces the parser to use the version
provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
"""
if isinstance(stix_data, _STIXBase):
# adding python STIX object
if stix_data["type"] == "bundle":
bundle = stix_data.serialize(encoding="utf-8")
if stix_data['type'] == 'bundle':
bundle = stix_data.serialize(encoding='utf-8', ensure_ascii=False)
elif 'spec_version' in stix_data:
# If the spec_version is present, use new Bundle object...
bundle = v21.Bundle(stix_data, allow_custom=self.allow_custom).serialize(encoding='utf-8', ensure_ascii=False)
else:
bundle = Bundle(stix_data, allow_custom=self.allow_custom).serialize(encoding="utf-8")
bundle = v20.Bundle(stix_data, allow_custom=self.allow_custom).serialize(encoding='utf-8', ensure_ascii=False)
elif isinstance(stix_data, dict):
# adding python dict (of either Bundle or STIX obj)
if stix_data["type"] == "bundle":
bundle = parse(stix_data, allow_custom=self.allow_custom, version=version).serialize(encoding="utf-8")
if stix_data['type'] == 'bundle':
bundle = parse(stix_data, allow_custom=self.allow_custom, version=version).serialize(encoding='utf-8', ensure_ascii=False)
elif 'spec_version' in stix_data:
# If the spec_version is present, use new Bundle object...
bundle = v21.Bundle(stix_data, allow_custom=self.allow_custom).serialize(encoding='utf-8', ensure_ascii=False)
else:
bundle = Bundle(stix_data, allow_custom=self.allow_custom).serialize(encoding="utf-8")
bundle = v20.Bundle(stix_data, allow_custom=self.allow_custom).serialize(encoding='utf-8', ensure_ascii=False)
elif isinstance(stix_data, list):
# adding list of something - recurse on each
@ -109,10 +122,13 @@ class TAXIICollectionSink(DataSink):
elif isinstance(stix_data, str):
# adding json encoded string of STIX content
stix_data = parse(stix_data, allow_custom=self.allow_custom, version=version)
if stix_data["type"] == "bundle":
bundle = stix_data.serialize(encoding="utf-8")
if stix_data['type'] == 'bundle':
bundle = stix_data.serialize(encoding='utf-8', ensure_ascii=False)
elif 'spec_version' in stix_data:
# If the spec_version is present, use new Bundle object...
bundle = v21.Bundle(stix_data, allow_custom=self.allow_custom).serialize(encoding='utf-8', ensure_ascii=False)
else:
bundle = Bundle(stix_data, allow_custom=self.allow_custom).serialize(encoding="utf-8")
bundle = v20.Bundle(stix_data, allow_custom=self.allow_custom).serialize(encoding='utf-8', ensure_ascii=False)
else:
raise TypeError("stix_data must be as STIX object(or list of),json formatted STIX (or list of), or a json formatted STIX bundle")
@ -139,12 +155,16 @@ class TAXIICollectionSource(DataSource):
if collection.can_read:
self.collection = collection
else:
raise DataSourceError("The TAXII Collection object provided does not have read access"
" to the underlying linked Collection resource")
raise DataSourceError(
"The TAXII Collection object provided does not have read access"
" to the underlying linked Collection resource",
)
except (HTTPError, ValidationError) as e:
raise DataSourceError("The underlying TAXII Collection resource defined in the supplied TAXII"
" Collection object provided could not be reached. Recieved error:", e)
raise DataSourceError(
"The underlying TAXII Collection resource defined in the supplied TAXII"
" Collection object provided could not be reached. Recieved error:", e,
)
self.allow_custom = allow_custom
@ -154,10 +174,11 @@ class TAXIICollectionSource(DataSource):
Args:
stix_id (str): The STIX ID of the STIX object to be retrieved.
_composite_filters (FilterSet): collection of filters passed from the parent
CompositeDataSource, not user supplied
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If
None, use latest version.
version (str): If present, it forces the parser to use the version
provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
_composite_filters (FilterSet): collection of filters passed from
the parent CompositeDataSource, not user supplied
Returns:
(STIX object): STIX object that has the supplied STIX ID.
@ -173,15 +194,16 @@ class TAXIICollectionSource(DataSource):
if _composite_filters:
query.add(_composite_filters)
# dont extract TAXII filters from query (to send to TAXII endpoint)
# as directly retrieveing a STIX object by ID
# don't extract TAXII filters from query (to send to TAXII endpoint)
# as directly retrieving a STIX object by ID
try:
stix_objs = self.collection.get_object(stix_id)["objects"]
stix_objs = self.collection.get_object(stix_id)['objects']
stix_obj = list(apply_common_filters(stix_objs, query))
except HTTPError as e:
if e.response.status_code == 404:
# if resource not found or access is denied from TAXII server, return None
# if resource not found or access is denied from TAXII server,
# return None
stix_obj = []
else:
raise DataSourceError("TAXII Collection resource returned error", e)
@ -202,10 +224,11 @@ class TAXIICollectionSource(DataSource):
Args:
stix_id (str): The STIX ID of the STIX objects to be retrieved.
version (str): If present, it forces the parser to use the version
provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
_composite_filters (FilterSet): collection of filters passed from the parent
CompositeDataSource, not user supplied
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If
None, use latest version.
Returns:
(see query() as all_versions() is just a wrapper)
@ -213,8 +236,8 @@ class TAXIICollectionSource(DataSource):
"""
# make query in TAXII query format since 'id' is TAXII field
query = [
Filter("id", "=", stix_id),
Filter("version", "=", "all")
Filter('id', '=', stix_id),
Filter('version', '=', 'all'),
]
all_data = self.query(query=query, _composite_filters=_composite_filters)
@ -236,10 +259,11 @@ class TAXIICollectionSource(DataSource):
Args:
query (list): list of filters to search on
_composite_filters (FilterSet): collection of filters passed from the
CompositeDataSource, not user supplied
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If
None, use latest version.
version (str): If present, it forces the parser to use the version
provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
_composite_filters (FilterSet): collection of filters passed from
the CompositeDataSource, not user supplied
Returns:
(list): list of STIX objects that matches the supplied
@ -263,7 +287,7 @@ class TAXIICollectionSource(DataSource):
# query TAXII collection
try:
all_data = self.collection.get_objects(**taxii_filters_dict)["objects"]
all_data = self.collection.get_objects(**taxii_filters_dict)['objects']
# deduplicate data (before filtering as reduces wasted filtering)
all_data = deduplicate(all_data)
@ -275,9 +299,11 @@ class TAXIICollectionSource(DataSource):
except HTTPError as e:
# if resources not found or access is denied from TAXII server, return empty list
if e.response.status_code == 404:
raise DataSourceError("The requested STIX objects for the TAXII Collection resource defined in"
" the supplied TAXII Collection object are either not found or access is"
" denied. Received error: ", e)
raise DataSourceError(
"The requested STIX objects for the TAXII Collection resource defined in"
" the supplied TAXII Collection object are either not found or access is"
" denied. Received error: ", e,
)
# parse python STIX objects from the STIX object dicts
stix_objs = [parse(stix_obj_dict, allow_custom=self.allow_custom, version=version) for stix_obj_dict in all_data]
@ -290,18 +316,17 @@ class TAXIICollectionSource(DataSource):
Does not put in TAXII spec format as the TAXII2Client (that we use)
does this for us.
Notes:
Note:
Currently, the TAXII2Client can handle TAXII filters where the
filter value is list, as both a comma-seperated string or python list
filter value is list, as both a comma-seperated string or python
list.
For instance - "?match[type]=indicator,sighting" can be in a
filter in any of these formats:
Filter("type", "<any op>", "indicator,sighting")
Filter("type", "<any op>", ["indicator", "sighting"])
Args:
query (list): list of filters to extract which ones are TAXII
specific.

View File

@ -1,5 +1,4 @@
"""Python STIX 2.0 Environment API.
"""
"""Python STIX2 Environment API."""
import copy
@ -27,9 +26,11 @@ class ObjectFactory(object):
default. Defaults to True.
"""
def __init__(self, created_by_ref=None, created=None,
external_references=None, object_marking_refs=None,
list_append=True):
def __init__(
self, created_by_ref=None, created=None,
external_references=None, object_marking_refs=None,
list_append=True,
):
self._defaults = {}
if created_by_ref:
@ -166,3 +167,22 @@ class Environment(DataStoreMixin):
def parse(self, *args, **kwargs):
return _parse(*args, **kwargs)
parse.__doc__ = _parse.__doc__
def creator_of(self, obj):
"""Retrieve the Identity refered to by the object's `created_by_ref`.
Args:
obj: The STIX object whose `created_by_ref` property will be looked
up.
Returns:
str: The STIX object's creator, or None, if the object contains no
`created_by_ref` property or the object's creator cannot be
found.
"""
creator_id = obj.get('created_by_ref', '')
if creator_id:
return self.get(creator_id)
else:
return None

View File

@ -1,5 +1,4 @@
"""STIX 2 error classes.
"""
"""STIX2 Error Classes."""
class STIXError(Exception):
@ -30,8 +29,10 @@ class MissingPropertiesError(STIXError, ValueError):
def __str__(self):
msg = "No values for required properties for {0}: ({1})."
return msg.format(self.cls.__name__,
", ".join(x for x in self.properties))
return msg.format(
self.cls.__name__,
", ".join(x for x in self.properties),
)
class ExtraPropertiesError(STIXError, TypeError):
@ -44,8 +45,10 @@ class ExtraPropertiesError(STIXError, TypeError):
def __str__(self):
msg = "Unexpected properties for {0}: ({1})."
return msg.format(self.cls.__name__,
", ".join(x for x in self.properties))
return msg.format(
self.cls.__name__,
", ".join(x for x in self.properties),
)
class ImmutableError(STIXError, ValueError):
@ -110,8 +113,10 @@ class MutuallyExclusivePropertiesError(STIXError, TypeError):
def __str__(self):
msg = "The ({1}) properties for {0} are mutually exclusive."
return msg.format(self.cls.__name__,
", ".join(x for x in self.properties))
return msg.format(
self.cls.__name__,
", ".join(x for x in self.properties),
)
class DependentPropertiesError(STIXError, TypeError):
@ -124,8 +129,10 @@ class DependentPropertiesError(STIXError, TypeError):
def __str__(self):
msg = "The property dependencies for {0}: ({1}) are not met."
return msg.format(self.cls.__name__,
", ".join(name for x in self.dependencies for name in x))
return msg.format(
self.cls.__name__,
", ".join(name for x in self.dependencies for name in x),
)
class AtLeastOnePropertyError(STIXError, TypeError):
@ -138,8 +145,10 @@ class AtLeastOnePropertyError(STIXError, TypeError):
def __str__(self):
msg = "At least one of the ({1}) properties for {0} must be populated."
return msg.format(self.cls.__name__,
", ".join(x for x in self.properties))
return msg.format(
self.cls.__name__,
", ".join(x for x in self.properties),
)
class RevokeError(STIXError, ValueError):

View File

@ -9,7 +9,6 @@ Note:
Definitions. The corresponding methods on those classes are identical to
these functions except that the `obj` parameter is omitted.
.. autosummary::
:toctree: markings
@ -51,7 +50,7 @@ def get_markings(obj, selectors=None, inherited=False, descendants=False):
obj,
selectors,
inherited,
descendants
descendants,
)
if inherited:
@ -208,7 +207,7 @@ def is_marked(obj, marking=None, selectors=None, inherited=False, descendants=Fa
marking,
selectors,
inherited,
descendants
descendants,
)
if inherited:
@ -221,7 +220,7 @@ def is_marked(obj, marking=None, selectors=None, inherited=False, descendants=Fa
granular_marks,
selectors,
inherited,
descendants
descendants,
)
result = result or object_markings.is_marked(obj, object_marks)

View File

@ -1,5 +1,4 @@
"""Functions for working with STIX 2.0 granular markings.
"""
"""Functions for working with STIX2 granular markings."""
from stix2 import exceptions
from stix2.markings import utils
@ -29,7 +28,7 @@ def get_markings(obj, selectors, inherited=False, descendants=False):
selectors = utils.convert_to_list(selectors)
utils.validate(obj, selectors)
granular_markings = obj.get("granular_markings", [])
granular_markings = obj.get('granular_markings', [])
if not granular_markings:
return []
@ -38,11 +37,13 @@ def get_markings(obj, selectors, inherited=False, descendants=False):
for marking in granular_markings:
for user_selector in selectors:
for marking_selector in marking.get("selectors", []):
if any([(user_selector == marking_selector), # Catch explicit selectors.
(user_selector.startswith(marking_selector) and inherited), # Catch inherited selectors.
(marking_selector.startswith(user_selector) and descendants)]): # Catch descendants selectors
refs = marking.get("marking_ref", [])
for marking_selector in marking.get('selectors', []):
if any([
(user_selector == marking_selector), # Catch explicit selectors.
(user_selector.startswith(marking_selector) and inherited), # Catch inherited selectors.
(marking_selector.startswith(user_selector) and descendants),
]): # Catch descendants selectors
refs = marking.get('marking_ref', [])
results.update([refs])
return list(results)
@ -93,7 +94,7 @@ def remove_markings(obj, marking, selectors):
marking = utils.convert_to_marking_list(marking)
utils.validate(obj, selectors)
granular_markings = obj.get("granular_markings")
granular_markings = obj.get('granular_markings')
if not granular_markings:
return obj
@ -102,9 +103,9 @@ def remove_markings(obj, marking, selectors):
to_remove = []
for m in marking:
to_remove.append({"marking_ref": m, "selectors": selectors})
to_remove.append({'marking_ref': m, 'selectors': selectors})
remove = utils.build_granular_marking(to_remove).get("granular_markings")
remove = utils.build_granular_marking(to_remove).get('granular_markings')
if not any(marking in granular_markings for marking in remove):
raise exceptions.MarkingNotFoundError(obj, remove)
@ -145,10 +146,10 @@ def add_markings(obj, marking, selectors):
granular_marking = []
for m in marking:
granular_marking.append({"marking_ref": m, "selectors": sorted(selectors)})
granular_marking.append({'marking_ref': m, 'selectors': sorted(selectors)})
if obj.get("granular_markings"):
granular_marking.extend(obj.get("granular_markings"))
if obj.get('granular_markings'):
granular_marking.extend(obj.get('granular_markings'))
granular_marking = utils.expand_markings(granular_marking)
granular_marking = utils.compress_markings(granular_marking)
@ -176,7 +177,7 @@ def clear_markings(obj, selectors):
selectors = utils.convert_to_list(selectors)
utils.validate(obj, selectors)
granular_markings = obj.get("granular_markings")
granular_markings = obj.get('granular_markings')
if not granular_markings:
return obj
@ -184,25 +185,26 @@ def clear_markings(obj, selectors):
granular_markings = utils.expand_markings(granular_markings)
sdo = utils.build_granular_marking(
[{"selectors": selectors, "marking_ref": "N/A"}]
[{'selectors': selectors, 'marking_ref': 'N/A'}],
)
clear = sdo.get("granular_markings", [])
clear = sdo.get('granular_markings', [])
if not any(clear_selector in sdo_selectors.get("selectors", [])
for sdo_selectors in granular_markings
for clear_marking in clear
for clear_selector in clear_marking.get("selectors", [])
):
if not any(
clear_selector in sdo_selectors.get('selectors', [])
for sdo_selectors in granular_markings
for clear_marking in clear
for clear_selector in clear_marking.get('selectors', [])
):
raise exceptions.MarkingNotFoundError(obj, clear)
for granular_marking in granular_markings:
for s in selectors:
if s in granular_marking.get("selectors", []):
marking_refs = granular_marking.get("marking_ref")
if s in granular_marking.get('selectors', []):
marking_refs = granular_marking.get('marking_ref')
if marking_refs:
granular_marking["marking_ref"] = ""
granular_marking['marking_ref'] = ''
granular_markings = utils.compress_markings(granular_markings)
@ -245,19 +247,21 @@ def is_marked(obj, marking=None, selectors=None, inherited=False, descendants=Fa
marking = utils.convert_to_marking_list(marking)
utils.validate(obj, selectors)
granular_markings = obj.get("granular_markings", [])
granular_markings = obj.get('granular_markings', [])
marked = False
markings = set()
for granular_marking in granular_markings:
for user_selector in selectors:
for marking_selector in granular_marking.get("selectors", []):
for marking_selector in granular_marking.get('selectors', []):
if any([(user_selector == marking_selector), # Catch explicit selectors.
(user_selector.startswith(marking_selector) and inherited), # Catch inherited selectors.
(marking_selector.startswith(user_selector) and descendants)]): # Catch descendants selectors
marking_ref = granular_marking.get("marking_ref", "")
if any([
(user_selector == marking_selector), # Catch explicit selectors.
(user_selector.startswith(marking_selector) and inherited), # Catch inherited selectors.
(marking_selector.startswith(user_selector) and descendants),
]): # Catch descendants selectors
marking_ref = granular_marking.get('marking_ref', '')
if marking and any(x == marking_ref for x in marking):
markings.update([marking_ref])

View File

@ -1,5 +1,4 @@
"""Functions for working with STIX 2.0 object markings.
"""
"""Functions for working with STIX2 object markings."""
from stix2 import exceptions
from stix2.markings import utils
@ -18,7 +17,7 @@ def get_markings(obj):
markings are present in `object_marking_refs`.
"""
return obj.get("object_marking_refs", [])
return obj.get('object_marking_refs', [])
def add_markings(obj, marking):
@ -35,7 +34,7 @@ def add_markings(obj, marking):
"""
marking = utils.convert_to_marking_list(marking)
object_markings = set(obj.get("object_marking_refs", []) + marking)
object_markings = set(obj.get('object_marking_refs', []) + marking)
return new_version(obj, object_marking_refs=list(object_markings), allow_custom=True)
@ -59,12 +58,12 @@ def remove_markings(obj, marking):
"""
marking = utils.convert_to_marking_list(marking)
object_markings = obj.get("object_marking_refs", [])
object_markings = obj.get('object_marking_refs', [])
if not object_markings:
return obj
if any(x not in obj["object_marking_refs"] for x in marking):
if any(x not in obj['object_marking_refs'] for x in marking):
raise exceptions.MarkingNotFoundError(obj, marking)
new_markings = [x for x in object_markings if x not in marking]
@ -124,7 +123,7 @@ def is_marked(obj, marking=None):
"""
marking = utils.convert_to_marking_list(marking)
object_markings = obj.get("object_marking_refs", [])
object_markings = obj.get('object_marking_refs', [])
if marking:
return any(x in object_markings for x in marking)

View File

@ -1,5 +1,4 @@
"""Utility functions for STIX 2.0 data markings.
"""
"""Utility functions for STIX2 data markings."""
import collections
@ -23,7 +22,7 @@ def _evaluate_expression(obj, selector):
"""
for items, value in iterpath(obj):
path = ".".join(items)
path = '.'.join(items)
if path == selector and value:
return [value]
@ -119,12 +118,12 @@ def compress_markings(granular_markings):
map_ = collections.defaultdict(set)
for granular_marking in granular_markings:
if granular_marking.get("marking_ref"):
map_[granular_marking.get("marking_ref")].update(granular_marking.get("selectors"))
if granular_marking.get('marking_ref'):
map_[granular_marking.get('marking_ref')].update(granular_marking.get('selectors'))
compressed = \
[
{"marking_ref": marking_ref, "selectors": sorted(selectors)}
{'marking_ref': marking_ref, 'selectors': sorted(selectors)}
for marking_ref, selectors in six.iteritems(map_)
]
@ -173,14 +172,14 @@ def expand_markings(granular_markings):
expanded = []
for marking in granular_markings:
selectors = marking.get("selectors")
marking_ref = marking.get("marking_ref")
selectors = marking.get('selectors')
marking_ref = marking.get('marking_ref')
expanded.extend(
[
{"marking_ref": marking_ref, "selectors": [selector]}
{'marking_ref': marking_ref, 'selectors': [selector]}
for selector in selectors
]
],
)
return expanded
@ -189,7 +188,7 @@ def expand_markings(granular_markings):
def build_granular_marking(granular_marking):
"""Return a dictionary with the required structure for a granular marking.
"""
return {"granular_markings": expand_markings(granular_marking)}
return {'granular_markings': expand_markings(granular_marking)}
def iterpath(obj, path=None):
@ -229,7 +228,7 @@ def iterpath(obj, path=None):
elif isinstance(varobj, list):
for item in varobj:
index = "[{0}]".format(varobj.index(item))
index = '[{0}]'.format(varobj.index(item))
path.append(index)
yield (path, item)

362
stix2/pattern_visitor.py Normal file
View File

@ -0,0 +1,362 @@
import importlib
import inspect
from antlr4 import CommonTokenStream, InputStream
import six
from stix2patterns.grammars.STIXPatternLexer import STIXPatternLexer
from stix2patterns.grammars.STIXPatternParser import (
STIXPatternParser, TerminalNode,
)
from stix2patterns.grammars.STIXPatternVisitor import STIXPatternVisitor
from stix2patterns.validator import STIXPatternErrorListener
from .patterns import *
from .patterns import _BooleanExpression
# flake8: noqa F405
def collapse_lists(lists):
result = []
for c in lists:
if isinstance(c, list):
result.extend(c)
else:
result.append(c)
return result
def remove_terminal_nodes(parse_tree_nodes):
values = []
for x in parse_tree_nodes:
if not isinstance(x, TerminalNode):
values.append(x)
return values
# This class defines a complete generic visitor for a parse tree produced by STIXPatternParser.
class STIXPatternVisitorForSTIX2(STIXPatternVisitor):
classes = {}
def __init__(self, module_suffix, module_name):
if module_suffix and module_name:
self.module_suffix = module_suffix
if not STIXPatternVisitorForSTIX2.classes:
module = importlib.import_module(module_name)
for k, c in inspect.getmembers(module, inspect.isclass):
STIXPatternVisitorForSTIX2.classes[k] = c
else:
self.module_suffix = None
super(STIXPatternVisitor, self).__init__()
def get_class(self, class_name):
if class_name in STIXPatternVisitorForSTIX2.classes:
return STIXPatternVisitorForSTIX2.classes[class_name]
else:
return None
def instantiate(self, klass_name, *args):
klass_to_instantiate = None
if self.module_suffix:
klass_to_instantiate = self.get_class(klass_name + "For" + self.module_suffix)
if not klass_to_instantiate:
# use the classes in python_stix2
klass_to_instantiate = globals()[klass_name]
return klass_to_instantiate(*args)
# Visit a parse tree produced by STIXPatternParser#pattern.
def visitPattern(self, ctx):
children = self.visitChildren(ctx)
return children[0]
# Visit a parse tree produced by STIXPatternParser#observationExpressions.
def visitObservationExpressions(self, ctx):
children = self.visitChildren(ctx)
if len(children) == 1:
return children[0]
else:
return FollowedByObservationExpression([children[0], children[2]])
# Visit a parse tree produced by STIXPatternParser#observationExpressionOr.
def visitObservationExpressionOr(self, ctx):
children = self.visitChildren(ctx)
if len(children) == 1:
return children[0]
else:
return self.instantiate("OrObservationExpression", [children[0], children[2]])
# Visit a parse tree produced by STIXPatternParser#observationExpressionAnd.
def visitObservationExpressionAnd(self, ctx):
children = self.visitChildren(ctx)
if len(children) == 1:
return children[0]
else:
return self.instantiate("AndObservationExpression", [children[0], children[2]])
# Visit a parse tree produced by STIXPatternParser#observationExpressionRepeated.
def visitObservationExpressionRepeated(self, ctx):
children = self.visitChildren(ctx)
return self.instantiate("QualifiedObservationExpression", children[0], children[1])
# Visit a parse tree produced by STIXPatternParser#observationExpressionSimple.
def visitObservationExpressionSimple(self, ctx):
children = self.visitChildren(ctx)
return self.instantiate("ObservationExpression", children[1])
# Visit a parse tree produced by STIXPatternParser#observationExpressionCompound.
def visitObservationExpressionCompound(self, ctx):
children = self.visitChildren(ctx)
return self.instantiate("ObservationExpression", children[1])
# Visit a parse tree produced by STIXPatternParser#observationExpressionWithin.
def visitObservationExpressionWithin(self, ctx):
children = self.visitChildren(ctx)
return self.instantiate("QualifiedObservationExpression", children[0], children[1])
# Visit a parse tree produced by STIXPatternParser#observationExpressionStartStop.
def visitObservationExpressionStartStop(self, ctx):
children = self.visitChildren(ctx)
return self.instantiate("QualifiedObservationExpression", children[0], children[1])
# Visit a parse tree produced by STIXPatternParser#comparisonExpression.
def visitComparisonExpression(self, ctx):
children = self.visitChildren(ctx)
if len(children) == 1:
return children[0]
else:
if isinstance(children[0], _BooleanExpression):
children[0].operands.append(children[2])
return children[0]
else:
return self.instantiate("OrBooleanExpression", [children[0], children[2]])
# Visit a parse tree produced by STIXPatternParser#comparisonExpressionAnd.
def visitComparisonExpressionAnd(self, ctx):
# TODO: NOT
children = self.visitChildren(ctx)
if len(children) == 1:
return children[0]
else:
if isinstance(children[0], _BooleanExpression):
children[0].operands.append(children[2])
return children[0]
else:
return self.instantiate("AndBooleanExpression", [children[0], children[2]])
# Visit a parse tree produced by STIXPatternParser#propTestEqual.
def visitPropTestEqual(self, ctx):
children = self.visitChildren(ctx)
operator = children[1].symbol.type
negated = operator != STIXPatternParser.EQ
return self.instantiate(
"EqualityComparisonExpression", children[0], children[3 if len(children) > 3 else 2],
negated,
)
# Visit a parse tree produced by STIXPatternParser#propTestOrder.
def visitPropTestOrder(self, ctx):
children = self.visitChildren(ctx)
operator = children[1].symbol.type
if operator == STIXPatternParser.GT:
return self.instantiate(
"GreaterThanComparisonExpression", children[0],
children[3 if len(children) > 3 else 2], False,
)
elif operator == STIXPatternParser.LT:
return self.instantiate(
"LessThanComparisonExpression", children[0],
children[3 if len(children) > 3 else 2], False,
)
elif operator == STIXPatternParser.GE:
return self.instantiate(
"GreaterThanEqualComparisonExpression", children[0],
children[3 if len(children) > 3 else 2], False,
)
elif operator == STIXPatternParser.LE:
return self.instantiate(
"LessThanEqualComparisonExpression", children[0],
children[3 if len(children) > 3 else 2], False,
)
# Visit a parse tree produced by STIXPatternParser#propTestSet.
def visitPropTestSet(self, ctx):
children = self.visitChildren(ctx)
return self.instantiate("InComparisonExpression", children[0], children[3 if len(children) > 3 else 2], False)
# Visit a parse tree produced by STIXPatternParser#propTestLike.
def visitPropTestLike(self, ctx):
children = self.visitChildren(ctx)
return self.instantiate("LikeComparisonExpression", children[0], children[3 if len(children) > 3 else 2], False)
# Visit a parse tree produced by STIXPatternParser#propTestRegex.
def visitPropTestRegex(self, ctx):
children = self.visitChildren(ctx)
return self.instantiate(
"MatchesComparisonExpression", children[0], children[3 if len(children) > 3 else 2],
False,
)
# Visit a parse tree produced by STIXPatternParser#propTestIsSubset.
def visitPropTestIsSubset(self, ctx):
children = self.visitChildren(ctx)
return self.instantiate("IsSubsetComparisonExpression", children[0], children[3 if len(children) > 3 else 2])
# Visit a parse tree produced by STIXPatternParser#propTestIsSuperset.
def visitPropTestIsSuperset(self, ctx):
children = self.visitChildren(ctx)
return self.instantiate("IsSupersetComparisonExpression", children[0], children[3 if len(children) > 3 else 2])
# Visit a parse tree produced by STIXPatternParser#propTestParen.
def visitPropTestParen(self, ctx):
children = self.visitChildren(ctx)
return self.instantiate("ParentheticalExpression", children[1])
# Visit a parse tree produced by STIXPatternParser#startStopQualifier.
def visitStartStopQualifier(self, ctx):
children = self.visitChildren(ctx)
return StartStopQualifier(children[1], children[3])
# Visit a parse tree produced by STIXPatternParser#withinQualifier.
def visitWithinQualifier(self, ctx):
children = self.visitChildren(ctx)
return WithinQualifier(children[1])
# Visit a parse tree produced by STIXPatternParser#repeatedQualifier.
def visitRepeatedQualifier(self, ctx):
children = self.visitChildren(ctx)
return RepeatQualifier(children[1])
# Visit a parse tree produced by STIXPatternParser#objectPath.
def visitObjectPath(self, ctx):
children = self.visitChildren(ctx)
flat_list = collapse_lists(children[2:])
property_path = []
i = 0
while i < len(flat_list):
current = flat_list[i]
if i == len(flat_list)-1:
property_path.append(current)
break
next = flat_list[i+1]
if isinstance(next, TerminalNode):
property_path.append(self.instantiate("ListObjectPathComponent", current.property_name, next.getText()))
i += 2
else:
property_path.append(current)
i += 1
return self.instantiate("ObjectPath", children[0].getText(), property_path)
# Visit a parse tree produced by STIXPatternParser#objectType.
def visitObjectType(self, ctx):
children = self.visitChildren(ctx)
return children[0]
# Visit a parse tree produced by STIXPatternParser#firstPathComponent.
def visitFirstPathComponent(self, ctx):
children = self.visitChildren(ctx)
step = children[0].getText()
# if step.endswith("_ref"):
# return stix2.ReferenceObjectPathComponent(step)
# else:
return self.instantiate("BasicObjectPathComponent", step, False)
# Visit a parse tree produced by STIXPatternParser#indexPathStep.
def visitIndexPathStep(self, ctx):
children = self.visitChildren(ctx)
return children[1]
# Visit a parse tree produced by STIXPatternParser#pathStep.
def visitPathStep(self, ctx):
return collapse_lists(self.visitChildren(ctx))
# Visit a parse tree produced by STIXPatternParser#keyPathStep.
def visitKeyPathStep(self, ctx):
children = self.visitChildren(ctx)
if isinstance(children[1], StringConstant):
# special case for hashes
return children[1].value
else:
return self.instantiate("BasicObjectPathComponent", children[1].getText(), True)
# Visit a parse tree produced by STIXPatternParser#setLiteral.
def visitSetLiteral(self, ctx):
children = self.visitChildren(ctx)
return self.instantiate("ListConstant", remove_terminal_nodes(children))
# Visit a parse tree produced by STIXPatternParser#primitiveLiteral.
def visitPrimitiveLiteral(self, ctx):
children = self.visitChildren(ctx)
return children[0]
# Visit a parse tree produced by STIXPatternParser#orderableLiteral.
def visitOrderableLiteral(self, ctx):
children = self.visitChildren(ctx)
return children[0]
def visitTerminal(self, node):
if node.symbol.type == STIXPatternParser.IntPosLiteral or node.symbol.type == STIXPatternParser.IntNegLiteral:
return IntegerConstant(node.getText())
elif node.symbol.type == STIXPatternParser.FloatPosLiteral or node.symbol.type == STIXPatternParser.FloatNegLiteral:
return FloatConstant(node.getText())
elif node.symbol.type == STIXPatternParser.HexLiteral:
return HexConstant(node.getText(), from_parse_tree=True)
elif node.symbol.type == STIXPatternParser.BinaryLiteral:
return BinaryConstant(node.getText(), from_parse_tree=True)
elif node.symbol.type == STIXPatternParser.StringLiteral:
return StringConstant(node.getText().strip('\''), from_parse_tree=True)
elif node.symbol.type == STIXPatternParser.BoolLiteral:
return BooleanConstant(node.getText())
elif node.symbol.type == STIXPatternParser.TimestampLiteral:
return TimestampConstant(node.getText())
else:
return node
def aggregateResult(self, aggregate, nextResult):
if aggregate:
aggregate.append(nextResult)
elif nextResult:
aggregate = [nextResult]
return aggregate
def create_pattern_object(pattern, module_suffix="", module_name=""):
"""
Validates a pattern against the STIX Pattern grammar. Error messages are
returned in a list. The test passed if the returned list is empty.
"""
start = ''
if isinstance(pattern, six.string_types):
start = pattern[:2]
pattern = InputStream(pattern)
if not start:
start = pattern.readline()[:2]
pattern.seek(0)
parseErrListener = STIXPatternErrorListener()
lexer = STIXPatternLexer(pattern)
# it always adds a console listener by default... remove it.
lexer.removeErrorListeners()
stream = CommonTokenStream(lexer)
parser = STIXPatternParser(stream)
parser.buildParseTrees = True
# it always adds a console listener by default... remove it.
parser.removeErrorListeners()
parser.addErrorListener(parseErrListener)
# To improve error messages, replace "<INVALID>" in the literal
# names with symbolic names. This is a hack, but seemed like
# the simplest workaround.
for i, lit_name in enumerate(parser.literalNames):
if lit_name == u"<INVALID>":
parser.literalNames[i] = parser.symbolicNames[i]
tree = parser.pattern()
builder = STIXPatternVisitorForSTIX2(module_suffix, module_name)
return builder.visit(tree)

View File

@ -1,11 +1,12 @@
"""Classes to aid in working with the STIX 2 patterning language.
"""
"""Classes to aid in working with the STIX 2 patterning language."""
import base64
import binascii
import datetime
import re
import six
from .utils import parse_into_datetime
@ -13,6 +14,14 @@ def escape_quotes_and_backslashes(s):
return s.replace(u'\\', u'\\\\').replace(u"'", u"\\'")
def quote_if_needed(x):
if isinstance(x, six.string_types):
if x.find("-") != -1:
if not x.startswith("'"):
return "'" + x + "'"
return x
class _Constant(object):
pass
@ -23,11 +32,13 @@ class StringConstant(_Constant):
Args:
value (str): string value
"""
def __init__(self, value):
def __init__(self, value, from_parse_tree=False):
self.needs_to_be_quoted = not from_parse_tree
self.value = value
def __str__(self):
return "'%s'" % escape_quotes_and_backslashes(self.value)
return "'%s'" % (escape_quotes_and_backslashes(self.value) if self.needs_to_be_quoted else self.value)
class TimestampConstant(_Constant):
@ -86,8 +97,8 @@ class BooleanConstant(_Constant):
self.value = value
return
trues = ['true', 't']
falses = ['false', 'f']
trues = ['true', 't', '1']
falses = ['false', 'f', '0']
try:
if value.lower() in trues:
self.value = True
@ -143,7 +154,7 @@ class HashConstant(StringConstant):
vocab_key = _HASH_REGEX[key][1]
if not re.match(_HASH_REGEX[key][0], value):
raise ValueError("'%s' is not a valid %s hash" % (value, vocab_key))
self.value = value
super(HashConstant, self).__init__(value)
class BinaryConstant(_Constant):
@ -152,7 +163,13 @@ class BinaryConstant(_Constant):
Args:
value (str): base64 encoded string value
"""
def __init__(self, value):
def __init__(self, value, from_parse_tree=False):
# support with or without a 'b'
if from_parse_tree:
m = re.match("^b'(.+)'$", value)
if m:
value = m.group(1)
try:
base64.b64decode(value)
self.value = value
@ -169,10 +186,16 @@ class HexConstant(_Constant):
Args:
value (str): hexadecimal value
"""
def __init__(self, value):
if not re.match('^([a-fA-F0-9]{2})+$', value):
raise ValueError("must contain an even number of hexadecimal characters")
self.value = value
def __init__(self, value, from_parse_tree=False):
# support with or without an 'h'
if not from_parse_tree and re.match('^([a-fA-F0-9]{2})+$', value):
self.value = value
else:
m = re.match("^h'(([a-fA-F0-9]{2})+)'$", value)
if m:
self.value = m.group(1)
else:
raise ValueError("must contain an even number of hexadecimal characters")
def __str__(self):
return "h'%s'" % self.value
@ -185,10 +208,11 @@ class ListConstant(_Constant):
value (list): list of values
"""
def __init__(self, values):
self.value = values
# handle _Constants or make a _Constant
self.value = [x if isinstance(x, _Constant) else make_constant(x) for x in values]
def __str__(self):
return "(" + ", ".join([("%s" % make_constant(x)) for x in self.value]) + ")"
return "(" + ", ".join(["%s" % x for x in self.value]) + ")"
def make_constant(value):
@ -229,7 +253,10 @@ class _ObjectPathComponent(object):
parse1 = component_name.split("[")
return ListObjectPathComponent(parse1[0], parse1[1][:-1])
else:
return BasicObjectPathComponent(component_name)
return BasicObjectPathComponent(component_name, False)
def __str__(self):
return quote_if_needed(self.property_name)
class BasicObjectPathComponent(_ObjectPathComponent):
@ -243,14 +270,11 @@ class BasicObjectPathComponent(_ObjectPathComponent):
property_name (str): object property name
is_key (bool): is dictionary key, default: False
"""
def __init__(self, property_name, is_key=False):
def __init__(self, property_name, is_key):
self.property_name = property_name
# TODO: set is_key to True if this component is a dictionary key
# self.is_key = is_key
def __str__(self):
return self.property_name
class ListObjectPathComponent(_ObjectPathComponent):
"""List object path component (for an observation or expression)
@ -264,7 +288,7 @@ class ListObjectPathComponent(_ObjectPathComponent):
self.index = index
def __str__(self):
return "%s[%s]" % (self.property_name, self.index)
return "%s[%s]" % (quote_if_needed(self.property_name), self.index)
class ReferenceObjectPathComponent(_ObjectPathComponent):
@ -276,9 +300,6 @@ class ReferenceObjectPathComponent(_ObjectPathComponent):
def __init__(self, reference_property_name):
self.property_name = reference_property_name
def __str__(self):
return self.property_name
class ObjectPath(object):
"""Pattern operand object (property) path
@ -289,12 +310,14 @@ class ObjectPath(object):
"""
def __init__(self, object_type_name, property_path):
self.object_type_name = object_type_name
self.property_path = [x if isinstance(x, _ObjectPathComponent) else
_ObjectPathComponent.create_ObjectPathComponent(x)
for x in property_path]
self.property_path = [
x if isinstance(x, _ObjectPathComponent) else
_ObjectPathComponent.create_ObjectPathComponent(x)
for x in property_path
]
def __str__(self):
return "%s:%s" % (self.object_type_name, ".".join(["%s" % x for x in self.property_path]))
return "%s:%s" % (self.object_type_name, ".".join(["%s" % quote_if_needed(x) for x in self.property_path]))
def merge(self, other):
"""Extend the object property with that of the supplied object property path"""

View File

@ -1,8 +1,9 @@
"""Classes for representing properties of STIX Objects and Cyber Observables.
"""
"""Classes for representing properties of STIX Objects and Cyber Observables."""
import base64
import binascii
import collections
import copy
import inspect
import re
import uuid
@ -11,19 +12,22 @@ from six import string_types, text_type
from stix2patterns.validator import run_validator
from .base import _STIXBase
from .exceptions import DictionaryKeyError
from .utils import _get_dict, parse_into_datetime
from .core import STIX2_OBJ_MAPS, parse, parse_observable
from .exceptions import CustomContentError, DictionaryKeyError
from .utils import _get_dict, get_class_hierarchy_names, parse_into_datetime
# This uses the regular expression for a RFC 4122, Version 4 UUID. In the
# 8-4-4-4-12 hexadecimal representation, the first hex digit of the third
# component must be a 4, and the first hex digit of the fourth component must be
# 8, 9, a, or b (10xx bit pattern).
ID_REGEX = re.compile("^[a-z0-9][a-z0-9-]+[a-z0-9]--" # object type
"[0-9a-fA-F]{8}-"
"[0-9a-fA-F]{4}-"
"4[0-9a-fA-F]{3}-"
"[89abAB][0-9a-fA-F]{3}-"
"[0-9a-fA-F]{12}$")
# component must be a 4, and the first hex digit of the fourth component
# must be 8, 9, a, or b (10xx bit pattern).
ID_REGEX = re.compile(
r"^[a-z0-9][a-z0-9-]+[a-z0-9]--" # object type
"[0-9a-fA-F]{8}-"
"[0-9a-fA-F]{4}-"
"4[0-9a-fA-F]{3}-"
"[89abAB][0-9a-fA-F]{3}-"
"[0-9a-fA-F]{12}$",
)
ID_REGEX_interoperability = re.compile("^[a-z0-9][a-z0-9-]+[a-z0-9]--" # object type
"[0-9a-fA-F]{8}-"
@ -44,14 +48,15 @@ class Property(object):
``__init__()``.
Args:
required (bool): If ``True``, the property must be provided when creating an
object with that property. No default value exists for these properties.
(Default: ``False``)
required (bool): If ``True``, the property must be provided when
creating an object with that property. No default value exists for
these properties. (Default: ``False``)
fixed: This provides a constant default value. Users are free to
provide this value explicity when constructing an object (which allows
you to copy **all** values from an existing object to a new object), but
if the user provides a value other than the ``fixed`` value, it will raise
an error. This is semantically equivalent to defining both:
provide this value explicity when constructing an object (which
allows you to copy **all** values from an existing object to a new
object), but if the user provides a value other than the ``fixed``
value, it will raise an error. This is semantically equivalent to
defining both:
- a ``clean()`` function that checks if the value matches the fixed
value, and
@ -62,29 +67,31 @@ class Property(object):
- ``def clean(self, value) -> any:``
- Return a value that is valid for this property. If ``value`` is not
valid for this property, this will attempt to transform it first. If
``value`` is not valid and no such transformation is possible, it should
raise a ValueError.
``value`` is not valid and no such transformation is possible, it
should raise a ValueError.
- ``def default(self):``
- provide a default value for this property.
- ``default()`` can return the special value ``NOW`` to use the current
time. This is useful when several timestamps in the same object need
to use the same default value, so calling now() for each property--
likely several microseconds apart-- does not work.
time. This is useful when several timestamps in the same object
need to use the same default value, so calling now() for each
property-- likely several microseconds apart-- does not work.
Subclasses can instead provide a lambda function for ``default`` as a keyword
argument. ``clean`` should not be provided as a lambda since lambdas cannot
raise their own exceptions.
Subclasses can instead provide a lambda function for ``default`` as a
keyword argument. ``clean`` should not be provided as a lambda since
lambdas cannot raise their own exceptions.
When instantiating Properties, ``required`` and ``default`` should not be
used together. ``default`` implies that the property is required in the
specification so this function will be used to supply a value if none is
provided. ``required`` means that the user must provide this; it is
required in the specification and we can't or don't want to create a
default value.
When instantiating Properties, ``required`` and ``default`` should not be used
together. ``default`` implies that the property is required in the specification
so this function will be used to supply a value if none is provided.
``required`` means that the user must provide this; it is required in the
specification and we can't or don't want to create a default value.
"""
def _default_clean(self, value):
if value != self._fixed_value:
raise ValueError("must equal '{0}'.".format(self._fixed_value))
raise ValueError("must equal '{}'.".format(self._fixed_value))
return value
def __init__(self, required=False, fixed=None, default=None):
@ -143,7 +150,7 @@ class ListProperty(Property):
if type(self.contained) is EmbeddedObjectProperty:
obj_type = self.contained.type
elif type(self.contained).__name__ is 'STIXObjectProperty':
elif type(self.contained).__name__ is "STIXObjectProperty":
# ^ this way of checking doesn't require a circular import
# valid is already an instance of a python-stix2 class; no need
# to turn it into a dictionary and then pass it to the class
@ -191,7 +198,7 @@ class IDProperty(Property):
def clean(self, value):
if not value.startswith(self.required_prefix):
raise ValueError("must start with '{0}'.".format(self.required_prefix))
raise ValueError("must start with '{}'.".format(self.required_prefix))
if hasattr(self, 'interoperability') and self.interoperability:
if not ID_REGEX_interoperability.match(value):
raise ValueError(ERROR_INVALID_ID)
@ -206,21 +213,51 @@ class IDProperty(Property):
class IntegerProperty(Property):
def __init__(self, min=None, max=None, **kwargs):
self.min = min
self.max = max
super(IntegerProperty, self).__init__(**kwargs)
def clean(self, value):
try:
return int(value)
value = int(value)
except Exception:
raise ValueError("must be an integer.")
if self.min is not None and value < self.min:
msg = "minimum value is {}. received {}".format(self.min, value)
raise ValueError(msg)
if self.max is not None and value > self.max:
msg = "maximum value is {}. received {}".format(self.max, value)
raise ValueError(msg)
return value
class FloatProperty(Property):
def __init__(self, min=None, max=None, **kwargs):
self.min = min
self.max = max
super(FloatProperty, self).__init__(**kwargs)
def clean(self, value):
try:
return float(value)
value = float(value)
except Exception:
raise ValueError("must be a float.")
if self.min is not None and value < self.min:
msg = "minimum value is {}. received {}".format(self.min, value)
raise ValueError(msg)
if self.max is not None and value > self.max:
msg = "maximum value is {}. received {}".format(self.max, value)
raise ValueError(msg)
return value
class BooleanProperty(Property):
@ -228,8 +265,8 @@ class BooleanProperty(Property):
if isinstance(value, bool):
return value
trues = ['true', 't']
falses = ['false', 'f']
trues = ['true', 't', '1']
falses = ['false', 'f', '0']
try:
if value.lower() in trues:
return True
@ -256,6 +293,10 @@ class TimestampProperty(Property):
class DictionaryProperty(Property):
def __init__(self, spec_version='2.0', **kwargs):
self.spec_version = spec_version
super(DictionaryProperty, self).__init__(**kwargs)
def clean(self, value):
try:
dictified = _get_dict(value)
@ -263,35 +304,40 @@ class DictionaryProperty(Property):
raise ValueError("The dictionary property must contain a dictionary")
if dictified == {}:
raise ValueError("The dictionary property must contain a non-empty dictionary")
for k in dictified.keys():
if len(k) < 3:
raise DictionaryKeyError(k, "shorter than 3 characters")
elif len(k) > 256:
raise DictionaryKeyError(k, "longer than 256 characters")
if not re.match('^[a-zA-Z0-9_-]+$', k):
raise DictionaryKeyError(k, "contains characters other than"
"lowercase a-z, uppercase A-Z, "
"numerals 0-9, hyphen (-), or "
"underscore (_)")
if self.spec_version == '2.0':
if len(k) < 3:
raise DictionaryKeyError(k, "shorter than 3 characters")
elif len(k) > 256:
raise DictionaryKeyError(k, "longer than 256 characters")
elif self.spec_version == '2.1':
if len(k) > 250:
raise DictionaryKeyError(k, "longer than 250 characters")
if not re.match(r"^[a-zA-Z0-9_-]+$", k):
msg = (
"contains characters other than lowercase a-z, "
"uppercase A-Z, numerals 0-9, hyphen (-), or "
"underscore (_)"
)
raise DictionaryKeyError(k, msg)
return dictified
HASHES_REGEX = {
"MD5": ("^[a-fA-F0-9]{32}$", "MD5"),
"MD6": ("^[a-fA-F0-9]{32}|[a-fA-F0-9]{40}|[a-fA-F0-9]{56}|[a-fA-F0-9]{64}|[a-fA-F0-9]{96}|[a-fA-F0-9]{128}$", "MD6"),
"RIPEMD160": ("^[a-fA-F0-9]{40}$", "RIPEMD-160"),
"SHA1": ("^[a-fA-F0-9]{40}$", "SHA-1"),
"SHA224": ("^[a-fA-F0-9]{56}$", "SHA-224"),
"SHA256": ("^[a-fA-F0-9]{64}$", "SHA-256"),
"SHA384": ("^[a-fA-F0-9]{96}$", "SHA-384"),
"SHA512": ("^[a-fA-F0-9]{128}$", "SHA-512"),
"SHA3224": ("^[a-fA-F0-9]{56}$", "SHA3-224"),
"SHA3256": ("^[a-fA-F0-9]{64}$", "SHA3-256"),
"SHA3384": ("^[a-fA-F0-9]{96}$", "SHA3-384"),
"SHA3512": ("^[a-fA-F0-9]{128}$", "SHA3-512"),
"SSDEEP": ("^[a-zA-Z0-9/+:.]{1,128}$", "ssdeep"),
"WHIRLPOOL": ("^[a-fA-F0-9]{128}$", "WHIRLPOOL"),
"MD5": (r"^[a-fA-F0-9]{32}$", "MD5"),
"MD6": (r"^[a-fA-F0-9]{32}|[a-fA-F0-9]{40}|[a-fA-F0-9]{56}|[a-fA-F0-9]{64}|[a-fA-F0-9]{96}|[a-fA-F0-9]{128}$", "MD6"),
"RIPEMD160": (r"^[a-fA-F0-9]{40}$", "RIPEMD-160"),
"SHA1": (r"^[a-fA-F0-9]{40}$", "SHA-1"),
"SHA224": (r"^[a-fA-F0-9]{56}$", "SHA-224"),
"SHA256": (r"^[a-fA-F0-9]{64}$", "SHA-256"),
"SHA384": (r"^[a-fA-F0-9]{96}$", "SHA-384"),
"SHA512": (r"^[a-fA-F0-9]{128}$", "SHA-512"),
"SHA3224": (r"^[a-fA-F0-9]{56}$", "SHA3-224"),
"SHA3256": (r"^[a-fA-F0-9]{64}$", "SHA3-256"),
"SHA3384": (r"^[a-fA-F0-9]{96}$", "SHA3-384"),
"SHA3512": (r"^[a-fA-F0-9]{128}$", "SHA3-512"),
"SSDEEP": (r"^[a-zA-Z0-9/+:.]{1,128}$", "ssdeep"),
"WHIRLPOOL": (r"^[a-fA-F0-9]{128}$", "WHIRLPOOL"),
}
@ -304,7 +350,7 @@ class HashesProperty(DictionaryProperty):
if key in HASHES_REGEX:
vocab_key = HASHES_REGEX[key][1]
if not re.match(HASHES_REGEX[key][0], v):
raise ValueError("'%s' is not a valid %s hash" % (v, vocab_key))
raise ValueError("'{0}' is not a valid {1} hash".format(v, vocab_key))
if k != vocab_key:
clean_dict[vocab_key] = clean_dict[k]
del clean_dict[k]
@ -324,7 +370,7 @@ class BinaryProperty(Property):
class HexProperty(Property):
def clean(self, value):
if not re.match('^([a-fA-F0-9]{2})+$', value):
if not re.match(r"^([a-fA-F0-9]{2})+$", value):
raise ValueError("must contain an even number of hexadecimal characters")
return value
@ -344,7 +390,7 @@ class ReferenceProperty(Property):
value = str(value)
if self.type:
if not value.startswith(self.type):
raise ValueError("must start with '{0}'.".format(self.type))
raise ValueError("must start with '{}'.".format(self.type))
if hasattr(self, 'interoperability') and self.interoperability:
if not ID_REGEX_interoperability.match(value):
raise ValueError(ERROR_INVALID_ID)
@ -354,7 +400,7 @@ class ReferenceProperty(Property):
return value
SELECTOR_REGEX = re.compile("^[a-z0-9_-]{3,250}(\\.(\\[\\d+\\]|[a-z0-9_-]{1,250}))*$")
SELECTOR_REGEX = re.compile(r"^[a-z0-9_-]{3,250}(\.(\[\d+\]|[a-z0-9_-]{1,250}))*$")
class SelectorProperty(Property):
@ -384,7 +430,7 @@ class EmbeddedObjectProperty(Property):
if type(value) is dict:
value = self.type(**value)
elif not isinstance(value, self.type):
raise ValueError("must be of type %s." % self.type.__name__)
raise ValueError("must be of type {}.".format(self.type.__name__))
return value
@ -399,7 +445,7 @@ class EnumProperty(StringProperty):
def clean(self, value):
value = super(EnumProperty, self).clean(value)
if value not in self.allowed:
raise ValueError("value '%s' is not valid for this enumeration." % value)
raise ValueError("value '{}' is not valid for this enumeration.".format(value))
return self.string_type(value)
@ -412,3 +458,127 @@ class PatternProperty(StringProperty):
raise ValueError(str(errors[0]))
return self.string_type(value)
class ObservableProperty(Property):
"""Property for holding Cyber Observable Objects.
"""
def __init__(self, spec_version='2.0', allow_custom=False, *args, **kwargs):
self.allow_custom = allow_custom
self.spec_version = spec_version
super(ObservableProperty, self).__init__(*args, **kwargs)
def clean(self, value):
try:
dictified = _get_dict(value)
# get deep copy since we are going modify the dict and might
# modify the original dict as _get_dict() does not return new
# dict when passed a dict
dictified = copy.deepcopy(dictified)
except ValueError:
raise ValueError("The observable property must contain a dictionary")
if dictified == {}:
raise ValueError("The observable property must contain a non-empty dictionary")
valid_refs = dict((k, v['type']) for (k, v) in dictified.items())
for key, obj in dictified.items():
parsed_obj = parse_observable(
obj,
valid_refs,
allow_custom=self.allow_custom,
version=self.spec_version,
)
dictified[key] = parsed_obj
return dictified
class ExtensionsProperty(DictionaryProperty):
"""Property for representing extensions on Observable objects.
"""
def __init__(self, spec_version='2.0', allow_custom=False, enclosing_type=None, required=False):
self.allow_custom = allow_custom
self.enclosing_type = enclosing_type
super(ExtensionsProperty, self).__init__(spec_version=spec_version, required=required)
def clean(self, value):
try:
dictified = _get_dict(value)
# get deep copy since we are going modify the dict and might
# modify the original dict as _get_dict() does not return new
# dict when passed a dict
dictified = copy.deepcopy(dictified)
except ValueError:
raise ValueError("The extensions property must contain a dictionary")
if dictified == {}:
raise ValueError("The extensions property must contain a non-empty dictionary")
v = 'v' + self.spec_version.replace('.', '')
specific_type_map = STIX2_OBJ_MAPS[v]['observable-extensions'].get(self.enclosing_type, {})
for key, subvalue in dictified.items():
if key in specific_type_map:
cls = specific_type_map[key]
if type(subvalue) is dict:
if self.allow_custom:
subvalue['allow_custom'] = True
dictified[key] = cls(**subvalue)
else:
dictified[key] = cls(**subvalue)
elif type(subvalue) is cls:
# If already an instance of an _Extension class, assume it's valid
dictified[key] = subvalue
else:
raise ValueError("Cannot determine extension type.")
else:
raise CustomContentError("Can't parse unknown extension type: {}".format(key))
return dictified
class STIXObjectProperty(Property):
def __init__(self, spec_version='2.0', allow_custom=False, interoperability=False, *args, **kwargs):
self.allow_custom = allow_custom
self.spec_version = spec_version
self.interoperability = interoperability
super(STIXObjectProperty, self).__init__(*args, **kwargs)
def clean(self, value):
# Any STIX Object (SDO, SRO, or Marking Definition) can be added to
# a bundle with no further checks.
if any(x in ('STIXDomainObject', 'STIXRelationshipObject', 'MarkingDefinition')
for x in get_class_hierarchy_names(value)):
# A simple "is this a spec version 2.1+ object" test. For now,
# limit 2.0 bundles to 2.0 objects. It's not possible yet to
# have validation co-constraints among properties, e.g. have
# validation here depend on the value of another property
# (spec_version). So this is a hack, and not technically spec-
# compliant.
if 'spec_version' in value and self.spec_version == '2.0':
raise ValueError(
"Spec version 2.0 bundles don't yet support "
"containing objects of a different spec "
"version.",
)
return value
try:
dictified = _get_dict(value)
except ValueError:
raise ValueError("This property may only contain a dictionary or object")
if dictified == {}:
raise ValueError("This property may only contain a non-empty dictionary or object")
if 'type' in dictified and dictified['type'] == 'bundle':
raise ValueError("This property may not contain a Bundle object")
if 'spec_version' in dictified and self.spec_version == '2.0':
# See above comment regarding spec_version.
raise ValueError(
"Spec version 2.0 bundles don't yet support "
"containing objects of a different spec version.",
)
parsed_obj = parse(dictified, allow_custom=self.allow_custom, interoperability=self.interoperability)
return parsed_obj

View File

@ -1,379 +0,0 @@
import datetime
import pytest
import stix2
def test_create_comparison_expression():
exp = stix2.EqualityComparisonExpression("file:hashes.'SHA-256'",
stix2.HashConstant("aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f", "SHA-256")) # noqa
assert str(exp) == "file:hashes.'SHA-256' = 'aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f'"
def test_boolean_expression():
exp1 = stix2.MatchesComparisonExpression("email-message:from_ref.value",
stix2.StringConstant(".+\\@example\\.com$"))
exp2 = stix2.MatchesComparisonExpression("email-message:body_multipart[*].body_raw_ref.name",
stix2.StringConstant("^Final Report.+\\.exe$"))
exp = stix2.AndBooleanExpression([exp1, exp2])
assert str(exp) == "email-message:from_ref.value MATCHES '.+\\\\@example\\\\.com$' AND email-message:body_multipart[*].body_raw_ref.name MATCHES '^Final Report.+\\\\.exe$'" # noqa
def test_boolean_expression_with_parentheses():
exp1 = stix2.MatchesComparisonExpression(stix2.ObjectPath("email-message",
[stix2.ReferenceObjectPathComponent("from_ref"),
stix2.BasicObjectPathComponent("value")]),
stix2.StringConstant(".+\\@example\\.com$"))
exp2 = stix2.MatchesComparisonExpression("email-message:body_multipart[*].body_raw_ref.name",
stix2.StringConstant("^Final Report.+\\.exe$"))
exp = stix2.ParentheticalExpression(stix2.AndBooleanExpression([exp1, exp2]))
assert str(exp) == "(email-message:from_ref.value MATCHES '.+\\\\@example\\\\.com$' AND email-message:body_multipart[*].body_raw_ref.name MATCHES '^Final Report.+\\\\.exe$')" # noqa
def test_hash_followed_by_registryKey_expression_python_constant():
hash_exp = stix2.EqualityComparisonExpression("file:hashes.MD5",
stix2.HashConstant("79054025255fb1a26e4bc422aef54eb4", "MD5"))
o_exp1 = stix2.ObservationExpression(hash_exp)
reg_exp = stix2.EqualityComparisonExpression(stix2.ObjectPath("windows-registry-key", ["key"]),
stix2.StringConstant("HKEY_LOCAL_MACHINE\\foo\\bar"))
o_exp2 = stix2.ObservationExpression(reg_exp)
fb_exp = stix2.FollowedByObservationExpression([o_exp1, o_exp2])
para_exp = stix2.ParentheticalExpression(fb_exp)
qual_exp = stix2.WithinQualifier(300)
exp = stix2.QualifiedObservationExpression(para_exp, qual_exp)
assert str(exp) == "([file:hashes.MD5 = '79054025255fb1a26e4bc422aef54eb4'] FOLLOWEDBY [windows-registry-key:key = 'HKEY_LOCAL_MACHINE\\\\foo\\\\bar']) WITHIN 300 SECONDS" # noqa
def test_hash_followed_by_registryKey_expression():
hash_exp = stix2.EqualityComparisonExpression("file:hashes.MD5",
stix2.HashConstant("79054025255fb1a26e4bc422aef54eb4", "MD5"))
o_exp1 = stix2.ObservationExpression(hash_exp)
reg_exp = stix2.EqualityComparisonExpression(stix2.ObjectPath("windows-registry-key", ["key"]),
stix2.StringConstant("HKEY_LOCAL_MACHINE\\foo\\bar"))
o_exp2 = stix2.ObservationExpression(reg_exp)
fb_exp = stix2.FollowedByObservationExpression([o_exp1, o_exp2])
para_exp = stix2.ParentheticalExpression(fb_exp)
qual_exp = stix2.WithinQualifier(stix2.IntegerConstant(300))
exp = stix2.QualifiedObservationExpression(para_exp, qual_exp)
assert str(exp) == "([file:hashes.MD5 = '79054025255fb1a26e4bc422aef54eb4'] FOLLOWEDBY [windows-registry-key:key = 'HKEY_LOCAL_MACHINE\\\\foo\\\\bar']) WITHIN 300 SECONDS" # noqa
def test_file_observable_expression():
exp1 = stix2.EqualityComparisonExpression("file:hashes.'SHA-256'",
stix2.HashConstant(
"aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f",
'SHA-256'))
exp2 = stix2.EqualityComparisonExpression("file:mime_type", stix2.StringConstant("application/x-pdf"))
bool_exp = stix2.ObservationExpression(stix2.AndBooleanExpression([exp1, exp2]))
assert str(bool_exp) == "[file:hashes.'SHA-256' = 'aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f' AND file:mime_type = 'application/x-pdf']" # noqa
@pytest.mark.parametrize("observation_class, op", [
(stix2.AndObservationExpression, 'AND'),
(stix2.OrObservationExpression, 'OR'),
])
def test_multiple_file_observable_expression(observation_class, op):
exp1 = stix2.EqualityComparisonExpression("file:hashes.'SHA-256'",
stix2.HashConstant(
"bf07a7fbb825fc0aae7bf4a1177b2b31fcf8a3feeaf7092761e18c859ee52a9c",
'SHA-256'))
exp2 = stix2.EqualityComparisonExpression("file:hashes.MD5",
stix2.HashConstant("cead3f77f6cda6ec00f57d76c9a6879f", "MD5"))
bool1_exp = stix2.OrBooleanExpression([exp1, exp2])
exp3 = stix2.EqualityComparisonExpression("file:hashes.'SHA-256'",
stix2.HashConstant(
"aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f",
'SHA-256'))
op1_exp = stix2.ObservationExpression(bool1_exp)
op2_exp = stix2.ObservationExpression(exp3)
exp = observation_class([op1_exp, op2_exp])
assert str(exp) == "[file:hashes.'SHA-256' = 'bf07a7fbb825fc0aae7bf4a1177b2b31fcf8a3feeaf7092761e18c859ee52a9c' OR file:hashes.MD5 = 'cead3f77f6cda6ec00f57d76c9a6879f'] {} [file:hashes.'SHA-256' = 'aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f']".format(op) # noqa
def test_root_types():
ast = stix2.ObservationExpression(
stix2.AndBooleanExpression(
[stix2.ParentheticalExpression(
stix2.OrBooleanExpression([
stix2.EqualityComparisonExpression("a:b", stix2.StringConstant("1")),
stix2.EqualityComparisonExpression("b:c", stix2.StringConstant("2"))])),
stix2.EqualityComparisonExpression(u"b:d", stix2.StringConstant("3"))]))
assert str(ast) == "[(a:b = '1' OR b:c = '2') AND b:d = '3']"
def test_artifact_payload():
exp1 = stix2.EqualityComparisonExpression("artifact:mime_type",
"application/vnd.tcpdump.pcap")
exp2 = stix2.MatchesComparisonExpression("artifact:payload_bin",
stix2.StringConstant("\\xd4\\xc3\\xb2\\xa1\\x02\\x00\\x04\\x00"))
and_exp = stix2.ObservationExpression(stix2.AndBooleanExpression([exp1, exp2]))
assert str(and_exp) == "[artifact:mime_type = 'application/vnd.tcpdump.pcap' AND artifact:payload_bin MATCHES '\\\\xd4\\\\xc3\\\\xb2\\\\xa1\\\\x02\\\\x00\\\\x04\\\\x00']" # noqa
def test_greater_than_python_constant():
exp1 = stix2.GreaterThanComparisonExpression("file:extensions.windows-pebinary-ext.sections[*].entropy", 7.0)
exp = stix2.ObservationExpression(exp1)
assert str(exp) == "[file:extensions.windows-pebinary-ext.sections[*].entropy > 7.0]"
def test_greater_than():
exp1 = stix2.GreaterThanComparisonExpression("file:extensions.windows-pebinary-ext.sections[*].entropy",
stix2.FloatConstant(7.0))
exp = stix2.ObservationExpression(exp1)
assert str(exp) == "[file:extensions.windows-pebinary-ext.sections[*].entropy > 7.0]"
def test_less_than():
exp = stix2.LessThanComparisonExpression("file:size", 1024)
assert str(exp) == "file:size < 1024"
def test_greater_than_or_equal():
exp = stix2.GreaterThanEqualComparisonExpression("file:size",
1024)
assert str(exp) == "file:size >= 1024"
def test_less_than_or_equal():
exp = stix2.LessThanEqualComparisonExpression("file:size",
1024)
assert str(exp) == "file:size <= 1024"
def test_not():
exp = stix2.LessThanComparisonExpression("file:size",
1024,
negated=True)
assert str(exp) == "file:size NOT < 1024"
def test_and_observable_expression():
exp1 = stix2.AndBooleanExpression([stix2.EqualityComparisonExpression("user-account:account_type",
"unix"),
stix2.EqualityComparisonExpression("user-account:user_id",
stix2.StringConstant("1007")),
stix2.EqualityComparisonExpression("user-account:account_login",
"Peter")])
exp2 = stix2.AndBooleanExpression([stix2.EqualityComparisonExpression("user-account:account_type",
"unix"),
stix2.EqualityComparisonExpression("user-account:user_id",
stix2.StringConstant("1008")),
stix2.EqualityComparisonExpression("user-account:account_login",
"Paul")])
exp3 = stix2.AndBooleanExpression([stix2.EqualityComparisonExpression("user-account:account_type",
"unix"),
stix2.EqualityComparisonExpression("user-account:user_id",
stix2.StringConstant("1009")),
stix2.EqualityComparisonExpression("user-account:account_login",
"Mary")])
exp = stix2.AndObservationExpression([stix2.ObservationExpression(exp1),
stix2.ObservationExpression(exp2),
stix2.ObservationExpression(exp3)])
assert str(exp) == "[user-account:account_type = 'unix' AND user-account:user_id = '1007' AND user-account:account_login = 'Peter'] AND [user-account:account_type = 'unix' AND user-account:user_id = '1008' AND user-account:account_login = 'Paul'] AND [user-account:account_type = 'unix' AND user-account:user_id = '1009' AND user-account:account_login = 'Mary']" # noqa
def test_invalid_and_observable_expression():
with pytest.raises(ValueError) as excinfo:
stix2.AndBooleanExpression([stix2.EqualityComparisonExpression("user-account:display_name",
"admin"),
stix2.EqualityComparisonExpression("email-addr:display_name",
stix2.StringConstant("admin"))])
assert "All operands to an 'AND' expression must have the same object type" in str(excinfo)
def test_hex():
exp_and = stix2.AndBooleanExpression([stix2.EqualityComparisonExpression("file:mime_type",
"image/bmp"),
stix2.EqualityComparisonExpression("file:magic_number_hex",
stix2.HexConstant("ffd8"))])
exp = stix2.ObservationExpression(exp_and)
assert str(exp) == "[file:mime_type = 'image/bmp' AND file:magic_number_hex = h'ffd8']"
def test_multiple_qualifiers():
exp_and = stix2.AndBooleanExpression([stix2.EqualityComparisonExpression("network-traffic:dst_ref.type",
"domain-name"),
stix2.EqualityComparisonExpression("network-traffic:dst_ref.value",
"example.com")])
exp_ob = stix2.ObservationExpression(exp_and)
qual_rep = stix2.RepeatQualifier(5)
qual_within = stix2.WithinQualifier(stix2.IntegerConstant(1800))
exp = stix2.QualifiedObservationExpression(stix2.QualifiedObservationExpression(exp_ob, qual_rep), qual_within)
assert str(exp) == "[network-traffic:dst_ref.type = 'domain-name' AND network-traffic:dst_ref.value = 'example.com'] REPEATS 5 TIMES WITHIN 1800 SECONDS" # noqa
def test_set_op():
exp = stix2.ObservationExpression(stix2.IsSubsetComparisonExpression("network-traffic:dst_ref.value",
"2001:0db8:dead:beef:0000:0000:0000:0000/64"))
assert str(exp) == "[network-traffic:dst_ref.value ISSUBSET '2001:0db8:dead:beef:0000:0000:0000:0000/64']"
def test_timestamp():
ts = stix2.TimestampConstant('2014-01-13T07:03:17Z')
assert str(ts) == "t'2014-01-13T07:03:17Z'"
def test_boolean():
exp = stix2.EqualityComparisonExpression("email-message:is_multipart",
True)
assert str(exp) == "email-message:is_multipart = true"
def test_binary():
const = stix2.BinaryConstant("dGhpcyBpcyBhIHRlc3Q=")
exp = stix2.EqualityComparisonExpression("artifact:payload_bin",
const)
assert str(exp) == "artifact:payload_bin = b'dGhpcyBpcyBhIHRlc3Q='"
def test_list():
exp = stix2.InComparisonExpression("process:name",
['proccy', 'proximus', 'badproc'])
assert str(exp) == "process:name IN ('proccy', 'proximus', 'badproc')"
def test_list2():
# alternate way to construct an "IN" Comparison Expression
exp = stix2.EqualityComparisonExpression("process:name",
['proccy', 'proximus', 'badproc'])
assert str(exp) == "process:name IN ('proccy', 'proximus', 'badproc')"
def test_invalid_constant_type():
with pytest.raises(ValueError) as excinfo:
stix2.EqualityComparisonExpression("artifact:payload_bin",
{'foo': 'bar'})
assert 'Unable to create a constant' in str(excinfo)
def test_invalid_integer_constant():
with pytest.raises(ValueError) as excinfo:
stix2.IntegerConstant('foo')
assert 'must be an integer' in str(excinfo)
def test_invalid_timestamp_constant():
with pytest.raises(ValueError) as excinfo:
stix2.TimestampConstant('foo')
assert 'Must be a datetime object or timestamp string' in str(excinfo)
def test_invalid_float_constant():
with pytest.raises(ValueError) as excinfo:
stix2.FloatConstant('foo')
assert 'must be a float' in str(excinfo)
@pytest.mark.parametrize("data, result", [
(True, True),
(False, False),
('True', True),
('False', False),
('true', True),
('false', False),
('t', True),
('f', False),
('T', True),
('F', False),
(1, True),
(0, False),
])
def test_boolean_constant(data, result):
boolean = stix2.BooleanConstant(data)
assert boolean.value == result
def test_invalid_boolean_constant():
with pytest.raises(ValueError) as excinfo:
stix2.BooleanConstant('foo')
assert 'must be a boolean' in str(excinfo)
@pytest.mark.parametrize("hashtype, data", [
('MD5', 'zzz'),
('ssdeep', 'zzz=='),
])
def test_invalid_hash_constant(hashtype, data):
with pytest.raises(ValueError) as excinfo:
stix2.HashConstant(data, hashtype)
assert 'is not a valid {} hash'.format(hashtype) in str(excinfo)
def test_invalid_hex_constant():
with pytest.raises(ValueError) as excinfo:
stix2.HexConstant('mm')
assert "must contain an even number of hexadecimal characters" in str(excinfo)
def test_invalid_binary_constant():
with pytest.raises(ValueError) as excinfo:
stix2.BinaryConstant('foo')
assert 'must contain a base64' in str(excinfo)
def test_escape_quotes_and_backslashes():
exp = stix2.MatchesComparisonExpression("file:name",
"^Final Report.+\\.exe$")
assert str(exp) == "file:name MATCHES '^Final Report.+\\\\.exe$'"
def test_like():
exp = stix2.LikeComparisonExpression("directory:path",
"C:\\Windows\\%\\foo")
assert str(exp) == "directory:path LIKE 'C:\\\\Windows\\\\%\\\\foo'"
def test_issuperset():
exp = stix2.IsSupersetComparisonExpression("ipv4-addr:value",
"198.51.100.0/24")
assert str(exp) == "ipv4-addr:value ISSUPERSET '198.51.100.0/24'"
def test_repeat_qualifier():
qual = stix2.RepeatQualifier(stix2.IntegerConstant(5))
assert str(qual) == 'REPEATS 5 TIMES'
def test_invalid_repeat_qualifier():
with pytest.raises(ValueError) as excinfo:
stix2.RepeatQualifier('foo')
assert 'is not a valid argument for a Repeat Qualifier' in str(excinfo)
def test_invalid_within_qualifier():
with pytest.raises(ValueError) as excinfo:
stix2.WithinQualifier('foo')
assert 'is not a valid argument for a Within Qualifier' in str(excinfo)
def test_startstop_qualifier():
qual = stix2.StartStopQualifier(stix2.TimestampConstant('2016-06-01T00:00:00Z'),
datetime.datetime(2017, 3, 12, 8, 30, 0))
assert str(qual) == "START t'2016-06-01T00:00:00Z' STOP t'2017-03-12T08:30:00Z'"
qual2 = stix2.StartStopQualifier(datetime.date(2016, 6, 1),
stix2.TimestampConstant('2016-07-01T00:00:00Z'))
assert str(qual2) == "START t'2016-06-01T00:00:00Z' STOP t'2016-07-01T00:00:00Z'"
def test_invalid_startstop_qualifier():
with pytest.raises(ValueError) as excinfo:
stix2.StartStopQualifier('foo',
stix2.TimestampConstant('2016-06-01T00:00:00Z'))
assert 'is not a valid argument for a Start/Stop Qualifier' in str(excinfo)
with pytest.raises(ValueError) as excinfo:
stix2.StartStopQualifier(datetime.date(2016, 6, 1),
'foo')
assert 'is not a valid argument for a Start/Stop Qualifier' in str(excinfo)
def test_make_constant_already_a_constant():
str_const = stix2.StringConstant('Foo')
result = stix2.patterns.make_constant(str_const)
assert result is str_const

View File

@ -1,210 +0,0 @@
# -*- coding: utf-8 -*-
import datetime as dt
from io import StringIO
import pytest
import pytz
import stix2.utils
amsterdam = pytz.timezone('Europe/Amsterdam')
eastern = pytz.timezone('US/Eastern')
@pytest.mark.parametrize('dttm, timestamp', [
(dt.datetime(2017, 1, 1, tzinfo=pytz.utc), '2017-01-01T00:00:00Z'),
(amsterdam.localize(dt.datetime(2017, 1, 1)), '2016-12-31T23:00:00Z'),
(eastern.localize(dt.datetime(2017, 1, 1, 12, 34, 56)), '2017-01-01T17:34:56Z'),
(eastern.localize(dt.datetime(2017, 7, 1)), '2017-07-01T04:00:00Z'),
(dt.datetime(2017, 7, 1), '2017-07-01T00:00:00Z'),
(dt.datetime(2017, 7, 1, 0, 0, 0, 1), '2017-07-01T00:00:00.000001Z'),
(stix2.utils.STIXdatetime(2017, 7, 1, 0, 0, 0, 1, precision='millisecond'), '2017-07-01T00:00:00.000Z'),
(stix2.utils.STIXdatetime(2017, 7, 1, 0, 0, 0, 1, precision='second'), '2017-07-01T00:00:00Z'),
])
def test_timestamp_formatting(dttm, timestamp):
assert stix2.utils.format_datetime(dttm) == timestamp
@pytest.mark.parametrize('timestamp, dttm', [
(dt.datetime(2017, 1, 1, 0, tzinfo=pytz.utc), dt.datetime(2017, 1, 1, 0, 0, 0, tzinfo=pytz.utc)),
(dt.date(2017, 1, 1), dt.datetime(2017, 1, 1, 0, 0, 0, tzinfo=pytz.utc)),
('2017-01-01T00:00:00Z', dt.datetime(2017, 1, 1, 0, 0, 0, tzinfo=pytz.utc)),
('2017-01-01T02:00:00+2:00', dt.datetime(2017, 1, 1, 0, 0, 0, tzinfo=pytz.utc)),
('2017-01-01T00:00:00', dt.datetime(2017, 1, 1, 0, 0, 0, tzinfo=pytz.utc)),
])
def test_parse_datetime(timestamp, dttm):
assert stix2.utils.parse_into_datetime(timestamp) == dttm
@pytest.mark.parametrize('timestamp, dttm, precision', [
('2017-01-01T01:02:03.000001', dt.datetime(2017, 1, 1, 1, 2, 3, 0, tzinfo=pytz.utc), 'millisecond'),
('2017-01-01T01:02:03.001', dt.datetime(2017, 1, 1, 1, 2, 3, 1000, tzinfo=pytz.utc), 'millisecond'),
('2017-01-01T01:02:03.1', dt.datetime(2017, 1, 1, 1, 2, 3, 100000, tzinfo=pytz.utc), 'millisecond'),
('2017-01-01T01:02:03.45', dt.datetime(2017, 1, 1, 1, 2, 3, 450000, tzinfo=pytz.utc), 'millisecond'),
('2017-01-01T01:02:03.45', dt.datetime(2017, 1, 1, 1, 2, 3, tzinfo=pytz.utc), 'second'),
])
def test_parse_datetime_precision(timestamp, dttm, precision):
assert stix2.utils.parse_into_datetime(timestamp, precision) == dttm
@pytest.mark.parametrize('ts', [
'foobar',
1,
])
def test_parse_datetime_invalid(ts):
with pytest.raises(ValueError):
stix2.utils.parse_into_datetime('foobar')
@pytest.mark.parametrize('data', [
{"a": 1},
'{"a": 1}',
StringIO(u'{"a": 1}'),
[("a", 1,)],
])
def test_get_dict(data):
assert stix2.utils._get_dict(data)
@pytest.mark.parametrize('data', [
1,
[1],
['a', 1],
"foobar",
])
def test_get_dict_invalid(data):
with pytest.raises(ValueError):
stix2.utils._get_dict(data)
@pytest.mark.parametrize('stix_id, type', [
('malware--d69c8146-ab35-4d50-8382-6fc80e641d43', 'malware'),
('intrusion-set--899ce53f-13a0-479b-a0e4-67d46e241542', 'intrusion-set')
])
def test_get_type_from_id(stix_id, type):
assert stix2.utils.get_type_from_id(stix_id) == type
def test_deduplicate(stix_objs1):
unique = stix2.utils.deduplicate(stix_objs1)
# Only 3 objects are unique
# 2 id's vary
# 2 modified times vary for a particular id
assert len(unique) == 3
ids = [obj['id'] for obj in unique]
mods = [obj['modified'] for obj in unique]
assert "indicator--00000000-0000-4000-8000-000000000001" in ids
assert "indicator--00000000-0000-4000-8000-000000000001" in ids
assert "2017-01-27T13:49:53.935Z" in mods
assert "2017-01-27T13:49:53.936Z" in mods
@pytest.mark.parametrize('object, tuple_to_find, expected_index', [
(stix2.ObservedData(
id="observed-data--b67d30ff-02ac-498a-92f9-32f845f448cf",
created_by_ref="identity--f431f809-377b-45e0-aa1c-6a4751cae5ff",
created="2016-04-06T19:58:16.000Z",
modified="2016-04-06T19:58:16.000Z",
first_observed="2015-12-21T19:00:00Z",
last_observed="2015-12-21T19:00:00Z",
number_observed=50,
objects={
"0": {
"name": "foo.exe",
"type": "file"
},
"1": {
"type": "ipv4-addr",
"value": "198.51.100.3"
},
"2": {
"type": "network-traffic",
"src_ref": "1",
"protocols": [
"tcp",
"http"
],
"extensions": {
"http-request-ext": {
"request_method": "get",
"request_value": "/download.html",
"request_version": "http/1.1",
"request_header": {
"Accept-Encoding": "gzip,deflate",
"User-Agent": "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.6) Gecko/20040113",
"Host": "www.example.com"
}
}
}
}
},
), ('1', {"type": "ipv4-addr", "value": "198.51.100.3"}), 1),
({
"type": "x-example",
"id": "x-example--d5413db2-c26c-42e0-b0e0-ec800a310bfb",
"created": "2018-06-11T01:25:22.063Z",
"modified": "2018-06-11T01:25:22.063Z",
"dictionary": {
"key": {
"key_one": "value",
"key_two": "value"
}
}
}, ('key', {'key_one': 'value', 'key_two': 'value'}), 0),
({
"type": "language-content",
"id": "language-content--b86bd89f-98bb-4fa9-8cb2-9ad421da981d",
"created": "2017-02-08T21:31:22.007Z",
"modified": "2017-02-08T21:31:22.007Z",
"object_ref": "campaign--12a111f0-b824-4baf-a224-83b80237a094",
"object_modified": "2017-02-08T21:31:22.007Z",
"contents": {
"de": {
"name": "Bank Angriff 1",
"description": "Weitere Informationen über Banküberfall"
},
"fr": {
"name": "Attaque Bank 1",
"description": "Plus d'informations sur la crise bancaire"
}
}
}, ('fr', {"name": "Attaque Bank 1", "description": "Plus d'informations sur la crise bancaire"}), 1)
])
def test_find_property_index(object, tuple_to_find, expected_index):
assert stix2.utils.find_property_index(
object,
*tuple_to_find
) == expected_index
@pytest.mark.parametrize('dict_value, tuple_to_find, expected_index', [
({
"contents": {
"de": {
"name": "Bank Angriff 1",
"description": "Weitere Informationen über Banküberfall"
},
"fr": {
"name": "Attaque Bank 1",
"description": "Plus d'informations sur la crise bancaire"
},
"es": {
"name": "Ataque al Banco",
"description": "Mas informacion sobre el ataque al banco"
}
}
}, ('es', {"name": "Ataque al Banco", "description": "Mas informacion sobre el ataque al banco"}), 1), # Sorted alphabetically
({
'my_list': [
{"key_one": 1},
{"key_two": 2}
]
}, ('key_one', 1), 0)
])
def test_iterate_over_values(dict_value, tuple_to_find, expected_index):
assert stix2.utils._find_property_in_seq(dict_value.values(), *tuple_to_find) == expected_index

View File

View File

@ -4,8 +4,9 @@ import pytest
import stix2
from .constants import (FAKE_TIME, INDICATOR_KWARGS, MALWARE_KWARGS,
RELATIONSHIP_KWARGS)
from .constants import (
FAKE_TIME, INDICATOR_KWARGS, MALWARE_KWARGS, RELATIONSHIP_KWARGS,
)
# Inspired by: http://stackoverflow.com/a/24006251
@ -35,17 +36,17 @@ def uuid4(monkeypatch):
@pytest.fixture
def indicator(uuid4, clock):
return stix2.Indicator(**INDICATOR_KWARGS)
return stix2.v20.Indicator(**INDICATOR_KWARGS)
@pytest.fixture
def malware(uuid4, clock):
return stix2.Malware(**MALWARE_KWARGS)
return stix2.v20.Malware(**MALWARE_KWARGS)
@pytest.fixture
def relationship(uuid4, clock):
return stix2.Relationship(**RELATIONSHIP_KWARGS)
return stix2.v20.Relationship(**RELATIONSHIP_KWARGS)
@pytest.fixture
@ -54,61 +55,61 @@ def stix_objs1():
"created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000001",
"labels": [
"url-watchlist"
"url-watchlist",
],
"modified": "2017-01-27T13:49:53.935Z",
"name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z"
"valid_from": "2017-01-27T13:49:53.935382Z",
}
ind2 = {
"created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000001",
"labels": [
"url-watchlist"
"url-watchlist",
],
"modified": "2017-01-27T13:49:53.935Z",
"name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z"
"valid_from": "2017-01-27T13:49:53.935382Z",
}
ind3 = {
"created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000001",
"labels": [
"url-watchlist"
"url-watchlist",
],
"modified": "2017-01-27T13:49:53.936Z",
"name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z"
"valid_from": "2017-01-27T13:49:53.935382Z",
}
ind4 = {
"created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000002",
"labels": [
"url-watchlist"
"url-watchlist",
],
"modified": "2017-01-27T13:49:53.935Z",
"name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z"
"valid_from": "2017-01-27T13:49:53.935382Z",
}
ind5 = {
"created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000002",
"labels": [
"url-watchlist"
"url-watchlist",
],
"modified": "2017-01-27T13:49:53.935Z",
"name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z"
"valid_from": "2017-01-27T13:49:53.935382Z",
}
return [ind1, ind2, ind3, ind4, ind5]
@ -119,41 +120,41 @@ def stix_objs2():
"created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000001",
"labels": [
"url-watchlist"
"url-watchlist",
],
"modified": "2017-01-31T13:49:53.935Z",
"name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z"
"valid_from": "2017-01-27T13:49:53.935382Z",
}
ind7 = {
"created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000002",
"labels": [
"url-watchlist"
"url-watchlist",
],
"modified": "2017-01-27T13:49:53.935Z",
"name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z"
"valid_from": "2017-01-27T13:49:53.935382Z",
}
ind8 = {
"created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000002",
"labels": [
"url-watchlist"
"url-watchlist",
],
"modified": "2017-01-27T13:49:53.935Z",
"name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z"
"valid_from": "2017-01-27T13:49:53.935382Z",
}
return [ind6, ind7, ind8]
@pytest.fixture
def real_stix_objs2(stix_objs2):
return [stix2.parse(x) for x in stix_objs2]
return [stix2.parse(x, version="2.0") for x in stix_objs2]

View File

@ -12,6 +12,7 @@ INDICATOR_ID = "indicator--a740531e-63ff-4e49-a9e1-a0a3eed0e3e7"
INTRUSION_SET_ID = "intrusion-set--4e78f46f-a023-4e5f-bc24-71b3ca22ec29"
MALWARE_ID = "malware--9c4638ec-f1de-4ddb-abf4-1b760417654e"
MARKING_DEFINITION_ID = "marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9"
NOTE_ID = "note--0c7b5b88-8ff7-4a4d-aa9d-feb398cd0061"
OBSERVED_DATA_ID = "observed-data--b67d30ff-02ac-498a-92f9-32f845f448cf"
RELATIONSHIP_ID = "relationship--df7c87eb-75d2-4948-af81-9d49d246f301"
REPORT_ID = "report--84e4d88f-44ea-4bcd-bbf3-b2c1c320bcb3"
@ -31,7 +32,7 @@ MARKING_IDS = [
RELATIONSHIP_IDS = [
'relationship--06520621-5352-4e6a-b976-e8fa3d437ffd',
'relationship--181c9c09-43e6-45dd-9374-3bec192f05ef',
'relationship--a0cbb21c-8daf-4a7f-96aa-7155a4ef8f70'
'relationship--a0cbb21c-8daf-4a7f-96aa-7155a4ef8f70',
]
# *_KWARGS contains all required arguments to create an instance of that STIX object
@ -86,7 +87,7 @@ MALWARE_MORE_KWARGS = dict(
modified="2016-04-06T20:03:00.000Z",
labels=['ransomware'],
name="Cryptolocker",
description="A ransomware related to ..."
description="A ransomware related to ...",
)
OBSERVED_DATA_KWARGS = dict(
@ -97,8 +98,8 @@ OBSERVED_DATA_KWARGS = dict(
"0": {
"type": "windows-registry-key",
"key": "HKEY_LOCAL_MACHINE\\System\\Foo\\Bar",
}
}
},
},
)
REPORT_KWARGS = dict(

View File

@ -24,14 +24,14 @@ EXPECTED = """{
def test_attack_pattern_example():
ap = stix2.AttackPattern(
ap = stix2.v20.AttackPattern(
id="attack-pattern--0c7b5b88-8ff7-4a4d-aa9d-feb398cd0061",
created="2016-05-12T08:17:27.000Z",
modified="2016-05-12T08:17:27.000Z",
name="Spear Phishing",
external_references=[{
"source_name": "capec",
"external_id": "CAPEC-163"
"external_id": "CAPEC-163",
}],
description="...",
)
@ -39,25 +39,27 @@ def test_attack_pattern_example():
assert str(ap) == EXPECTED
@pytest.mark.parametrize("data", [
EXPECTED,
{
"type": "attack-pattern",
"id": "attack-pattern--0c7b5b88-8ff7-4a4d-aa9d-feb398cd0061",
"created": "2016-05-12T08:17:27.000Z",
"modified": "2016-05-12T08:17:27.000Z",
"description": "...",
"external_references": [
{
"external_id": "CAPEC-163",
"source_name": "capec"
}
],
"name": "Spear Phishing",
},
])
@pytest.mark.parametrize(
"data", [
EXPECTED,
{
"type": "attack-pattern",
"id": "attack-pattern--0c7b5b88-8ff7-4a4d-aa9d-feb398cd0061",
"created": "2016-05-12T08:17:27.000Z",
"modified": "2016-05-12T08:17:27.000Z",
"description": "...",
"external_references": [
{
"external_id": "CAPEC-163",
"source_name": "capec",
},
],
"name": "Spear Phishing",
},
],
)
def test_parse_attack_pattern(data):
ap = stix2.parse(data)
ap = stix2.parse(data, version="2.0")
assert ap.type == 'attack-pattern'
assert ap.id == ATTACK_PATTERN_ID
@ -71,12 +73,12 @@ def test_parse_attack_pattern(data):
def test_attack_pattern_invalid_labels():
with pytest.raises(stix2.exceptions.InvalidValueError):
stix2.AttackPattern(
stix2.v20.AttackPattern(
id="attack-pattern--0c7b5b88-8ff7-4a4d-aa9d-feb398cd0061",
created="2016-05-12T08:17:27Z",
modified="2016-05-12T08:17:27Z",
name="Spear Phishing",
labels=1
labels=1,
)
# TODO: Add other examples

View File

@ -0,0 +1,236 @@
import json
import pytest
import stix2
EXPECTED_BUNDLE = """{
"type": "bundle",
"id": "bundle--00000000-0000-4000-8000-000000000007",
"spec_version": "2.0",
"objects": [
{
"type": "indicator",
"id": "indicator--00000000-0000-4000-8000-000000000001",
"created": "2017-01-01T12:34:56.000Z",
"modified": "2017-01-01T12:34:56.000Z",
"pattern": "[file:hashes.MD5 = 'd41d8cd98f00b204e9800998ecf8427e']",
"valid_from": "2017-01-01T12:34:56Z",
"labels": [
"malicious-activity"
]
},
{
"type": "malware",
"id": "malware--00000000-0000-4000-8000-000000000003",
"created": "2017-01-01T12:34:56.000Z",
"modified": "2017-01-01T12:34:56.000Z",
"name": "Cryptolocker",
"labels": [
"ransomware"
]
},
{
"type": "relationship",
"id": "relationship--00000000-0000-4000-8000-000000000005",
"created": "2017-01-01T12:34:56.000Z",
"modified": "2017-01-01T12:34:56.000Z",
"relationship_type": "indicates",
"source_ref": "indicator--a740531e-63ff-4e49-a9e1-a0a3eed0e3e7",
"target_ref": "malware--9c4638ec-f1de-4ddb-abf4-1b760417654e"
}
]
}"""
EXPECTED_BUNDLE_DICT = {
"type": "bundle",
"id": "bundle--00000000-0000-4000-8000-000000000007",
"spec_version": "2.0",
"objects": [
{
"type": "indicator",
"id": "indicator--00000000-0000-4000-8000-000000000001",
"created": "2017-01-01T12:34:56.000Z",
"modified": "2017-01-01T12:34:56.000Z",
"pattern": "[file:hashes.MD5 = 'd41d8cd98f00b204e9800998ecf8427e']",
"valid_from": "2017-01-01T12:34:56Z",
"labels": [
"malicious-activity",
],
},
{
"type": "malware",
"id": "malware--00000000-0000-4000-8000-000000000003",
"created": "2017-01-01T12:34:56.000Z",
"modified": "2017-01-01T12:34:56.000Z",
"name": "Cryptolocker",
"labels": [
"ransomware",
],
},
{
"type": "relationship",
"id": "relationship--00000000-0000-4000-8000-000000000005",
"created": "2017-01-01T12:34:56.000Z",
"modified": "2017-01-01T12:34:56.000Z",
"relationship_type": "indicates",
"source_ref": "indicator--a740531e-63ff-4e49-a9e1-a0a3eed0e3e7",
"target_ref": "malware--9c4638ec-f1de-4ddb-abf4-1b760417654e",
},
],
}
def test_empty_bundle():
bundle = stix2.v20.Bundle()
assert bundle.type == "bundle"
assert bundle.id.startswith("bundle--")
with pytest.raises(AttributeError):
assert bundle.objects
def test_bundle_with_wrong_type():
with pytest.raises(stix2.exceptions.InvalidValueError) as excinfo:
stix2.v20.Bundle(type="not-a-bundle")
assert excinfo.value.cls == stix2.v20.Bundle
assert excinfo.value.prop_name == "type"
assert excinfo.value.reason == "must equal 'bundle'."
assert str(excinfo.value) == "Invalid value for Bundle 'type': must equal 'bundle'."
def test_bundle_id_must_start_with_bundle():
with pytest.raises(stix2.exceptions.InvalidValueError) as excinfo:
stix2.v20.Bundle(id='my-prefix--')
assert excinfo.value.cls == stix2.v20.Bundle
assert excinfo.value.prop_name == "id"
assert excinfo.value.reason == "must start with 'bundle--'."
assert str(excinfo.value) == "Invalid value for Bundle 'id': must start with 'bundle--'."
def test_create_bundle1(indicator, malware, relationship):
bundle = stix2.v20.Bundle(objects=[indicator, malware, relationship])
assert str(bundle) == EXPECTED_BUNDLE
assert bundle.serialize(pretty=True) == EXPECTED_BUNDLE
def test_create_bundle2(indicator, malware, relationship):
bundle = stix2.v20.Bundle(objects=[indicator, malware, relationship])
assert json.loads(bundle.serialize()) == EXPECTED_BUNDLE_DICT
def test_create_bundle_with_positional_args(indicator, malware, relationship):
bundle = stix2.v20.Bundle(indicator, malware, relationship)
assert str(bundle) == EXPECTED_BUNDLE
def test_create_bundle_with_positional_listarg(indicator, malware, relationship):
bundle = stix2.v20.Bundle([indicator, malware, relationship])
assert str(bundle) == EXPECTED_BUNDLE
def test_create_bundle_with_listarg_and_positional_arg(indicator, malware, relationship):
bundle = stix2.v20.Bundle([indicator, malware], relationship)
assert str(bundle) == EXPECTED_BUNDLE
def test_create_bundle_with_listarg_and_kwarg(indicator, malware, relationship):
bundle = stix2.v20.Bundle([indicator, malware], objects=[relationship])
assert str(bundle) == EXPECTED_BUNDLE
def test_create_bundle_with_arg_listarg_and_kwarg(indicator, malware, relationship):
bundle = stix2.v20.Bundle([indicator], malware, objects=[relationship])
assert str(bundle) == EXPECTED_BUNDLE
def test_create_bundle_invalid(indicator, malware, relationship):
with pytest.raises(ValueError) as excinfo:
stix2.v20.Bundle(objects=[1])
assert excinfo.value.reason == "This property may only contain a dictionary or object"
with pytest.raises(ValueError) as excinfo:
stix2.v20.Bundle(objects=[{}])
assert excinfo.value.reason == "This property may only contain a non-empty dictionary or object"
with pytest.raises(ValueError) as excinfo:
stix2.v20.Bundle(objects=[{'type': 'bundle'}])
assert excinfo.value.reason == 'This property may not contain a Bundle object'
@pytest.mark.parametrize("version", ["2.0"])
def test_parse_bundle(version):
bundle = stix2.parse(EXPECTED_BUNDLE, version=version)
assert bundle.type == "bundle"
assert bundle.id.startswith("bundle--")
assert type(bundle.objects[0]) is stix2.v20.Indicator
assert bundle.objects[0].type == 'indicator'
assert bundle.objects[1].type == 'malware'
assert bundle.objects[2].type == 'relationship'
def test_parse_unknown_type():
unknown = {
"type": "other",
"id": "other--8e2e2d2b-17d4-4cbf-938f-98ee46b3cd3f",
"created": "2016-04-06T20:03:00Z",
"modified": "2016-04-06T20:03:00Z",
"created_by_ref": "identity--f431f809-377b-45e0-aa1c-6a4751cae5ff",
"description": "Campaign by Green Group against a series of targets in the financial services sector.",
"name": "Green Group Attacks Against Finance",
}
with pytest.raises(stix2.exceptions.ParseError) as excinfo:
stix2.parse(unknown, version="2.0")
assert str(excinfo.value) == "Can't parse unknown object type 'other'! For custom types, use the CustomObject decorator."
def test_stix_object_property():
prop = stix2.properties.STIXObjectProperty(spec_version='2.0')
identity = stix2.v20.Identity(name="test", identity_class="individual")
assert prop.clean(identity) is identity
def test_bundle_with_different_spec_objects():
# This is a 2.0 case only...
data = [
{
"spec_version": "2.1",
"type": "indicator",
"id": "indicator--00000000-0000-4000-8000-000000000001",
"created": "2017-01-01T12:34:56.000Z",
"modified": "2017-01-01T12:34:56.000Z",
"pattern": "[file:hashes.MD5 = 'd41d8cd98f00b204e9800998ecf8427e']",
"valid_from": "2017-01-01T12:34:56Z",
"labels": [
"malicious-activity",
],
},
{
"type": "malware",
"id": "malware--00000000-0000-4000-8000-000000000003",
"created": "2017-01-01T12:34:56.000Z",
"modified": "2017-01-01T12:34:56.000Z",
"name": "Cryptolocker",
"labels": [
"ransomware",
],
},
]
with pytest.raises(ValueError) as excinfo:
stix2.v20.Bundle(objects=data)
assert "Spec version 2.0 bundles don't yet support containing objects of a different spec version." in str(excinfo.value)

View File

@ -19,32 +19,34 @@ EXPECTED = """{
def test_campaign_example():
campaign = stix2.Campaign(
campaign = stix2.v20.Campaign(
id="campaign--8e2e2d2b-17d4-4cbf-938f-98ee46b3cd3f",
created_by_ref="identity--f431f809-377b-45e0-aa1c-6a4751cae5ff",
created="2016-04-06T20:03:00Z",
modified="2016-04-06T20:03:00Z",
name="Green Group Attacks Against Finance",
description="Campaign by Green Group against a series of targets in the financial services sector."
description="Campaign by Green Group against a series of targets in the financial services sector.",
)
assert str(campaign) == EXPECTED
@pytest.mark.parametrize("data", [
EXPECTED,
{
"type": "campaign",
"id": "campaign--8e2e2d2b-17d4-4cbf-938f-98ee46b3cd3f",
"created": "2016-04-06T20:03:00Z",
"modified": "2016-04-06T20:03:00Z",
"created_by_ref": "identity--f431f809-377b-45e0-aa1c-6a4751cae5ff",
"description": "Campaign by Green Group against a series of targets in the financial services sector.",
"name": "Green Group Attacks Against Finance",
},
])
@pytest.mark.parametrize(
"data", [
EXPECTED,
{
"type": "campaign",
"id": "campaign--8e2e2d2b-17d4-4cbf-938f-98ee46b3cd3f",
"created": "2016-04-06T20:03:00Z",
"modified": "2016-04-06T20:03:00Z",
"created_by_ref": "identity--f431f809-377b-45e0-aa1c-6a4751cae5ff",
"description": "Campaign by Green Group against a series of targets in the financial services sector.",
"name": "Green Group Attacks Against Finance",
},
],
)
def test_parse_campaign(data):
cmpn = stix2.parse(data)
cmpn = stix2.parse(data, version="2.0")
assert cmpn.type == 'campaign'
assert cmpn.id == CAMPAIGN_ID

172
stix2/test/v20/test_core.py Normal file
View File

@ -0,0 +1,172 @@
import pytest
import stix2
from stix2 import core, exceptions
BUNDLE = {
"type": "bundle",
"spec_version": "2.0",
"id": "bundle--00000000-0000-4000-8000-000000000007",
"objects": [
{
"type": "indicator",
"id": "indicator--00000000-0000-4000-8000-000000000001",
"created": "2017-01-01T12:34:56.000Z",
"modified": "2017-01-01T12:34:56.000Z",
"pattern": "[file:hashes.MD5 = 'd41d8cd98f00b204e9800998ecf8427e']",
"valid_from": "2017-01-01T12:34:56Z",
"labels": [
"malicious-activity",
],
},
{
"type": "malware",
"id": "malware--00000000-0000-4000-8000-000000000003",
"created": "2017-01-01T12:34:56.000Z",
"modified": "2017-01-01T12:34:56.000Z",
"name": "Cryptolocker",
"labels": [
"ransomware",
],
},
{
"type": "relationship",
"id": "relationship--00000000-0000-4000-8000-000000000005",
"created": "2017-01-01T12:34:56.000Z",
"modified": "2017-01-01T12:34:56.000Z",
"relationship_type": "indicates",
"source_ref": "indicator--a740531e-63ff-4e49-a9e1-a0a3eed0e3e7",
"target_ref": "malware--9c4638ec-f1de-4ddb-abf4-1b760417654e",
},
],
}
def test_dict_to_stix2_bundle_with_version():
with pytest.raises(exceptions.ExtraPropertiesError) as excinfo:
core.dict_to_stix2(BUNDLE, version='2.1')
assert str(excinfo.value) == "Unexpected properties for Bundle: (spec_version)."
def test_parse_observable_with_version():
observable = {"type": "file", "name": "foo.exe"}
obs_obj = core.parse_observable(observable, version='2.0')
v = 'v20'
assert v in str(obs_obj.__class__)
@pytest.mark.xfail(reason="The default version is no longer 2.0", condition=stix2.DEFAULT_VERSION != "2.0")
def test_parse_observable_with_no_version():
observable = {"type": "file", "name": "foo.exe"}
obs_obj = core.parse_observable(observable)
v = 'v20'
assert v in str(obs_obj.__class__)
def test_register_object_with_version():
bundle = core.dict_to_stix2(BUNDLE, version='2.0')
core._register_object(bundle.objects[0].__class__, version='2.0')
v = 'v20'
assert bundle.objects[0].type in core.STIX2_OBJ_MAPS[v]['objects']
assert v in str(bundle.objects[0].__class__)
def test_register_marking_with_version():
core._register_marking(stix2.v20.TLP_WHITE.__class__, version='2.0')
v = 'v20'
assert stix2.v20.TLP_WHITE.definition._type in core.STIX2_OBJ_MAPS[v]['markings']
assert v in str(stix2.v20.TLP_WHITE.__class__)
@pytest.mark.xfail(reason="The default version is no longer 2.0", condition=stix2.DEFAULT_VERSION != "2.0")
def test_register_marking_with_no_version():
# Uses default version (2.0 in this case)
core._register_marking(stix2.v20.TLP_WHITE.__class__)
v = 'v20'
assert stix2.v20.TLP_WHITE.definition._type in core.STIX2_OBJ_MAPS[v]['markings']
assert v in str(stix2.v20.TLP_WHITE.__class__)
def test_register_observable_with_version():
observed_data = stix2.v20.ObservedData(
id="observed-data--b67d30ff-02ac-498a-92f9-32f845f448cf",
created_by_ref="identity--f431f809-377b-45e0-aa1c-6a4751cae5ff",
created="2016-04-06T19:58:16.000Z",
modified="2016-04-06T19:58:16.000Z",
first_observed="2015-12-21T19:00:00Z",
last_observed="2015-12-21T19:00:00Z",
number_observed=50,
objects={
"0": {
"name": "foo.exe",
"type": "file",
"extensions": {
"ntfs-ext": {
"alternate_data_streams": [
{
"name": "second.stream",
"size": 25536,
},
],
},
},
},
"1": {
"type": "directory",
"path": "/usr/home",
"contains_refs": ["0"],
},
},
)
core._register_observable(observed_data.objects['0'].__class__, version='2.0')
v = 'v20'
assert observed_data.objects['0'].type in core.STIX2_OBJ_MAPS[v]['observables']
assert v in str(observed_data.objects['0'].__class__)
def test_register_observable_extension_with_version():
observed_data = stix2.v20.ObservedData(
id="observed-data--b67d30ff-02ac-498a-92f9-32f845f448cf",
created_by_ref="identity--f431f809-377b-45e0-aa1c-6a4751cae5ff",
created="2016-04-06T19:58:16.000Z",
modified="2016-04-06T19:58:16.000Z",
first_observed="2015-12-21T19:00:00Z",
last_observed="2015-12-21T19:00:00Z",
number_observed=50,
objects={
"0": {
"name": "foo.exe",
"type": "file",
"extensions": {
"ntfs-ext": {
"alternate_data_streams": [
{
"name": "second.stream",
"size": 25536,
},
],
},
},
},
"1": {
"type": "directory",
"path": "/usr/home",
"contains_refs": ["0"],
},
},
)
core._register_observable_extension(observed_data.objects['0'], observed_data.objects['0'].extensions['ntfs-ext'].__class__, version='2.0')
v = 'v20'
assert observed_data.objects['0'].type in core.STIX2_OBJ_MAPS[v]['observables']
assert v in str(observed_data.objects['0'].__class__)
assert observed_data.objects['0'].extensions['ntfs-ext']._type in core.STIX2_OBJ_MAPS[v]['observable-extensions']['file']
assert v in str(observed_data.objects['0'].extensions['ntfs-ext'].__class__)

View File

@ -19,32 +19,34 @@ EXPECTED = """{
def test_course_of_action_example():
coa = stix2.CourseOfAction(
coa = stix2.v20.CourseOfAction(
id="course-of-action--8e2e2d2b-17d4-4cbf-938f-98ee46b3cd3f",
created_by_ref="identity--f431f809-377b-45e0-aa1c-6a4751cae5ff",
created="2016-04-06T20:03:48.000Z",
modified="2016-04-06T20:03:48.000Z",
name="Add TCP port 80 Filter Rule to the existing Block UDP 1434 Filter",
description="This is how to add a filter rule to block inbound access to TCP port 80 to the existing UDP 1434 filter ..."
description="This is how to add a filter rule to block inbound access to TCP port 80 to the existing UDP 1434 filter ...",
)
assert str(coa) == EXPECTED
@pytest.mark.parametrize("data", [
EXPECTED,
{
"created": "2016-04-06T20:03:48.000Z",
"created_by_ref": "identity--f431f809-377b-45e0-aa1c-6a4751cae5ff",
"description": "This is how to add a filter rule to block inbound access to TCP port 80 to the existing UDP 1434 filter ...",
"id": "course-of-action--8e2e2d2b-17d4-4cbf-938f-98ee46b3cd3f",
"modified": "2016-04-06T20:03:48.000Z",
"name": "Add TCP port 80 Filter Rule to the existing Block UDP 1434 Filter",
"type": "course-of-action"
},
])
@pytest.mark.parametrize(
"data", [
EXPECTED,
{
"created": "2016-04-06T20:03:48.000Z",
"created_by_ref": "identity--f431f809-377b-45e0-aa1c-6a4751cae5ff",
"description": "This is how to add a filter rule to block inbound access to TCP port 80 to the existing UDP 1434 filter ...",
"id": "course-of-action--8e2e2d2b-17d4-4cbf-938f-98ee46b3cd3f",
"modified": "2016-04-06T20:03:48.000Z",
"name": "Add TCP port 80 Filter Rule to the existing Block UDP 1434 Filter",
"type": "course-of-action",
},
],
)
def test_parse_course_of_action(data):
coa = stix2.parse(data)
coa = stix2.parse(data, version="2.0")
assert coa.type == 'course-of-action'
assert coa.id == COURSE_OF_ACTION_ID

View File

@ -4,7 +4,7 @@ import stix2
from .constants import FAKE_TIME, MARKING_DEFINITION_ID
IDENTITY_CUSTOM_PROP = stix2.Identity(
IDENTITY_CUSTOM_PROP = stix2.v20.Identity(
name="John Smith",
identity_class="individual",
x_foo="bar",
@ -14,7 +14,7 @@ IDENTITY_CUSTOM_PROP = stix2.Identity(
def test_identity_custom_property():
with pytest.raises(ValueError) as excinfo:
stix2.Identity(
stix2.v20.Identity(
id="identity--311b2d2d-f010-4473-83ec-1edf84858f4c",
created="2015-12-21T19:59:11Z",
modified="2015-12-21T19:59:11Z",
@ -25,7 +25,7 @@ def test_identity_custom_property():
assert str(excinfo.value) == "'custom_properties' must be a dictionary"
with pytest.raises(stix2.exceptions.ExtraPropertiesError) as excinfo:
stix2.Identity(
stix2.v20.Identity(
id="identity--311b2d2d-f010-4473-83ec-1edf84858f4c",
created="2015-12-21T19:59:11Z",
modified="2015-12-21T19:59:11Z",
@ -35,10 +35,10 @@ def test_identity_custom_property():
"foo": "bar",
},
foo="bar",
)
)
assert "Unexpected properties for Identity" in str(excinfo.value)
identity = stix2.Identity(
identity = stix2.v20.Identity(
id="identity--311b2d2d-f010-4473-83ec-1edf84858f4c",
created="2015-12-21T19:59:11Z",
modified="2015-12-21T19:59:11Z",
@ -53,7 +53,7 @@ def test_identity_custom_property():
def test_identity_custom_property_invalid():
with pytest.raises(stix2.exceptions.ExtraPropertiesError) as excinfo:
stix2.Identity(
stix2.v20.Identity(
id="identity--311b2d2d-f010-4473-83ec-1edf84858f4c",
created="2015-12-21T19:59:11Z",
modified="2015-12-21T19:59:11Z",
@ -61,13 +61,13 @@ def test_identity_custom_property_invalid():
identity_class="individual",
x_foo="bar",
)
assert excinfo.value.cls == stix2.Identity
assert excinfo.value.cls == stix2.v20.Identity
assert excinfo.value.properties == ['x_foo']
assert "Unexpected properties for" in str(excinfo.value)
def test_identity_custom_property_allowed():
identity = stix2.Identity(
identity = stix2.v20.Identity(
id="identity--311b2d2d-f010-4473-83ec-1edf84858f4c",
created="2015-12-21T19:59:11Z",
modified="2015-12-21T19:59:11Z",
@ -79,8 +79,9 @@ def test_identity_custom_property_allowed():
assert identity.x_foo == "bar"
@pytest.mark.parametrize("data", [
"""{
@pytest.mark.parametrize(
"data", [
"""{
"type": "identity",
"id": "identity--311b2d2d-f010-4473-83ec-1edf84858f4c",
"created": "2015-12-21T19:59:11Z",
@ -89,34 +90,35 @@ def test_identity_custom_property_allowed():
"identity_class": "individual",
"foo": "bar"
}""",
])
],
)
def test_parse_identity_custom_property(data):
with pytest.raises(stix2.exceptions.ExtraPropertiesError) as excinfo:
identity = stix2.parse(data)
assert excinfo.value.cls == stix2.Identity
stix2.parse(data, version="2.0")
assert excinfo.value.cls == stix2.v20.Identity
assert excinfo.value.properties == ['foo']
assert "Unexpected properties for" in str(excinfo.value)
identity = stix2.parse(data, allow_custom=True)
identity = stix2.parse(data, version="2.0", allow_custom=True)
assert identity.foo == "bar"
def test_custom_property_object_in_bundled_object():
bundle = stix2.Bundle(IDENTITY_CUSTOM_PROP, allow_custom=True)
bundle = stix2.v20.Bundle(IDENTITY_CUSTOM_PROP, allow_custom=True)
assert bundle.objects[0].x_foo == "bar"
assert '"x_foo": "bar"' in str(bundle)
def test_custom_properties_object_in_bundled_object():
obj = stix2.Identity(
obj = stix2.v20.Identity(
name="John Smith",
identity_class="individual",
custom_properties={
"x_foo": "bar",
}
},
)
bundle = stix2.Bundle(obj, allow_custom=True)
bundle = stix2.v20.Bundle(obj, allow_custom=True)
assert bundle.objects[0].x_foo == "bar"
assert '"x_foo": "bar"' in str(bundle)
@ -132,9 +134,9 @@ def test_custom_property_dict_in_bundled_object():
'x_foo': 'bar',
}
with pytest.raises(stix2.exceptions.ExtraPropertiesError):
bundle = stix2.Bundle(custom_identity)
stix2.v20.Bundle(custom_identity)
bundle = stix2.Bundle(custom_identity, allow_custom=True)
bundle = stix2.v20.Bundle(custom_identity, allow_custom=True)
assert bundle.objects[0].x_foo == "bar"
assert '"x_foo": "bar"' in str(bundle)
@ -150,23 +152,23 @@ def test_custom_properties_dict_in_bundled_object():
'x_foo': 'bar',
},
}
bundle = stix2.Bundle(custom_identity)
bundle = stix2.v20.Bundle(custom_identity)
assert bundle.objects[0].x_foo == "bar"
assert '"x_foo": "bar"' in str(bundle)
def test_custom_property_in_observed_data():
artifact = stix2.File(
artifact = stix2.v20.File(
allow_custom=True,
name='test',
x_foo='bar'
x_foo='bar',
)
observed_data = stix2.ObservedData(
observed_data = stix2.v20.ObservedData(
allow_custom=True,
first_observed="2015-12-21T19:00:00Z",
last_observed="2015-12-21T19:00:00Z",
number_observed=0,
number_observed=1,
objects={"0": artifact},
)
@ -175,20 +177,20 @@ def test_custom_property_in_observed_data():
def test_custom_property_object_in_observable_extension():
ntfs = stix2.NTFSExt(
ntfs = stix2.v20.NTFSExt(
allow_custom=True,
sid=1,
x_foo='bar',
)
artifact = stix2.File(
artifact = stix2.v20.File(
name='test',
extensions={'ntfs-ext': ntfs},
)
observed_data = stix2.ObservedData(
observed_data = stix2.v20.ObservedData(
allow_custom=True,
first_observed="2015-12-21T19:00:00Z",
last_observed="2015-12-21T19:00:00Z",
number_observed=0,
number_observed=1,
objects={"0": artifact},
)
@ -198,17 +200,17 @@ def test_custom_property_object_in_observable_extension():
def test_custom_property_dict_in_observable_extension():
with pytest.raises(stix2.exceptions.ExtraPropertiesError):
artifact = stix2.File(
stix2.v20.File(
name='test',
extensions={
'ntfs-ext': {
'sid': 1,
'x_foo': 'bar',
}
},
},
)
artifact = stix2.File(
artifact = stix2.v20.File(
allow_custom=True,
name='test',
extensions={
@ -216,14 +218,14 @@ def test_custom_property_dict_in_observable_extension():
'allow_custom': True,
'sid': 1,
'x_foo': 'bar',
}
},
},
)
observed_data = stix2.ObservedData(
observed_data = stix2.v20.ObservedData(
allow_custom=True,
first_observed="2015-12-21T19:00:00Z",
last_observed="2015-12-21T19:00:00Z",
number_observed=0,
number_observed=1,
objects={"0": artifact},
)
@ -237,15 +239,15 @@ def test_identity_custom_property_revoke():
def test_identity_custom_property_edit_markings():
marking_obj = stix2.MarkingDefinition(
marking_obj = stix2.v20.MarkingDefinition(
id=MARKING_DEFINITION_ID,
definition_type="statement",
definition=stix2.StatementMarking(statement="Copyright 2016, Example Corp")
definition=stix2.v20.StatementMarking(statement="Copyright 2016, Example Corp"),
)
marking_obj2 = stix2.MarkingDefinition(
marking_obj2 = stix2.v20.MarkingDefinition(
id=MARKING_DEFINITION_ID,
definition_type="statement",
definition=stix2.StatementMarking(statement="Another one")
definition=stix2.v20.StatementMarking(statement="Another one"),
)
# None of the following should throw exceptions
@ -258,9 +260,11 @@ def test_identity_custom_property_edit_markings():
def test_custom_marking_no_init_1():
@stix2.CustomMarking('x-new-obj', [
('property1', stix2.properties.StringProperty(required=True)),
])
@stix2.v20.CustomMarking(
'x-new-obj', [
('property1', stix2.properties.StringProperty(required=True)),
],
)
class NewObj():
pass
@ -269,9 +273,11 @@ def test_custom_marking_no_init_1():
def test_custom_marking_no_init_2():
@stix2.CustomMarking('x-new-obj2', [
('property1', stix2.properties.StringProperty(required=True)),
])
@stix2.v20.CustomMarking(
'x-new-obj2', [
('property1', stix2.properties.StringProperty(required=True)),
],
)
class NewObj2(object):
pass
@ -279,10 +285,12 @@ def test_custom_marking_no_init_2():
assert no2.property1 == 'something'
@stix2.sdo.CustomObject('x-new-type', [
('property1', stix2.properties.StringProperty(required=True)),
('property2', stix2.properties.IntegerProperty()),
])
@stix2.v20.CustomObject(
'x-new-type', [
('property1', stix2.properties.StringProperty(required=True)),
('property2', stix2.properties.IntegerProperty()),
],
)
class NewType(object):
def __init__(self, property2=None, **kwargs):
if property2 and property2 < 10:
@ -312,9 +320,11 @@ def test_custom_object_type():
def test_custom_object_no_init_1():
@stix2.sdo.CustomObject('x-new-obj', [
('property1', stix2.properties.StringProperty(required=True)),
])
@stix2.v20.CustomObject(
'x-new-obj', [
('property1', stix2.properties.StringProperty(required=True)),
],
)
class NewObj():
pass
@ -323,9 +333,11 @@ def test_custom_object_no_init_1():
def test_custom_object_no_init_2():
@stix2.sdo.CustomObject('x-new-obj2', [
('property1', stix2.properties.StringProperty(required=True)),
])
@stix2.v20.CustomObject(
'x-new-obj2', [
('property1', stix2.properties.StringProperty(required=True)),
],
)
class NewObj2(object):
pass
@ -335,17 +347,21 @@ def test_custom_object_no_init_2():
def test_custom_object_invalid_type_name():
with pytest.raises(ValueError) as excinfo:
@stix2.sdo.CustomObject('x', [
('property1', stix2.properties.StringProperty(required=True)),
])
@stix2.v20.CustomObject(
'x', [
('property1', stix2.properties.StringProperty(required=True)),
],
)
class NewObj(object):
pass # pragma: no cover
assert "Invalid type name 'x': " in str(excinfo.value)
with pytest.raises(ValueError) as excinfo:
@stix2.sdo.CustomObject('x_new_object', [
('property1', stix2.properties.StringProperty(required=True)),
])
@stix2.v20.CustomObject(
'x_new_object', [
('property1', stix2.properties.StringProperty(required=True)),
],
)
class NewObj2(object):
pass # pragma: no cover
assert "Invalid type name 'x_new_object':" in str(excinfo.value)
@ -358,8 +374,8 @@ def test_parse_custom_object_type():
"property1": "something"
}"""
nt = stix2.parse(nt_string)
assert nt.property1 == 'something'
nt = stix2.parse(nt_string, version="2.0", allow_custom=True)
assert nt["property1"] == 'something'
def test_parse_unregistered_custom_object_type():
@ -370,7 +386,7 @@ def test_parse_unregistered_custom_object_type():
}"""
with pytest.raises(stix2.exceptions.ParseError) as excinfo:
stix2.parse(nt_string)
stix2.parse(nt_string, version="2.0")
assert "Can't parse unknown object type" in str(excinfo.value)
assert "use the CustomObject decorator." in str(excinfo.value)
@ -385,15 +401,17 @@ def test_parse_unregistered_custom_object_type_w_allow_custom():
"property1": "something"
}"""
custom_obj = stix2.parse(nt_string, allow_custom=True)
custom_obj = stix2.parse(nt_string, version="2.0", allow_custom=True)
assert custom_obj["type"] == "x-foobar-observable"
@stix2.observables.CustomObservable('x-new-observable', [
('property1', stix2.properties.StringProperty(required=True)),
('property2', stix2.properties.IntegerProperty()),
('x_property3', stix2.properties.BooleanProperty()),
])
@stix2.v20.CustomObservable(
'x-new-observable', [
('property1', stix2.properties.StringProperty(required=True)),
('property2', stix2.properties.IntegerProperty()),
('x_property3', stix2.properties.BooleanProperty()),
],
)
class NewObservable():
def __init__(self, property2=None, **kwargs):
if property2 and property2 < 10:
@ -428,9 +446,11 @@ def test_custom_observable_raises_exception():
def test_custom_observable_object_no_init_1():
@stix2.observables.CustomObservable('x-new-observable', [
('property1', stix2.properties.StringProperty()),
])
@stix2.v20.CustomObservable(
'x-new-observable', [
('property1', stix2.properties.StringProperty()),
],
)
class NewObs():
pass
@ -439,9 +459,11 @@ def test_custom_observable_object_no_init_1():
def test_custom_observable_object_no_init_2():
@stix2.observables.CustomObservable('x-new-obs2', [
('property1', stix2.properties.StringProperty()),
])
@stix2.v20.CustomObservable(
'x-new-obs2', [
('property1', stix2.properties.StringProperty()),
],
)
class NewObs2(object):
pass
@ -451,17 +473,21 @@ def test_custom_observable_object_no_init_2():
def test_custom_observable_object_invalid_type_name():
with pytest.raises(ValueError) as excinfo:
@stix2.observables.CustomObservable('x', [
('property1', stix2.properties.StringProperty()),
])
@stix2.v20.CustomObservable(
'x', [
('property1', stix2.properties.StringProperty()),
],
)
class NewObs(object):
pass # pragma: no cover
assert "Invalid observable type name 'x':" in str(excinfo.value)
with pytest.raises(ValueError) as excinfo:
@stix2.observables.CustomObservable('x_new_obs', [
('property1', stix2.properties.StringProperty()),
])
@stix2.v20.CustomObservable(
'x_new_obs', [
('property1', stix2.properties.StringProperty()),
],
)
class NewObs2(object):
pass # pragma: no cover
assert "Invalid observable type name 'x_new_obs':" in str(excinfo.value)
@ -469,9 +495,11 @@ def test_custom_observable_object_invalid_type_name():
def test_custom_observable_object_invalid_ref_property():
with pytest.raises(ValueError) as excinfo:
@stix2.observables.CustomObservable('x-new-obs', [
('property_ref', stix2.properties.StringProperty()),
])
@stix2.v20.CustomObservable(
'x-new-obs', [
('property_ref', stix2.properties.StringProperty()),
],
)
class NewObs():
pass
assert "is named like an object reference property but is not an ObjectReferenceProperty" in str(excinfo.value)
@ -479,9 +507,11 @@ def test_custom_observable_object_invalid_ref_property():
def test_custom_observable_object_invalid_refs_property():
with pytest.raises(ValueError) as excinfo:
@stix2.observables.CustomObservable('x-new-obs', [
('property_refs', stix2.properties.StringProperty()),
])
@stix2.v20.CustomObservable(
'x-new-obs', [
('property_refs', stix2.properties.StringProperty()),
],
)
class NewObs():
pass
assert "is named like an object reference list property but is not a ListProperty containing ObjectReferenceProperty" in str(excinfo.value)
@ -489,33 +519,39 @@ def test_custom_observable_object_invalid_refs_property():
def test_custom_observable_object_invalid_refs_list_property():
with pytest.raises(ValueError) as excinfo:
@stix2.observables.CustomObservable('x-new-obs', [
('property_refs', stix2.properties.ListProperty(stix2.properties.StringProperty)),
])
@stix2.v20.CustomObservable(
'x-new-obs', [
('property_refs', stix2.properties.ListProperty(stix2.properties.StringProperty)),
],
)
class NewObs():
pass
assert "is named like an object reference list property but is not a ListProperty containing ObjectReferenceProperty" in str(excinfo.value)
def test_custom_observable_object_invalid_valid_refs():
@stix2.observables.CustomObservable('x-new-obs', [
('property1', stix2.properties.StringProperty(required=True)),
('property_ref', stix2.properties.ObjectReferenceProperty(valid_types='email-addr')),
])
@stix2.v20.CustomObservable(
'x-new-obs', [
('property1', stix2.properties.StringProperty(required=True)),
('property_ref', stix2.properties.ObjectReferenceProperty(valid_types='email-addr')),
],
)
class NewObs():
pass
with pytest.raises(Exception) as excinfo:
NewObs(_valid_refs=['1'],
property1='something',
property_ref='1')
NewObs(
_valid_refs=['1'],
property1='something',
property_ref='1',
)
assert "must be created with _valid_refs as a dict, not a list" in str(excinfo.value)
def test_custom_no_properties_raises_exception():
with pytest.raises(ValueError):
with pytest.raises(TypeError):
@stix2.sdo.CustomObject('x-new-object-type')
@stix2.v20.CustomObject('x-new-object-type')
class NewObject1(object):
pass
@ -523,7 +559,7 @@ def test_custom_no_properties_raises_exception():
def test_custom_wrong_properties_arg_raises_exception():
with pytest.raises(ValueError):
@stix2.observables.CustomObservable('x-new-object-type', (("prop", stix2.properties.BooleanProperty())))
@stix2.v20.CustomObservable('x-new-object-type', (("prop", stix2.properties.BooleanProperty())))
class NewObject2(object):
pass
@ -534,8 +570,8 @@ def test_parse_custom_observable_object():
"property1": "something"
}"""
nt = stix2.parse_observable(nt_string, [])
assert isinstance(nt, stix2.core._STIXBase)
nt = stix2.parse_observable(nt_string, [], version='2.0')
assert isinstance(nt, stix2.base._STIXBase)
assert nt.property1 == 'something'
@ -546,14 +582,14 @@ def test_parse_unregistered_custom_observable_object():
}"""
with pytest.raises(stix2.exceptions.CustomContentError) as excinfo:
stix2.parse_observable(nt_string)
stix2.parse_observable(nt_string, version='2.0')
assert "Can't parse unknown observable type" in str(excinfo.value)
parsed_custom = stix2.parse_observable(nt_string, allow_custom=True)
parsed_custom = stix2.parse_observable(nt_string, allow_custom=True, version='2.0')
assert parsed_custom['property1'] == 'something'
with pytest.raises(AttributeError) as excinfo:
assert parsed_custom.property1 == 'something'
assert not isinstance(parsed_custom, stix2.core._STIXBase)
assert not isinstance(parsed_custom, stix2.base._STIXBase)
def test_parse_unregistered_custom_observable_object_with_no_type():
@ -562,7 +598,7 @@ def test_parse_unregistered_custom_observable_object_with_no_type():
}"""
with pytest.raises(stix2.exceptions.ParseError) as excinfo:
stix2.parse_observable(nt_string, allow_custom=True)
stix2.parse_observable(nt_string, allow_custom=True, version='2.0')
assert "Can't parse observable with no 'type' property" in str(excinfo.value)
@ -582,7 +618,7 @@ def test_parse_observed_data_with_custom_observable():
}
}
}"""
parsed = stix2.parse(input_str, allow_custom=True)
parsed = stix2.parse(input_str, version="2.0", allow_custom=True)
assert parsed.objects['0']['property1'] == 'something'
@ -592,7 +628,7 @@ def test_parse_invalid_custom_observable_object():
}"""
with pytest.raises(stix2.exceptions.ParseError) as excinfo:
stix2.parse_observable(nt_string)
stix2.parse_observable(nt_string, version='2.0')
assert "Can't parse observable with no 'type' property" in str(excinfo.value)
@ -634,7 +670,7 @@ def test_observable_custom_property_allowed():
def test_observed_data_with_custom_observable_object():
no = NewObservable(property1='something')
ob_data = stix2.ObservedData(
ob_data = stix2.v20.ObservedData(
first_observed=FAKE_TIME,
last_observed=FAKE_TIME,
number_observed=1,
@ -644,10 +680,12 @@ def test_observed_data_with_custom_observable_object():
assert ob_data.objects['0'].property1 == 'something'
@stix2.observables.CustomExtension(stix2.DomainName, 'x-new-ext', [
('property1', stix2.properties.StringProperty(required=True)),
('property2', stix2.properties.IntegerProperty()),
])
@stix2.v20.CustomExtension(
stix2.v20.DomainName, 'x-new-ext', [
('property1', stix2.properties.StringProperty(required=True)),
('property2', stix2.properties.IntegerProperty()),
],
)
class NewExtension():
def __init__(self, property2=None, **kwargs):
if property2 and property2 < 10:
@ -670,7 +708,7 @@ def test_custom_extension():
with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo:
NewExtension(property2=42)
assert excinfo.value.properties == ['property1']
assert str(excinfo.value) == "No values for required properties for _Custom: (property1)."
assert str(excinfo.value) == "No values for required properties for _CustomExtension: (property1)."
with pytest.raises(ValueError) as excinfo:
NewExtension(property1='something', property2=4)
@ -681,16 +719,19 @@ def test_custom_extension_wrong_observable_type():
# NewExtension is an extension of DomainName, not File
ext = NewExtension(property1='something')
with pytest.raises(ValueError) as excinfo:
stix2.File(name="abc.txt",
extensions={
"ntfs-ext": ext,
})
stix2.v20.File(
name="abc.txt",
extensions={
"ntfs-ext": ext,
},
)
assert 'Cannot determine extension type' in excinfo.value.reason
@pytest.mark.parametrize("data", [
"""{
@pytest.mark.parametrize(
"data", [
"""{
"keys": [
{
"test123": 123,
@ -698,11 +739,14 @@ def test_custom_extension_wrong_observable_type():
}
]
}""",
])
],
)
def test_custom_extension_with_list_and_dict_properties_observable_type(data):
@stix2.observables.CustomExtension(stix2.UserAccount, 'some-extension', [
('keys', stix2.properties.ListProperty(stix2.properties.DictionaryProperty, required=True))
])
@stix2.v20.CustomExtension(
stix2.v20.UserAccount, 'some-extension', [
('keys', stix2.properties.ListProperty(stix2.properties.DictionaryProperty, required=True)),
],
)
class SomeCustomExtension:
pass
@ -716,30 +760,36 @@ def test_custom_extension_invalid_observable():
class Foo(object):
pass
with pytest.raises(ValueError) as excinfo:
@stix2.observables.CustomExtension(Foo, 'x-new-ext', [
('property1', stix2.properties.StringProperty(required=True)),
])
@stix2.v20.CustomExtension(
Foo, 'x-new-ext', [
('property1', stix2.properties.StringProperty(required=True)),
],
)
class FooExtension():
pass # pragma: no cover
assert str(excinfo.value) == "'observable' must be a valid Observable class!"
class Bar(stix2.observables._Observable):
class Bar(stix2.v20.observables._Observable):
pass
with pytest.raises(ValueError) as excinfo:
@stix2.observables.CustomExtension(Bar, 'x-new-ext', [
('property1', stix2.properties.StringProperty(required=True)),
])
@stix2.v20.CustomExtension(
Bar, 'x-new-ext', [
('property1', stix2.properties.StringProperty(required=True)),
],
)
class BarExtension():
pass
assert "Unknown observable type" in str(excinfo.value)
assert "Custom observables must be created with the @CustomObservable decorator." in str(excinfo.value)
class Baz(stix2.observables._Observable):
class Baz(stix2.v20.observables._Observable):
_type = 'Baz'
with pytest.raises(ValueError) as excinfo:
@stix2.observables.CustomExtension(Baz, 'x-new-ext', [
('property1', stix2.properties.StringProperty(required=True)),
])
@stix2.v20.CustomExtension(
Baz, 'x-new-ext', [
('property1', stix2.properties.StringProperty(required=True)),
],
)
class BazExtension():
pass
assert "Unknown observable type" in str(excinfo.value)
@ -748,17 +798,21 @@ def test_custom_extension_invalid_observable():
def test_custom_extension_invalid_type_name():
with pytest.raises(ValueError) as excinfo:
@stix2.observables.CustomExtension(stix2.File, 'x', {
'property1': stix2.properties.StringProperty(required=True),
})
@stix2.v20.CustomExtension(
stix2.v20.File, 'x', {
'property1': stix2.properties.StringProperty(required=True),
},
)
class FooExtension():
pass # pragma: no cover
assert "Invalid extension type name 'x':" in str(excinfo.value)
with pytest.raises(ValueError) as excinfo:
@stix2.observables.CustomExtension(stix2.File, 'x_new_ext', {
'property1': stix2.properties.StringProperty(required=True),
})
@stix2.v20.CustomExtension(
stix2.File, 'x_new_ext', {
'property1': stix2.properties.StringProperty(required=True),
},
)
class BlaExtension():
pass # pragma: no cover
assert "Invalid extension type name 'x_new_ext':" in str(excinfo.value)
@ -766,7 +820,7 @@ def test_custom_extension_invalid_type_name():
def test_custom_extension_no_properties():
with pytest.raises(ValueError) as excinfo:
@stix2.observables.CustomExtension(stix2.DomainName, 'x-new-ext2', None)
@stix2.v20.CustomExtension(stix2.v20.DomainName, 'x-new-ext2', None)
class BarExtension():
pass
assert "Must supply a list, containing tuples." in str(excinfo.value)
@ -774,7 +828,7 @@ def test_custom_extension_no_properties():
def test_custom_extension_empty_properties():
with pytest.raises(ValueError) as excinfo:
@stix2.observables.CustomExtension(stix2.DomainName, 'x-new-ext2', [])
@stix2.v20.CustomExtension(stix2.v20.DomainName, 'x-new-ext2', [])
class BarExtension():
pass
assert "Must supply a list, containing tuples." in str(excinfo.value)
@ -782,16 +836,18 @@ def test_custom_extension_empty_properties():
def test_custom_extension_dict_properties():
with pytest.raises(ValueError) as excinfo:
@stix2.observables.CustomExtension(stix2.DomainName, 'x-new-ext2', {})
@stix2.v20.CustomExtension(stix2.v20.DomainName, 'x-new-ext2', {})
class BarExtension():
pass
assert "Must supply a list, containing tuples." in str(excinfo.value)
def test_custom_extension_no_init_1():
@stix2.observables.CustomExtension(stix2.DomainName, 'x-new-extension', [
('property1', stix2.properties.StringProperty(required=True)),
])
@stix2.v20.CustomExtension(
stix2.v20.DomainName, 'x-new-extension', [
('property1', stix2.properties.StringProperty(required=True)),
],
)
class NewExt():
pass
@ -800,9 +856,11 @@ def test_custom_extension_no_init_1():
def test_custom_extension_no_init_2():
@stix2.observables.CustomExtension(stix2.DomainName, 'x-new-ext2', [
('property1', stix2.properties.StringProperty(required=True)),
])
@stix2.v20.CustomExtension(
stix2.v20.DomainName, 'x-new-ext2', [
('property1', stix2.properties.StringProperty(required=True)),
],
)
class NewExt2(object):
pass
@ -822,13 +880,14 @@ def test_parse_observable_with_custom_extension():
}
}"""
parsed = stix2.parse_observable(input_str)
parsed = stix2.parse_observable(input_str, version='2.0')
assert parsed.extensions['x-new-ext'].property2 == 12
@pytest.mark.parametrize("data", [
# URL is not in EXT_MAP
"""{
@pytest.mark.parametrize(
"data", [
# URL is not in EXT_MAP
"""{
"type": "url",
"value": "example.com",
"extensions": {
@ -838,8 +897,8 @@ def test_parse_observable_with_custom_extension():
}
}
}""",
# File is in EXT_MAP
"""{
# File is in EXT_MAP
"""{
"type": "file",
"name": "foo.txt",
"extensions": {
@ -849,15 +908,16 @@ def test_parse_observable_with_custom_extension():
}
}
}""",
])
],
)
def test_parse_observable_with_unregistered_custom_extension(data):
with pytest.raises(ValueError) as excinfo:
stix2.parse_observable(data)
stix2.parse_observable(data, version='2.0')
assert "Can't parse unknown extension type" in str(excinfo.value)
parsed_ob = stix2.parse_observable(data, allow_custom=True)
parsed_ob = stix2.parse_observable(data, allow_custom=True, version='2.0')
assert parsed_ob['extensions']['x-foobar-ext']['property1'] == 'foo'
assert not isinstance(parsed_ob['extensions']['x-foobar-ext'], stix2.core._STIXBase)
assert not isinstance(parsed_ob['extensions']['x-foobar-ext'], stix2.base._STIXBase)
def test_register_custom_object():
@ -865,18 +925,19 @@ def test_register_custom_object():
class CustomObject2(object):
_type = 'awesome-object'
stix2._register_type(CustomObject2)
stix2.core._register_object(CustomObject2, version="2.0")
# Note that we will always check against newest OBJ_MAP.
assert (CustomObject2._type, CustomObject2) in stix2.OBJ_MAP.items()
assert (CustomObject2._type, CustomObject2) in stix2.v20.OBJ_MAP.items()
def test_extension_property_location():
assert 'extensions' in stix2.v20.observables.OBJ_MAP_OBSERVABLE['x-new-observable']._properties
assert 'extensions' not in stix2.v20.observables.EXT_MAP['domain-name']['x-new-ext']._properties
assert 'extensions' in stix2.v20.OBJ_MAP_OBSERVABLE['x-new-observable']._properties
assert 'extensions' not in stix2.v20.EXT_MAP['domain-name']['x-new-ext']._properties
@pytest.mark.parametrize("data", [
"""{
@pytest.mark.parametrize(
"data", [
"""{
"type": "x-example",
"id": "x-example--336d8a9f-91f1-46c5-b142-6441bb9f8b8d",
"created": "2018-06-12T16:20:58.059Z",
@ -888,18 +949,23 @@ def test_extension_property_location():
}
}
}""",
])
],
)
def test_custom_object_nested_dictionary(data):
@stix2.sdo.CustomObject('x-example', [
('dictionary', stix2.properties.DictionaryProperty()),
])
@stix2.v20.CustomObject(
'x-example', [
('dictionary', stix2.properties.DictionaryProperty()),
],
)
class Example(object):
def __init__(self, **kwargs):
pass
example = Example(id='x-example--336d8a9f-91f1-46c5-b142-6441bb9f8b8d',
created='2018-06-12T16:20:58.059Z',
modified='2018-06-12T16:20:58.059Z',
dictionary={'key': {'key_b': 'value', 'key_a': 'value'}})
example = Example(
id='x-example--336d8a9f-91f1-46c5-b142-6441bb9f8b8d',
created='2018-06-12T16:20:58.059Z',
modified='2018-06-12T16:20:58.059Z',
dictionary={'key': {'key_b': 'value', 'key_a': 'value'}},
)
assert data == str(example)

View File

@ -1,9 +1,11 @@
import pytest
from stix2.datastore import (CompositeDataSource, DataSink, DataSource,
DataStoreMixin)
from stix2.datastore import (
CompositeDataSource, DataSink, DataSource, DataStoreMixin,
)
from stix2.datastore.filters import Filter
from stix2.test.constants import CAMPAIGN_MORE_KWARGS
from .constants import CAMPAIGN_MORE_KWARGS
def test_datasource_abstract_class_raises_error():
@ -46,15 +48,19 @@ def test_datastore_creator_of_raises():
def test_datastore_relationships_raises():
with pytest.raises(AttributeError) as excinfo:
DataStoreMixin().relationships(obj="indicator--00000000-0000-4000-8000-000000000001",
target_only=True)
DataStoreMixin().relationships(
obj="indicator--00000000-0000-4000-8000-000000000001",
target_only=True,
)
assert "DataStoreMixin has no data source to query" == str(excinfo.value)
def test_datastore_related_to_raises():
with pytest.raises(AttributeError) as excinfo:
DataStoreMixin().related_to(obj="indicator--00000000-0000-4000-8000-000000000001",
target_only=True)
DataStoreMixin().related_to(
obj="indicator--00000000-0000-4000-8000-000000000001",
target_only=True,
)
assert "DataStoreMixin has no data source to query" == str(excinfo.value)
@ -84,15 +90,19 @@ def test_composite_datastore_query_raises_error():
def test_composite_datastore_relationships_raises_error():
with pytest.raises(AttributeError) as excinfo:
CompositeDataSource().relationships(obj="indicator--00000000-0000-4000-8000-000000000001",
target_only=True)
CompositeDataSource().relationships(
obj="indicator--00000000-0000-4000-8000-000000000001",
target_only=True,
)
assert "CompositeDataSource has no data sources" == str(excinfo.value)
def test_composite_datastore_related_to_raises_error():
with pytest.raises(AttributeError) as excinfo:
CompositeDataSource().related_to(obj="indicator--00000000-0000-4000-8000-000000000001",
target_only=True)
CompositeDataSource().related_to(
obj="indicator--00000000-0000-4000-8000-000000000001",
target_only=True,
)
assert "CompositeDataSource has no data sources" == str(excinfo.value)

View File

@ -15,8 +15,10 @@ def test_add_remove_composite_datasource():
with pytest.raises(TypeError) as excinfo:
cds.add_data_sources([ds1, ds2, ds1, ds3])
assert str(excinfo.value) == ("DataSource (to be added) is not of type "
"stix2.DataSource. DataSource type is '<class 'stix2.datastore.memory.MemorySink'>'")
assert str(excinfo.value) == (
"DataSource (to be added) is not of type "
"stix2.DataSource. DataSource type is '<class 'stix2.datastore.memory.MemorySink'>'"
)
cds.add_data_sources([ds1, ds2, ds1])
@ -28,10 +30,12 @@ def test_add_remove_composite_datasource():
def test_composite_datasource_operations(stix_objs1, stix_objs2):
BUNDLE1 = dict(id="bundle--%s" % make_id(),
objects=stix_objs1,
spec_version="2.0",
type="bundle")
BUNDLE1 = dict(
id="bundle--%s" % make_id(),
objects=stix_objs1,
spec_version="2.0",
type="bundle",
)
cds1 = CompositeDataSource()
ds1_1 = MemorySource(stix_data=BUNDLE1)
ds1_2 = MemorySource(stix_data=stix_objs2)
@ -57,11 +61,11 @@ def test_composite_datasource_operations(stix_objs1, stix_objs2):
assert indicator["type"] == "indicator"
query1 = [
Filter("type", "=", "indicator")
Filter("type", "=", "indicator"),
]
query2 = [
Filter("valid_from", "=", "2017-01-27T13:49:53.935382Z")
Filter("valid_from", "=", "2017-01-27T13:49:53.935382Z"),
]
cds1.filters.add(query2)

View File

@ -8,18 +8,17 @@ import stat
import pytest
import pytz
from stix2 import (Bundle, Campaign, CustomObject, FileSystemSink,
FileSystemSource, FileSystemStore, Filter, Identity,
Indicator, Malware, MarkingDefinition, Relationship,
TLPMarking, parse, properties)
from stix2.datastore.filesystem import (AuthSet, _find_search_optimizations,
_get_matching_dir_entries,
_timestamp2filename)
import stix2
from stix2.datastore.filesystem import (
AuthSet, _find_search_optimizations, _get_matching_dir_entries,
_timestamp2filename,
)
from stix2.exceptions import STIXError
from stix2.test.constants import (CAMPAIGN_ID, CAMPAIGN_KWARGS, IDENTITY_ID,
IDENTITY_KWARGS, INDICATOR_ID,
INDICATOR_KWARGS, MALWARE_ID, MALWARE_KWARGS,
RELATIONSHIP_IDS)
from .constants import (
CAMPAIGN_ID, CAMPAIGN_KWARGS, IDENTITY_ID, IDENTITY_KWARGS, INDICATOR_ID,
INDICATOR_KWARGS, MALWARE_ID, MALWARE_KWARGS, RELATIONSHIP_IDS,
)
FS_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "stix2_data")
@ -27,7 +26,7 @@ FS_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "stix2_data"
@pytest.fixture
def fs_store():
# create
yield FileSystemStore(FS_PATH)
yield stix2.FileSystemStore(FS_PATH)
# remove campaign dir
shutil.rmtree(os.path.join(FS_PATH, "campaign"), True)
@ -36,7 +35,7 @@ def fs_store():
@pytest.fixture
def fs_source():
# create
fs = FileSystemSource(FS_PATH)
fs = stix2.FileSystemSource(FS_PATH)
assert fs.stix_dir == FS_PATH
yield fs
@ -47,7 +46,7 @@ def fs_source():
@pytest.fixture
def fs_sink():
# create
fs = FileSystemSink(FS_PATH)
fs = stix2.FileSystemSink(FS_PATH)
assert fs.stix_dir == FS_PATH
yield fs
@ -78,7 +77,7 @@ def bad_stix_files():
# bad STIX object
stix_obj = {
"id": "intrusion-set--test-bad-stix",
"spec_version": "2.0"
"spec_version": "2.0",
# no "type" field
}
@ -92,22 +91,24 @@ def bad_stix_files():
@pytest.fixture(scope='module')
def rel_fs_store():
cam = Campaign(id=CAMPAIGN_ID, **CAMPAIGN_KWARGS)
idy = Identity(id=IDENTITY_ID, **IDENTITY_KWARGS)
ind = Indicator(id=INDICATOR_ID, **INDICATOR_KWARGS)
mal = Malware(id=MALWARE_ID, **MALWARE_KWARGS)
rel1 = Relationship(ind, 'indicates', mal, id=RELATIONSHIP_IDS[0])
rel2 = Relationship(mal, 'targets', idy, id=RELATIONSHIP_IDS[1])
rel3 = Relationship(cam, 'uses', mal, id=RELATIONSHIP_IDS[2])
cam = stix2.v20.Campaign(id=CAMPAIGN_ID, **CAMPAIGN_KWARGS)
idy = stix2.v20.Identity(id=IDENTITY_ID, **IDENTITY_KWARGS)
ind = stix2.v20.Indicator(id=INDICATOR_ID, **INDICATOR_KWARGS)
mal = stix2.v20.Malware(id=MALWARE_ID, **MALWARE_KWARGS)
rel1 = stix2.v20.Relationship(ind, 'indicates', mal, id=RELATIONSHIP_IDS[0])
rel2 = stix2.v20.Relationship(mal, 'targets', idy, id=RELATIONSHIP_IDS[1])
rel3 = stix2.v20.Relationship(cam, 'uses', mal, id=RELATIONSHIP_IDS[2])
stix_objs = [cam, idy, ind, mal, rel1, rel2, rel3]
fs = FileSystemStore(FS_PATH)
fs = stix2.FileSystemStore(FS_PATH)
for o in stix_objs:
fs.add(o)
yield fs
for o in stix_objs:
filepath = os.path.join(FS_PATH, o.type, o.id,
_timestamp2filename(o.modified) + '.json')
filepath = os.path.join(
FS_PATH, o.type, o.id,
_timestamp2filename(o.modified) + '.json',
)
# Some test-scoped fixtures (e.g. fs_store) delete all campaigns, so by
# the time this module-scoped fixture tears itself down, it may find
@ -124,13 +125,13 @@ def rel_fs_store():
def test_filesystem_source_nonexistent_folder():
with pytest.raises(ValueError) as excinfo:
FileSystemSource('nonexistent-folder')
stix2.FileSystemSource('nonexistent-folder')
assert "for STIX data does not exist" in str(excinfo)
def test_filesystem_sink_nonexistent_folder():
with pytest.raises(ValueError) as excinfo:
FileSystemSink('nonexistent-folder')
stix2.FileSystemSink('nonexistent-folder')
assert "for STIX data does not exist" in str(excinfo)
@ -158,8 +159,10 @@ def test_filesystem_source_get_object(fs_source):
mal = fs_source.get("malware--6b616fc1-1505-48e3-8b2c-0d19337bff38")
assert mal.id == "malware--6b616fc1-1505-48e3-8b2c-0d19337bff38"
assert mal.name == "Rover"
assert mal.modified == datetime.datetime(2018, 11, 16, 22, 54, 20, 390000,
pytz.utc)
assert mal.modified == datetime.datetime(
2018, 11, 16, 22, 54, 20, 390000,
pytz.utc,
)
def test_filesystem_source_get_nonexistent_object(fs_source):
@ -169,18 +172,20 @@ def test_filesystem_source_get_nonexistent_object(fs_source):
def test_filesystem_source_all_versions(fs_source):
ids = fs_source.all_versions(
"identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5"
"identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5",
)
assert len(ids) == 2
assert all(id_.id == "identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5"
for id_ in ids)
assert all(
id_.id == "identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5"
for id_ in ids
)
assert all(id_.name == "The MITRE Corporation" for id_ in ids)
assert all(id_.type == "identity" for id_ in ids)
def test_filesystem_source_query_single(fs_source):
# query2
is_2 = fs_source.query([Filter("external_references.external_id", '=', "T1027")])
is_2 = fs_source.query([stix2.Filter("external_references.external_id", '=', "T1027")])
assert len(is_2) == 1
is_2 = is_2[0]
@ -188,9 +193,9 @@ def test_filesystem_source_query_single(fs_source):
assert is_2.type == "attack-pattern"
def test_filesytem_source_query_multiple(fs_source):
def test_filesystem_source_query_multiple(fs_source):
# query
intrusion_sets = fs_source.query([Filter("type", '=', "intrusion-set")])
intrusion_sets = fs_source.query([stix2.Filter("type", '=', "intrusion-set")])
assert len(intrusion_sets) == 2
assert "intrusion-set--a653431d-6a5e-4600-8ad3-609b5af57064" in [is_.id for is_ in intrusion_sets]
assert "intrusion-set--f3bdec95-3d62-42d9-a840-29630f6cdc1a" in [is_.id for is_ in intrusion_sets]
@ -205,9 +210,9 @@ def test_filesystem_source_backward_compatible(fs_source):
# it.
modified = datetime.datetime(2018, 11, 16, 22, 54, 20, 390000, pytz.utc)
results = fs_source.query([
Filter("type", "=", "malware"),
Filter("id", "=", "malware--6b616fc1-1505-48e3-8b2c-0d19337bff38"),
Filter("modified", "=", modified)
stix2.Filter("type", "=", "malware"),
stix2.Filter("id", "=", "malware--6b616fc1-1505-48e3-8b2c-0d19337bff38"),
stix2.Filter("modified", "=", modified),
])
assert len(results) == 1
@ -220,14 +225,18 @@ def test_filesystem_source_backward_compatible(fs_source):
def test_filesystem_sink_add_python_stix_object(fs_sink, fs_source):
# add python stix object
camp1 = Campaign(name="Hannibal",
objective="Targeting Italian and Spanish Diplomat internet accounts",
aliases=["War Elephant"])
camp1 = stix2.v20.Campaign(
name="Hannibal",
objective="Targeting Italian and Spanish Diplomat internet accounts",
aliases=["War Elephant"],
)
fs_sink.add(camp1)
filepath = os.path.join(FS_PATH, "campaign", camp1.id,
_timestamp2filename(camp1.modified) + ".json")
filepath = os.path.join(
FS_PATH, "campaign", camp1.id,
_timestamp2filename(camp1.modified) + ".json",
)
assert os.path.exists(filepath)
camp1_r = fs_source.get(camp1.id)
@ -247,7 +256,7 @@ def test_filesystem_sink_add_stix_object_dict(fs_sink, fs_source):
"aliases": ["Purple Robes"],
"id": "campaign--8e2e2d2b-17d4-4cbf-938f-98ee46b3cd3f",
"created": "2017-05-31T21:31:53.197755Z",
"modified": "2017-05-31T21:31:53.197755Z"
"modified": "2017-05-31T21:31:53.197755Z",
}
fs_sink.add(camp2)
@ -258,9 +267,11 @@ def test_filesystem_sink_add_stix_object_dict(fs_sink, fs_source):
# as what's in the dict, since the parsing process can enforce a precision
# constraint (e.g. truncate to milliseconds), which results in a slightly
# different name.
camp2obj = parse(camp2)
filepath = os.path.join(FS_PATH, "campaign", camp2obj["id"],
_timestamp2filename(camp2obj["modified"]) + ".json")
camp2obj = stix2.parse(camp2)
filepath = os.path.join(
FS_PATH, "campaign", camp2obj["id"],
_timestamp2filename(camp2obj["modified"]) + ".json",
)
assert os.path.exists(filepath)
@ -286,16 +297,18 @@ def test_filesystem_sink_add_stix_bundle_dict(fs_sink, fs_source):
"aliases": ["Huns"],
"id": "campaign--b8f86161-ccae-49de-973a-4ca320c62478",
"created": "2017-05-31T21:31:53.197755Z",
"modified": "2017-05-31T21:31:53.197755Z"
}
]
"modified": "2017-05-31T21:31:53.197755Z",
},
],
}
fs_sink.add(bund)
camp_obj = parse(bund["objects"][0])
filepath = os.path.join(FS_PATH, "campaign", camp_obj["id"],
_timestamp2filename(camp_obj["modified"]) + ".json")
camp_obj = stix2.parse(bund["objects"][0])
filepath = os.path.join(
FS_PATH, "campaign", camp_obj["id"],
_timestamp2filename(camp_obj["modified"]) + ".json",
)
assert os.path.exists(filepath)
@ -316,10 +329,12 @@ def test_filesystem_sink_add_json_stix_object(fs_sink, fs_source):
fs_sink.add(camp4)
camp4obj = parse(camp4)
filepath = os.path.join(FS_PATH, "campaign",
"campaign--6a6ca372-ba07-42cc-81ef-9840fc1f963d",
_timestamp2filename(camp4obj["modified"]) + ".json")
camp4obj = stix2.parse(camp4)
filepath = os.path.join(
FS_PATH, "campaign",
"campaign--6a6ca372-ba07-42cc-81ef-9840fc1f963d",
_timestamp2filename(camp4obj["modified"]) + ".json",
)
assert os.path.exists(filepath)
@ -339,12 +354,14 @@ def test_filesystem_sink_json_stix_bundle(fs_sink, fs_source):
' "name": "Spartacus", "objective": "Oppressive regimes of Africa and Middle East"}]}'
fs_sink.add(bund2)
bund2obj = parse(bund2)
bund2obj = stix2.parse(bund2)
camp_obj = bund2obj["objects"][0]
filepath = os.path.join(FS_PATH, "campaign",
"campaign--2c03b8bf-82ee-433e-9918-ca2cb6e9534b",
_timestamp2filename(camp_obj["modified"]) + ".json")
filepath = os.path.join(
FS_PATH, "campaign",
"campaign--2c03b8bf-82ee-433e-9918-ca2cb6e9534b",
_timestamp2filename(camp_obj["modified"]) + ".json",
)
assert os.path.exists(filepath)
@ -357,9 +374,11 @@ def test_filesystem_sink_json_stix_bundle(fs_sink, fs_source):
def test_filesystem_sink_add_objects_list(fs_sink, fs_source):
# add list of objects
camp6 = Campaign(name="Comanche",
objective="US Midwest manufacturing firms, oil refineries, and businesses",
aliases=["Horse Warrior"])
camp6 = stix2.v20.Campaign(
name="Comanche",
objective="US Midwest manufacturing firms, oil refineries, and businesses",
aliases=["Horse Warrior"],
)
camp7 = {
"name": "Napolean",
@ -368,19 +387,22 @@ def test_filesystem_sink_add_objects_list(fs_sink, fs_source):
"aliases": ["The Frenchmen"],
"id": "campaign--122818b6-1112-4fb0-b11b-b111107ca70a",
"created": "2017-05-31T21:31:53.197755Z",
"modified": "2017-05-31T21:31:53.197755Z"
"modified": "2017-05-31T21:31:53.197755Z",
}
fs_sink.add([camp6, camp7])
camp7obj = parse(camp7)
camp7obj = stix2.parse(camp7)
camp6filepath = os.path.join(FS_PATH, "campaign", camp6.id,
_timestamp2filename(camp6["modified"]) +
".json")
camp6filepath = os.path.join(
FS_PATH, "campaign", camp6.id,
_timestamp2filename(camp6["modified"]) +
".json",
)
camp7filepath = os.path.join(
FS_PATH, "campaign", "campaign--122818b6-1112-4fb0-b11b-b111107ca70a",
_timestamp2filename(camp7obj["modified"]) + ".json")
_timestamp2filename(camp7obj["modified"]) + ".json",
)
assert os.path.exists(camp6filepath)
assert os.path.exists(camp7filepath)
@ -399,14 +421,14 @@ def test_filesystem_sink_add_objects_list(fs_sink, fs_source):
def test_filesystem_sink_marking(fs_sink):
marking = MarkingDefinition(
marking = stix2.v20.MarkingDefinition(
definition_type="tlp",
definition=TLPMarking(tlp="green")
definition=stix2.v20.TLPMarking(tlp="green"),
)
fs_sink.add(marking)
marking_filepath = os.path.join(
FS_PATH, "marking-definition", marking["id"] + ".json"
FS_PATH, "marking-definition", marking["id"] + ".json",
)
assert os.path.exists(marking_filepath)
@ -436,14 +458,14 @@ def test_filesystem_store_all_versions(fs_store):
def test_filesystem_store_query(fs_store):
# query()
tools = fs_store.query([Filter("labels", "in", "tool")])
tools = fs_store.query([stix2.Filter("labels", "in", "tool")])
assert len(tools) == 2
assert "tool--242f3da3-4425-4d11-8f5c-b842886da966" in [tool.id for tool in tools]
assert "tool--03342581-f790-4f03-ba41-e82e67392e23" in [tool.id for tool in tools]
def test_filesystem_store_query_single_filter(fs_store):
query = Filter("labels", "in", "tool")
query = stix2.Filter("labels", "in", "tool")
tools = fs_store.query(query)
assert len(tools) == 2
assert "tool--242f3da3-4425-4d11-8f5c-b842886da966" in [tool.id for tool in tools]
@ -458,45 +480,53 @@ def test_filesystem_store_empty_query(fs_store):
def test_filesystem_store_query_multiple_filters(fs_store):
fs_store.source.filters.add(Filter("labels", "in", "tool"))
tools = fs_store.query(Filter("id", "=", "tool--242f3da3-4425-4d11-8f5c-b842886da966"))
fs_store.source.filters.add(stix2.Filter("labels", "in", "tool"))
tools = fs_store.query(stix2.Filter("id", "=", "tool--242f3da3-4425-4d11-8f5c-b842886da966"))
assert len(tools) == 1
assert tools[0].id == "tool--242f3da3-4425-4d11-8f5c-b842886da966"
def test_filesystem_store_query_dont_include_type_folder(fs_store):
results = fs_store.query(Filter("type", "!=", "tool"))
results = fs_store.query(stix2.Filter("type", "!=", "tool"))
assert len(results) == 28
def test_filesystem_store_add(fs_store):
# add()
camp1 = Campaign(name="Great Heathen Army",
objective="Targeting the government of United Kingdom and insitutions affiliated with the Church Of England",
aliases=["Ragnar"])
camp1 = stix2.v20.Campaign(
name="Great Heathen Army",
objective="Targeting the government of United Kingdom and insitutions affiliated with the Church Of England",
aliases=["Ragnar"],
)
fs_store.add(camp1)
camp1_r = fs_store.get(camp1.id)
assert camp1_r.id == camp1.id
assert camp1_r.name == camp1.name
filepath = os.path.join(FS_PATH, "campaign", camp1_r.id,
_timestamp2filename(camp1_r.modified) + ".json")
filepath = os.path.join(
FS_PATH, "campaign", camp1_r.id,
_timestamp2filename(camp1_r.modified) + ".json",
)
# remove
os.remove(filepath)
def test_filesystem_store_add_as_bundle():
fs_store = FileSystemStore(FS_PATH, bundlify=True)
fs_store = stix2.FileSystemStore(FS_PATH, bundlify=True)
camp1 = Campaign(name="Great Heathen Army",
objective="Targeting the government of United Kingdom and insitutions affiliated with the Church Of England",
aliases=["Ragnar"])
camp1 = stix2.v20.Campaign(
name="Great Heathen Army",
objective="Targeting the government of United Kingdom and insitutions affiliated with the Church Of England",
aliases=["Ragnar"],
)
fs_store.add(camp1)
filepath = os.path.join(FS_PATH, "campaign", camp1.id,
_timestamp2filename(camp1.modified) + ".json")
filepath = os.path.join(
FS_PATH, "campaign", camp1.id,
_timestamp2filename(camp1.modified) + ".json",
)
with open(filepath) as bundle_file:
assert '"type": "bundle"' in bundle_file.read()
@ -509,7 +539,7 @@ def test_filesystem_store_add_as_bundle():
def test_filesystem_add_bundle_object(fs_store):
bundle = Bundle()
bundle = stix2.v20.Bundle()
fs_store.add(bundle)
@ -524,14 +554,14 @@ def test_filesystem_store_add_invalid_object(fs_store):
def test_filesystem_store_add_marking(fs_store):
marking = MarkingDefinition(
marking = stix2.v20.MarkingDefinition(
definition_type="tlp",
definition=TLPMarking(tlp="green")
definition=stix2.v20.TLPMarking(tlp="green"),
)
fs_store.add(marking)
marking_filepath = os.path.join(
FS_PATH, "marking-definition", marking["id"] + ".json"
FS_PATH, "marking-definition", marking["id"] + ".json",
)
assert os.path.exists(marking_filepath)
@ -544,12 +574,14 @@ def test_filesystem_store_add_marking(fs_store):
def test_filesystem_object_with_custom_property(fs_store):
camp = Campaign(name="Scipio Africanus",
objective="Defeat the Carthaginians",
x_empire="Roman",
allow_custom=True)
camp = stix2.v20.Campaign(
name="Scipio Africanus",
objective="Defeat the Carthaginians",
x_empire="Roman",
allow_custom=True,
)
fs_store.add(camp, True)
fs_store.add(camp)
camp_r = fs_store.get(camp.id)
assert camp_r.id == camp.id
@ -557,12 +589,14 @@ def test_filesystem_object_with_custom_property(fs_store):
def test_filesystem_object_with_custom_property_in_bundle(fs_store):
camp = Campaign(name="Scipio Africanus",
objective="Defeat the Carthaginians",
x_empire="Roman",
allow_custom=True)
camp = stix2.v20.Campaign(
name="Scipio Africanus",
objective="Defeat the Carthaginians",
x_empire="Roman",
allow_custom=True,
)
bundle = Bundle(camp, allow_custom=True)
bundle = stix2.v20.Bundle(camp, allow_custom=True)
fs_store.add(bundle)
camp_r = fs_store.get(camp.id)
@ -571,9 +605,11 @@ def test_filesystem_object_with_custom_property_in_bundle(fs_store):
def test_filesystem_custom_object(fs_store):
@CustomObject('x-new-obj', [
('property1', properties.StringProperty(required=True)),
])
@stix2.v20.CustomObject(
'x-new-obj', [
('property1', stix2.properties.StringProperty(required=True)),
],
)
class NewObj():
pass
@ -581,8 +617,8 @@ def test_filesystem_custom_object(fs_store):
fs_store.add(newobj)
newobj_r = fs_store.get(newobj.id)
assert newobj_r.id == newobj.id
assert newobj_r.property1 == 'something'
assert newobj_r["id"] == newobj["id"]
assert newobj_r["property1"] == 'something'
# remove dir
shutil.rmtree(os.path.join(FS_PATH, "x-new-obj"), True)
@ -690,7 +726,7 @@ def test_auth_set_black1():
def test_optimize_types1():
filters = [
Filter("type", "=", "foo")
stix2.Filter("type", "=", "foo"),
]
auth_types, auth_ids = _find_search_optimizations(filters)
@ -703,8 +739,8 @@ def test_optimize_types1():
def test_optimize_types2():
filters = [
Filter("type", "=", "foo"),
Filter("type", "=", "bar")
stix2.Filter("type", "=", "foo"),
stix2.Filter("type", "=", "bar"),
]
auth_types, auth_ids = _find_search_optimizations(filters)
@ -717,8 +753,8 @@ def test_optimize_types2():
def test_optimize_types3():
filters = [
Filter("type", "in", ["A", "B", "C"]),
Filter("type", "in", ["B", "C", "D"])
stix2.Filter("type", "in", ["A", "B", "C"]),
stix2.Filter("type", "in", ["B", "C", "D"]),
]
auth_types, auth_ids = _find_search_optimizations(filters)
@ -731,8 +767,8 @@ def test_optimize_types3():
def test_optimize_types4():
filters = [
Filter("type", "in", ["A", "B", "C"]),
Filter("type", "in", ["D", "E", "F"])
stix2.Filter("type", "in", ["A", "B", "C"]),
stix2.Filter("type", "in", ["D", "E", "F"]),
]
auth_types, auth_ids = _find_search_optimizations(filters)
@ -745,8 +781,8 @@ def test_optimize_types4():
def test_optimize_types5():
filters = [
Filter("type", "in", ["foo", "bar"]),
Filter("type", "!=", "bar")
stix2.Filter("type", "in", ["foo", "bar"]),
stix2.Filter("type", "!=", "bar"),
]
auth_types, auth_ids = _find_search_optimizations(filters)
@ -759,8 +795,8 @@ def test_optimize_types5():
def test_optimize_types6():
filters = [
Filter("type", "!=", "foo"),
Filter("type", "!=", "bar")
stix2.Filter("type", "!=", "foo"),
stix2.Filter("type", "!=", "bar"),
]
auth_types, auth_ids = _find_search_optimizations(filters)
@ -773,8 +809,8 @@ def test_optimize_types6():
def test_optimize_types7():
filters = [
Filter("type", "=", "foo"),
Filter("type", "!=", "foo")
stix2.Filter("type", "=", "foo"),
stix2.Filter("type", "!=", "foo"),
]
auth_types, auth_ids = _find_search_optimizations(filters)
@ -798,8 +834,8 @@ def test_optimize_types8():
def test_optimize_types_ids1():
filters = [
Filter("type", "in", ["foo", "bar"]),
Filter("id", "=", "foo--00000000-0000-0000-0000-000000000000")
stix2.Filter("type", "in", ["foo", "bar"]),
stix2.Filter("id", "=", "foo--00000000-0000-0000-0000-000000000000"),
]
auth_types, auth_ids = _find_search_optimizations(filters)
@ -812,8 +848,8 @@ def test_optimize_types_ids1():
def test_optimize_types_ids2():
filters = [
Filter("type", "=", "foo"),
Filter("id", "=", "bar--00000000-0000-0000-0000-000000000000")
stix2.Filter("type", "=", "foo"),
stix2.Filter("id", "=", "bar--00000000-0000-0000-0000-000000000000"),
]
auth_types, auth_ids = _find_search_optimizations(filters)
@ -826,8 +862,8 @@ def test_optimize_types_ids2():
def test_optimize_types_ids3():
filters = [
Filter("type", "in", ["foo", "bar"]),
Filter("id", "!=", "bar--00000000-0000-0000-0000-000000000000")
stix2.Filter("type", "in", ["foo", "bar"]),
stix2.Filter("id", "!=", "bar--00000000-0000-0000-0000-000000000000"),
]
auth_types, auth_ids = _find_search_optimizations(filters)
@ -840,12 +876,14 @@ def test_optimize_types_ids3():
def test_optimize_types_ids4():
filters = [
Filter("type", "in", ["A", "B", "C"]),
Filter("id", "in", [
"B--00000000-0000-0000-0000-000000000000",
"C--00000000-0000-0000-0000-000000000000",
"D--00000000-0000-0000-0000-000000000000",
])
stix2.Filter("type", "in", ["A", "B", "C"]),
stix2.Filter(
"id", "in", [
"B--00000000-0000-0000-0000-000000000000",
"C--00000000-0000-0000-0000-000000000000",
"D--00000000-0000-0000-0000-000000000000",
],
),
]
auth_types, auth_ids = _find_search_optimizations(filters)
@ -855,20 +893,22 @@ def test_optimize_types_ids4():
assert auth_ids.auth_type == AuthSet.WHITE
assert auth_ids.values == {
"B--00000000-0000-0000-0000-000000000000",
"C--00000000-0000-0000-0000-000000000000"
"C--00000000-0000-0000-0000-000000000000",
}
def test_optimize_types_ids5():
filters = [
Filter("type", "in", ["A", "B", "C"]),
Filter("type", "!=", "C"),
Filter("id", "in", [
"B--00000000-0000-0000-0000-000000000000",
"C--00000000-0000-0000-0000-000000000000",
"D--00000000-0000-0000-0000-000000000000"
]),
Filter("id", "!=", "D--00000000-0000-0000-0000-000000000000")
stix2.Filter("type", "in", ["A", "B", "C"]),
stix2.Filter("type", "!=", "C"),
stix2.Filter(
"id", "in", [
"B--00000000-0000-0000-0000-000000000000",
"C--00000000-0000-0000-0000-000000000000",
"D--00000000-0000-0000-0000-000000000000",
],
),
stix2.Filter("id", "!=", "D--00000000-0000-0000-0000-000000000000"),
]
auth_types, auth_ids = _find_search_optimizations(filters)
@ -881,7 +921,7 @@ def test_optimize_types_ids5():
def test_optimize_types_ids6():
filters = [
Filter("id", "=", "A--00000000-0000-0000-0000-000000000000")
stix2.Filter("id", "=", "A--00000000-0000-0000-0000-000000000000"),
]
auth_types, auth_ids = _find_search_optimizations(filters)
@ -895,7 +935,7 @@ def test_optimize_types_ids6():
def test_search_auth_set_white1():
auth_set = AuthSet(
{"attack-pattern", "doesntexist"},
set()
set(),
)
results = _get_matching_dir_entries(FS_PATH, auth_set, stat.S_ISDIR)
@ -909,19 +949,19 @@ def test_search_auth_set_white2():
auth_set = AuthSet(
{
"malware--6b616fc1-1505-48e3-8b2c-0d19337bff38",
"malware--92ec0cbd-2c30-44a2-b270-73f4ec949841"
"malware--92ec0cbd-2c30-44a2-b270-73f4ec949841",
},
{
"malware--92ec0cbd-2c30-44a2-b270-73f4ec949841",
"malware--96b08451-b27a-4ff6-893f-790e26393a8e",
"doesntexist"
}
"doesntexist",
},
)
results = _get_matching_dir_entries(
os.path.join(FS_PATH, "malware"),
auth_set, stat.S_ISDIR
auth_set, stat.S_ISDIR,
)
assert results == ["malware--6b616fc1-1505-48e3-8b2c-0d19337bff38"]
@ -931,9 +971,11 @@ def test_search_auth_set_white3():
auth_set = AuthSet({"20170531213258226477", "doesntexist"}, set())
results = _get_matching_dir_entries(
os.path.join(FS_PATH, "malware",
"malware--6b616fc1-1505-48e3-8b2c-0d19337bff38"),
auth_set, stat.S_ISREG, ".json"
os.path.join(
FS_PATH, "malware",
"malware--6b616fc1-1505-48e3-8b2c-0d19337bff38",
),
auth_set, stat.S_ISREG, ".json",
)
assert results == ["20170531213258226477.json"]
@ -942,23 +984,23 @@ def test_search_auth_set_white3():
def test_search_auth_set_black1():
auth_set = AuthSet(
None,
{"tool--242f3da3-4425-4d11-8f5c-b842886da966", "doesntexist"}
{"tool--242f3da3-4425-4d11-8f5c-b842886da966", "doesntexist"},
)
results = _get_matching_dir_entries(
os.path.join(FS_PATH, "tool"),
auth_set, stat.S_ISDIR
auth_set, stat.S_ISDIR,
)
assert set(results) == {
"tool--03342581-f790-4f03-ba41-e82e67392e23"
"tool--03342581-f790-4f03-ba41-e82e67392e23",
}
def test_search_auth_set_white_empty():
auth_set = AuthSet(
set(),
set()
set(),
)
results = _get_matching_dir_entries(FS_PATH, auth_set, stat.S_ISDIR)
@ -971,7 +1013,7 @@ def test_search_auth_set_black_empty(rel_fs_store):
# predictable (it adds "campaign").
auth_set = AuthSet(
None,
set()
set(),
)
results = _get_matching_dir_entries(FS_PATH, auth_set, stat.S_ISDIR)
@ -987,14 +1029,14 @@ def test_search_auth_set_black_empty(rel_fs_store):
"malware",
"marking-definition",
"relationship",
"tool"
"tool",
}
def test_timestamp2filename_naive():
dt = datetime.datetime(
2010, 6, 15,
8, 30, 10, 1234
8, 30, 10, 1234,
)
filename = _timestamp2filename(dt)
@ -1007,7 +1049,7 @@ def test_timestamp2filename_tz():
dt = datetime.datetime(
2010, 6, 15,
7, 30, 10, 1234,
tz
tz,
)
filename = _timestamp2filename(dt)

View File

@ -10,23 +10,23 @@ stix_objs = [
"description": "\n\nTITLE:\n\tPoison Ivy",
"id": "malware--fdd60b30-b67c-41e3-b0b9-f01faf20d111",
"labels": [
"remote-access-trojan"
"remote-access-trojan",
],
"modified": "2017-01-27T13:49:53.997Z",
"name": "Poison Ivy",
"type": "malware"
"type": "malware",
},
{
"created": "2014-05-08T09:00:00.000Z",
"id": "indicator--a932fcc6-e032-476c-826f-cb970a5a1ade",
"labels": [
"file-hash-watchlist"
"file-hash-watchlist",
],
"modified": "2014-05-08T09:00:00.000Z",
"name": "File hash for Poison Ivy variant",
"pattern": "[file:hashes.'SHA-256' = 'ef537f25c895bfa782526529a9b63d97aa631564d5d789c2b765448c8635fb6c']",
"type": "indicator",
"valid_from": "2014-05-08T09:00:00.000000Z"
"valid_from": "2014-05-08T09:00:00.000000Z",
},
{
"created": "2014-05-08T09:00:00.000Z",
@ -34,20 +34,20 @@ stix_objs = [
{
"marking_ref": "marking-definition--5e57c739-391a-4eb3-b6be-7d15ca92d5ed",
"selectors": [
"relationship_type"
]
}
"relationship_type",
],
},
],
"id": "relationship--2f9a9aa9-108a-4333-83e2-4fb25add0463",
"modified": "2014-05-08T09:00:00.000Z",
"object_marking_refs": [
"marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9"
"marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9",
],
"relationship_type": "indicates",
"revoked": True,
"source_ref": "indicator--a932fcc6-e032-476c-826f-cb970a5a1ade",
"target_ref": "malware--fdd60b30-b67c-41e3-b0b9-f01faf20d111",
"type": "relationship"
"type": "relationship",
},
{
"id": "vulnerability--ee916c28-c7a4-4d0d-ad56-a8d357f89fef",
@ -60,10 +60,10 @@ stix_objs = [
"external_references": [
{
"source_name": "cve",
"external_id": "CVE-2014-0160"
}
"external_id": "CVE-2014-0160",
},
],
"labels": ["heartbleed", "has-logo"]
"labels": ["heartbleed", "has-logo"],
},
{
"type": "observed-data",
@ -77,11 +77,11 @@ stix_objs = [
"objects": {
"0": {
"type": "file",
"name": "HAL 9000.exe"
}
}
"name": "HAL 9000.exe",
},
},
}
},
]
@ -406,8 +406,10 @@ def test_filters4():
# Assert invalid Filter cannot be created
with pytest.raises(ValueError) as excinfo:
Filter("modified", "?", "2017-01-27T13:49:53.935Z")
assert str(excinfo.value) == ("Filter operator '?' not supported "
"for specified property: 'modified'")
assert str(excinfo.value) == (
"Filter operator '?' not supported "
"for specified property: 'modified'"
)
def test_filters5(stix_objs2, real_stix_objs2):
@ -447,7 +449,7 @@ def test_filters7(stix_objs2, real_stix_objs2):
"0": {
"type": "file",
"hashes": {
"SHA-256": "35a01331e9ad96f751278b891b6ea09699806faedfa237d40513d92ad1b7100f"
"SHA-256": "35a01331e9ad96f751278b891b6ea09699806faedfa237d40513d92ad1b7100f",
},
"extensions": {
"pdf-ext": {
@ -457,14 +459,14 @@ def test_filters7(stix_objs2, real_stix_objs2):
"Author": "Adobe Systems Incorporated",
"Creator": "Adobe FrameMaker 5.5.3 for Power Macintosh",
"Producer": "Acrobat Distiller 3.01 for Power Macintosh",
"CreationDate": "20070412090123-02"
"CreationDate": "20070412090123-02",
},
"pdfid0": "DFCE52BD827ECF765649852119D",
"pdfid1": "57A1E0F9ED2AE523E313C"
}
}
}
}
"pdfid1": "57A1E0F9ED2AE523E313C",
},
},
},
},
}
stix_objects = list(stix_objs2) + [obsvd_data_obj]

View File

@ -3,111 +3,113 @@ import shutil
import pytest
from stix2 import (Bundle, Campaign, CustomObject, Filter, Identity, Indicator,
Malware, MemorySource, MemoryStore, Relationship,
properties)
from stix2 import Filter, MemorySource, MemoryStore, properties
from stix2.datastore import make_id
from stix2.utils import parse_into_datetime
from stix2.v20 import (
Bundle, Campaign, CustomObject, Identity, Indicator, Malware, Relationship,
)
from .constants import (CAMPAIGN_ID, CAMPAIGN_KWARGS, IDENTITY_ID,
IDENTITY_KWARGS, INDICATOR_ID, INDICATOR_KWARGS,
MALWARE_ID, MALWARE_KWARGS, RELATIONSHIP_IDS)
from .constants import (
CAMPAIGN_ID, CAMPAIGN_KWARGS, IDENTITY_ID, IDENTITY_KWARGS, INDICATOR_ID,
INDICATOR_KWARGS, MALWARE_ID, MALWARE_KWARGS, RELATIONSHIP_IDS,
)
IND1 = {
"created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000001",
"labels": [
"url-watchlist"
"url-watchlist",
],
"modified": "2017-01-27T13:49:53.935Z",
"name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z"
"valid_from": "2017-01-27T13:49:53.935382Z",
}
IND2 = {
"created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000001",
"labels": [
"url-watchlist"
"url-watchlist",
],
"modified": "2017-01-27T13:49:53.935Z",
"name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z"
"valid_from": "2017-01-27T13:49:53.935382Z",
}
IND3 = {
"created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000001",
"labels": [
"url-watchlist"
"url-watchlist",
],
"modified": "2017-01-27T13:49:53.936Z",
"name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z"
"valid_from": "2017-01-27T13:49:53.935382Z",
}
IND4 = {
"created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000002",
"labels": [
"url-watchlist"
"url-watchlist",
],
"modified": "2017-01-27T13:49:53.935Z",
"name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z"
"valid_from": "2017-01-27T13:49:53.935382Z",
}
IND5 = {
"created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000002",
"labels": [
"url-watchlist"
"url-watchlist",
],
"modified": "2017-01-27T13:49:53.935Z",
"name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z"
"valid_from": "2017-01-27T13:49:53.935382Z",
}
IND6 = {
"created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000001",
"labels": [
"url-watchlist"
"url-watchlist",
],
"modified": "2017-01-31T13:49:53.935Z",
"name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z"
"valid_from": "2017-01-27T13:49:53.935382Z",
}
IND7 = {
"created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000002",
"labels": [
"url-watchlist"
"url-watchlist",
],
"modified": "2017-01-27T13:49:53.935Z",
"name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z"
"valid_from": "2017-01-27T13:49:53.935382Z",
}
IND8 = {
"created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000002",
"labels": [
"url-watchlist"
"url-watchlist",
],
"modified": "2017-01-27T13:49:53.935Z",
"name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z"
"valid_from": "2017-01-27T13:49:53.935382Z",
}
STIX_OBJS2 = [IND6, IND7, IND8]
@ -139,11 +141,22 @@ def rel_mem_store():
@pytest.fixture
def fs_mem_store(request, mem_store):
filename = 'memory_test/mem_store.json'
mem_store.save_to_file(filename)
filename = mem_store.save_to_file('memory_test/mem_store.json')
def fin():
# teardown, excecuted regardless of exception
# teardown, executed regardless of exception
shutil.rmtree(os.path.dirname(filename))
request.addfinalizer(fin)
return filename
@pytest.fixture
def fs_mem_store_no_name(request, mem_store):
filename = mem_store.save_to_file('memory_test/')
def fin():
# teardown, executed regardless of exception
shutil.rmtree(os.path.dirname(filename))
request.addfinalizer(fin)
@ -162,10 +175,12 @@ def test_memory_source_get_nonexistant_object(mem_source):
def test_memory_store_all_versions(mem_store):
# Add bundle of items to sink
mem_store.add(dict(id="bundle--%s" % make_id(),
objects=STIX_OBJS2,
spec_version="2.0",
type="bundle"))
mem_store.add(dict(
id="bundle--%s" % make_id(),
objects=STIX_OBJS2,
spec_version="2.0",
type="bundle",
))
resp = mem_store.all_versions("indicator--00000000-0000-4000-8000-000000000001")
assert len(resp) == 3
@ -203,7 +218,7 @@ def test_memory_store_query_multiple_filters(mem_store):
assert len(resp) == 2
def test_memory_store_save_load_file(mem_store, fs_mem_store):
def test_memory_store_save_load_file(fs_mem_store):
filename = fs_mem_store # the fixture fs_mem_store yields filename where the memory store was written to
# STIX2 contents of mem_store have already been written to file
@ -219,6 +234,22 @@ def test_memory_store_save_load_file(mem_store, fs_mem_store):
assert mem_store2.get("indicator--00000000-0000-4000-8000-000000000001")
def test_memory_store_save_load_file_no_name_provided(fs_mem_store_no_name):
filename = fs_mem_store_no_name # the fixture fs_mem_store yields filename where the memory store was written to
# STIX2 contents of mem_store have already been written to file
# (this is done in fixture 'fs_mem_store'), so can already read-in here
contents = open(os.path.abspath(filename)).read()
assert '"id": "indicator--00000000-0000-4000-8000-000000000001",' in contents
assert '"id": "indicator--00000000-0000-4000-8000-000000000001",' in contents
mem_store2 = MemoryStore()
mem_store2.load_from_file(filename)
assert mem_store2.get("indicator--00000000-0000-4000-8000-000000000001")
assert mem_store2.get("indicator--00000000-0000-4000-8000-000000000001")
def test_memory_store_add_invalid_object(mem_store):
ind = ('indicator', IND1) # tuple isn't valid
with pytest.raises(TypeError):
@ -226,23 +257,67 @@ def test_memory_store_add_invalid_object(mem_store):
def test_memory_store_object_with_custom_property(mem_store):
camp = Campaign(name="Scipio Africanus",
objective="Defeat the Carthaginians",
x_empire="Roman",
allow_custom=True)
camp = Campaign(
name="Scipio Africanus",
objective="Defeat the Carthaginians",
x_empire="Roman",
allow_custom=True,
)
mem_store.add(camp, True)
mem_store.add(camp)
camp_r = mem_store.get(camp.id)
assert camp_r.id == camp.id
assert camp_r.x_empire == camp.x_empire
def test_memory_store_object_creator_of_present(mem_store):
camp = Campaign(
name="Scipio Africanus",
objective="Defeat the Carthaginians",
created_by_ref="identity--e4196283-7420-4277-a7a3-d57f61ef1389",
x_empire="Roman",
allow_custom=True,
)
iden = Identity(
id="identity--e4196283-7420-4277-a7a3-d57f61ef1389",
name="Foo Corp.",
identity_class="corporation",
)
mem_store.add(camp)
mem_store.add(iden)
camp_r = mem_store.get(camp.id)
assert camp_r.id == camp.id
assert camp_r.x_empire == camp.x_empire
assert mem_store.creator_of(camp_r) == iden
def test_memory_store_object_creator_of_missing(mem_store):
camp = Campaign(
name="Scipio Africanus",
objective="Defeat the Carthaginians",
x_empire="Roman",
allow_custom=True,
)
mem_store.add(camp)
camp_r = mem_store.get(camp.id)
assert camp_r.id == camp.id
assert camp_r.x_empire == camp.x_empire
assert mem_store.creator_of(camp) is None
def test_memory_store_object_with_custom_property_in_bundle(mem_store):
camp = Campaign(name="Scipio Africanus",
objective="Defeat the Carthaginians",
x_empire="Roman",
allow_custom=True)
camp = Campaign(
name="Scipio Africanus",
objective="Defeat the Carthaginians",
x_empire="Roman",
allow_custom=True,
)
bundle = Bundle(camp, allow_custom=True)
mem_store.add(bundle)
@ -253,14 +328,16 @@ def test_memory_store_object_with_custom_property_in_bundle(mem_store):
def test_memory_store_custom_object(mem_store):
@CustomObject('x-new-obj', [
('property1', properties.StringProperty(required=True)),
])
@CustomObject(
'x-new-obj', [
('property1', properties.StringProperty(required=True)),
],
)
class NewObj():
pass
newobj = NewObj(property1='something')
mem_store.add(newobj, True)
mem_store.add(newobj)
newobj_r = mem_store.get(newobj.id)
assert newobj_r.id == newobj.id
@ -337,3 +414,12 @@ def test_related_to_by_target(rel_mem_store):
assert len(resp) == 2
assert any(x['id'] == CAMPAIGN_ID for x in resp)
assert any(x['id'] == INDICATOR_ID for x in resp)
def test_object_family_internal_components(mem_source):
# Testing internal components.
str_representation = str(mem_source._data['indicator--00000000-0000-4000-8000-000000000001'])
repr_representation = repr(mem_source._data['indicator--00000000-0000-4000-8000-000000000001'])
assert "latest=2017-01-27 13:49:53.936000+00:00>>" in str_representation
assert "latest=2017-01-27 13:49:53.936000+00:00>>" in repr_representation

View File

@ -3,10 +3,10 @@ import json
from medallion.filters.basic_filter import BasicFilter
import pytest
from requests.models import Response
import six
from taxii2client import Collection, _filter_kwargs_to_query_params
from stix2 import (Bundle, TAXIICollectionSink, TAXIICollectionSource,
TAXIICollectionStore, ThreatActor)
import stix2
from stix2.datastore import DataSourceError
from stix2.datastore.filters import Filter
@ -18,50 +18,52 @@ class MockTAXIICollectionEndpoint(Collection):
def __init__(self, url, collection_info):
super(MockTAXIICollectionEndpoint, self).__init__(
url, collection_info=collection_info
url, collection_info=collection_info,
)
self.objects = []
def add_objects(self, bundle):
self._verify_can_write()
if isinstance(bundle, str):
bundle = json.loads(bundle)
if isinstance(bundle, six.string_types):
bundle = json.loads(bundle, encoding='utf-8')
for object in bundle.get("objects", []):
self.objects.append(object)
def get_objects(self, **filter_kwargs):
self._verify_can_read()
query_params = _filter_kwargs_to_query_params(filter_kwargs)
if not isinstance(query_params, dict):
query_params = json.loads(query_params)
full_filter = BasicFilter(query_params or {})
assert isinstance(query_params, dict)
full_filter = BasicFilter(query_params)
objs = full_filter.process_filter(
self.objects,
("id", "type", "version"),
[]
[],
)
if objs:
return Bundle(objects=objs)
return stix2.v20.Bundle(objects=objs)
else:
resp = Response()
resp.status_code = 404
resp.raise_for_status()
def get_object(self, id, version=None):
def get_object(self, id, **filter_kwargs):
self._verify_can_read()
query_params = None
if version:
query_params = _filter_kwargs_to_query_params({"version": version})
if query_params:
query_params = json.loads(query_params)
full_filter = BasicFilter(query_params or {})
objs = full_filter.process_filter(
self.objects,
("version",),
[]
)
if objs:
return Bundle(objects=objs)
query_params = _filter_kwargs_to_query_params(filter_kwargs)
assert isinstance(query_params, dict)
full_filter = BasicFilter(query_params)
# In this endpoint we must first filter objects by id beforehand.
objects = [x for x in self.objects if x["id"] == id]
if objects:
filtered_objects = full_filter.process_filter(
objects,
("version",),
[],
)
else:
filtered_objects = []
if filtered_objects:
return stix2.v20.Bundle(objects=filtered_objects)
else:
resp = Response()
resp.status_code = 404
@ -70,16 +72,18 @@ class MockTAXIICollectionEndpoint(Collection):
@pytest.fixture
def collection(stix_objs1):
mock = MockTAXIICollectionEndpoint(COLLECTION_URL, {
"id": "91a7b528-80eb-42ed-a74d-c6fbd5a26116",
"title": "Writable Collection",
"description": "This collection is a dropbox for submitting indicators",
"can_read": True,
"can_write": True,
"media_types": [
"application/vnd.oasis.stix+json; version=2.0"
]
})
mock = MockTAXIICollectionEndpoint(
COLLECTION_URL, {
"id": "91a7b528-80eb-42ed-a74d-c6fbd5a26116",
"title": "Writable Collection",
"description": "This collection is a dropbox for submitting indicators",
"can_read": True,
"can_write": True,
"media_types": [
"application/vnd.oasis.stix+json; version=2.0",
],
},
)
mock.objects.extend(stix_objs1)
return mock
@ -87,94 +91,118 @@ def collection(stix_objs1):
@pytest.fixture
def collection_no_rw_access(stix_objs1):
mock = MockTAXIICollectionEndpoint(COLLECTION_URL, {
"id": "91a7b528-80eb-42ed-a74d-c6fbd5a26116",
"title": "Not writeable or readable Collection",
"description": "This collection is a dropbox for submitting indicators",
"can_read": False,
"can_write": False,
"media_types": [
"application/vnd.oasis.stix+json; version=2.0"
]
})
mock = MockTAXIICollectionEndpoint(
COLLECTION_URL, {
"id": "91a7b528-80eb-42ed-a74d-c6fbd5a26116",
"title": "Not writeable or readable Collection",
"description": "This collection is a dropbox for submitting indicators",
"can_read": False,
"can_write": False,
"media_types": [
"application/vnd.oasis.stix+json; version=2.0",
],
},
)
mock.objects.extend(stix_objs1)
return mock
def test_ds_taxii(collection):
ds = TAXIICollectionSource(collection)
ds = stix2.TAXIICollectionSource(collection)
assert ds.collection is not None
def test_add_stix2_object(collection):
tc_sink = TAXIICollectionSink(collection)
tc_sink = stix2.TAXIICollectionSink(collection)
# create new STIX threat-actor
ta = ThreatActor(name="Teddy Bear",
labels=["nation-state"],
sophistication="innovator",
resource_level="government",
goals=[
"compromising environment NGOs",
"water-hole attacks geared towards energy sector",
])
ta = stix2.v20.ThreatActor(
name="Teddy Bear",
labels=["nation-state"],
sophistication="innovator",
resource_level="government",
goals=[
"compromising environment NGOs",
"water-hole attacks geared towards energy sector",
],
)
tc_sink.add(ta)
def test_add_stix2_with_custom_object(collection):
tc_sink = TAXIICollectionStore(collection, allow_custom=True)
tc_sink = stix2.TAXIICollectionStore(collection, allow_custom=True)
# create new STIX threat-actor
ta = ThreatActor(name="Teddy Bear",
labels=["nation-state"],
sophistication="innovator",
resource_level="government",
goals=[
"compromising environment NGOs",
"water-hole attacks geared towards energy sector",
],
foo="bar",
allow_custom=True)
ta = stix2.v20.ThreatActor(
name="Teddy Bear",
labels=["nation-state"],
sophistication="innovator",
resource_level="government",
goals=[
"compromising environment NGOs",
"water-hole attacks geared towards energy sector",
],
foo="bar",
allow_custom=True,
)
tc_sink.add(ta)
def test_add_list_object(collection, indicator):
tc_sink = TAXIICollectionSink(collection)
tc_sink = stix2.TAXIICollectionSink(collection)
# create new STIX threat-actor
ta = ThreatActor(name="Teddy Bear",
labels=["nation-state"],
sophistication="innovator",
resource_level="government",
goals=[
"compromising environment NGOs",
"water-hole attacks geared towards energy sector",
])
ta = stix2.v20.ThreatActor(
name="Teddy Bear",
labels=["nation-state"],
sophistication="innovator",
resource_level="government",
goals=[
"compromising environment NGOs",
"water-hole attacks geared towards energy sector",
],
)
tc_sink.add([ta, indicator])
def test_get_object_found(collection):
tc_source = stix2.TAXIICollectionSource(collection)
result = tc_source.query([
stix2.Filter("id", "=", "indicator--00000000-0000-4000-8000-000000000001"),
])
assert result
def test_get_object_not_found(collection):
tc_source = stix2.TAXIICollectionSource(collection)
result = tc_source.get("indicator--00000000-0000-4000-8000-000000000005")
assert result is None
def test_add_stix2_bundle_object(collection):
tc_sink = TAXIICollectionSink(collection)
tc_sink = stix2.TAXIICollectionSink(collection)
# create new STIX threat-actor
ta = ThreatActor(name="Teddy Bear",
labels=["nation-state"],
sophistication="innovator",
resource_level="government",
goals=[
"compromising environment NGOs",
"water-hole attacks geared towards energy sector",
])
ta = stix2.v20.ThreatActor(
name="Teddy Bear",
labels=["nation-state"],
sophistication="innovator",
resource_level="government",
goals=[
"compromising environment NGOs",
"water-hole attacks geared towards energy sector",
],
)
tc_sink.add(Bundle(objects=[ta]))
tc_sink.add(stix2.v20.Bundle(objects=[ta]))
def test_add_str_object(collection):
tc_sink = TAXIICollectionSink(collection)
tc_sink = stix2.TAXIICollectionSink(collection)
# create new STIX threat-actor
ta = """{
@ -198,7 +226,7 @@ def test_add_str_object(collection):
def test_add_dict_object(collection):
tc_sink = TAXIICollectionSink(collection)
tc_sink = stix2.TAXIICollectionSink(collection)
ta = {
"type": "threat-actor",
@ -208,25 +236,24 @@ def test_add_dict_object(collection):
"name": "Teddy Bear",
"goals": [
"compromising environment NGOs",
"water-hole attacks geared towards energy sector"
"water-hole attacks geared towards energy sector",
],
"sophistication": "innovator",
"resource_level": "government",
"labels": [
"nation-state"
]
"nation-state",
],
}
tc_sink.add(ta)
def test_add_dict_bundle_object(collection):
tc_sink = TAXIICollectionSink(collection)
tc_sink = stix2.TAXIICollectionSink(collection)
ta = {
"type": "bundle",
"id": "bundle--860ccc8d-56c9-4fda-9384-84276fb52fb1",
"spec_version": "2.0",
"objects": [
{
"type": "threat-actor",
@ -236,22 +263,22 @@ def test_add_dict_bundle_object(collection):
"name": "Teddy Bear",
"goals": [
"compromising environment NGOs",
"water-hole attacks geared towards energy sector"
"water-hole attacks geared towards energy sector",
],
"sophistication": "innovator",
"resource_level": "government",
"labels": [
"nation-state"
]
}
]
"nation-state",
],
},
],
}
tc_sink.add(ta)
def test_get_stix2_object(collection):
tc_sink = TAXIICollectionSource(collection)
tc_sink = stix2.TAXIICollectionSource(collection)
objects = tc_sink.get("indicator--00000000-0000-4000-8000-000000000001")
@ -271,10 +298,10 @@ def test_parse_taxii_filters(collection):
Filter("added_after", "=", "2016-02-01T00:00:01.000Z"),
Filter("id", "=", "taxii stix object ID"),
Filter("type", "=", "taxii stix object ID"),
Filter("version", "=", "first")
Filter("version", "=", "first"),
]
ds = TAXIICollectionSource(collection)
ds = stix2.TAXIICollectionSource(collection)
taxii_filters = ds._parse_taxii_filters(query)
@ -282,7 +309,7 @@ def test_parse_taxii_filters(collection):
def test_add_get_remove_filter(collection):
ds = TAXIICollectionSource(collection)
ds = stix2.TAXIICollectionSource(collection)
# First 3 filters are valid, remaining properties are erroneous in some way
valid_filters = [
@ -318,7 +345,7 @@ def test_add_get_remove_filter(collection):
def test_get_all_versions(collection):
ds = TAXIICollectionStore(collection)
ds = stix2.TAXIICollectionStore(collection)
indicators = ds.all_versions('indicator--00000000-0000-4000-8000-000000000001')
# There are 3 indicators but 2 share the same 'modified' timestamp
@ -330,7 +357,7 @@ def test_can_read_error(collection_no_rw_access):
instance that does not have read access, check ValueError exception is raised"""
with pytest.raises(DataSourceError) as excinfo:
TAXIICollectionSource(collection_no_rw_access)
stix2.TAXIICollectionSource(collection_no_rw_access)
assert "Collection object provided does not have read access" in str(excinfo.value)
@ -339,7 +366,7 @@ def test_can_write_error(collection_no_rw_access):
instance that does not have write access, check ValueError exception is raised"""
with pytest.raises(DataSourceError) as excinfo:
TAXIICollectionSink(collection_no_rw_access)
stix2.TAXIICollectionSink(collection_no_rw_access)
assert "Collection object provided does not have write access" in str(excinfo.value)
@ -360,7 +387,7 @@ def test_get_404():
resp.status_code = 404
resp.raise_for_status()
ds = TAXIICollectionSource(TAXIICollection404())
ds = stix2.TAXIICollectionSource(TAXIICollection404())
# this will raise 404 from mock TAXII Client but TAXIICollectionStore
# should handle gracefully and return None
@ -372,7 +399,7 @@ def test_all_versions_404(collection):
""" a TAXIICollectionSource.all_version() call that recieves an HTTP 404
response code from the taxii2client should be returned as an exception"""
ds = TAXIICollectionStore(collection)
ds = stix2.TAXIICollectionStore(collection)
with pytest.raises(DataSourceError) as excinfo:
ds.all_versions("indicator--1")
@ -384,7 +411,7 @@ def test_query_404(collection):
""" a TAXIICollectionSource.query() call that recieves an HTTP 404
response code from the taxii2client should be returned as an exception"""
ds = TAXIICollectionStore(collection)
ds = stix2.TAXIICollectionStore(collection)
query = [Filter("type", "=", "malware")]
with pytest.raises(DataSourceError) as excinfo:

View File

@ -2,109 +2,127 @@ import pytest
import stix2
from .constants import (CAMPAIGN_ID, CAMPAIGN_KWARGS, FAKE_TIME, IDENTITY_ID,
IDENTITY_KWARGS, INDICATOR_ID, INDICATOR_KWARGS,
MALWARE_ID, MALWARE_KWARGS, RELATIONSHIP_IDS)
from .constants import (
CAMPAIGN_ID, CAMPAIGN_KWARGS, FAKE_TIME, IDENTITY_ID, IDENTITY_KWARGS,
INDICATOR_ID, INDICATOR_KWARGS, MALWARE_ID, MALWARE_KWARGS,
RELATIONSHIP_IDS,
)
@pytest.fixture
def ds():
cam = stix2.Campaign(id=CAMPAIGN_ID, **CAMPAIGN_KWARGS)
idy = stix2.Identity(id=IDENTITY_ID, **IDENTITY_KWARGS)
ind = stix2.Indicator(id=INDICATOR_ID, **INDICATOR_KWARGS)
mal = stix2.Malware(id=MALWARE_ID, **MALWARE_KWARGS)
rel1 = stix2.Relationship(ind, 'indicates', mal, id=RELATIONSHIP_IDS[0])
rel2 = stix2.Relationship(mal, 'targets', idy, id=RELATIONSHIP_IDS[1])
rel3 = stix2.Relationship(cam, 'uses', mal, id=RELATIONSHIP_IDS[2])
cam = stix2.v20.Campaign(id=CAMPAIGN_ID, **CAMPAIGN_KWARGS)
idy = stix2.v20.Identity(id=IDENTITY_ID, **IDENTITY_KWARGS)
ind = stix2.v20.Indicator(id=INDICATOR_ID, **INDICATOR_KWARGS)
mal = stix2.v20.Malware(id=MALWARE_ID, **MALWARE_KWARGS)
rel1 = stix2.v20.Relationship(ind, 'indicates', mal, id=RELATIONSHIP_IDS[0])
rel2 = stix2.v20.Relationship(mal, 'targets', idy, id=RELATIONSHIP_IDS[1])
rel3 = stix2.v20.Relationship(cam, 'uses', mal, id=RELATIONSHIP_IDS[2])
stix_objs = [cam, idy, ind, mal, rel1, rel2, rel3]
yield stix2.MemoryStore(stix_objs)
def test_object_factory_created_by_ref_str():
factory = stix2.ObjectFactory(created_by_ref=IDENTITY_ID)
ind = factory.create(stix2.Indicator, **INDICATOR_KWARGS)
ind = factory.create(stix2.v20.Indicator, **INDICATOR_KWARGS)
assert ind.created_by_ref == IDENTITY_ID
def test_object_factory_created_by_ref_obj():
id_obj = stix2.Identity(id=IDENTITY_ID, **IDENTITY_KWARGS)
id_obj = stix2.v20.Identity(id=IDENTITY_ID, **IDENTITY_KWARGS)
factory = stix2.ObjectFactory(created_by_ref=id_obj)
ind = factory.create(stix2.Indicator, **INDICATOR_KWARGS)
ind = factory.create(stix2.v20.Indicator, **INDICATOR_KWARGS)
assert ind.created_by_ref == IDENTITY_ID
def test_object_factory_override_default():
factory = stix2.ObjectFactory(created_by_ref=IDENTITY_ID)
new_id = "identity--983b3172-44fe-4a80-8091-eb8098841fe8"
ind = factory.create(stix2.Indicator, created_by_ref=new_id, **INDICATOR_KWARGS)
ind = factory.create(stix2.v20.Indicator, created_by_ref=new_id, **INDICATOR_KWARGS)
assert ind.created_by_ref == new_id
def test_object_factory_created():
factory = stix2.ObjectFactory(created=FAKE_TIME)
ind = factory.create(stix2.Indicator, **INDICATOR_KWARGS)
ind = factory.create(stix2.v20.Indicator, **INDICATOR_KWARGS)
assert ind.created == FAKE_TIME
assert ind.modified == FAKE_TIME
def test_object_factory_external_reference():
ext_ref = stix2.ExternalReference(source_name="ACME Threat Intel",
description="Threat report")
ext_ref = stix2.v20.ExternalReference(
source_name="ACME Threat Intel",
description="Threat report",
)
factory = stix2.ObjectFactory(external_references=ext_ref)
ind = factory.create(stix2.Indicator, **INDICATOR_KWARGS)
ind = factory.create(stix2.v20.Indicator, **INDICATOR_KWARGS)
assert ind.external_references[0].source_name == "ACME Threat Intel"
assert ind.external_references[0].description == "Threat report"
ind2 = factory.create(stix2.Indicator, external_references=None, **INDICATOR_KWARGS)
ind2 = factory.create(stix2.v20.Indicator, external_references=None, **INDICATOR_KWARGS)
assert 'external_references' not in ind2
def test_object_factory_obj_markings():
stmt_marking = stix2.StatementMarking("Copyright 2016, Example Corp")
mark_def = stix2.MarkingDefinition(definition_type="statement",
definition=stmt_marking)
factory = stix2.ObjectFactory(object_marking_refs=[mark_def, stix2.TLP_AMBER])
ind = factory.create(stix2.Indicator, **INDICATOR_KWARGS)
stmt_marking = stix2.v20.StatementMarking("Copyright 2016, Example Corp")
mark_def = stix2.v20.MarkingDefinition(
definition_type="statement",
definition=stmt_marking,
)
factory = stix2.ObjectFactory(object_marking_refs=[mark_def, stix2.v20.TLP_AMBER])
ind = factory.create(stix2.v20.Indicator, **INDICATOR_KWARGS)
assert mark_def.id in ind.object_marking_refs
assert stix2.TLP_AMBER.id in ind.object_marking_refs
assert stix2.v20.TLP_AMBER.id in ind.object_marking_refs
factory = stix2.ObjectFactory(object_marking_refs=stix2.TLP_RED)
ind = factory.create(stix2.Indicator, **INDICATOR_KWARGS)
assert stix2.TLP_RED.id in ind.object_marking_refs
factory = stix2.ObjectFactory(object_marking_refs=stix2.v20.TLP_RED)
ind = factory.create(stix2.v20.Indicator, **INDICATOR_KWARGS)
assert stix2.v20.TLP_RED.id in ind.object_marking_refs
def test_object_factory_list_append():
ext_ref = stix2.ExternalReference(source_name="ACME Threat Intel",
description="Threat report from ACME")
ext_ref2 = stix2.ExternalReference(source_name="Yet Another Threat Report",
description="Threat report from YATR")
ext_ref3 = stix2.ExternalReference(source_name="Threat Report #3",
description="One more threat report")
ext_ref = stix2.v20.ExternalReference(
source_name="ACME Threat Intel",
description="Threat report from ACME",
)
ext_ref2 = stix2.v20.ExternalReference(
source_name="Yet Another Threat Report",
description="Threat report from YATR",
)
ext_ref3 = stix2.v20.ExternalReference(
source_name="Threat Report #3",
description="One more threat report",
)
factory = stix2.ObjectFactory(external_references=ext_ref)
ind = factory.create(stix2.Indicator, external_references=ext_ref2, **INDICATOR_KWARGS)
ind = factory.create(stix2.v20.Indicator, external_references=ext_ref2, **INDICATOR_KWARGS)
assert ind.external_references[1].source_name == "Yet Another Threat Report"
ind = factory.create(stix2.Indicator, external_references=[ext_ref2, ext_ref3], **INDICATOR_KWARGS)
ind = factory.create(stix2.v20.Indicator, external_references=[ext_ref2, ext_ref3], **INDICATOR_KWARGS)
assert ind.external_references[2].source_name == "Threat Report #3"
def test_object_factory_list_replace():
ext_ref = stix2.ExternalReference(source_name="ACME Threat Intel",
description="Threat report from ACME")
ext_ref2 = stix2.ExternalReference(source_name="Yet Another Threat Report",
description="Threat report from YATR")
ext_ref = stix2.v20.ExternalReference(
source_name="ACME Threat Intel",
description="Threat report from ACME",
)
ext_ref2 = stix2.v20.ExternalReference(
source_name="Yet Another Threat Report",
description="Threat report from YATR",
)
factory = stix2.ObjectFactory(external_references=ext_ref, list_append=False)
ind = factory.create(stix2.Indicator, external_references=ext_ref2, **INDICATOR_KWARGS)
ind = factory.create(stix2.v20.Indicator, external_references=ext_ref2, **INDICATOR_KWARGS)
assert len(ind.external_references) == 1
assert ind.external_references[0].source_name == "Yet Another Threat Report"
def test_environment_functions():
env = stix2.Environment(stix2.ObjectFactory(created_by_ref=IDENTITY_ID),
stix2.MemoryStore())
env = stix2.Environment(
stix2.ObjectFactory(created_by_ref=IDENTITY_ID),
stix2.MemoryStore(),
)
# Create a STIX object
ind = env.create(stix2.Indicator, id=INDICATOR_ID, **INDICATOR_KWARGS)
ind = env.create(stix2.v20.Indicator, id=INDICATOR_ID, **INDICATOR_KWARGS)
assert ind.created_by_ref == IDENTITY_ID
# Add objects to datastore
@ -125,23 +143,27 @@ def test_environment_functions():
assert len(resp) == 0
# See different results after adding filters to the environment
env.add_filters([stix2.Filter('type', '=', 'indicator'),
stix2.Filter('created_by_ref', '=', IDENTITY_ID)])
env.add_filters([
stix2.Filter('type', '=', 'indicator'),
stix2.Filter('created_by_ref', '=', IDENTITY_ID),
])
env.add_filter(stix2.Filter('labels', '=', 'benign')) # should be 'malicious-activity'
resp = env.get(INDICATOR_ID)
assert resp['labels'][0] == 'benign' # should be 'malicious-activity'
def test_environment_source_and_sink():
ind = stix2.Indicator(id=INDICATOR_ID, **INDICATOR_KWARGS)
ind = stix2.v20.Indicator(id=INDICATOR_ID, **INDICATOR_KWARGS)
env = stix2.Environment(source=stix2.MemorySource([ind]), sink=stix2.MemorySink([ind]))
assert env.get(INDICATOR_ID).labels[0] == 'malicious-activity'
def test_environment_datastore_and_sink():
with pytest.raises(ValueError) as excinfo:
stix2.Environment(factory=stix2.ObjectFactory(),
store=stix2.MemoryStore(), sink=stix2.MemorySink)
stix2.Environment(
factory=stix2.ObjectFactory(),
store=stix2.MemoryStore(), sink=stix2.MemorySink,
)
assert 'Data store already provided' in str(excinfo.value)
@ -149,7 +171,7 @@ def test_environment_no_datastore():
env = stix2.Environment(factory=stix2.ObjectFactory())
with pytest.raises(AttributeError) as excinfo:
env.add(stix2.Indicator(**INDICATOR_KWARGS))
env.add(stix2.v20.Indicator(**INDICATOR_KWARGS))
assert 'Environment has no data sink to put objects in' in str(excinfo.value)
with pytest.raises(AttributeError) as excinfo:
@ -182,7 +204,7 @@ def test_environment_add_filters():
def test_environment_datastore_and_no_object_factory():
# Uses a default object factory
env = stix2.Environment(store=stix2.MemoryStore())
ind = env.create(stix2.Indicator, id=INDICATOR_ID, **INDICATOR_KWARGS)
ind = env.create(stix2.v20.Indicator, id=INDICATOR_ID, **INDICATOR_KWARGS)
assert ind.id == INDICATOR_ID
@ -198,7 +220,7 @@ def test_parse_malware():
"ransomware"
]
}"""
mal = env.parse(data)
mal = env.parse(data, version="2.0")
assert mal.type == 'malware'
assert mal.id == MALWARE_ID
@ -209,40 +231,40 @@ def test_parse_malware():
def test_creator_of():
identity = stix2.Identity(**IDENTITY_KWARGS)
identity = stix2.v20.Identity(**IDENTITY_KWARGS)
factory = stix2.ObjectFactory(created_by_ref=identity.id)
env = stix2.Environment(store=stix2.MemoryStore(), factory=factory)
env.add(identity)
ind = env.create(stix2.Indicator, **INDICATOR_KWARGS)
ind = env.create(stix2.v20.Indicator, **INDICATOR_KWARGS)
creator = env.creator_of(ind)
assert creator is identity
def test_creator_of_no_datasource():
identity = stix2.Identity(**IDENTITY_KWARGS)
identity = stix2.v20.Identity(**IDENTITY_KWARGS)
factory = stix2.ObjectFactory(created_by_ref=identity.id)
env = stix2.Environment(factory=factory)
ind = env.create(stix2.Indicator, **INDICATOR_KWARGS)
ind = env.create(stix2.v20.Indicator, **INDICATOR_KWARGS)
with pytest.raises(AttributeError) as excinfo:
env.creator_of(ind)
assert 'Environment has no data source' in str(excinfo.value)
def test_creator_of_not_found():
identity = stix2.Identity(**IDENTITY_KWARGS)
identity = stix2.v20.Identity(**IDENTITY_KWARGS)
factory = stix2.ObjectFactory(created_by_ref=identity.id)
env = stix2.Environment(store=stix2.MemoryStore(), factory=factory)
ind = env.create(stix2.Indicator, **INDICATOR_KWARGS)
ind = env.create(stix2.v20.Indicator, **INDICATOR_KWARGS)
creator = env.creator_of(ind)
assert creator is None
def test_creator_of_no_created_by_ref():
env = stix2.Environment(store=stix2.MemoryStore())
ind = env.create(stix2.Indicator, **INDICATOR_KWARGS)
ind = env.create(stix2.v20.Indicator, **INDICATOR_KWARGS)
creator = env.creator_of(ind)
assert creator is None
@ -262,7 +284,7 @@ def test_relationships_no_id(ds):
env = stix2.Environment(store=ds)
mal = {
"type": "malware",
"name": "some variant"
"name": "some variant",
}
with pytest.raises(ValueError) as excinfo:
env.relationships(mal)
@ -326,7 +348,7 @@ def test_related_to_no_id(ds):
env = stix2.Environment(store=ds)
mal = {
"type": "malware",
"name": "some variant"
"name": "some variant",
}
with pytest.raises(ValueError) as excinfo:
env.related_to(mal)

View File

@ -17,11 +17,11 @@ VERIS = """{
def test_external_reference_veris():
ref = stix2.ExternalReference(
ref = stix2.v20.ExternalReference(
source_name="veris",
external_id="0001AA7F-C601-424A-B2B8-BE6C9F5164E7",
hashes={
"SHA-256": "6db12788c37247f2316052e142f42f4b259d6561751e5f401a1ae2a6df9c674b"
"SHA-256": "6db12788c37247f2316052e142f42f4b259d6561751e5f401a1ae2a6df9c674b",
},
url="https://github.com/vz-risk/VCDB/blob/master/data/json/0001AA7F-C601-424A-B2B8-BE6C9F5164E7.json",
)
@ -36,7 +36,7 @@ CAPEC = """{
def test_external_reference_capec():
ref = stix2.ExternalReference(
ref = stix2.v20.ExternalReference(
source_name="capec",
external_id="CAPEC-550",
)
@ -53,7 +53,7 @@ CAPEC_URL = """{
def test_external_reference_capec_url():
ref = stix2.ExternalReference(
ref = stix2.v20.ExternalReference(
source_name="capec",
external_id="CAPEC-550",
url="http://capec.mitre.org/data/definitions/550.html",
@ -70,7 +70,7 @@ THREAT_REPORT = """{
def test_external_reference_threat_report():
ref = stix2.ExternalReference(
ref = stix2.v20.ExternalReference(
source_name="ACME Threat Intel",
description="Threat report",
url="http://www.example.com/threat-report.pdf",
@ -87,7 +87,7 @@ BUGZILLA = """{
def test_external_reference_bugzilla():
ref = stix2.ExternalReference(
ref = stix2.v20.ExternalReference(
source_name="ACME Bugzilla",
external_id="1370",
url="https://www.example.com/bugs/1370",
@ -103,7 +103,7 @@ OFFLINE = """{
def test_external_reference_offline():
ref = stix2.ExternalReference(
ref = stix2.v20.ExternalReference(
source_name="ACME Threat Intel",
description="Threat report",
)
@ -116,7 +116,7 @@ def test_external_reference_offline():
def test_external_reference_source_required():
with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo:
stix2.ExternalReference()
stix2.v20.ExternalReference()
assert excinfo.value.cls == stix2.ExternalReference
assert excinfo.value.cls == stix2.v20.ExternalReference
assert excinfo.value.properties == ["source_name"]

View File

@ -1,8 +1,9 @@
import pytest
from stix2 import TLP_RED, Malware, markings
from stix2 import markings
from stix2.exceptions import MarkingNotFoundError
from stix2.v20 import TLP_RED, Malware
from .constants import MALWARE_MORE_KWARGS as MALWARE_KWARGS_CONST
from .constants import MARKING_IDS
@ -20,11 +21,11 @@ def test_add_marking_mark_one_selector_multiple_refs():
granular_markings=[
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[0]
"marking_ref": MARKING_IDS[0],
},
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[1]
"marking_ref": MARKING_IDS[1],
},
],
**MALWARE_KWARGS
@ -35,44 +36,49 @@ def test_add_marking_mark_one_selector_multiple_refs():
assert m in after["granular_markings"]
@pytest.mark.parametrize("data", [
(
Malware(**MALWARE_KWARGS),
Malware(
granular_markings=[
{
"selectors": ["description", "name"],
"marking_ref": MARKING_IDS[0]
},
],
**MALWARE_KWARGS),
MARKING_IDS[0],
),
(
MALWARE_KWARGS,
dict(
granular_markings=[
{
"selectors": ["description", "name"],
"marking_ref": MARKING_IDS[0]
},
],
**MALWARE_KWARGS),
MARKING_IDS[0],
),
(
Malware(**MALWARE_KWARGS),
Malware(
granular_markings=[
{
"selectors": ["description", "name"],
"marking_ref": TLP_RED.id,
},
],
**MALWARE_KWARGS),
TLP_RED,
),
])
@pytest.mark.parametrize(
"data", [
(
Malware(**MALWARE_KWARGS),
Malware(
granular_markings=[
{
"selectors": ["description", "name"],
"marking_ref": MARKING_IDS[0],
},
],
**MALWARE_KWARGS
),
MARKING_IDS[0],
),
(
MALWARE_KWARGS,
dict(
granular_markings=[
{
"selectors": ["description", "name"],
"marking_ref": MARKING_IDS[0],
},
],
**MALWARE_KWARGS
),
MARKING_IDS[0],
),
(
Malware(**MALWARE_KWARGS),
Malware(
granular_markings=[
{
"selectors": ["description", "name"],
"marking_ref": TLP_RED.id,
},
],
**MALWARE_KWARGS
),
TLP_RED,
),
],
)
def test_add_marking_mark_multiple_selector_one_refs(data):
before = data[0]
after = data[1]
@ -91,12 +97,12 @@ def test_add_marking_mark_multiple_selector_multiple_refs():
granular_markings=[
{
"selectors": ["description", "name"],
"marking_ref": MARKING_IDS[0]
"marking_ref": MARKING_IDS[0],
},
{
"selectors": ["description", "name"],
"marking_ref": MARKING_IDS[1]
}
"marking_ref": MARKING_IDS[1],
},
],
**MALWARE_KWARGS
)
@ -111,7 +117,7 @@ def test_add_marking_mark_another_property_same_marking():
granular_markings=[
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[0]
"marking_ref": MARKING_IDS[0],
},
],
**MALWARE_KWARGS
@ -120,7 +126,7 @@ def test_add_marking_mark_another_property_same_marking():
granular_markings=[
{
"selectors": ["description", "name"],
"marking_ref": MARKING_IDS[0]
"marking_ref": MARKING_IDS[0],
},
],
**MALWARE_KWARGS
@ -136,7 +142,7 @@ def test_add_marking_mark_same_property_same_marking():
granular_markings=[
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[0]
"marking_ref": MARKING_IDS[0],
},
],
**MALWARE_KWARGS
@ -145,7 +151,7 @@ def test_add_marking_mark_same_property_same_marking():
granular_markings=[
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[0]
"marking_ref": MARKING_IDS[0],
},
],
**MALWARE_KWARGS
@ -156,17 +162,22 @@ def test_add_marking_mark_same_property_same_marking():
assert m in after["granular_markings"]
@pytest.mark.parametrize("data,marking", [
({"description": "test description"},
[["title"], ["marking-definition--1", "marking-definition--2"],
"", ["marking-definition--1", "marking-definition--2"],
[], ["marking-definition--1", "marking-definition--2"],
[""], ["marking-definition--1", "marking-definition--2"],
["description"], [""],
["description"], [],
["description"], ["marking-definition--1", 456]
])
])
@pytest.mark.parametrize(
"data,marking", [
(
{"description": "test description"},
[
["title"], ["marking-definition--1", "marking-definition--2"],
"", ["marking-definition--1", "marking-definition--2"],
[], ["marking-definition--1", "marking-definition--2"],
[""], ["marking-definition--1", "marking-definition--2"],
["description"], [""],
["description"], [],
["description"], ["marking-definition--1", 456],
],
),
],
)
def test_add_marking_bad_selector(data, marking):
with pytest.raises(AssertionError):
markings.add_markings(data, marking[0], marking[1])
@ -180,61 +191,61 @@ GET_MARKINGS_TEST_DATA = {
"list value",
{
"g": "nested",
"h": 45
}
"h": 45,
},
],
"x": {
"y": [
"hello",
88
88,
],
"z": {
"foo1": "bar",
"foo2": 65
}
"foo2": 65,
},
},
"granular_markings": [
{
"marking_ref": "1",
"selectors": ["a"]
"selectors": ["a"],
},
{
"marking_ref": "2",
"selectors": ["c"]
"selectors": ["c"],
},
{
"marking_ref": "3",
"selectors": ["c.[1]"]
"selectors": ["c.[1]"],
},
{
"marking_ref": "4",
"selectors": ["c.[2]"]
"selectors": ["c.[2]"],
},
{
"marking_ref": "5",
"selectors": ["c.[2].g"]
"selectors": ["c.[2].g"],
},
{
"marking_ref": "6",
"selectors": ["x"]
"selectors": ["x"],
},
{
"marking_ref": "7",
"selectors": ["x.y"]
"selectors": ["x.y"],
},
{
"marking_ref": "8",
"selectors": ["x.y.[1]"]
"selectors": ["x.y.[1]"],
},
{
"marking_ref": "9",
"selectors": ["x.z"]
"selectors": ["x.z"],
},
{
"marking_ref": "10",
"selectors": ["x.z.foo2"]
"selectors": ["x.z.foo2"],
},
]
],
}
@ -245,10 +256,12 @@ def test_get_markings_smoke(data):
assert markings.get_markings(data, "a") == ["1"]
@pytest.mark.parametrize("data", [
GET_MARKINGS_TEST_DATA,
{"b": 1234},
])
@pytest.mark.parametrize(
"data", [
GET_MARKINGS_TEST_DATA,
{"b": 1234},
],
)
def test_get_markings_not_marked(data):
"""Test selector that is not marked returns empty list."""
results = markings.get_markings(data, "b")
@ -267,21 +280,23 @@ def test_get_markings_multiple_selectors(data):
assert set(xy_markings).union(xz_markings).issuperset(total)
@pytest.mark.parametrize("data,selector", [
(GET_MARKINGS_TEST_DATA, "foo"),
(GET_MARKINGS_TEST_DATA, ""),
(GET_MARKINGS_TEST_DATA, []),
(GET_MARKINGS_TEST_DATA, [""]),
(GET_MARKINGS_TEST_DATA, "x.z.[-2]"),
(GET_MARKINGS_TEST_DATA, "c.f"),
(GET_MARKINGS_TEST_DATA, "c.[2].i"),
(GET_MARKINGS_TEST_DATA, "c.[3]"),
(GET_MARKINGS_TEST_DATA, "d"),
(GET_MARKINGS_TEST_DATA, "x.[0]"),
(GET_MARKINGS_TEST_DATA, "z.y.w"),
(GET_MARKINGS_TEST_DATA, "x.z.[1]"),
(GET_MARKINGS_TEST_DATA, "x.z.foo3")
])
@pytest.mark.parametrize(
"data,selector", [
(GET_MARKINGS_TEST_DATA, "foo"),
(GET_MARKINGS_TEST_DATA, ""),
(GET_MARKINGS_TEST_DATA, []),
(GET_MARKINGS_TEST_DATA, [""]),
(GET_MARKINGS_TEST_DATA, "x.z.[-2]"),
(GET_MARKINGS_TEST_DATA, "c.f"),
(GET_MARKINGS_TEST_DATA, "c.[2].i"),
(GET_MARKINGS_TEST_DATA, "c.[3]"),
(GET_MARKINGS_TEST_DATA, "d"),
(GET_MARKINGS_TEST_DATA, "x.[0]"),
(GET_MARKINGS_TEST_DATA, "z.y.w"),
(GET_MARKINGS_TEST_DATA, "x.z.[1]"),
(GET_MARKINGS_TEST_DATA, "x.z.foo3"),
],
)
def test_get_markings_bad_selector(data, selector):
"""Test bad selectors raise exception"""
with pytest.raises(AssertionError):
@ -362,40 +377,42 @@ def test_get_markings_positional_arguments_combinations(data):
assert set(markings.get_markings(data, "x.z.foo2", False, True)) == set(["10"])
@pytest.mark.parametrize("data", [
(
Malware(
granular_markings=[
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[0]
},
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[1]
},
],
**MALWARE_KWARGS
@pytest.mark.parametrize(
"data", [
(
Malware(
granular_markings=[
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[0],
},
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[1],
},
],
**MALWARE_KWARGS
),
[MARKING_IDS[0], MARKING_IDS[1]],
),
[MARKING_IDS[0], MARKING_IDS[1]],
),
(
dict(
granular_markings=[
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[0]
},
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[1]
},
],
**MALWARE_KWARGS
(
dict(
granular_markings=[
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[0],
},
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[1],
},
],
**MALWARE_KWARGS
),
[MARKING_IDS[0], MARKING_IDS[1]],
),
[MARKING_IDS[0], MARKING_IDS[1]],
),
])
],
)
def test_remove_marking_remove_one_selector_with_multiple_refs(data):
before = markings.remove_markings(data[0], data[1], ["description"])
assert "granular_markings" not in before
@ -406,8 +423,8 @@ def test_remove_marking_remove_multiple_selector_one_ref():
granular_markings=[
{
"selectors": ["description", "modified"],
"marking_ref": MARKING_IDS[0]
}
"marking_ref": MARKING_IDS[0],
},
],
**MALWARE_KWARGS
)
@ -420,8 +437,8 @@ def test_remove_marking_mark_one_selector_from_multiple_ones():
granular_markings=[
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[0]
}
"marking_ref": MARKING_IDS[0],
},
],
**MALWARE_KWARGS
)
@ -429,8 +446,8 @@ def test_remove_marking_mark_one_selector_from_multiple_ones():
granular_markings=[
{
"selectors": ["description", "modified"],
"marking_ref": MARKING_IDS[0]
}
"marking_ref": MARKING_IDS[0],
},
],
**MALWARE_KWARGS
)
@ -444,12 +461,12 @@ def test_remove_marking_mark_one_selector_markings_from_multiple_ones():
granular_markings=[
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[0]
"marking_ref": MARKING_IDS[0],
},
{
"selectors": ["description", "modified"],
"marking_ref": MARKING_IDS[1]
}
"marking_ref": MARKING_IDS[1],
},
],
**MALWARE_KWARGS
)
@ -457,12 +474,12 @@ def test_remove_marking_mark_one_selector_markings_from_multiple_ones():
granular_markings=[
{
"selectors": ["description", "modified"],
"marking_ref": MARKING_IDS[0]
"marking_ref": MARKING_IDS[0],
},
{
"selectors": ["description", "modified"],
"marking_ref": MARKING_IDS[1]
}
"marking_ref": MARKING_IDS[1],
},
],
**MALWARE_KWARGS
)
@ -476,12 +493,12 @@ def test_remove_marking_mark_mutilple_selector_multiple_refs():
granular_markings=[
{
"selectors": ["description", "modified"],
"marking_ref": MARKING_IDS[0]
"marking_ref": MARKING_IDS[0],
},
{
"selectors": ["description", "modified"],
"marking_ref": MARKING_IDS[1]
}
"marking_ref": MARKING_IDS[1],
},
],
**MALWARE_KWARGS
)
@ -494,8 +511,8 @@ def test_remove_marking_mark_another_property_same_marking():
granular_markings=[
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[0]
}
"marking_ref": MARKING_IDS[0],
},
],
**MALWARE_KWARGS
)
@ -503,12 +520,12 @@ def test_remove_marking_mark_another_property_same_marking():
granular_markings=[
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[0]
"marking_ref": MARKING_IDS[0],
},
{
"selectors": ["modified"],
"marking_ref": MARKING_IDS[0]
}
"marking_ref": MARKING_IDS[0],
},
],
**MALWARE_KWARGS
)
@ -522,8 +539,8 @@ def test_remove_marking_mark_same_property_same_marking():
granular_markings=[
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[0]
}
"marking_ref": MARKING_IDS[0],
},
],
**MALWARE_KWARGS
)
@ -552,8 +569,8 @@ def test_remove_marking_not_present():
granular_markings=[
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[0]
}
"marking_ref": MARKING_IDS[0],
},
],
**MALWARE_KWARGS
)
@ -566,15 +583,15 @@ IS_MARKED_TEST_DATA = [
granular_markings=[
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[1]
"marking_ref": MARKING_IDS[1],
},
{
"selectors": ["labels", "description"],
"marking_ref": MARKING_IDS[2]
"marking_ref": MARKING_IDS[2],
},
{
"selectors": ["labels", "description"],
"marking_ref": MARKING_IDS[3]
"marking_ref": MARKING_IDS[3],
},
],
**MALWARE_KWARGS
@ -583,15 +600,15 @@ IS_MARKED_TEST_DATA = [
granular_markings=[
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[1]
"marking_ref": MARKING_IDS[1],
},
{
"selectors": ["labels", "description"],
"marking_ref": MARKING_IDS[2]
"marking_ref": MARKING_IDS[2],
},
{
"selectors": ["labels", "description"],
"marking_ref": MARKING_IDS[3]
"marking_ref": MARKING_IDS[3],
},
],
**MALWARE_KWARGS
@ -606,21 +623,23 @@ def test_is_marked_smoke(data):
assert markings.is_marked(data, selectors=["modified"]) is False
@pytest.mark.parametrize("data,selector", [
(IS_MARKED_TEST_DATA[0], "foo"),
(IS_MARKED_TEST_DATA[0], ""),
(IS_MARKED_TEST_DATA[0], []),
(IS_MARKED_TEST_DATA[0], [""]),
(IS_MARKED_TEST_DATA[0], "x.z.[-2]"),
(IS_MARKED_TEST_DATA[0], "c.f"),
(IS_MARKED_TEST_DATA[0], "c.[2].i"),
(IS_MARKED_TEST_DATA[1], "c.[3]"),
(IS_MARKED_TEST_DATA[1], "d"),
(IS_MARKED_TEST_DATA[1], "x.[0]"),
(IS_MARKED_TEST_DATA[1], "z.y.w"),
(IS_MARKED_TEST_DATA[1], "x.z.[1]"),
(IS_MARKED_TEST_DATA[1], "x.z.foo3")
])
@pytest.mark.parametrize(
"data,selector", [
(IS_MARKED_TEST_DATA[0], "foo"),
(IS_MARKED_TEST_DATA[0], ""),
(IS_MARKED_TEST_DATA[0], []),
(IS_MARKED_TEST_DATA[0], [""]),
(IS_MARKED_TEST_DATA[0], "x.z.[-2]"),
(IS_MARKED_TEST_DATA[0], "c.f"),
(IS_MARKED_TEST_DATA[0], "c.[2].i"),
(IS_MARKED_TEST_DATA[1], "c.[3]"),
(IS_MARKED_TEST_DATA[1], "d"),
(IS_MARKED_TEST_DATA[1], "x.[0]"),
(IS_MARKED_TEST_DATA[1], "z.y.w"),
(IS_MARKED_TEST_DATA[1], "x.z.[1]"),
(IS_MARKED_TEST_DATA[1], "x.z.foo3"),
],
)
def test_is_marked_invalid_selector(data, selector):
"""Test invalid selector raises an error."""
with pytest.raises(AssertionError):
@ -688,61 +707,61 @@ def test_is_marked_positional_arguments_combinations():
"list value",
{
"g": "nested",
"h": 45
}
"h": 45,
},
],
"x": {
"y": [
"hello",
88
88,
],
"z": {
"foo1": "bar",
"foo2": 65
}
"foo2": 65,
},
},
"granular_markings": [
{
"marking_ref": "1",
"selectors": ["a"]
"selectors": ["a"],
},
{
"marking_ref": "2",
"selectors": ["c"]
"selectors": ["c"],
},
{
"marking_ref": "3",
"selectors": ["c.[1]"]
"selectors": ["c.[1]"],
},
{
"marking_ref": "4",
"selectors": ["c.[2]"]
"selectors": ["c.[2]"],
},
{
"marking_ref": "5",
"selectors": ["c.[2].g"]
"selectors": ["c.[2].g"],
},
{
"marking_ref": "6",
"selectors": ["x"]
"selectors": ["x"],
},
{
"marking_ref": "7",
"selectors": ["x.y"]
"selectors": ["x.y"],
},
{
"marking_ref": "8",
"selectors": ["x.y.[1]"]
"selectors": ["x.y.[1]"],
},
{
"marking_ref": "9",
"selectors": ["x.z"]
"selectors": ["x.z"],
},
{
"marking_ref": "10",
"selectors": ["x.z.foo2"]
"selectors": ["x.z.foo2"],
},
]
],
}
assert markings.is_marked(test_sdo, ["1"], "a", False, False)
@ -822,8 +841,8 @@ def test_create_sdo_with_invalid_marking():
granular_markings=[
{
"selectors": ["foo"],
"marking_ref": MARKING_IDS[0]
}
"marking_ref": MARKING_IDS[0],
},
],
**MALWARE_KWARGS
)
@ -838,12 +857,12 @@ def test_set_marking_mark_one_selector_multiple_refs():
granular_markings=[
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[0]
"marking_ref": MARKING_IDS[0],
},
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[1]
}
"marking_ref": MARKING_IDS[1],
},
],
**MALWARE_KWARGS
)
@ -857,8 +876,8 @@ def test_set_marking_mark_multiple_selector_one_refs():
granular_markings=[
{
"selectors": ["description", "modified"],
"marking_ref": MARKING_IDS[1]
}
"marking_ref": MARKING_IDS[1],
},
],
**MALWARE_KWARGS
)
@ -866,8 +885,8 @@ def test_set_marking_mark_multiple_selector_one_refs():
granular_markings=[
{
"selectors": ["description", "modified"],
"marking_ref": MARKING_IDS[0]
}
"marking_ref": MARKING_IDS[0],
},
],
**MALWARE_KWARGS
)
@ -884,12 +903,12 @@ def test_set_marking_mark_multiple_selector_multiple_refs_from_none():
granular_markings=[
{
"selectors": ["description", "modified"],
"marking_ref": MARKING_IDS[0]
"marking_ref": MARKING_IDS[0],
},
{
"selectors": ["description", "modified"],
"marking_ref": MARKING_IDS[1]
}
"marking_ref": MARKING_IDS[1],
},
],
**MALWARE_KWARGS
)
@ -903,8 +922,8 @@ def test_set_marking_mark_another_property_same_marking():
granular_markings=[
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[0]
}
"marking_ref": MARKING_IDS[0],
},
],
**MALWARE_KWARGS
)
@ -912,12 +931,12 @@ def test_set_marking_mark_another_property_same_marking():
granular_markings=[
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[1]
"marking_ref": MARKING_IDS[1],
},
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[2]
}
"marking_ref": MARKING_IDS[2],
},
],
**MALWARE_KWARGS
)
@ -927,19 +946,21 @@ def test_set_marking_mark_another_property_same_marking():
assert m in after["granular_markings"]
@pytest.mark.parametrize("marking", [
([MARKING_IDS[4], MARKING_IDS[5]], ["foo"]),
([MARKING_IDS[4], MARKING_IDS[5]], ""),
([MARKING_IDS[4], MARKING_IDS[5]], []),
([MARKING_IDS[4], MARKING_IDS[5]], [""]),
])
@pytest.mark.parametrize(
"marking", [
([MARKING_IDS[4], MARKING_IDS[5]], ["foo"]),
([MARKING_IDS[4], MARKING_IDS[5]], ""),
([MARKING_IDS[4], MARKING_IDS[5]], []),
([MARKING_IDS[4], MARKING_IDS[5]], [""]),
],
)
def test_set_marking_bad_selector(marking):
before = Malware(
granular_markings=[
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[0]
}
"marking_ref": MARKING_IDS[0],
},
],
**MALWARE_KWARGS
)
@ -947,8 +968,8 @@ def test_set_marking_bad_selector(marking):
granular_markings=[
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[0]
}
"marking_ref": MARKING_IDS[0],
},
],
**MALWARE_KWARGS
)
@ -964,8 +985,8 @@ def test_set_marking_mark_same_property_same_marking():
granular_markings=[
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[0]
}
"marking_ref": MARKING_IDS[0],
},
],
**MALWARE_KWARGS
)
@ -973,8 +994,8 @@ def test_set_marking_mark_same_property_same_marking():
granular_markings=[
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[0]
}
"marking_ref": MARKING_IDS[0],
},
],
**MALWARE_KWARGS
)
@ -988,15 +1009,15 @@ CLEAR_MARKINGS_TEST_DATA = [
granular_markings=[
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[0]
"marking_ref": MARKING_IDS[0],
},
{
"selectors": ["modified", "description"],
"marking_ref": MARKING_IDS[1]
"marking_ref": MARKING_IDS[1],
},
{
"selectors": ["modified", "description", "type"],
"marking_ref": MARKING_IDS[2]
"marking_ref": MARKING_IDS[2],
},
],
**MALWARE_KWARGS
@ -1005,19 +1026,19 @@ CLEAR_MARKINGS_TEST_DATA = [
granular_markings=[
{
"selectors": ["description"],
"marking_ref": MARKING_IDS[0]
"marking_ref": MARKING_IDS[0],
},
{
"selectors": ["modified", "description"],
"marking_ref": MARKING_IDS[1]
"marking_ref": MARKING_IDS[1],
},
{
"selectors": ["modified", "description", "type"],
"marking_ref": MARKING_IDS[2]
"marking_ref": MARKING_IDS[2],
},
],
**MALWARE_KWARGS
)
),
]
@ -1049,12 +1070,14 @@ def test_clear_marking_all_selectors(data):
assert "granular_markings" not in data
@pytest.mark.parametrize("data,selector", [
(CLEAR_MARKINGS_TEST_DATA[0], "foo"),
(CLEAR_MARKINGS_TEST_DATA[0], ""),
(CLEAR_MARKINGS_TEST_DATA[1], []),
(CLEAR_MARKINGS_TEST_DATA[1], [""]),
])
@pytest.mark.parametrize(
"data,selector", [
(CLEAR_MARKINGS_TEST_DATA[0], "foo"),
(CLEAR_MARKINGS_TEST_DATA[0], ""),
(CLEAR_MARKINGS_TEST_DATA[1], []),
(CLEAR_MARKINGS_TEST_DATA[1], [""]),
],
)
def test_clear_marking_bad_selector(data, selector):
"""Test bad selector raises exception."""
with pytest.raises(AssertionError):

View File

@ -18,7 +18,7 @@ EXPECTED = """{
def test_identity_example():
identity = stix2.Identity(
identity = stix2.v20.Identity(
id="identity--311b2d2d-f010-4473-83ec-1edf84858f4c",
created="2015-12-21T19:59:11.000Z",
modified="2015-12-21T19:59:11.000Z",
@ -29,19 +29,21 @@ def test_identity_example():
assert str(identity) == EXPECTED
@pytest.mark.parametrize("data", [
EXPECTED,
{
"created": "2015-12-21T19:59:11.000Z",
"id": "identity--311b2d2d-f010-4473-83ec-1edf84858f4c",
"identity_class": "individual",
"modified": "2015-12-21T19:59:11.000Z",
"name": "John Smith",
"type": "identity"
},
])
@pytest.mark.parametrize(
"data", [
EXPECTED,
{
"created": "2015-12-21T19:59:11.000Z",
"id": "identity--311b2d2d-f010-4473-83ec-1edf84858f4c",
"identity_class": "individual",
"modified": "2015-12-21T19:59:11.000Z",
"name": "John Smith",
"type": "identity",
},
],
)
def test_parse_identity(data):
identity = stix2.parse(data)
identity = stix2.parse(data, version="2.0")
assert identity.type == 'identity'
assert identity.id == IDENTITY_ID
@ -52,21 +54,23 @@ def test_parse_identity(data):
def test_parse_no_type():
with pytest.raises(stix2.exceptions.ParseError):
stix2.parse("""
stix2.parse(
"""
{
"id": "identity--311b2d2d-f010-4473-83ec-1edf84858f4c",
"created": "2015-12-21T19:59:11.000Z",
"modified": "2015-12-21T19:59:11.000Z",
"name": "John Smith",
"identity_class": "individual"
}""")
}""", version="2.0",
)
def test_identity_with_custom():
identity = stix2.Identity(
identity = stix2.v20.Identity(
name="John Smith",
identity_class="individual",
custom_properties={'x_foo': 'bar'}
custom_properties={'x_foo': 'bar'},
)
assert identity.x_foo == "bar"

View File

@ -35,7 +35,7 @@ def test_indicator_with_all_required_properties():
now = dt.datetime(2017, 1, 1, 0, 0, 1, tzinfo=pytz.utc)
epoch = dt.datetime(1970, 1, 1, 0, 0, 1, tzinfo=pytz.utc)
ind = stix2.Indicator(
ind = stix2.v20.Indicator(
type="indicator",
id=INDICATOR_ID,
created=now,
@ -71,9 +71,9 @@ def test_indicator_autogenerated_properties(indicator):
def test_indicator_type_must_be_indicator():
with pytest.raises(stix2.exceptions.InvalidValueError) as excinfo:
stix2.Indicator(type='xxx', **INDICATOR_KWARGS)
stix2.v20.Indicator(type='xxx', **INDICATOR_KWARGS)
assert excinfo.value.cls == stix2.Indicator
assert excinfo.value.cls == stix2.v20.Indicator
assert excinfo.value.prop_name == "type"
assert excinfo.value.reason == "must equal 'indicator'."
assert str(excinfo.value) == "Invalid value for Indicator 'type': must equal 'indicator'."
@ -81,9 +81,9 @@ def test_indicator_type_must_be_indicator():
def test_indicator_id_must_start_with_indicator():
with pytest.raises(stix2.exceptions.InvalidValueError) as excinfo:
stix2.Indicator(id='my-prefix--', **INDICATOR_KWARGS)
stix2.v20.Indicator(id='my-prefix--', **INDICATOR_KWARGS)
assert excinfo.value.cls == stix2.Indicator
assert excinfo.value.cls == stix2.v20.Indicator
assert excinfo.value.prop_name == "id"
assert excinfo.value.reason == "must start with 'indicator--'."
assert str(excinfo.value) == "Invalid value for Indicator 'id': must start with 'indicator--'."
@ -91,26 +91,26 @@ def test_indicator_id_must_start_with_indicator():
def test_indicator_required_properties():
with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo:
stix2.Indicator()
stix2.v20.Indicator()
assert excinfo.value.cls == stix2.Indicator
assert excinfo.value.cls == stix2.v20.Indicator
assert excinfo.value.properties == ["labels", "pattern"]
assert str(excinfo.value) == "No values for required properties for Indicator: (labels, pattern)."
def test_indicator_required_property_pattern():
with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo:
stix2.Indicator(labels=['malicious-activity'])
stix2.v20.Indicator(labels=['malicious-activity'])
assert excinfo.value.cls == stix2.Indicator
assert excinfo.value.cls == stix2.v20.Indicator
assert excinfo.value.properties == ["pattern"]
def test_indicator_created_ref_invalid_format():
with pytest.raises(stix2.exceptions.InvalidValueError) as excinfo:
stix2.Indicator(created_by_ref='myprefix--12345678', **INDICATOR_KWARGS)
stix2.v20.Indicator(created_by_ref='myprefix--12345678', **INDICATOR_KWARGS)
assert excinfo.value.cls == stix2.Indicator
assert excinfo.value.cls == stix2.v20.Indicator
assert excinfo.value.prop_name == "created_by_ref"
assert excinfo.value.reason == "must start with 'identity'."
assert str(excinfo.value) == "Invalid value for Indicator 'created_by_ref': must start with 'identity'."
@ -118,9 +118,9 @@ def test_indicator_created_ref_invalid_format():
def test_indicator_revoked_invalid():
with pytest.raises(stix2.exceptions.InvalidValueError) as excinfo:
stix2.Indicator(revoked='no', **INDICATOR_KWARGS)
stix2.v20.Indicator(revoked='no', **INDICATOR_KWARGS)
assert excinfo.value.cls == stix2.Indicator
assert excinfo.value.cls == stix2.v20.Indicator
assert excinfo.value.prop_name == "revoked"
assert excinfo.value.reason == "must be a boolean value."
@ -134,36 +134,38 @@ def test_cannot_assign_to_indicator_attributes(indicator):
def test_invalid_kwarg_to_indicator():
with pytest.raises(stix2.exceptions.ExtraPropertiesError) as excinfo:
stix2.Indicator(my_custom_property="foo", **INDICATOR_KWARGS)
stix2.v20.Indicator(my_custom_property="foo", **INDICATOR_KWARGS)
assert excinfo.value.cls == stix2.Indicator
assert excinfo.value.cls == stix2.v20.Indicator
assert excinfo.value.properties == ['my_custom_property']
assert str(excinfo.value) == "Unexpected properties for Indicator: (my_custom_property)."
def test_created_modified_time_are_identical_by_default():
"""By default, the created and modified times should be the same."""
ind = stix2.Indicator(**INDICATOR_KWARGS)
ind = stix2.v20.Indicator(**INDICATOR_KWARGS)
assert ind.created == ind.modified
@pytest.mark.parametrize("data", [
EXPECTED_INDICATOR,
{
"type": "indicator",
"id": "indicator--a740531e-63ff-4e49-a9e1-a0a3eed0e3e7",
"created": "2017-01-01T00:00:01Z",
"modified": "2017-01-01T00:00:01Z",
"labels": [
"malicious-activity"
],
"pattern": "[file:hashes.MD5 = 'd41d8cd98f00b204e9800998ecf8427e']",
"valid_from": "1970-01-01T00:00:01Z"
},
])
@pytest.mark.parametrize(
"data", [
EXPECTED_INDICATOR,
{
"type": "indicator",
"id": "indicator--a740531e-63ff-4e49-a9e1-a0a3eed0e3e7",
"created": "2017-01-01T00:00:01Z",
"modified": "2017-01-01T00:00:01Z",
"labels": [
"malicious-activity",
],
"pattern": "[file:hashes.MD5 = 'd41d8cd98f00b204e9800998ecf8427e']",
"valid_from": "1970-01-01T00:00:01Z",
},
],
)
def test_parse_indicator(data):
idctr = stix2.parse(data)
idctr = stix2.parse(data, version="2.0")
assert idctr.type == 'indicator'
assert idctr.id == INDICATOR_ID
@ -176,19 +178,19 @@ def test_parse_indicator(data):
def test_invalid_indicator_pattern():
with pytest.raises(stix2.exceptions.InvalidValueError) as excinfo:
stix2.Indicator(
stix2.v20.Indicator(
labels=['malicious-activity'],
pattern="file:hashes.MD5 = 'd41d8cd98f00b204e9800998ecf8427e'",
)
assert excinfo.value.cls == stix2.Indicator
assert excinfo.value.cls == stix2.v20.Indicator
assert excinfo.value.prop_name == 'pattern'
assert 'input is missing square brackets' in excinfo.value.reason
with pytest.raises(stix2.exceptions.InvalidValueError) as excinfo:
stix2.Indicator(
stix2.v20.Indicator(
labels=['malicious-activity'],
pattern='[file:hashes.MD5 = "d41d8cd98f00b204e9800998ecf8427e"]',
)
assert excinfo.value.cls == stix2.Indicator
assert excinfo.value.cls == stix2.v20.Indicator
assert excinfo.value.prop_name == 'pattern'
assert 'mismatched input' in excinfo.value.reason

View File

@ -27,7 +27,7 @@ EXPECTED = """{
def test_intrusion_set_example():
intrusion_set = stix2.IntrusionSet(
intrusion_set = stix2.v20.IntrusionSet(
id="intrusion-set--4e78f46f-a023-4e5f-bc24-71b3ca22ec29",
created_by_ref="identity--f431f809-377b-45e0-aa1c-6a4751cae5ff",
created="2016-04-06T20:03:48.000Z",
@ -35,34 +35,36 @@ def test_intrusion_set_example():
name="Bobcat Breakin",
description="Incidents usually feature a shared TTP of a bobcat being released...",
aliases=["Zookeeper"],
goals=["acquisition-theft", "harassment", "damage"]
goals=["acquisition-theft", "harassment", "damage"],
)
assert str(intrusion_set) == EXPECTED
@pytest.mark.parametrize("data", [
EXPECTED,
{
"aliases": [
"Zookeeper"
],
"created": "2016-04-06T20:03:48.000Z",
"created_by_ref": "identity--f431f809-377b-45e0-aa1c-6a4751cae5ff",
"description": "Incidents usually feature a shared TTP of a bobcat being released...",
"goals": [
"acquisition-theft",
"harassment",
"damage"
],
"id": "intrusion-set--4e78f46f-a023-4e5f-bc24-71b3ca22ec29",
"modified": "2016-04-06T20:03:48.000Z",
"name": "Bobcat Breakin",
"type": "intrusion-set"
},
])
@pytest.mark.parametrize(
"data", [
EXPECTED,
{
"aliases": [
"Zookeeper",
],
"created": "2016-04-06T20:03:48.000Z",
"created_by_ref": "identity--f431f809-377b-45e0-aa1c-6a4751cae5ff",
"description": "Incidents usually feature a shared TTP of a bobcat being released...",
"goals": [
"acquisition-theft",
"harassment",
"damage",
],
"id": "intrusion-set--4e78f46f-a023-4e5f-bc24-71b3ca22ec29",
"modified": "2016-04-06T20:03:48.000Z",
"name": "Bobcat Breakin",
"type": "intrusion-set",
},
],
)
def test_parse_intrusion_set(data):
intset = stix2.parse(data)
intset = stix2.parse(data, version="2.0")
assert intset.type == "intrusion-set"
assert intset.id == INTRUSION_SET_ID

View File

@ -11,7 +11,7 @@ LMCO_RECON = """{
def test_lockheed_martin_cyber_kill_chain():
recon = stix2.KillChainPhase(
recon = stix2.v20.KillChainPhase(
kill_chain_name="lockheed-martin-cyber-kill-chain",
phase_name="reconnaissance",
)
@ -26,7 +26,7 @@ FOO_PRE_ATTACK = """{
def test_kill_chain_example():
preattack = stix2.KillChainPhase(
preattack = stix2.v20.KillChainPhase(
kill_chain_name="foo",
phase_name="pre-attack",
)
@ -37,25 +37,25 @@ def test_kill_chain_example():
def test_kill_chain_required_properties():
with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo:
stix2.KillChainPhase()
stix2.v20.KillChainPhase()
assert excinfo.value.cls == stix2.KillChainPhase
assert excinfo.value.cls == stix2.v20.KillChainPhase
assert excinfo.value.properties == ["kill_chain_name", "phase_name"]
def test_kill_chain_required_property_chain_name():
with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo:
stix2.KillChainPhase(phase_name="weaponization")
stix2.v20.KillChainPhase(phase_name="weaponization")
assert excinfo.value.cls == stix2.KillChainPhase
assert excinfo.value.cls == stix2.v20.KillChainPhase
assert excinfo.value.properties == ["kill_chain_name"]
def test_kill_chain_required_property_phase_name():
with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo:
stix2.KillChainPhase(kill_chain_name="lockheed-martin-cyber-kill-chain")
stix2.v20.KillChainPhase(kill_chain_name="lockheed-martin-cyber-kill-chain")
assert excinfo.value.cls == stix2.KillChainPhase
assert excinfo.value.cls == stix2.v20.KillChainPhase
assert excinfo.value.properties == ["phase_name"]

View File

@ -23,7 +23,7 @@ EXPECTED_MALWARE = """{
def test_malware_with_all_required_properties():
now = dt.datetime(2016, 5, 12, 8, 17, 27, tzinfo=pytz.utc)
mal = stix2.Malware(
mal = stix2.v20.Malware(
type="malware",
id=MALWARE_ID,
created=now,
@ -53,9 +53,9 @@ def test_malware_autogenerated_properties(malware):
def test_malware_type_must_be_malware():
with pytest.raises(stix2.exceptions.InvalidValueError) as excinfo:
stix2.Malware(type='xxx', **MALWARE_KWARGS)
stix2.v20.Malware(type='xxx', **MALWARE_KWARGS)
assert excinfo.value.cls == stix2.Malware
assert excinfo.value.cls == stix2.v20.Malware
assert excinfo.value.prop_name == "type"
assert excinfo.value.reason == "must equal 'malware'."
assert str(excinfo.value) == "Invalid value for Malware 'type': must equal 'malware'."
@ -63,9 +63,9 @@ def test_malware_type_must_be_malware():
def test_malware_id_must_start_with_malware():
with pytest.raises(stix2.exceptions.InvalidValueError) as excinfo:
stix2.Malware(id='my-prefix--', **MALWARE_KWARGS)
stix2.v20.Malware(id='my-prefix--', **MALWARE_KWARGS)
assert excinfo.value.cls == stix2.Malware
assert excinfo.value.cls == stix2.v20.Malware
assert excinfo.value.prop_name == "id"
assert excinfo.value.reason == "must start with 'malware--'."
assert str(excinfo.value) == "Invalid value for Malware 'id': must start with 'malware--'."
@ -73,17 +73,17 @@ def test_malware_id_must_start_with_malware():
def test_malware_required_properties():
with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo:
stix2.Malware()
stix2.v20.Malware()
assert excinfo.value.cls == stix2.Malware
assert excinfo.value.cls == stix2.v20.Malware
assert excinfo.value.properties == ["labels", "name"]
def test_malware_required_property_name():
with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo:
stix2.Malware(labels=['ransomware'])
stix2.v20.Malware(labels=['ransomware'])
assert excinfo.value.cls == stix2.Malware
assert excinfo.value.cls == stix2.v20.Malware
assert excinfo.value.properties == ["name"]
@ -96,26 +96,28 @@ def test_cannot_assign_to_malware_attributes(malware):
def test_invalid_kwarg_to_malware():
with pytest.raises(stix2.exceptions.ExtraPropertiesError) as excinfo:
stix2.Malware(my_custom_property="foo", **MALWARE_KWARGS)
stix2.v20.Malware(my_custom_property="foo", **MALWARE_KWARGS)
assert excinfo.value.cls == stix2.Malware
assert excinfo.value.cls == stix2.v20.Malware
assert excinfo.value.properties == ['my_custom_property']
assert str(excinfo.value) == "Unexpected properties for Malware: (my_custom_property)."
@pytest.mark.parametrize("data", [
EXPECTED_MALWARE,
{
"type": "malware",
"id": "malware--9c4638ec-f1de-4ddb-abf4-1b760417654e",
"created": "2016-05-12T08:17:27.000Z",
"modified": "2016-05-12T08:17:27.000Z",
"labels": ["ransomware"],
"name": "Cryptolocker",
},
])
@pytest.mark.parametrize(
"data", [
EXPECTED_MALWARE,
{
"type": "malware",
"id": "malware--9c4638ec-f1de-4ddb-abf4-1b760417654e",
"created": "2016-05-12T08:17:27.000Z",
"modified": "2016-05-12T08:17:27.000Z",
"labels": ["ransomware"],
"name": "Cryptolocker",
},
],
)
def test_parse_malware(data):
mal = stix2.parse(data)
mal = stix2.parse(data, version="2.0")
assert mal.type == 'malware'
assert mal.id == MALWARE_ID
@ -128,7 +130,7 @@ def test_parse_malware(data):
def test_parse_malware_invalid_labels():
data = re.compile('\\[.+\\]', re.DOTALL).sub('1', EXPECTED_MALWARE)
with pytest.raises(ValueError) as excinfo:
stix2.parse(data)
stix2.parse(data, version="2.0")
assert "Invalid value for Malware 'labels'" in str(excinfo.value)
@ -141,7 +143,7 @@ def test_parse_malware_kill_chain_phases():
}
]"""
data = EXPECTED_MALWARE.replace('malware"', 'malware",%s' % kill_chain)
mal = stix2.parse(data)
mal = stix2.parse(data, version="2.0")
assert mal.kill_chain_phases[0].kill_chain_name == "lockheed-martin-cyber-kill-chain"
assert mal.kill_chain_phases[0].phase_name == "reconnaissance"
assert mal['kill_chain_phases'][0]['kill_chain_name'] == "lockheed-martin-cyber-kill-chain"
@ -157,5 +159,5 @@ def test_parse_malware_clean_kill_chain_phases():
}
]"""
data = EXPECTED_MALWARE.replace('malware"', 'malware",%s' % kill_chain)
mal = stix2.parse(data)
mal = stix2.parse(data, version="2.0")
assert mal['kill_chain_phases'][0]['phase_name'] == "1"

View File

@ -4,7 +4,7 @@ import pytest
import pytz
import stix2
from stix2 import TLP_WHITE
from stix2.v20 import TLP_WHITE
from .constants import MARKING_DEFINITION_ID
@ -75,11 +75,11 @@ def test_marking_def_example_with_tlp():
def test_marking_def_example_with_statement_positional_argument():
marking_definition = stix2.MarkingDefinition(
marking_definition = stix2.v20.MarkingDefinition(
id="marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9",
created="2017-01-20T00:00:00.000Z",
definition_type="statement",
definition=stix2.StatementMarking(statement="Copyright 2016, Example Corp")
definition=stix2.v20.StatementMarking(statement="Copyright 2016, Example Corp"),
)
assert str(marking_definition) == EXPECTED_STATEMENT_MARKING_DEFINITION
@ -87,11 +87,11 @@ def test_marking_def_example_with_statement_positional_argument():
def test_marking_def_example_with_kwargs_statement():
kwargs = dict(statement="Copyright 2016, Example Corp")
marking_definition = stix2.MarkingDefinition(
marking_definition = stix2.v20.MarkingDefinition(
id="marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9",
created="2017-01-20T00:00:00.000Z",
definition_type="statement",
definition=stix2.StatementMarking(**kwargs)
definition=stix2.v20.StatementMarking(**kwargs),
)
assert str(marking_definition) == EXPECTED_STATEMENT_MARKING_DEFINITION
@ -99,31 +99,31 @@ def test_marking_def_example_with_kwargs_statement():
def test_marking_def_invalid_type():
with pytest.raises(ValueError):
stix2.MarkingDefinition(
stix2.v20.MarkingDefinition(
id="marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9",
created="2017-01-20T00:00:00.000Z",
definition_type="my-definition-type",
definition=stix2.StatementMarking("Copyright 2016, Example Corp")
definition=stix2.v20.StatementMarking("Copyright 2016, Example Corp"),
)
def test_campaign_with_markings_example():
campaign = stix2.Campaign(
campaign = stix2.v20.Campaign(
id="campaign--8e2e2d2b-17d4-4cbf-938f-98ee46b3cd3f",
created_by_ref="identity--f431f809-377b-45e0-aa1c-6a4751cae5ff",
created="2016-04-06T20:03:00Z",
modified="2016-04-06T20:03:00Z",
name="Green Group Attacks Against Finance",
description="Campaign by Green Group against a series of targets in the financial services sector.",
object_marking_refs=TLP_WHITE
object_marking_refs=TLP_WHITE,
)
assert str(campaign) == EXPECTED_CAMPAIGN_WITH_OBJECT_MARKING
def test_granular_example():
granular_marking = stix2.GranularMarking(
granular_marking = stix2.v20.GranularMarking(
marking_ref="marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9",
selectors=["abc", "abc.[23]", "abc.def", "abc.[2].efg"]
selectors=["abc", "abc.[23]", "abc.def", "abc.[2].efg"],
)
assert str(granular_marking) == EXPECTED_GRANULAR_MARKING
@ -131,19 +131,19 @@ def test_granular_example():
def test_granular_example_with_bad_selector():
with pytest.raises(stix2.exceptions.InvalidValueError) as excinfo:
stix2.GranularMarking(
stix2.v20.GranularMarking(
marking_ref="marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9",
selectors=["abc[0]"] # missing "."
selectors=["abc[0]"], # missing "."
)
assert excinfo.value.cls == stix2.GranularMarking
assert excinfo.value.cls == stix2.v20.GranularMarking
assert excinfo.value.prop_name == "selectors"
assert excinfo.value.reason == "must adhere to selector syntax."
assert str(excinfo.value) == "Invalid value for GranularMarking 'selectors': must adhere to selector syntax."
def test_campaign_with_granular_markings_example():
campaign = stix2.Campaign(
campaign = stix2.v20.Campaign(
id="campaign--8e2e2d2b-17d4-4cbf-938f-98ee46b3cd3f",
created_by_ref="identity--f431f809-377b-45e0-aa1c-6a4751cae5ff",
created="2016-04-06T20:03:00Z",
@ -151,27 +151,31 @@ def test_campaign_with_granular_markings_example():
name="Green Group Attacks Against Finance",
description="Campaign by Green Group against a series of targets in the financial services sector.",
granular_markings=[
stix2.GranularMarking(
stix2.v20.GranularMarking(
marking_ref="marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9",
selectors=["description"])
])
selectors=["description"],
),
],
)
assert str(campaign) == EXPECTED_CAMPAIGN_WITH_GRANULAR_MARKINGS
@pytest.mark.parametrize("data", [
EXPECTED_TLP_MARKING_DEFINITION,
{
"id": "marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9",
"type": "marking-definition",
"created": "2017-01-20T00:00:00Z",
"definition": {
"tlp": "white"
@pytest.mark.parametrize(
"data", [
EXPECTED_TLP_MARKING_DEFINITION,
{
"id": "marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9",
"type": "marking-definition",
"created": "2017-01-20T00:00:00Z",
"definition": {
"tlp": "white",
},
"definition_type": "tlp",
},
"definition_type": "tlp",
},
])
],
)
def test_parse_marking_definition(data):
gm = stix2.parse(data)
gm = stix2.parse(data, version="2.0")
assert gm.type == 'marking-definition'
assert gm.id == MARKING_DEFINITION_ID
@ -180,10 +184,12 @@ def test_parse_marking_definition(data):
assert gm.definition_type == "tlp"
@stix2.common.CustomMarking('x-new-marking-type', [
('property1', stix2.properties.StringProperty(required=True)),
('property2', stix2.properties.IntegerProperty()),
])
@stix2.v20.CustomMarking(
'x-new-marking-type', [
('property1', stix2.properties.StringProperty(required=True)),
('property2', stix2.properties.IntegerProperty()),
],
)
class NewMarking(object):
def __init__(self, property2=None, **kwargs):
if "property3" in kwargs and not isinstance(kwargs.get("property3"), int):
@ -193,11 +199,11 @@ class NewMarking(object):
def test_registered_custom_marking():
nm = NewMarking(property1='something', property2=55)
marking_def = stix2.MarkingDefinition(
marking_def = stix2.v20.MarkingDefinition(
id="marking-definition--00000000-0000-4000-8000-000000000012",
created="2017-01-22T00:00:00.000Z",
definition_type="x-new-marking-type",
definition=nm
definition=nm,
)
assert marking_def.type == "marking-definition"
@ -218,21 +224,23 @@ def test_registered_custom_marking_raises_exception():
def test_not_registered_marking_raises_exception():
with pytest.raises(ValueError) as excinfo:
# Used custom object on purpose to demonstrate a not-registered marking
@stix2.sdo.CustomObject('x-new-marking-type2', [
('property1', stix2.properties.StringProperty(required=True)),
('property2', stix2.properties.IntegerProperty()),
])
@stix2.v20.CustomObject(
'x-new-marking-type2', [
('property1', stix2.properties.StringProperty(required=True)),
('property2', stix2.properties.IntegerProperty()),
],
)
class NewObject2(object):
def __init__(self, property2=None, **kwargs):
return
no = NewObject2(property1='something', property2=55)
stix2.MarkingDefinition(
stix2.v20.MarkingDefinition(
id="marking-definition--00000000-0000-4000-8000-000000000012",
created="2017-01-22T00:00:00.000Z",
definition_type="x-new-marking-type2",
definition=no
definition=no,
)
assert str(excinfo.value) == "definition_type must be a valid marking type"
@ -241,7 +249,7 @@ def test_not_registered_marking_raises_exception():
def test_marking_wrong_type_construction():
with pytest.raises(ValueError) as excinfo:
# Test passing wrong type for properties.
@stix2.CustomMarking('x-new-marking-type2', ("a", "b"))
@stix2.v20.CustomMarking('x-new-marking-type2', ("a", "b"))
class NewObject3(object):
pass
@ -249,7 +257,7 @@ def test_marking_wrong_type_construction():
def test_campaign_add_markings():
campaign = stix2.Campaign(
campaign = stix2.v20.Campaign(
id="campaign--8e2e2d2b-17d4-4cbf-938f-98ee46b3cd3f",
created_by_ref="identity--f431f809-377b-45e0-aa1c-6a4751cae5ff",
created="2016-04-06T20:03:00Z",

View File

@ -1,7 +1,8 @@
import pytest
from stix2 import TLP_AMBER, Malware, exceptions, markings
from stix2 import exceptions, markings
from stix2.v20 import TLP_AMBER, Malware
from .constants import FAKE_TIME, MALWARE_ID
from .constants import MALWARE_KWARGS as MALWARE_KWARGS_CONST
@ -17,26 +18,34 @@ MALWARE_KWARGS.update({
})
@pytest.mark.parametrize("data", [
(
Malware(**MALWARE_KWARGS),
Malware(object_marking_refs=[MARKING_IDS[0]],
**MALWARE_KWARGS),
MARKING_IDS[0],
),
(
MALWARE_KWARGS,
dict(object_marking_refs=[MARKING_IDS[0]],
**MALWARE_KWARGS),
MARKING_IDS[0],
),
(
Malware(**MALWARE_KWARGS),
Malware(object_marking_refs=[TLP_AMBER.id],
**MALWARE_KWARGS),
TLP_AMBER,
),
])
@pytest.mark.parametrize(
"data", [
(
Malware(**MALWARE_KWARGS),
Malware(
object_marking_refs=[MARKING_IDS[0]],
**MALWARE_KWARGS
),
MARKING_IDS[0],
),
(
MALWARE_KWARGS,
dict(
object_marking_refs=[MARKING_IDS[0]],
**MALWARE_KWARGS
),
MARKING_IDS[0],
),
(
Malware(**MALWARE_KWARGS),
Malware(
object_marking_refs=[TLP_AMBER.id],
**MALWARE_KWARGS
),
TLP_AMBER,
),
],
)
def test_add_markings_one_marking(data):
before = data[0]
after = data[1]
@ -72,12 +81,12 @@ def test_add_markings_combination():
granular_markings=[
{
"selectors": ["labels"],
"marking_ref": MARKING_IDS[2]
"marking_ref": MARKING_IDS[2],
},
{
"selectors": ["name"],
"marking_ref": MARKING_IDS[3]
}
"marking_ref": MARKING_IDS[3],
},
],
**MALWARE_KWARGS
)
@ -94,12 +103,14 @@ def test_add_markings_combination():
assert m in after["object_marking_refs"]
@pytest.mark.parametrize("data", [
([""]),
(""),
([]),
([MARKING_IDS[0], 456])
])
@pytest.mark.parametrize(
"data", [
([""]),
(""),
([]),
([MARKING_IDS[0], 456]),
],
)
def test_add_markings_bad_markings(data):
before = Malware(
**MALWARE_KWARGS
@ -119,62 +130,62 @@ GET_MARKINGS_TEST_DATA = \
"list value",
{
"g": "nested",
"h": 45
}
"h": 45,
},
],
"x": {
"y": [
"hello",
88
88,
],
"z": {
"foo1": "bar",
"foo2": 65
}
"foo2": 65,
},
},
"object_marking_refs": ["11"],
"granular_markings": [
{
"marking_ref": "1",
"selectors": ["a"]
"selectors": ["a"],
},
{
"marking_ref": "2",
"selectors": ["c"]
"selectors": ["c"],
},
{
"marking_ref": "3",
"selectors": ["c.[1]"]
"selectors": ["c.[1]"],
},
{
"marking_ref": "4",
"selectors": ["c.[2]"]
"selectors": ["c.[2]"],
},
{
"marking_ref": "5",
"selectors": ["c.[2].g"]
"selectors": ["c.[2].g"],
},
{
"marking_ref": "6",
"selectors": ["x"]
"selectors": ["x"],
},
{
"marking_ref": "7",
"selectors": ["x.y"]
"selectors": ["x.y"],
},
{
"marking_ref": "8",
"selectors": ["x.y.[1]"]
"selectors": ["x.y.[1]"],
},
{
"marking_ref": "9",
"selectors": ["x.z"]
"selectors": ["x.z"],
},
{
"marking_ref": "10",
"selectors": ["x.z.foo2"]
"selectors": ["x.z.foo2"],
},
]
],
}
@ -257,18 +268,24 @@ def test_get_markings_object_and_granular_combinations(data):
assert set(markings.get_markings(data, "x.z.foo2", False, True)) == set(["10"])
@pytest.mark.parametrize("data", [
(
Malware(object_marking_refs=[MARKING_IDS[0]],
**MALWARE_KWARGS),
Malware(**MALWARE_KWARGS),
),
(
dict(object_marking_refs=[MARKING_IDS[0]],
**MALWARE_KWARGS),
MALWARE_KWARGS,
),
])
@pytest.mark.parametrize(
"data", [
(
Malware(
object_marking_refs=[MARKING_IDS[0]],
**MALWARE_KWARGS
),
Malware(**MALWARE_KWARGS),
),
(
dict(
object_marking_refs=[MARKING_IDS[0]],
**MALWARE_KWARGS
),
MALWARE_KWARGS,
),
],
)
def test_remove_markings_object_level(data):
before = data[0]
after = data[1]
@ -283,29 +300,43 @@ def test_remove_markings_object_level(data):
modified == after['modified']
@pytest.mark.parametrize("data", [
(
Malware(object_marking_refs=[MARKING_IDS[0], MARKING_IDS[1], MARKING_IDS[2]],
**MALWARE_KWARGS),
Malware(object_marking_refs=[MARKING_IDS[1]],
**MALWARE_KWARGS),
[MARKING_IDS[0], MARKING_IDS[2]],
),
(
dict(object_marking_refs=[MARKING_IDS[0], MARKING_IDS[1], MARKING_IDS[2]],
**MALWARE_KWARGS),
dict(object_marking_refs=[MARKING_IDS[1]],
**MALWARE_KWARGS),
[MARKING_IDS[0], MARKING_IDS[2]],
),
(
Malware(object_marking_refs=[MARKING_IDS[0], MARKING_IDS[1], TLP_AMBER.id],
**MALWARE_KWARGS),
Malware(object_marking_refs=[MARKING_IDS[1]],
**MALWARE_KWARGS),
[MARKING_IDS[0], TLP_AMBER],
),
])
@pytest.mark.parametrize(
"data", [
(
Malware(
object_marking_refs=[MARKING_IDS[0], MARKING_IDS[1], MARKING_IDS[2]],
**MALWARE_KWARGS
),
Malware(
object_marking_refs=[MARKING_IDS[1]],
**MALWARE_KWARGS
),
[MARKING_IDS[0], MARKING_IDS[2]],
),
(
dict(
object_marking_refs=[MARKING_IDS[0], MARKING_IDS[1], MARKING_IDS[2]],
**MALWARE_KWARGS
),
dict(
object_marking_refs=[MARKING_IDS[1]],
**MALWARE_KWARGS
),
[MARKING_IDS[0], MARKING_IDS[2]],
),
(
Malware(
object_marking_refs=[MARKING_IDS[0], MARKING_IDS[1], TLP_AMBER.id],
**MALWARE_KWARGS
),
Malware(
object_marking_refs=[MARKING_IDS[1]],
**MALWARE_KWARGS
),
[MARKING_IDS[0], TLP_AMBER],
),
],
)
def test_remove_markings_multiple(data):
before = data[0]
after = data[1]
@ -325,18 +356,24 @@ def test_remove_markings_bad_markings():
assert str(excinfo.value) == "Marking ['%s'] was not found in Malware!" % MARKING_IDS[4]
@pytest.mark.parametrize("data", [
(
Malware(object_marking_refs=[MARKING_IDS[0], MARKING_IDS[1], MARKING_IDS[2]],
**MALWARE_KWARGS),
Malware(**MALWARE_KWARGS),
),
(
dict(object_marking_refs=[MARKING_IDS[0], MARKING_IDS[1], MARKING_IDS[2]],
**MALWARE_KWARGS),
MALWARE_KWARGS,
),
])
@pytest.mark.parametrize(
"data", [
(
Malware(
object_marking_refs=[MARKING_IDS[0], MARKING_IDS[1], MARKING_IDS[2]],
**MALWARE_KWARGS
),
Malware(**MALWARE_KWARGS),
),
(
dict(
object_marking_refs=[MARKING_IDS[0], MARKING_IDS[1], MARKING_IDS[2]],
**MALWARE_KWARGS
),
MALWARE_KWARGS,
),
],
)
def test_clear_markings(data):
before = data[0]
after = data[1]
@ -358,62 +395,62 @@ def test_is_marked_object_and_granular_combinations():
"list value",
{
"g": "nested",
"h": 45
}
"h": 45,
},
],
"x": {
"y": [
"hello",
88
88,
],
"z": {
"foo1": "bar",
"foo2": 65
}
"foo2": 65,
},
},
"object_marking_refs": "11",
"granular_markings": [
{
"marking_ref": "1",
"selectors": ["a"]
"selectors": ["a"],
},
{
"marking_ref": "2",
"selectors": ["c"]
"selectors": ["c"],
},
{
"marking_ref": "3",
"selectors": ["c.[1]"]
"selectors": ["c.[1]"],
},
{
"marking_ref": "4",
"selectors": ["c.[2]"]
"selectors": ["c.[2]"],
},
{
"marking_ref": "5",
"selectors": ["c.[2].g"]
"selectors": ["c.[2].g"],
},
{
"marking_ref": "6",
"selectors": ["x"]
"selectors": ["x"],
},
{
"marking_ref": "7",
"selectors": ["x.y"]
"selectors": ["x.y"],
},
{
"marking_ref": "8",
"selectors": ["x.y.[1]"]
"selectors": ["x.y.[1]"],
},
{
"marking_ref": "9",
"selectors": ["x.z"]
"selectors": ["x.z"],
},
{
"marking_ref": "10",
"selectors": ["x.z.foo2"]
"selectors": ["x.z.foo2"],
},
]
],
}
assert markings.is_marked(test_sdo, ["1"], "a", False, False)
@ -490,18 +527,24 @@ def test_is_marked_object_and_granular_combinations():
assert markings.is_marked(test_sdo, ["2"], None, True, True) is False
@pytest.mark.parametrize("data", [
(
Malware(object_marking_refs=[MARKING_IDS[0], MARKING_IDS[1], MARKING_IDS[2]],
**MALWARE_KWARGS),
Malware(**MALWARE_KWARGS),
),
(
dict(object_marking_refs=[MARKING_IDS[0], MARKING_IDS[1], MARKING_IDS[2]],
**MALWARE_KWARGS),
MALWARE_KWARGS,
),
])
@pytest.mark.parametrize(
"data", [
(
Malware(
object_marking_refs=[MARKING_IDS[0], MARKING_IDS[1], MARKING_IDS[2]],
**MALWARE_KWARGS
),
Malware(**MALWARE_KWARGS),
),
(
dict(
object_marking_refs=[MARKING_IDS[0], MARKING_IDS[1], MARKING_IDS[2]],
**MALWARE_KWARGS
),
MALWARE_KWARGS,
),
],
)
def test_is_marked_no_markings(data):
marked = data[0]
nonmarked = data[1]
@ -531,12 +574,14 @@ def test_set_marking():
assert x in after["object_marking_refs"]
@pytest.mark.parametrize("data", [
([]),
([""]),
(""),
([MARKING_IDS[4], 687])
])
@pytest.mark.parametrize(
"data", [
([]),
([""]),
(""),
([MARKING_IDS[4], 687]),
],
)
def test_set_marking_bad_input(data):
before = Malware(
object_marking_refs=[MARKING_IDS[0]],

View File

@ -0,0 +1,525 @@
import datetime
import pytest
import stix2
from stix2.pattern_visitor import create_pattern_object
def test_create_comparison_expression():
exp = stix2.EqualityComparisonExpression(
"file:hashes.'SHA-256'",
stix2.HashConstant("aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f", "SHA-256"),
) # noqa
assert str(exp) == "file:hashes.'SHA-256' = 'aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f'"
def test_boolean_expression():
exp1 = stix2.MatchesComparisonExpression(
"email-message:from_ref.value",
stix2.StringConstant(".+\\@example\\.com$"),
)
exp2 = stix2.MatchesComparisonExpression(
"email-message:body_multipart[*].body_raw_ref.name",
stix2.StringConstant("^Final Report.+\\.exe$"),
)
exp = stix2.AndBooleanExpression([exp1, exp2])
assert str(exp) == "email-message:from_ref.value MATCHES '.+\\\\@example\\\\.com$' AND email-message:body_multipart[*].body_raw_ref.name MATCHES '^Final Report.+\\\\.exe$'" # noqa
def test_boolean_expression_with_parentheses():
exp1 = stix2.MatchesComparisonExpression(
stix2.ObjectPath(
"email-message",
[
stix2.ReferenceObjectPathComponent("from_ref"),
stix2.BasicObjectPathComponent("value", False),
],
),
stix2.StringConstant(".+\\@example\\.com$"),
)
exp2 = stix2.MatchesComparisonExpression(
"email-message:body_multipart[*].body_raw_ref.name",
stix2.StringConstant("^Final Report.+\\.exe$"),
)
exp = stix2.ParentheticalExpression(stix2.AndBooleanExpression([exp1, exp2]))
assert str(exp) == "(email-message:from_ref.value MATCHES '.+\\\\@example\\\\.com$' AND email-message:body_multipart[*].body_raw_ref.name MATCHES '^Final Report.+\\\\.exe$')" # noqa
def test_hash_followed_by_registryKey_expression_python_constant():
hash_exp = stix2.EqualityComparisonExpression(
"file:hashes.MD5",
stix2.HashConstant("79054025255fb1a26e4bc422aef54eb4", "MD5"),
)
o_exp1 = stix2.ObservationExpression(hash_exp)
reg_exp = stix2.EqualityComparisonExpression(
stix2.ObjectPath("windows-registry-key", ["key"]),
stix2.StringConstant("HKEY_LOCAL_MACHINE\\foo\\bar"),
)
o_exp2 = stix2.ObservationExpression(reg_exp)
fb_exp = stix2.FollowedByObservationExpression([o_exp1, o_exp2])
para_exp = stix2.ParentheticalExpression(fb_exp)
qual_exp = stix2.WithinQualifier(300)
exp = stix2.QualifiedObservationExpression(para_exp, qual_exp)
assert str(exp) == "([file:hashes.MD5 = '79054025255fb1a26e4bc422aef54eb4'] FOLLOWEDBY [windows-registry-key:key = 'HKEY_LOCAL_MACHINE\\\\foo\\\\bar']) WITHIN 300 SECONDS" # noqa
def test_hash_followed_by_registryKey_expression():
hash_exp = stix2.EqualityComparisonExpression(
"file:hashes.MD5",
stix2.HashConstant("79054025255fb1a26e4bc422aef54eb4", "MD5"),
)
o_exp1 = stix2.ObservationExpression(hash_exp)
reg_exp = stix2.EqualityComparisonExpression(
stix2.ObjectPath("windows-registry-key", ["key"]),
stix2.StringConstant("HKEY_LOCAL_MACHINE\\foo\\bar"),
)
o_exp2 = stix2.ObservationExpression(reg_exp)
fb_exp = stix2.FollowedByObservationExpression([o_exp1, o_exp2])
para_exp = stix2.ParentheticalExpression(fb_exp)
qual_exp = stix2.WithinQualifier(stix2.IntegerConstant(300))
exp = stix2.QualifiedObservationExpression(para_exp, qual_exp)
assert str(exp) == "([file:hashes.MD5 = '79054025255fb1a26e4bc422aef54eb4'] FOLLOWEDBY [windows-registry-key:key = 'HKEY_LOCAL_MACHINE\\\\foo\\\\bar']) WITHIN 300 SECONDS" # noqa
def test_file_observable_expression():
exp1 = stix2.EqualityComparisonExpression(
"file:hashes.'SHA-256'",
stix2.HashConstant(
"aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f",
'SHA-256',
),
)
exp2 = stix2.EqualityComparisonExpression("file:mime_type", stix2.StringConstant("application/x-pdf"))
bool_exp = stix2.ObservationExpression(stix2.AndBooleanExpression([exp1, exp2]))
assert str(bool_exp) == "[file:hashes.'SHA-256' = 'aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f' AND file:mime_type = 'application/x-pdf']" # noqa
@pytest.mark.parametrize(
"observation_class, op", [
(stix2.AndObservationExpression, 'AND'),
(stix2.OrObservationExpression, 'OR'),
],
)
def test_multiple_file_observable_expression(observation_class, op):
exp1 = stix2.EqualityComparisonExpression(
"file:hashes.'SHA-256'",
stix2.HashConstant(
"bf07a7fbb825fc0aae7bf4a1177b2b31fcf8a3feeaf7092761e18c859ee52a9c",
'SHA-256',
),
)
exp2 = stix2.EqualityComparisonExpression(
"file:hashes.MD5",
stix2.HashConstant("cead3f77f6cda6ec00f57d76c9a6879f", "MD5"),
)
bool1_exp = stix2.OrBooleanExpression([exp1, exp2])
exp3 = stix2.EqualityComparisonExpression(
"file:hashes.'SHA-256'",
stix2.HashConstant(
"aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f",
'SHA-256',
),
)
op1_exp = stix2.ObservationExpression(bool1_exp)
op2_exp = stix2.ObservationExpression(exp3)
exp = observation_class([op1_exp, op2_exp])
assert str(exp) == "[file:hashes.'SHA-256' = 'bf07a7fbb825fc0aae7bf4a1177b2b31fcf8a3feeaf7092761e18c859ee52a9c' OR file:hashes.MD5 = 'cead3f77f6cda6ec00f57d76c9a6879f'] {} [file:hashes.'SHA-256' = 'aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f']".format(op) # noqa
def test_root_types():
ast = stix2.ObservationExpression(
stix2.AndBooleanExpression(
[
stix2.ParentheticalExpression(
stix2.OrBooleanExpression([
stix2.EqualityComparisonExpression("a:b", stix2.StringConstant("1")),
stix2.EqualityComparisonExpression("b:c", stix2.StringConstant("2")),
]),
),
stix2.EqualityComparisonExpression(u"b:d", stix2.StringConstant("3")),
],
),
)
assert str(ast) == "[(a:b = '1' OR b:c = '2') AND b:d = '3']"
def test_artifact_payload():
exp1 = stix2.EqualityComparisonExpression(
"artifact:mime_type",
"application/vnd.tcpdump.pcap",
)
exp2 = stix2.MatchesComparisonExpression(
"artifact:payload_bin",
stix2.StringConstant("\\xd4\\xc3\\xb2\\xa1\\x02\\x00\\x04\\x00"),
)
and_exp = stix2.ObservationExpression(stix2.AndBooleanExpression([exp1, exp2]))
assert str(and_exp) == "[artifact:mime_type = 'application/vnd.tcpdump.pcap' AND artifact:payload_bin MATCHES '\\\\xd4\\\\xc3\\\\xb2\\\\xa1\\\\x02\\\\x00\\\\x04\\\\x00']" # noqa
def test_greater_than_python_constant():
exp1 = stix2.GreaterThanComparisonExpression("file:extensions.'windows-pebinary-ext'.sections[*].entropy", 7.0)
exp = stix2.ObservationExpression(exp1)
assert str(exp) == "[file:extensions.'windows-pebinary-ext'.sections[*].entropy > 7.0]"
def test_greater_than():
exp1 = stix2.GreaterThanComparisonExpression(
"file:extensions.'windows-pebinary-ext'.sections[*].entropy",
stix2.FloatConstant(7.0),
)
exp = stix2.ObservationExpression(exp1)
assert str(exp) == "[file:extensions.'windows-pebinary-ext'.sections[*].entropy > 7.0]"
def test_less_than():
exp = stix2.LessThanComparisonExpression("file:size", 1024)
assert str(exp) == "file:size < 1024"
def test_greater_than_or_equal():
exp = stix2.GreaterThanEqualComparisonExpression(
"file:size",
1024,
)
assert str(exp) == "file:size >= 1024"
def test_less_than_or_equal():
exp = stix2.LessThanEqualComparisonExpression(
"file:size",
1024,
)
assert str(exp) == "file:size <= 1024"
def test_not():
exp = stix2.LessThanComparisonExpression(
"file:size",
1024,
negated=True,
)
assert str(exp) == "file:size NOT < 1024"
def test_and_observable_expression():
exp1 = stix2.AndBooleanExpression([
stix2.EqualityComparisonExpression(
"user-account:account_type",
"unix",
),
stix2.EqualityComparisonExpression(
"user-account:user_id",
stix2.StringConstant("1007"),
),
stix2.EqualityComparisonExpression(
"user-account:account_login",
"Peter",
),
])
exp2 = stix2.AndBooleanExpression([
stix2.EqualityComparisonExpression(
"user-account:account_type",
"unix",
),
stix2.EqualityComparisonExpression(
"user-account:user_id",
stix2.StringConstant("1008"),
),
stix2.EqualityComparisonExpression(
"user-account:account_login",
"Paul",
),
])
exp3 = stix2.AndBooleanExpression([
stix2.EqualityComparisonExpression(
"user-account:account_type",
"unix",
),
stix2.EqualityComparisonExpression(
"user-account:user_id",
stix2.StringConstant("1009"),
),
stix2.EqualityComparisonExpression(
"user-account:account_login",
"Mary",
),
])
exp = stix2.AndObservationExpression([
stix2.ObservationExpression(exp1),
stix2.ObservationExpression(exp2),
stix2.ObservationExpression(exp3),
])
assert str(exp) == "[user-account:account_type = 'unix' AND user-account:user_id = '1007' AND user-account:account_login = 'Peter'] AND [user-account:account_type = 'unix' AND user-account:user_id = '1008' AND user-account:account_login = 'Paul'] AND [user-account:account_type = 'unix' AND user-account:user_id = '1009' AND user-account:account_login = 'Mary']" # noqa
def test_invalid_and_observable_expression():
with pytest.raises(ValueError) as excinfo:
stix2.AndBooleanExpression([
stix2.EqualityComparisonExpression(
"user-account:display_name",
"admin",
),
stix2.EqualityComparisonExpression(
"email-addr:display_name",
stix2.StringConstant("admin"),
),
])
assert "All operands to an 'AND' expression must have the same object type" in str(excinfo)
def test_hex():
exp_and = stix2.AndBooleanExpression([
stix2.EqualityComparisonExpression(
"file:mime_type",
"image/bmp",
),
stix2.EqualityComparisonExpression(
"file:magic_number_hex",
stix2.HexConstant("ffd8"),
),
])
exp = stix2.ObservationExpression(exp_and)
assert str(exp) == "[file:mime_type = 'image/bmp' AND file:magic_number_hex = h'ffd8']"
def test_multiple_qualifiers():
exp_and = stix2.AndBooleanExpression([
stix2.EqualityComparisonExpression(
"network-traffic:dst_ref.type",
"domain-name",
),
stix2.EqualityComparisonExpression(
"network-traffic:dst_ref.value",
"example.com",
),
])
exp_ob = stix2.ObservationExpression(exp_and)
qual_rep = stix2.RepeatQualifier(5)
qual_within = stix2.WithinQualifier(stix2.IntegerConstant(1800))
exp = stix2.QualifiedObservationExpression(stix2.QualifiedObservationExpression(exp_ob, qual_rep), qual_within)
assert str(exp) == "[network-traffic:dst_ref.type = 'domain-name' AND network-traffic:dst_ref.value = 'example.com'] REPEATS 5 TIMES WITHIN 1800 SECONDS" # noqa
def test_set_op():
exp = stix2.ObservationExpression(stix2.IsSubsetComparisonExpression(
"network-traffic:dst_ref.value",
"2001:0db8:dead:beef:0000:0000:0000:0000/64",
))
assert str(exp) == "[network-traffic:dst_ref.value ISSUBSET '2001:0db8:dead:beef:0000:0000:0000:0000/64']"
def test_timestamp():
ts = stix2.TimestampConstant('2014-01-13T07:03:17Z')
assert str(ts) == "t'2014-01-13T07:03:17Z'"
def test_boolean():
exp = stix2.EqualityComparisonExpression(
"email-message:is_multipart",
True,
)
assert str(exp) == "email-message:is_multipart = true"
def test_binary():
const = stix2.BinaryConstant("dGhpcyBpcyBhIHRlc3Q=")
exp = stix2.EqualityComparisonExpression(
"artifact:payload_bin",
const,
)
assert str(exp) == "artifact:payload_bin = b'dGhpcyBpcyBhIHRlc3Q='"
def test_list():
exp = stix2.InComparisonExpression(
"process:name",
['proccy', 'proximus', 'badproc'],
)
assert str(exp) == "process:name IN ('proccy', 'proximus', 'badproc')"
def test_list2():
# alternate way to construct an "IN" Comparison Expression
exp = stix2.EqualityComparisonExpression(
"process:name",
['proccy', 'proximus', 'badproc'],
)
assert str(exp) == "process:name IN ('proccy', 'proximus', 'badproc')"
def test_invalid_constant_type():
with pytest.raises(ValueError) as excinfo:
stix2.EqualityComparisonExpression(
"artifact:payload_bin",
{'foo': 'bar'},
)
assert 'Unable to create a constant' in str(excinfo)
def test_invalid_integer_constant():
with pytest.raises(ValueError) as excinfo:
stix2.IntegerConstant('foo')
assert 'must be an integer' in str(excinfo)
def test_invalid_timestamp_constant():
with pytest.raises(ValueError) as excinfo:
stix2.TimestampConstant('foo')
assert 'Must be a datetime object or timestamp string' in str(excinfo)
def test_invalid_float_constant():
with pytest.raises(ValueError) as excinfo:
stix2.FloatConstant('foo')
assert 'must be a float' in str(excinfo)
@pytest.mark.parametrize(
"data, result", [
(True, True),
(False, False),
('True', True),
('False', False),
('true', True),
('false', False),
('t', True),
('f', False),
('T', True),
('F', False),
(1, True),
(0, False),
],
)
def test_boolean_constant(data, result):
boolean = stix2.BooleanConstant(data)
assert boolean.value == result
def test_invalid_boolean_constant():
with pytest.raises(ValueError) as excinfo:
stix2.BooleanConstant('foo')
assert 'must be a boolean' in str(excinfo)
@pytest.mark.parametrize(
"hashtype, data", [
('MD5', 'zzz'),
('ssdeep', 'zzz=='),
],
)
def test_invalid_hash_constant(hashtype, data):
with pytest.raises(ValueError) as excinfo:
stix2.HashConstant(data, hashtype)
assert 'is not a valid {} hash'.format(hashtype) in str(excinfo)
def test_invalid_hex_constant():
with pytest.raises(ValueError) as excinfo:
stix2.HexConstant('mm')
assert "must contain an even number of hexadecimal characters" in str(excinfo)
def test_invalid_binary_constant():
with pytest.raises(ValueError) as excinfo:
stix2.BinaryConstant('foo')
assert 'must contain a base64' in str(excinfo)
def test_escape_quotes_and_backslashes():
exp = stix2.MatchesComparisonExpression(
"file:name",
"^Final Report.+\\.exe$",
)
assert str(exp) == "file:name MATCHES '^Final Report.+\\\\.exe$'"
def test_like():
exp = stix2.LikeComparisonExpression(
"directory:path",
"C:\\Windows\\%\\foo",
)
assert str(exp) == "directory:path LIKE 'C:\\\\Windows\\\\%\\\\foo'"
def test_issuperset():
exp = stix2.IsSupersetComparisonExpression(
"ipv4-addr:value",
"198.51.100.0/24",
)
assert str(exp) == "ipv4-addr:value ISSUPERSET '198.51.100.0/24'"
def test_repeat_qualifier():
qual = stix2.RepeatQualifier(stix2.IntegerConstant(5))
assert str(qual) == 'REPEATS 5 TIMES'
def test_invalid_repeat_qualifier():
with pytest.raises(ValueError) as excinfo:
stix2.RepeatQualifier('foo')
assert 'is not a valid argument for a Repeat Qualifier' in str(excinfo)
def test_invalid_within_qualifier():
with pytest.raises(ValueError) as excinfo:
stix2.WithinQualifier('foo')
assert 'is not a valid argument for a Within Qualifier' in str(excinfo)
def test_startstop_qualifier():
qual = stix2.StartStopQualifier(
stix2.TimestampConstant('2016-06-01T00:00:00Z'),
datetime.datetime(2017, 3, 12, 8, 30, 0),
)
assert str(qual) == "START t'2016-06-01T00:00:00Z' STOP t'2017-03-12T08:30:00Z'"
qual2 = stix2.StartStopQualifier(
datetime.date(2016, 6, 1),
stix2.TimestampConstant('2016-07-01T00:00:00Z'),
)
assert str(qual2) == "START t'2016-06-01T00:00:00Z' STOP t'2016-07-01T00:00:00Z'"
def test_invalid_startstop_qualifier():
with pytest.raises(ValueError) as excinfo:
stix2.StartStopQualifier(
'foo',
stix2.TimestampConstant('2016-06-01T00:00:00Z'),
)
assert 'is not a valid argument for a Start/Stop Qualifier' in str(excinfo)
with pytest.raises(ValueError) as excinfo:
stix2.StartStopQualifier(
datetime.date(2016, 6, 1),
'foo',
)
assert 'is not a valid argument for a Start/Stop Qualifier' in str(excinfo)
def test_make_constant_already_a_constant():
str_const = stix2.StringConstant('Foo')
result = stix2.patterns.make_constant(str_const)
assert result is str_const
def test_parsing_comparison_expression():
patt_obj = create_pattern_object("[file:hashes.'SHA-256' = 'aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f']")
assert str(patt_obj) == "[file:hashes.'SHA-256' = 'aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f']"
def test_parsing_qualified_expression():
patt_obj = create_pattern_object(
"[network-traffic:dst_ref.type = 'domain-name' AND network-traffic:dst_ref.value = 'example.com'] REPEATS 5 TIMES WITHIN 1800 SECONDS",
)
assert str(
patt_obj,
) == "[network-traffic:dst_ref.type = 'domain-name' AND network-traffic:dst_ref.value = 'example.com'] REPEATS 5 TIMES WITHIN 1800 SECONDS"
def test_list_constant():
patt_obj = create_pattern_object("[network-traffic:src_ref.value IN ('10.0.0.0', '10.0.0.1', '10.0.0.2')]")
assert str(patt_obj) == "[network-traffic:src_ref.value IN ('10.0.0.0', '10.0.0.1', '10.0.0.2')]"

View File

@ -7,11 +7,11 @@ def test_pickling():
"""
Ensure a pickle/unpickle cycle works okay.
"""
identity = stix2.Identity(
identity = stix2.v20.Identity(
id="identity--d66cb89d-5228-4983-958c-fa84ef75c88c",
name="alice",
description="this is a pickle test",
identity_class="some_class"
identity_class="some_class",
)
pickle.loads(pickle.dumps(identity))

View File

@ -2,15 +2,16 @@ import uuid
import pytest
from stix2 import CustomObject, EmailMIMEComponent, ExtensionsProperty, TCPExt
import stix2
from stix2.exceptions import AtLeastOnePropertyError, DictionaryKeyError
from stix2.properties import (ERROR_INVALID_ID, BinaryProperty,
BooleanProperty, DictionaryProperty,
EmbeddedObjectProperty, EnumProperty,
FloatProperty, HashesProperty, HexProperty,
IDProperty, IntegerProperty, ListProperty,
Property, ReferenceProperty, StringProperty,
TimestampProperty, TypeProperty)
from stix2.properties import (
ERROR_INVALID_ID, BinaryProperty, BooleanProperty, DictionaryProperty,
EmbeddedObjectProperty, EnumProperty, ExtensionsProperty, FloatProperty,
HashesProperty, HexProperty, IDProperty, IntegerProperty, ListProperty,
Property, ReferenceProperty, STIXObjectProperty, StringProperty,
TimestampProperty, TypeProperty,
)
from stix2.v20.common import MarkingProperty
from . import constants
@ -92,10 +93,12 @@ ID_PROP = IDProperty('my-type')
MY_ID = 'my-type--232c9d3f-49fc-4440-bb01-607f638778e7'
@pytest.mark.parametrize("value", [
MY_ID,
'my-type--00000000-0000-4000-8000-000000000000',
])
@pytest.mark.parametrize(
"value", [
MY_ID,
'my-type--00000000-0000-4000-8000-000000000000',
],
)
def test_id_property_valid(value):
assert ID_PROP.clean(value) == value
@ -133,14 +136,16 @@ def test_id_property_wrong_type():
assert str(excinfo.value) == "must start with 'my-type--'."
@pytest.mark.parametrize("value", [
'my-type--foo',
# Not a v4 UUID
'my-type--00000000-0000-0000-0000-000000000000',
'my-type--' + str(uuid.uuid1()),
'my-type--' + str(uuid.uuid3(uuid.NAMESPACE_DNS, "example.org")),
'my-type--' + str(uuid.uuid5(uuid.NAMESPACE_DNS, "example.org")),
])
@pytest.mark.parametrize(
"value", [
'my-type--foo',
# Not a v4 UUID
'my-type--00000000-0000-0000-0000-000000000000',
'my-type--' + str(uuid.uuid1()),
'my-type--' + str(uuid.uuid3(uuid.NAMESPACE_DNS, "example.org")),
'my-type--' + str(uuid.uuid5(uuid.NAMESPACE_DNS, "example.org")),
],
)
def test_id_property_not_a_valid_hex_uuid(value):
with pytest.raises(ValueError) as excinfo:
ID_PROP.clean(value)
@ -152,77 +157,117 @@ def test_id_property_default():
assert ID_PROP.clean(default) == default
@pytest.mark.parametrize("value", [
2,
-1,
3.14,
False,
])
@pytest.mark.parametrize(
"value", [
2,
-1,
3.14,
False,
],
)
def test_integer_property_valid(value):
int_prop = IntegerProperty()
assert int_prop.clean(value) is not None
@pytest.mark.parametrize("value", [
"something",
StringProperty(),
])
@pytest.mark.parametrize(
"value", [
-1,
-100,
-5 * 6,
],
)
def test_integer_property_invalid_min_with_constraints(value):
int_prop = IntegerProperty(min=0, max=180)
with pytest.raises(ValueError) as excinfo:
int_prop.clean(value)
assert "minimum value is" in str(excinfo.value)
@pytest.mark.parametrize(
"value", [
181,
200,
50 * 6,
],
)
def test_integer_property_invalid_max_with_constraints(value):
int_prop = IntegerProperty(min=0, max=180)
with pytest.raises(ValueError) as excinfo:
int_prop.clean(value)
assert "maximum value is" in str(excinfo.value)
@pytest.mark.parametrize(
"value", [
"something",
StringProperty(),
],
)
def test_integer_property_invalid(value):
int_prop = IntegerProperty()
with pytest.raises(ValueError):
int_prop.clean(value)
@pytest.mark.parametrize("value", [
2,
-1,
3.14,
False,
])
@pytest.mark.parametrize(
"value", [
2,
-1,
3.14,
False,
],
)
def test_float_property_valid(value):
int_prop = FloatProperty()
assert int_prop.clean(value) is not None
@pytest.mark.parametrize("value", [
"something",
StringProperty(),
])
@pytest.mark.parametrize(
"value", [
"something",
StringProperty(),
],
)
def test_float_property_invalid(value):
int_prop = FloatProperty()
with pytest.raises(ValueError):
int_prop.clean(value)
@pytest.mark.parametrize("value", [
True,
False,
'True',
'False',
'true',
'false',
'TRUE',
'FALSE',
'T',
'F',
't',
'f',
1,
0,
])
@pytest.mark.parametrize(
"value", [
True,
False,
'True',
'False',
'true',
'false',
'TRUE',
'FALSE',
'T',
'F',
't',
'f',
1,
0,
],
)
def test_boolean_property_valid(value):
bool_prop = BooleanProperty()
assert bool_prop.clean(value) is not None
@pytest.mark.parametrize("value", [
'abc',
['false'],
{'true': 'true'},
2,
-1,
])
@pytest.mark.parametrize(
"value", [
'abc',
['false'],
{'true': 'true'},
2,
-1,
],
)
def test_boolean_property_invalid(value):
bool_prop = BooleanProperty()
with pytest.raises(ValueError):
@ -241,11 +286,13 @@ def test_reference_property():
ref_prop.clean("my-type--00000000-0000-0000-0000-000000000000")
@pytest.mark.parametrize("value", [
'2017-01-01T12:34:56Z',
'2017-01-01 12:34:56',
'Jan 1 2017 12:34:56',
])
@pytest.mark.parametrize(
"value", [
'2017-01-01T12:34:56Z',
'2017-01-01 12:34:56',
'Jan 1 2017 12:34:56',
],
)
def test_timestamp_property_valid(value):
ts_prop = TimestampProperty()
assert ts_prop.clean(value) == constants.FAKE_TIME
@ -275,25 +322,33 @@ def test_hex_property():
hex_prop.clean("foobar")
@pytest.mark.parametrize("d", [
{'description': 'something'},
[('abc', 1), ('bcd', 2), ('cde', 3)],
])
@pytest.mark.parametrize(
"d", [
{'description': 'something'},
[('abc', 1), ('bcd', 2), ('cde', 3)],
],
)
def test_dictionary_property_valid(d):
dict_prop = DictionaryProperty()
assert dict_prop.clean(d)
@pytest.mark.parametrize("d", [
[{'a': 'something'}, "Invalid dictionary key a: (shorter than 3 characters)."],
[{'a'*300: 'something'}, "Invalid dictionary key aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
"aaaaaaaaaaaaaaaaaaaaaaa: (longer than 256 characters)."],
[{'Hey!': 'something'}, "Invalid dictionary key Hey!: (contains characters other thanlowercase a-z, "
"uppercase A-Z, numerals 0-9, hyphen (-), or underscore (_))."],
])
@pytest.mark.parametrize(
"d", [
[{'a': 'something'}, "Invalid dictionary key a: (shorter than 3 characters)."],
[
{'a'*300: 'something'}, "Invalid dictionary key aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
"aaaaaaaaaaaaaaaaaaaaaaa: (longer than 256 characters).",
],
[
{'Hey!': 'something'}, "Invalid dictionary key Hey!: (contains characters other than lowercase a-z, "
"uppercase A-Z, numerals 0-9, hyphen (-), or underscore (_)).",
],
],
)
def test_dictionary_property_invalid_key(d):
dict_prop = DictionaryProperty()
@ -303,18 +358,20 @@ def test_dictionary_property_invalid_key(d):
assert str(excinfo.value) == d[1]
@pytest.mark.parametrize("d", [
({}, "The dictionary property must contain a non-empty dictionary"),
# TODO: This error message could be made more helpful. The error is caused
# because `json.loads()` doesn't like the *single* quotes around the key
# name, even though they are valid in a Python dictionary. While technically
# accurate (a string is not a dictionary), if we want to be able to load
# string-encoded "dictionaries" that are, we need a better error message
# or an alternative to `json.loads()` ... and preferably *not* `eval()`. :-)
# Changing the following to `'{"description": "something"}'` does not cause
# any ValueError to be raised.
("{'description': 'something'}", "The dictionary property must contain a dictionary"),
])
@pytest.mark.parametrize(
"d", [
({}, "The dictionary property must contain a non-empty dictionary"),
# TODO: This error message could be made more helpful. The error is caused
# because `json.loads()` doesn't like the *single* quotes around the key
# name, even though they are valid in a Python dictionary. While technically
# accurate (a string is not a dictionary), if we want to be able to load
# string-encoded "dictionaries" that are, we need a better error message
# or an alternative to `json.loads()` ... and preferably *not* `eval()`. :-)
# Changing the following to `'{"description": "something"}'` does not cause
# any ValueError to be raised.
("{'description': 'something'}", "The dictionary property must contain a dictionary"),
],
)
def test_dictionary_property_invalid(d):
dict_prop = DictionaryProperty()
@ -324,9 +381,11 @@ def test_dictionary_property_invalid(d):
def test_property_list_of_dictionary():
@CustomObject('x-new-obj', [
('property1', ListProperty(DictionaryProperty(), required=True)),
])
@stix2.v20.CustomObject(
'x-new-obj', [
('property1', ListProperty(DictionaryProperty(), required=True)),
],
)
class NewObj():
pass
@ -334,19 +393,23 @@ def test_property_list_of_dictionary():
assert test_obj.property1[0]['foo'] == 'bar'
@pytest.mark.parametrize("value", [
{"sha256": "6db12788c37247f2316052e142f42f4b259d6561751e5f401a1ae2a6df9c674b"},
[('MD5', '2dfb1bcc980200c6706feee399d41b3f'), ('RIPEMD-160', 'b3a8cd8a27c90af79b3c81754f267780f443dfef')],
])
@pytest.mark.parametrize(
"value", [
{"sha256": "6db12788c37247f2316052e142f42f4b259d6561751e5f401a1ae2a6df9c674b"},
[('MD5', '2dfb1bcc980200c6706feee399d41b3f'), ('RIPEMD-160', 'b3a8cd8a27c90af79b3c81754f267780f443dfef')],
],
)
def test_hashes_property_valid(value):
hash_prop = HashesProperty()
assert hash_prop.clean(value)
@pytest.mark.parametrize("value", [
{"MD5": "a"},
{"SHA-256": "2dfb1bcc980200c6706feee399d41b3f"},
])
@pytest.mark.parametrize(
"value", [
{"MD5": "a"},
{"SHA-256": "2dfb1bcc980200c6706feee399d41b3f"},
],
)
def test_hashes_property_invalid(value):
hash_prop = HashesProperty()
@ -355,11 +418,11 @@ def test_hashes_property_invalid(value):
def test_embedded_property():
emb_prop = EmbeddedObjectProperty(type=EmailMIMEComponent)
mime = EmailMIMEComponent(
emb_prop = EmbeddedObjectProperty(type=stix2.v20.EmailMIMEComponent)
mime = stix2.v20.EmailMIMEComponent(
content_type="text/plain; charset=utf-8",
content_disposition="inline",
body="Cats are funny!"
body="Cats are funny!",
)
assert emb_prop.clean(mime)
@ -367,11 +430,13 @@ def test_embedded_property():
emb_prop.clean("string")
@pytest.mark.parametrize("value", [
['a', 'b', 'c'],
('a', 'b', 'c'),
'b',
])
@pytest.mark.parametrize(
"value", [
['a', 'b', 'c'],
('a', 'b', 'c'),
'b',
],
)
def test_enum_property_valid(value):
enum_prop = EnumProperty(value)
assert enum_prop.clean('b')
@ -387,17 +452,19 @@ def test_extension_property_valid():
ext_prop = ExtensionsProperty(enclosing_type='file')
assert ext_prop({
'windows-pebinary-ext': {
'pe_type': 'exe'
'pe_type': 'exe',
},
})
@pytest.mark.parametrize("data", [
1,
{'foobar-ext': {
'pe_type': 'exe'
}},
])
@pytest.mark.parametrize(
"data", [
1,
{'foobar-ext': {
'pe_type': 'exe',
}},
],
)
def test_extension_property_invalid(data):
ext_prop = ExtensionsProperty(enclosing_type='file')
with pytest.raises(ValueError):
@ -407,14 +474,36 @@ def test_extension_property_invalid(data):
def test_extension_property_invalid_type():
ext_prop = ExtensionsProperty(enclosing_type='indicator')
with pytest.raises(ValueError) as excinfo:
ext_prop.clean({
'windows-pebinary-ext': {
'pe_type': 'exe'
}}
ext_prop.clean(
{
'windows-pebinary-ext': {
'pe_type': 'exe',
},
},
)
assert "Can't parse unknown extension" in str(excinfo.value)
def test_extension_at_least_one_property_constraint():
with pytest.raises(AtLeastOnePropertyError):
TCPExt()
stix2.v20.TCPExt()
def test_marking_property_error():
mark_prop = MarkingProperty()
with pytest.raises(ValueError) as excinfo:
mark_prop.clean('my-marking')
assert str(excinfo.value) == "must be a Statement, TLP Marking or a registered marking."
def test_stix_property_not_compliant_spec():
# This is a 2.0 test only...
indicator = stix2.v20.Indicator(spec_version="2.0", allow_custom=True, **constants.INDICATOR_KWARGS)
stix_prop = STIXObjectProperty(spec_version="2.0")
with pytest.raises(ValueError) as excinfo:
stix_prop.clean(indicator)
assert "Spec version 2.0 bundles don't yet support containing objects of a different spec version." in str(excinfo.value)

Some files were not shown because too many files have changed in this diff Show More