Merge branch 'master' of github.com:oasis-open/cti-python-stix2

master
chrisr3d 2018-12-14 10:09:58 +01:00
commit a68a43a732
200 changed files with 21456 additions and 4034 deletions

28
.gitignore vendored
View File

@ -68,3 +68,31 @@ cache.sqlite
# PyCharm # PyCharm
.idea/ .idea/
### macOS template
# General
.DS_Store
.AppleDouble
.LSOverride
# Icon must end with two \r
Icon
# Thumbnails
._*
# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk

View File

@ -2,6 +2,7 @@
skip = workbench.py skip = workbench.py
not_skip = __init__.py not_skip = __init__.py
known_third_party = known_third_party =
antlr4,
dateutil, dateutil,
medallion, medallion,
pytest, pytest,
@ -14,3 +15,5 @@ known_third_party =
taxii2client, taxii2client,
known_first_party = stix2 known_first_party = stix2
force_sort_within_sections = 1 force_sort_within_sections = 1
multi_line_output = 5
include_trailing_comma = True

View File

@ -1,11 +1,16 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
sha: v0.9.4 rev: v1.3.0
hooks: hooks:
- id: trailing-whitespace - id: trailing-whitespace
- id: flake8 - id: flake8
args: args:
- --max-line-length=160 - --max-line-length=160
- id: check-merge-conflict - id: check-merge-conflict
- repo: https://github.com/asottile/add-trailing-comma
rev: v0.6.4
hooks:
- id: add-trailing-comma
- repo: https://github.com/FalconSocial/pre-commit-python-sorter - repo: https://github.com/FalconSocial/pre-commit-python-sorter
sha: b57843b0b874df1d16eb0bef00b868792cb245c2 sha: b57843b0b874df1d16eb0bef00b868792cb245c2
hooks: hooks:

View File

@ -1,6 +1,15 @@
CHANGELOG CHANGELOG
========= =========
1.1.0 - 2018-12-11
- Most (if not all) STIX 2.1 SDOs/SROs and core objects have been implemented according to the latest CSD/WD document
- There is an implementation for the conversion scales
- #196, #193 Removing duplicate code for: properties, registering objects, parsing objects, custom objects
- #80, #197 Most (if not all) tests created for v20 are also implemented for v21
- #189 Added extra checks for the pre-commit tool
- #202 It is now possible to pass a Bundle into add() method in Memory datastores
1.0.4 - 2018-11-15 1.0.4 - 2018-11-15
* #225 MemorySource fix to support custom objects * #225 MemorySource fix to support custom objects

View File

@ -1,42 +1,34 @@
|Build_Status| |Coverage| |Version| |Build_Status| |Coverage| |Version| |Downloads_Badge|
cti-python-stix2 cti-python-stix2
================ ================
This is an `OASIS TC Open This is an `OASIS TC Open Repository <https://www.oasis-open.org/resources/open-repositories/>`__.
Repository <https://www.oasis-open.org/resources/open-
repositories/>`__.
See the `Governance <#governance>`__ section for more information. See the `Governance <#governance>`__ section for more information.
This repository provides Python APIs for serializing and de- This repository provides Python APIs for serializing and de-serializing STIX2
serializing JSON content, along with higher-level APIs for common tasks, including data
STIX 2 JSON content, along with higher-level APIs for common tasks, markings, versioning, and for resolving STIX IDs across multiple data sources.
including data markings, versioning, and for resolving STIX IDs across
multiple data sources.
For more information, see `the For more information, see `the documentation <https://stix2.readthedocs.io/>`__ on ReadTheDocs.
documentation <https://stix2.readthedocs.io/>`__ on
ReadTheDocs.
Installation Installation
------------ ------------
Install with `pip <https://pip.pypa.io/en/stable/>`__: Install with `pip <https://pip.pypa.io/en/stable/>`__:
:: .. code-block:: bash
pip install stix2 $ pip install stix2
Usage Usage
----- -----
To create a STIX object, provide keyword arguments to the type's To create a STIX object, provide keyword arguments to the type's constructor.
constructor. Certain required attributes of all objects, such as Certain required attributes of all objects, such as ``type`` or ``id``, will
``type`` or be set automatically if not provided as keyword arguments.
``id``, will be set automatically if not provided as keyword
arguments.
.. code:: python .. code-block:: python
from stix2 import Indicator from stix2 import Indicator
@ -44,135 +36,100 @@ arguments.
labels=["malicious-activity"], labels=["malicious-activity"],
pattern="[file:hashes.md5 = 'd41d8cd98f00b204e9800998ecf8427e']") pattern="[file:hashes.md5 = 'd41d8cd98f00b204e9800998ecf8427e']")
To parse a STIX JSON string into a Python STIX object, use To parse a STIX JSON string into a Python STIX object, use ``parse()``:
``parse()``:
.. code:: python .. code-block:: python
from stix2 import parse from stix2 import parse
indicator = parse("""{ indicator = parse("""{
"type": "indicator", "type": "indicator",
"spec_version": "2.1",
"id": "indicator--dbcbd659-c927-4f9a-994f-0a2632274394", "id": "indicator--dbcbd659-c927-4f9a-994f-0a2632274394",
"created": "2017-09-26T23:33:39.829Z", "created": "2017-09-26T23:33:39.829Z",
"modified": "2017-09-26T23:33:39.829Z", "modified": "2017-09-26T23:33:39.829Z",
"labels": [ "name": "File hash for malware variant",
"indicator_types": [
"malicious-activity" "malicious-activity"
], ],
"name": "File hash for malware variant",
"pattern": "[file:hashes.md5 ='d41d8cd98f00b204e9800998ecf8427e']", "pattern": "[file:hashes.md5 ='d41d8cd98f00b204e9800998ecf8427e']",
"valid_from": "2017-09-26T23:33:39.829952Z" "valid_from": "2017-09-26T23:33:39.829952Z"
}""") }""")
print(indicator) print(indicator)
For more in-depth documentation, please see For more in-depth documentation, please see `https://stix2.readthedocs.io/ <https://stix2.readthedocs.io/>`__.
`https://stix2.readthedocs.io/ <https://stix2.readthedocs.io/>`__.
STIX 2.X Technical Specification Support STIX 2.X Technical Specification Support
---------------------------------------- ----------------------------------------
This version of python-stix2 supports STIX 2.0 by default. Although, This version of python-stix2 brings initial support to STIX 2.1 currently at the
the CSD level. The intention is to help debug components of the library and also
`stix2` Python library is built to support multiple versions of the check for problems that should be fixed in the specification.
STIX
Technical Specification. With every major release of stix2 the The `stix2` Python library is built to support multiple versions of the STIX
``import stix2`` Technical Specification. With every major release of stix2 the ``import stix2``
statement will automatically load the SDO/SROs equivalent to the most statement will automatically load the SDO/SROs equivalent to the most recent
recent supported 2.X Committee Specification. Please see the library documentation for
supported 2.X Technical Specification. Please see the library more details.
documentation
for more details.
Governance Governance
---------- ----------
This GitHub public repository ( This GitHub public repository (**https://github.com/oasis-open/cti-python-stix2**) was
**https://github.com/oasis-open/cti-python-stix2** ) was `proposed <https://lists.oasis-open.org/archives/cti/201702/msg00008.html>`__ and
`proposed <https://lists.oasis- `approved <https://www.oasis-open.org/committees/download.php/60009/>`__
open.org/archives/cti/201702/msg00008.html>`__
and
`approved <https://www.oasis-
open.org/committees/download.php/60009/>`__
[`bis <https://issues.oasis-open.org/browse/TCADMIN-2549>`__] by the [`bis <https://issues.oasis-open.org/browse/TCADMIN-2549>`__] by the
`OASIS Cyber Threat Intelligence (CTI) `OASIS Cyber Threat Intelligence (CTI) TC <https://www.oasis-open.org/committees/cti/>`__
TC <https://www.oasis-open.org/committees/cti/>`__ as an `OASIS TC as an `OASIS TC Open Repository <https://www.oasis-open.org/resources/open-repositories/>`__
Open to support development of open source resources related to Technical Committee work.
Repository <https://www.oasis-open.org/resources/open-
repositories/>`__
to support development of open source resources related to Technical
Committee work.
While this TC Open Repository remains associated with the sponsor TC, While this TC Open Repository remains associated with the sponsor TC, its
its development priorities, leadership, intellectual property terms, participation
development priorities, leadership, intellectual property terms, rules, and other matters of governance are `separate and distinct
participation rules, and other matters of governance are `separate and <https://github.com/oasis-open/cti-python-stix2/blob/master/CONTRIBUTING.md#governance-distinct-from-oasis-tc-process>`__
distinct <https://github.com/oasis-open/cti-python-
stix2/blob/master/CONTRIBUTING.md#governance-distinct-from-oasis-tc-
process>`__
from the OASIS TC Process and related policies. from the OASIS TC Process and related policies.
All contributions made to this TC Open Repository are subject to open All contributions made to this TC Open Repository are subject to open
source license terms expressed in the `BSD-3-Clause source license terms expressed in the `BSD-3-Clause License <https://www.oasis-open.org/sites/www.oasis-open.org/files/BSD-3-Clause.txt>`__.
License <https://www.oasis-open.org/sites/www.oasis- That license was selected as the declared `"Applicable License" <https://www.oasis-open.org/resources/open-repositories/licenses>`__
open.org/files/BSD-3-Clause.txt>`__.
That license was selected as the declared `"Applicable
License" <https://www.oasis-open.org/resources/open-
repositories/licenses>`__
when the TC Open Repository was created. when the TC Open Repository was created.
As documented in `"Public Participation As documented in `"Public Participation Invited
Invited <https://github.com/oasis-open/cti-python- <https://github.com/oasis-open/cti-python-stix2/blob/master/CONTRIBUTING.md#public-participation-invited>`__",
stix2/blob/master/CONTRIBUTING.md#public-participation-invited>`__", contributions to this OASIS TC Open Repository are invited from all parties,
contributions to this OASIS TC Open Repository are invited from all whether affiliated with OASIS or not. Participants must have a GitHub account,
parties, whether affiliated with OASIS or not. Participants must have but no fees or OASIS membership obligations are required. Participation is
a expected to be consistent with the `OASIS TC Open Repository Guidelines and Procedures
GitHub account, but no fees or OASIS membership obligations are <https://www.oasis-open.org/policies-guidelines/open-repositories>`__,
required. Participation is expected to be consistent with the `OASIS the open source `LICENSE <https://github.com/oasis-open/cti-python-stix2/blob/master/LICENSE>`__
TC Open Repository Guidelines and
Procedures <https://www.oasis-open.org/policies-guidelines/open-
repositories>`__,
the open source
`LICENSE <https://github.com/oasis-open/cti-python-
stix2/blob/master/LICENSE>`__
designated for this particular repository, and the requirement for an designated for this particular repository, and the requirement for an
`Individual Contributor License `Individual Contributor License Agreement <https://www.oasis-open.org/resources/open-repositories/cla/individual-cla>`__
Agreement <https://www.oasis-open.org/resources/open-
repositories/cla/individual-cla>`__
that governs intellectual property. that governs intellectual property.
Maintainers Maintainers
~~~~~~~~~~~ ~~~~~~~~~~~
TC Open Repository TC Open Repository `Maintainers <https://www.oasis-open.org/resources/open-repositories/maintainers-guide>`__
`Maintainers <https://www.oasis-open.org/resources/open-
repositories/maintainers-guide>`__
are responsible for oversight of this project's community development are responsible for oversight of this project's community development
activities, including evaluation of GitHub `pull activities, including evaluation of GitHub
requests <https://github.com/oasis-open/cti-python- `pull requests <https://github.com/oasis-open/cti-python-stix2/blob/master/CONTRIBUTING.md#fork-and-pull-collaboration-model>`__
stix2/blob/master/CONTRIBUTING.md#fork-and-pull-collaboration- and `preserving <https://www.oasis-open.org/policies-guidelines/open-repositories#repositoryManagement>`__
model>`__ open source principles of openness and fairness. Maintainers are recognized
and and trusted experts who serve to implement community goals and consensus design
`preserving <https://www.oasis-open.org/policies-guidelines/open- preferences.
repositories#repositoryManagement>`__
open source principles of openness and fairness. Maintainers are
recognized and trusted experts who serve to implement community goals
and consensus design preferences.
Initially, the associated TC members have designated one or more Initially, the associated TC members have designated one or more persons to
persons serve as Maintainer(s); subsequently, participating community members may
to serve as Maintainer(s); subsequently, participating community select additional or substitute Maintainers, per `consensus agreements
members <https://www.oasis-open.org/resources/open-repositories/maintainers-guide#additionalMaintainers>`__.
may select additional or substitute Maintainers, per `consensus
agreements <https://www.oasis-open.org/resources/open-
repositories/maintainers-guide#additionalMaintainers>`__.
.. _currentMaintainers: .. _currentmaintainers:
**Current Maintainers of this TC Open Repository** **Current Maintainers of this TC Open Repository**
- `Chris Lenk <mailto:clenk@mitre.org>`__; GitHub ID: - `Chris Lenk <mailto:clenk@mitre.org>`__; GitHub ID:
https://github.com/clenk/; WWW: `MITRE https://github.com/clenk/; WWW: `MITRE Corporation <http://www.mitre.org/>`__
Corporation <http://www.mitre.org/>`__
- `Emmanuelle Vargas-Gonzalez <mailto:emmanuelle@mitre.org>`__; GitHub ID: - `Emmanuelle Vargas-Gonzalez <mailto:emmanuelle@mitre.org>`__; GitHub ID:
https://github.com/emmanvg/; WWW: `MITRE https://github.com/emmanvg/; WWW: `MITRE
@ -181,39 +138,32 @@ repositories/maintainers-guide#additionalMaintainers>`__.
About OASIS TC Open Repositories About OASIS TC Open Repositories
-------------------------------- --------------------------------
- `TC Open Repositories: Overview and - `TC Open Repositories: Overview and Resources <https://www.oasis-open.org/resources/open-repositories/>`__
Resources <https://www.oasis-open.org/resources/open- - `Frequently Asked Questions <https://www.oasis-open.org/resources/open-repositories/faq>`__
repositories/>`__ - `Open Source Licenses <https://www.oasis-open.org/resources/open-repositories/licenses>`__
- `Frequently Asked - `Contributor License Agreements (CLAs) <https://www.oasis-open.org/resources/open-repositories/cla>`__
Questions <https://www.oasis-open.org/resources/open- - `Maintainers' Guidelines and Agreement <https://www.oasis-open.org/resources/open-repositories/maintainers-guide>`__
repositories/faq>`__
- `Open Source
Licenses <https://www.oasis-open.org/resources/open-
repositories/licenses>`__
- `Contributor License Agreements
(CLAs) <https://www.oasis-open.org/resources/open-
repositories/cla>`__
- `Maintainers' Guidelines and
Agreement <https://www.oasis-open.org/resources/open-
repositories/maintainers-guide>`__
Feedback Feedback
-------- --------
Questions or comments about this TC Open Repository's activities Questions or comments about this TC Open Repository's activities should be
should be composed as GitHub issues or comments. If use of an issue/comment is not
composed as GitHub issues or comments. If use of an issue/comment is
not
possible or appropriate, questions may be directed by email to the possible or appropriate, questions may be directed by email to the
Maintainer(s) `listed above <#currentmaintainers>`__. Please send Maintainer(s) `listed above <#currentmaintainers>`__. Please send general
general questions about TC Open Repository participation to OASIS questions about TC Open Repository participation to OASIS Staff at
Staff at
repository-admin@oasis-open.org and any specific CLA-related questions repository-admin@oasis-open.org and any specific CLA-related questions
to repository-cla@oasis-open.org. to repository-cla@oasis-open.org.
.. |Build_Status| image:: https://travis-ci.org/oasis-open/cti-python-stix2.svg?branch=master .. |Build_Status| image:: https://travis-ci.org/oasis-open/cti-python-stix2.svg?branch=master
:target: https://travis-ci.org/oasis-open/cti-python-stix2 :target: https://travis-ci.org/oasis-open/cti-python-stix2
:alt: Build Status
.. |Coverage| image:: https://codecov.io/gh/oasis-open/cti-python-stix2/branch/master/graph/badge.svg .. |Coverage| image:: https://codecov.io/gh/oasis-open/cti-python-stix2/branch/master/graph/badge.svg
:target: https://codecov.io/gh/oasis-open/cti-python-stix2 :target: https://codecov.io/gh/oasis-open/cti-python-stix2
:alt: Coverage
.. |Version| image:: https://img.shields.io/pypi/v/stix2.svg?maxAge=3600 .. |Version| image:: https://img.shields.io/pypi/v/stix2.svg?maxAge=3600
:target: https://pypi.python.org/pypi/stix2/ :target: https://pypi.python.org/pypi/stix2/
:alt: Version
.. |Downloads_Badge| image:: https://img.shields.io/pypi/dm/stix2.svg?maxAge=3600
:target: https://pypi.python.org/pypi/stix2/
:alt: Downloads

View File

@ -0,0 +1,5 @@
scales
=======================
.. automodule:: stix2.confidence.scales
:members:

View File

@ -0,0 +1,5 @@
confidence
================
.. automodule:: stix2.confidence
:members:

View File

@ -0,0 +1,5 @@
bundle
================
.. automodule:: stix2.v20.bundle
:members:

View File

@ -0,0 +1,5 @@
bundle
================
.. automodule:: stix2.v21.bundle
:members:

View File

@ -0,0 +1,5 @@
common
================
.. automodule:: stix2.v21.common
:members:

View File

@ -0,0 +1,5 @@
observables
=====================
.. automodule:: stix2.v21.observables
:members:

View File

@ -0,0 +1,5 @@
sdo
=============
.. automodule:: stix2.v21.sdo
:members:

View File

@ -0,0 +1,5 @@
sro
=============
.. automodule:: stix2.v21.sro
:members:

View File

@ -1,3 +1,4 @@
import datetime
import os import os
import re import re
import sys import sys
@ -6,6 +7,7 @@ from six import class_types
from sphinx.ext.autodoc import ClassDocumenter from sphinx.ext.autodoc import ClassDocumenter
from stix2.base import _STIXBase from stix2.base import _STIXBase
from stix2.version import __version__
sys.path.insert(0, os.path.abspath('..')) sys.path.insert(0, os.path.abspath('..'))
@ -31,11 +33,11 @@ source_suffix = '.rst'
master_doc = 'index' master_doc = 'index'
project = 'stix2' project = 'stix2'
copyright = '2017, OASIS Open' copyright = '{}, OASIS Open'.format(datetime.date.today().year)
author = 'OASIS Open' author = 'OASIS Open'
version = '1.0.4' version = __version__
release = '1.0.4' release = __version__
language = None language = None
exclude_patterns = ['_build', '_templates', 'Thumbs.db', '.DS_Store', 'guide/.ipynb_checkpoints'] exclude_patterns = ['_build', '_templates', 'Thumbs.db', '.DS_Store', 'guide/.ipynb_checkpoints']
@ -49,7 +51,7 @@ html_sidebars = {
'navigation.html', 'navigation.html',
'relations.html', 'relations.html',
'searchbox.html', 'searchbox.html',
] ],
} }
latex_elements = {} latex_elements = {}

View File

@ -7,8 +7,10 @@ import stix2
def main(): def main():
collection = Collection("http://127.0.0.1:5000/trustgroup1/collections/52892447-4d7e-4f70-b94d-d7f22742ff63/", collection = Collection(
user="admin", password="Password0") "http://127.0.0.1:5000/trustgroup1/collections/52892447-4d7e-4f70-b94d-d7f22742ff63/",
user="admin", password="Password0",
)
# instantiate TAXII data source # instantiate TAXII data source
taxii = stix2.TAXIICollectionSource(collection) taxii = stix2.TAXIICollectionSource(collection)

View File

@ -1,12 +1,10 @@
[bumpversion] [bumpversion]
current_version = 1.0.4 current_version = 1.1.0
commit = True commit = True
tag = True tag = True
[bumpversion:file:stix2/version.py] [bumpversion:file:stix2/version.py]
[bumpversion:file:docs/conf.py]
[metadata] [metadata]
license_file = LICENSE license_file = LICENSE

View File

@ -11,26 +11,27 @@ VERSION_FILE = os.path.join(BASE_DIR, 'stix2', 'version.py')
def get_version(): def get_version():
with open(VERSION_FILE) as f: with open(VERSION_FILE) as f:
for line in f.readlines(): for line in f.readlines():
if line.startswith("__version__"): if line.startswith('__version__'):
version = line.split()[-1].strip('"') version = line.split()[-1].strip('"')
return version return version
raise AttributeError("Package does not have a __version__") raise AttributeError("Package does not have a __version__")
with open('README.rst') as f: def get_long_description():
long_description = f.read() with open('README.rst') as f:
return f.read()
setup( setup(
name='stix2', name='stix2',
version=get_version(), version=get_version(),
description='Produce and consume STIX 2 JSON content', description='Produce and consume STIX 2 JSON content',
long_description=long_description, long_description=get_long_description(),
url='https://github.com/oasis-open/cti-python-stix2', url='https://oasis-open.github.io/cti-documentation/',
author='OASIS Cyber Threat Intelligence Technical Committee', author='OASIS Cyber Threat Intelligence Technical Committee',
author_email='cti-users@lists.oasis-open.org', author_email='cti-users@lists.oasis-open.org',
maintainer='Greg Back', maintainer='Chris Lenk, Emmanuelle Vargas-Gonzalez',
maintainer_email='gback@mitre.org', maintainer_email='clenk@mitre.org, emmanuelle@mitre.org',
license='BSD', license='BSD',
classifiers=[ classifiers=[
'Development Status :: 4 - Beta', 'Development Status :: 4 - Beta',
@ -45,7 +46,7 @@ setup(
'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.7',
], ],
keywords="stix stix2 json cti cyber threat intelligence", keywords='stix stix2 json cti cyber threat intelligence',
packages=find_packages(exclude=['*.test']), packages=find_packages(exclude=['*.test']),
install_requires=[ install_requires=[
'python-dateutil', 'python-dateutil',
@ -55,7 +56,12 @@ setup(
'six', 'six',
'stix2-patterns', 'stix2-patterns',
], ],
project_urls={
'Documentation': 'https://stix2.readthedocs.io/',
'Source Code': 'https://github.com/oasis-open/cti-python-stix2/',
'Bug Tracker': 'https://github.com/oasis-open/cti-python-stix2/issues/',
},
extras_require={ extras_require={
'taxii': ['taxii2-client'] 'taxii': ['taxii2-client'],
} },
) )

View File

@ -3,6 +3,7 @@
.. autosummary:: .. autosummary::
:toctree: api :toctree: api
confidence
core core
datastore datastore
environment environment
@ -11,49 +12,57 @@
patterns patterns
properties properties
utils utils
workbench v20.bundle
v20.common v20.common
v20.observables v20.observables
v20.sdo v20.sdo
v20.sro v20.sro
v21.bundle
v21.common
v21.observables
v21.sdo
v21.sro
workbench
""" """
# flake8: noqa # flake8: noqa
from .core import Bundle, _collect_stix2_obj_maps, _register_type, parse from .confidence import scales
from .core import _collect_stix2_mappings, parse, parse_observable
from .datastore import CompositeDataSource from .datastore import CompositeDataSource
from .datastore.filesystem import (FileSystemSink, FileSystemSource, from .datastore.filesystem import (
FileSystemStore) FileSystemSink, FileSystemSource, FileSystemStore,
)
from .datastore.filters import Filter from .datastore.filters import Filter
from .datastore.memory import MemorySink, MemorySource, MemoryStore from .datastore.memory import MemorySink, MemorySource, MemoryStore
from .datastore.taxii import (TAXIICollectionSink, TAXIICollectionSource, from .datastore.taxii import (
TAXIICollectionStore) TAXIICollectionSink, TAXIICollectionSource, TAXIICollectionStore,
)
from .environment import Environment, ObjectFactory from .environment import Environment, ObjectFactory
from .markings import (add_markings, clear_markings, get_markings, is_marked, from .markings import (
remove_markings, set_markings) add_markings, clear_markings, get_markings, is_marked, remove_markings,
from .patterns import (AndBooleanExpression, AndObservationExpression, set_markings,
BasicObjectPathComponent, BinaryConstant, )
BooleanConstant, EqualityComparisonExpression, from .patterns import (
FloatConstant, FollowedByObservationExpression, AndBooleanExpression, AndObservationExpression, BasicObjectPathComponent,
GreaterThanComparisonExpression, BinaryConstant, BooleanConstant, EqualityComparisonExpression,
GreaterThanEqualComparisonExpression, HashConstant, FloatConstant, FollowedByObservationExpression,
HexConstant, InComparisonExpression, IntegerConstant, GreaterThanComparisonExpression, GreaterThanEqualComparisonExpression,
IsSubsetComparisonExpression, HashConstant, HexConstant, InComparisonExpression, IntegerConstant,
IsSupersetComparisonExpression, IsSubsetComparisonExpression, IsSupersetComparisonExpression,
LessThanComparisonExpression, LessThanComparisonExpression, LessThanEqualComparisonExpression,
LessThanEqualComparisonExpression, LikeComparisonExpression, ListConstant, ListObjectPathComponent,
LikeComparisonExpression, ListConstant, MatchesComparisonExpression, ObjectPath, ObservationExpression,
ListObjectPathComponent, MatchesComparisonExpression, OrBooleanExpression, OrObservationExpression, ParentheticalExpression,
ObjectPath, ObservationExpression, OrBooleanExpression, QualifiedObservationExpression, ReferenceObjectPathComponent,
OrObservationExpression, ParentheticalExpression, RepeatQualifier, StartStopQualifier, StringConstant, TimestampConstant,
QualifiedObservationExpression, WithinQualifier,
ReferenceObjectPathComponent, RepeatQualifier, )
StartStopQualifier, StringConstant, TimestampConstant,
WithinQualifier)
from .utils import new_version, revoke from .utils import new_version, revoke
from .v20 import * # This import will always be the latest STIX 2.X version from .v20 import * # This import will always be the latest STIX 2.X version
from .version import __version__ from .version import __version__
_collect_stix2_obj_maps() _collect_stix2_mappings()
DEFAULT_VERSION = "2.0" # Default version will always be the latest STIX 2.X version DEFAULT_VERSION = '2.0' # Default version will always be the latest STIX 2.X version

View File

@ -1,4 +1,4 @@
"""Base classes for type definitions in the stix2 library.""" """Base classes for type definitions in the STIX2 library."""
import collections import collections
import copy import copy
@ -6,11 +6,12 @@ import datetime as dt
import simplejson as json import simplejson as json
from .exceptions import (AtLeastOnePropertyError, CustomContentError, from .exceptions import (
DependentPropertiesError, ExtraPropertiesError, AtLeastOnePropertyError, CustomContentError, DependentPropertiesError,
ImmutableError, InvalidObjRefError, InvalidValueError, ExtraPropertiesError, ImmutableError, InvalidObjRefError,
MissingPropertiesError, InvalidValueError, MissingPropertiesError,
MutuallyExclusivePropertiesError) MutuallyExclusivePropertiesError,
)
from .markings.utils import validate from .markings.utils import validate
from .utils import NOW, find_property_index, format_datetime, get_timestamp from .utils import NOW, find_property_index, format_datetime, get_timestamp
from .utils import new_version as _new_version from .utils import new_version as _new_version
@ -104,11 +105,11 @@ class _STIXBase(collections.Mapping):
def _check_at_least_one_property(self, list_of_properties=None): def _check_at_least_one_property(self, list_of_properties=None):
if not list_of_properties: if not list_of_properties:
list_of_properties = sorted(list(self.__class__._properties.keys())) list_of_properties = sorted(list(self.__class__._properties.keys()))
if "type" in list_of_properties: if 'type' in list_of_properties:
list_of_properties.remove("type") list_of_properties.remove('type')
current_properties = self.properties_populated() current_properties = self.properties_populated()
list_of_properties_populated = set(list_of_properties).intersection(current_properties) list_of_properties_populated = set(list_of_properties).intersection(current_properties)
if list_of_properties and (not list_of_properties_populated or list_of_properties_populated == set(["extensions"])): if list_of_properties and (not list_of_properties_populated or list_of_properties_populated == set(['extensions'])):
raise AtLeastOnePropertyError(self.__class__, list_of_properties) raise AtLeastOnePropertyError(self.__class__, list_of_properties)
def _check_properties_dependency(self, list_of_properties, list_of_dependent_properties): def _check_properties_dependency(self, list_of_properties, list_of_dependent_properties):
@ -121,8 +122,8 @@ class _STIXBase(collections.Mapping):
raise DependentPropertiesError(self.__class__, failed_dependency_pairs) raise DependentPropertiesError(self.__class__, failed_dependency_pairs)
def _check_object_constraints(self): def _check_object_constraints(self):
for m in self.get("granular_markings", []): for m in self.get('granular_markings', []):
validate(self, m.get("selectors")) validate(self, m.get('selectors'))
def __init__(self, allow_custom=False, interoperability=False, **kwargs): def __init__(self, allow_custom=False, interoperability=False, **kwargs):
cls = self.__class__ cls = self.__class__
@ -191,7 +192,7 @@ class _STIXBase(collections.Mapping):
# usual behavior of this method reads an __init__-assigned attribute, # usual behavior of this method reads an __init__-assigned attribute,
# which would cause infinite recursion. So this check disables all # which would cause infinite recursion. So this check disables all
# attribute reads until the instance has been properly initialized. # attribute reads until the instance has been properly initialized.
unpickling = "_inner" not in self.__dict__ unpickling = '_inner' not in self.__dict__
if not unpickling and name in self: if not unpickling and name in self:
return self.__getitem__(name) return self.__getitem__(name)
raise AttributeError("'%s' object has no attribute '%s'" % raise AttributeError("'%s' object has no attribute '%s'" %
@ -207,8 +208,10 @@ class _STIXBase(collections.Mapping):
def __repr__(self): def __repr__(self):
props = [(k, self[k]) for k in self.object_properties() if self.get(k)] props = [(k, self[k]) for k in self.object_properties() if self.get(k)]
return "{0}({1})".format(self.__class__.__name__, return '{0}({1})'.format(
", ".join(["{0!s}={1!r}".format(k, v) for k, v in props])) self.__class__.__name__,
', '.join(['{0!s}={1!r}'.format(k, v) for k, v in props]),
)
def __deepcopy__(self, memo): def __deepcopy__(self, memo):
# Assume: we can ignore the memo argument, because no object will ever contain the same sub-object multiple times. # Assume: we can ignore the memo argument, because no object will ever contain the same sub-object multiple times.
@ -274,7 +277,7 @@ class _STIXBase(collections.Mapping):
def sort_by(element): def sort_by(element):
return find_property_index(self, *element) return find_property_index(self, *element)
kwargs.update({'indent': 4, 'separators': (",", ": "), 'item_sort_key': sort_by}) kwargs.update({'indent': 4, 'separators': (',', ': '), 'item_sort_key': sort_by})
if include_optional_defaults: if include_optional_defaults:
return json.dumps(self, cls=STIXJSONIncludeOptionalDefaultsEncoder, **kwargs) return json.dumps(self, cls=STIXJSONIncludeOptionalDefaultsEncoder, **kwargs)

View File

@ -0,0 +1,10 @@
"""
Functions to operate with STIX2 Confidence scales.
.. autosummary::
:toctree: confidence
scales
|
"""

571
stix2/confidence/scales.py Normal file
View File

@ -0,0 +1,571 @@
# -*- coding: utf-8 -*-
"""Functions to perform conversions between the different Confidence scales.
As specified in STIX Version 2.1. Part 1: STIX Core Concepts - Appendix B"""
def none_low_med_high_to_value(scale_value):
"""
This method will transform a string value from the None / Low / Med /
High scale to its confidence integer representation.
The scale for this confidence representation is the following:
.. list-table:: None, Low, Med, High to STIX Confidence
:header-rows: 1
* - None/ Low/ Med/ High
- STIX Confidence Value
* - Not Specified
- Not Specified
* - None
- 0
* - Low
- 15
* - Med
- 50
* - High
- 85
Args:
scale_value (str): A string value from the scale. Accepted strings are
"None", "Low", "Med" and "High". Argument is case sensitive.
Returns:
int: The numerical representation corresponding to values in the
None / Low / Med / High scale.
Raises:
ValueError: If `scale_value` is not within the accepted strings.
"""
if scale_value == 'None':
return 0
elif scale_value == 'Low':
return 15
elif scale_value == 'Med':
return 50
elif scale_value == 'High':
return 85
else:
raise ValueError("STIX Confidence value cannot be determined for %s" % scale_value)
def value_to_none_low_medium_high(confidence_value):
"""
This method will transform an integer value into the None / Low / Med /
High scale string representation.
The scale for this confidence representation is the following:
.. list-table:: STIX Confidence to None, Low, Med, High
:header-rows: 1
* - Range of Values
- None/ Low/ Med/ High
* - 0
- None
* - 1-29
- Low
* - 30-69
- Med
* - 70-100
- High
Args:
confidence_value (int): An integer value between 0 and 100.
Returns:
str: A string corresponding to the None / Low / Med / High scale.
Raises:
ValueError: If `confidence_value` is out of bounds.
"""
if confidence_value == 0:
return 'None'
elif 29 >= confidence_value >= 1:
return 'Low'
elif 69 >= confidence_value >= 30:
return 'Med'
elif 100 >= confidence_value >= 70:
return 'High'
else:
raise ValueError("Range of values out of bounds: %s" % confidence_value)
def zero_ten_to_value(scale_value):
"""
This method will transform a string value from the 0-10 scale to its
confidence integer representation.
The scale for this confidence representation is the following:
.. list-table:: 0-10 to STIX Confidence
:header-rows: 1
* - 0-10 Scale
- STIX Confidence Value
* - 0
- 0
* - 1
- 10
* - 2
- 20
* - 3
- 30
* - 4
- 40
* - 5
- 50
* - 6
- 60
* - 7
- 70
* - 8
- 80
* - 9
- 90
* - 10
- 100
Args:
scale_value (str): A string value from the scale. Accepted strings are "0"
through "10" inclusive.
Returns:
int: The numerical representation corresponding to values in the 0-10
scale.
Raises:
ValueError: If `scale_value` is not within the accepted strings.
"""
if scale_value == '0':
return 0
elif scale_value == '1':
return 10
elif scale_value == '2':
return 20
elif scale_value == '3':
return 30
elif scale_value == '4':
return 40
elif scale_value == '5':
return 50
elif scale_value == '6':
return 60
elif scale_value == '7':
return 70
elif scale_value == '8':
return 80
elif scale_value == '9':
return 90
elif scale_value == '10':
return 100
else:
raise ValueError("STIX Confidence value cannot be determined for %s" % scale_value)
def value_to_zero_ten(confidence_value):
"""
This method will transform an integer value into the 0-10 scale string
representation.
The scale for this confidence representation is the following:
.. list-table:: STIX Confidence to 0-10
:header-rows: 1
* - Range of Values
- 0-10 Scale
* - 0-4
- 0
* - 5-14
- 1
* - 15-24
- 2
* - 25-34
- 3
* - 35-44
- 4
* - 45-54
- 5
* - 55-64
- 6
* - 65-74
- 7
* - 75-84
- 8
* - 95-94
- 9
* - 95-100
- 10
Args:
confidence_value (int): An integer value between 0 and 100.
Returns:
str: A string corresponding to the 0-10 scale.
Raises:
ValueError: If `confidence_value` is out of bounds.
"""
if 4 >= confidence_value >= 0:
return '0'
elif 14 >= confidence_value >= 5:
return '1'
elif 24 >= confidence_value >= 15:
return '2'
elif 34 >= confidence_value >= 25:
return '3'
elif 44 >= confidence_value >= 35:
return '4'
elif 54 >= confidence_value >= 45:
return '5'
elif 64 >= confidence_value >= 55:
return '6'
elif 74 >= confidence_value >= 65:
return '7'
elif 84 >= confidence_value >= 75:
return '8'
elif 94 >= confidence_value >= 85:
return '9'
elif 100 >= confidence_value >= 95:
return '10'
else:
raise ValueError("Range of values out of bounds: %s" % confidence_value)
def admiralty_credibility_to_value(scale_value):
"""
This method will transform a string value from the Admiralty Credibility
scale to its confidence integer representation.
The scale for this confidence representation is the following:
.. list-table:: Admiralty Credibility Scale to STIX Confidence
:header-rows: 1
* - Admiralty Credibility
- STIX Confidence Value
* - 6 - Truth cannot be judged
- (Not present)
* - 5 - Improbable
- 10
* - 4 - Doubtful
- 30
* - 3 - Possibly True
- 50
* - 2 - Probably True
- 70
* - 1 - Confirmed by other sources
- 90
Args:
scale_value (str): A string value from the scale. Accepted strings are
"6 - Truth cannot be judged", "5 - Improbable", "4 - Doubtful",
"3 - Possibly True", "2 - Probably True" and
"1 - Confirmed by other sources". Argument is case sensitive.
Returns:
int: The numerical representation corresponding to values in the
Admiralty Credibility scale.
Raises:
ValueError: If `scale_value` is not within the accepted strings.
"""
if scale_value == '6 - Truth cannot be judged':
raise ValueError("STIX Confidence value cannot be determined for %s" % scale_value)
elif scale_value == '5 - Improbable':
return 10
elif scale_value == '4 - Doubtful':
return 30
elif scale_value == '3 - Possibly True':
return 50
elif scale_value == '2 - Probably True':
return 70
elif scale_value == '1 - Confirmed by other sources':
return 90
else:
raise ValueError("STIX Confidence value cannot be determined for %s" % scale_value)
def value_to_admiralty_credibility(confidence_value):
"""
This method will transform an integer value into the Admiralty Credibility
scale string representation.
The scale for this confidence representation is the following:
.. list-table:: STIX Confidence to Admiralty Credibility Scale
:header-rows: 1
* - Range of Values
- Admiralty Credibility
* - N/A
- 6 - Truth cannot be judged
* - 0-19
- 5 - Improbable
* - 20-39
- 4 - Doubtful
* - 40-59
- 3 - Possibly True
* - 60-79
- 2 - Probably True
* - 80-100
- 1 - Confirmed by other sources
Args:
confidence_value (int): An integer value between 0 and 100.
Returns:
str: A string corresponding to the Admiralty Credibility scale.
Raises:
ValueError: If `confidence_value` is out of bounds.
"""
if 19 >= confidence_value >= 0:
return '5 - Improbable'
elif 39 >= confidence_value >= 20:
return '4 - Doubtful'
elif 59 >= confidence_value >= 40:
return '3 - Possibly True'
elif 79 >= confidence_value >= 60:
return '2 - Probably True'
elif 100 >= confidence_value >= 80:
return '1 - Confirmed by other sources'
else:
raise ValueError("Range of values out of bounds: %s" % confidence_value)
def wep_to_value(scale_value):
"""
This method will transform a string value from the WEP scale to its
confidence integer representation.
The scale for this confidence representation is the following:
.. list-table:: WEP to STIX Confidence
:header-rows: 1
* - WEP
- STIX Confidence Value
* - Impossible
- 0
* - Highly Unlikely/Almost Certainly Not
- 10
* - Unlikely/Probably Not
- 20
* - Even Chance
- 50
* - Likely/Probable
- 70
* - Highly likely/Almost Certain
- 90
* - Certain
- 100
Args:
scale_value (str): A string value from the scale. Accepted strings are
"Impossible", "Highly Unlikely/Almost Certainly Not",
"Unlikely/Probably Not", "Even Chance", "Likely/Probable",
"Highly likely/Almost Certain" and "Certain". Argument is case
sensitive.
Returns:
int: The numerical representation corresponding to values in the WEP
scale.
Raises:
ValueError: If `scale_value` is not within the accepted strings.
"""
if scale_value == 'Impossible':
return 0
elif scale_value == 'Highly Unlikely/Almost Certainly Not':
return 10
elif scale_value == 'Unlikely/Probably Not':
return 30
elif scale_value == 'Even Chance':
return 50
elif scale_value == 'Likely/Probable':
return 70
elif scale_value == 'Highly likely/Almost Certain':
return 90
elif scale_value == 'Certain':
return 100
else:
raise ValueError("STIX Confidence value cannot be determined for %s" % scale_value)
def value_to_wep(confidence_value):
"""
This method will transform an integer value into the WEP scale string
representation.
The scale for this confidence representation is the following:
.. list-table:: STIX Confidence to WEP
:header-rows: 1
* - Range of Values
- WEP
* - 0
- Impossible
* - 1-19
- Highly Unlikely/Almost Certainly Not
* - 20-39
- Unlikely/Probably Not
* - 40-59
- Even Chance
* - 60-79
- Likely/Probable
* - 80-99
- Highly likely/Almost Certain
* - 100
- Certain
Args:
confidence_value (int): An integer value between 0 and 100.
Returns:
str: A string corresponding to the WEP scale.
Raises:
ValueError: If `confidence_value` is out of bounds.
"""
if confidence_value == 0:
return 'Impossible'
elif 19 >= confidence_value >= 1:
return 'Highly Unlikely/Almost Certainly Not'
elif 39 >= confidence_value >= 20:
return 'Unlikely/Probably Not'
elif 59 >= confidence_value >= 40:
return 'Even Chance'
elif 79 >= confidence_value >= 60:
return 'Likely/Probable'
elif 99 >= confidence_value >= 80:
return 'Highly likely/Almost Certain'
elif confidence_value == 100:
return 'Certain'
else:
raise ValueError("Range of values out of bounds: %s" % confidence_value)
def dni_to_value(scale_value):
"""
This method will transform a string value from the DNI scale to its
confidence integer representation.
The scale for this confidence representation is the following:
.. list-table:: DNI Scale to STIX Confidence
:header-rows: 1
* - DNI Scale
- STIX Confidence Value
* - Almost No Chance / Remote
- 5
* - Very Unlikely / Highly Improbable
- 15
* - Unlikely / Improbable
- 30
* - Roughly Even Chance / Roughly Even Odds
- 50
* - Likely / Probable
- 70
* - Very Likely / Highly Probable
- 85
* - Almost Certain / Nearly Certain
- 95
Args:
scale_value (str): A string value from the scale. Accepted strings are
"Almost No Chance / Remote", "Very Unlikely / Highly Improbable",
"Unlikely / Improbable", "Roughly Even Chance / Roughly Even Odds",
"Likely / Probable", "Very Likely / Highly Probable" and
"Almost Certain / Nearly Certain". Argument is case sensitive.
Returns:
int: The numerical representation corresponding to values in the DNI
scale.
Raises:
ValueError: If `scale_value` is not within the accepted strings.
"""
if scale_value == 'Almost No Chance / Remote':
return 5
elif scale_value == 'Very Unlikely / Highly Improbable':
return 15
elif scale_value == 'Unlikely / Improbable':
return 30
elif scale_value == 'Roughly Even Chance / Roughly Even Odds':
return 50
elif scale_value == 'Likely / Probable':
return 70
elif scale_value == 'Very Likely / Highly Probable':
return 85
elif scale_value == 'Almost Certain / Nearly Certain':
return 95
else:
raise ValueError("STIX Confidence value cannot be determined for %s" % scale_value)
def value_to_dni(confidence_value):
"""
This method will transform an integer value into the DNI scale string
representation.
The scale for this confidence representation is the following:
.. list-table:: STIX Confidence to DNI Scale
:header-rows: 1
* - Range of Values
- DNI Scale
* - 0-9
- Almost No Chance / Remote
* - 10-19
- Very Unlikely / Highly Improbable
* - 20-39
- Unlikely / Improbable
* - 40-59
- Roughly Even Chance / Roughly Even Odds
* - 60-79
- Likely / Probable
* - 80-89
- Very Likely / Highly Probable
* - 90-100
- Almost Certain / Nearly Certain
Args:
confidence_value (int): An integer value between 0 and 100.
Returns:
str: A string corresponding to the DNI scale.
Raises:
ValueError: If `confidence_value` is out of bounds.
"""
if 9 >= confidence_value >= 0:
return 'Almost No Chance / Remote'
elif 19 >= confidence_value >= 10:
return 'Very Unlikely / Highly Improbable'
elif 39 >= confidence_value >= 20:
return 'Unlikely / Improbable'
elif 59 >= confidence_value >= 40:
return 'Roughly Even Chance / Roughly Even Odds'
elif 79 >= confidence_value >= 60:
return 'Likely / Probable'
elif 89 >= confidence_value >= 80:
return 'Very Likely / Highly Probable'
elif 100 >= confidence_value >= 90:
return 'Almost Certain / Nearly Certain'
else:
raise ValueError("Range of values out of bounds: %s" % confidence_value)

View File

@ -1,76 +1,43 @@
"""STIX 2.0 Objects that are neither SDOs nor SROs.""" """STIX2 Core Objects and Methods."""
from collections import OrderedDict import copy
import importlib import importlib
import pkgutil import pkgutil
import re
import stix2 import stix2
from . import exceptions
from .base import _STIXBase from .base import _STIXBase
from .properties import IDProperty, ListProperty, Property, TypeProperty from .exceptions import CustomContentError, ParseError
from .utils import _get_dict, get_class_hierarchy_names from .markings import _MarkingsMixin
from .utils import _get_dict
STIX2_OBJ_MAPS = {}
class STIXObjectProperty(Property): class STIXDomainObject(_STIXBase, _MarkingsMixin):
def __init__(self, allow_custom=False, interoperability=False, *args, **kwargs):
self.allow_custom = allow_custom
self.interoperability = interoperability
super(STIXObjectProperty, self).__init__(*args, **kwargs)
def clean(self, value):
# Any STIX Object (SDO, SRO, or Marking Definition) can be added to
# a bundle with no further checks.
if any(x in ('STIXDomainObject', 'STIXRelationshipObject', 'MarkingDefinition')
for x in get_class_hierarchy_names(value)):
return value
try:
dictified = _get_dict(value)
except ValueError:
raise ValueError("This property may only contain a dictionary or object")
if dictified == {}:
raise ValueError("This property may only contain a non-empty dictionary or object")
if 'type' in dictified and dictified['type'] == 'bundle':
raise ValueError('This property may not contain a Bundle object')
return parse(dictified, self.allow_custom, self.interoperability)
class Bundle(_STIXBase):
"""For more detailed information on this object's properties, see
`the STIX 2.0 specification <http://docs.oasis-open.org/cti/stix/v2.0/cs01/part1-stix-core/stix-v2.0-cs01-part1-stix-core.html#_Toc496709293>`__.
"""
_type = 'bundle'
_properties = OrderedDict()
_properties.update([
('type', TypeProperty(_type)),
('id', IDProperty(_type)),
('spec_version', Property(fixed="2.0")),
('objects', ListProperty(STIXObjectProperty)),
])
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
# Add any positional arguments to the 'objects' kwarg.
if args:
if isinstance(args[0], list):
kwargs['objects'] = args[0] + list(args[1:]) + kwargs.get('objects', [])
else:
kwargs['objects'] = list(args) + kwargs.get('objects', [])
allow_custom = kwargs.get('allow_custom', False)
self.__allow_custom = allow_custom
self._properties['objects'].contained.allow_custom = allow_custom
interoperability = kwargs.get('interoperability', False) interoperability = kwargs.get('interoperability', False)
self.__interoperability = interoperability self.__interoperability = interoperability
self._properties['id'].interoperability = interoperability self._properties['id'].interoperability = interoperability
self._properties['objects'].contained.interoperability = interoperability self._properties['created_by_ref'].interoperability = interoperability
if kwargs.get('object_marking_refs'):
self._properties['object_marking_refs'].contained.interoperability = interoperability
super(Bundle, self).__init__(**kwargs) super(STIXDomainObject, self).__init__(*args, **kwargs)
STIX2_OBJ_MAPS = {} class STIXRelationshipObject(_STIXBase, _MarkingsMixin):
def __init__(self, *args, **kwargs):
interoperability = kwargs.get('interoperability', False)
self.__interoperability = interoperability
self._properties['id'].interoperability = interoperability
if kwargs.get('created_by_ref'):
self._properties['created_by_ref'].interoperability = interoperability
if kwargs.get('object_marking_refs'):
self._properties['object_marking_refs'].contained.interoperability = interoperability
super(STIXRelationshipObject, self).__init__(*args, **kwargs)
def parse(data, allow_custom=False, interoperability=False, version=None): def parse(data, allow_custom=False, interoperability=False, version=None):
@ -81,18 +48,22 @@ def parse(data, allow_custom=False, interoperability=False, version=None):
allow_custom (bool): Whether to allow custom properties as well unknown allow_custom (bool): Whether to allow custom properties as well unknown
custom objects. Note that unknown custom objects cannot be parsed custom objects. Note that unknown custom objects cannot be parsed
into STIX objects, and will be returned as is. Default: False. into STIX objects, and will be returned as is. Default: False.
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If version (str): If present, it forces the parser to use the version
None, use latest version. provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property. If none of the above are
possible, it will use the default version specified by the library.
Returns: Returns:
An instantiated Python STIX object. An instantiated Python STIX object.
WARNING: 'allow_custom=True' will allow for the return of any supplied STIX Warnings:
dict(s) that cannot be found to map to any known STIX object types (both STIX2 'allow_custom=True' will allow for the return of any supplied STIX
domain objects or defined custom STIX2 objects); NO validation is done. This is dict(s) that cannot be found to map to any known STIX object types
done to allow the processing of possibly unknown custom STIX objects (example (both STIX2 domain objects or defined custom STIX2 objects); NO
scenario: I need to query a third-party TAXII endpoint that could provide custom validation is done. This is done to allow the processing of possibly
STIX objects that I dont know about ahead of time) unknown custom STIX objects (example scenario: I need to query a
third-party TAXII endpoint that could provide custom STIX objects that
I don't know about ahead of time)
""" """
# convert STIX object to dict, if not already # convert STIX object to dict, if not already
@ -107,35 +78,55 @@ def parse(data, allow_custom=False, interoperability=False, version=None):
def dict_to_stix2(stix_dict, allow_custom=False, interoperability=False, version=None): def dict_to_stix2(stix_dict, allow_custom=False, interoperability=False, version=None):
"""convert dictionary to full python-stix2 object """convert dictionary to full python-stix2 object
Args: Args:
stix_dict (dict): a python dictionary of a STIX object stix_dict (dict): a python dictionary of a STIX object
that (presumably) is semantically correct to be parsed that (presumably) is semantically correct to be parsed
into a full python-stix2 obj into a full python-stix2 obj
allow_custom (bool): Whether to allow custom properties as well unknown allow_custom (bool): Whether to allow custom properties as well
custom objects. Note that unknown custom objects cannot be parsed unknown custom objects. Note that unknown custom objects cannot
into STIX objects, and will be returned as is. Default: False. be parsed into STIX objects, and will be returned as is.
Default: False.
version (str): If present, it forces the parser to use the version
provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property. If none of the above are
possible, it will use the default version specified by the library.
Returns: Returns:
An instantiated Python STIX object An instantiated Python STIX object
WARNING: 'allow_custom=True' will allow for the return of any supplied STIX Warnings:
dict(s) that cannot be found to map to any known STIX object types (both STIX2 'allow_custom=True' will allow for the return of any supplied STIX
domain objects or defined custom STIX2 objects); NO validation is done. This is dict(s) that cannot be found to map to any known STIX object types
done to allow the processing of possibly unknown custom STIX objects (example (both STIX2 domain objects or defined custom STIX2 objects); NO
scenario: I need to query a third-party TAXII endpoint that could provide custom validation is done. This is done to allow the processing of
STIX objects that I dont know about ahead of time) possibly unknown custom STIX objects (example scenario: I need to
query a third-party TAXII endpoint that could provide custom STIX
objects that I don't know about ahead of time)
""" """
if not version:
# Use latest version
v = 'v' + stix2.DEFAULT_VERSION.replace('.', '')
else:
v = 'v' + version.replace('.', '')
OBJ_MAP = STIX2_OBJ_MAPS[v]
if 'type' not in stix_dict: if 'type' not in stix_dict:
raise exceptions.ParseError("Can't parse object with no 'type' property: %s" % str(stix_dict)) raise ParseError("Can't parse object with no 'type' property: %s" % str(stix_dict))
if version:
# If the version argument was passed, override other approaches.
v = 'v' + version.replace('.', '')
elif 'spec_version' in stix_dict:
# For STIX 2.0, applies to bundles only.
# For STIX 2.1+, applies to SDOs, SROs, and markings only.
v = 'v' + stix_dict['spec_version'].replace('.', '')
elif stix_dict['type'] == 'bundle':
# bundles without spec_version are ambiguous.
if any('spec_version' in x for x in stix_dict['objects']):
# Only on 2.1 we are allowed to have 'spec_version' in SDOs/SROs.
v = 'v21'
else:
v = 'v' + stix2.DEFAULT_VERSION.replace('.', '')
else:
# The spec says that SDO/SROs without spec_version will default to a
# '2.0' representation.
v = 'v20'
OBJ_MAP = STIX2_OBJ_MAPS[v]['objects']
try: try:
obj_class = OBJ_MAP[stix_dict['type']] obj_class = OBJ_MAP[stix_dict['type']]
@ -144,39 +135,187 @@ def dict_to_stix2(stix_dict, allow_custom=False, interoperability=False, version
# flag allows for unknown custom objects too, but will not # flag allows for unknown custom objects too, but will not
# be parsed into STIX object, returned as is # be parsed into STIX object, returned as is
return stix_dict return stix_dict
raise exceptions.ParseError("Can't parse unknown object type '%s'! For custom types, use the CustomObject decorator." % stix_dict['type']) raise ParseError("Can't parse unknown object type '%s'! For custom types, use the CustomObject decorator." % stix_dict['type'])
return obj_class(allow_custom=allow_custom, interoperability=interoperability, **stix_dict) return obj_class(allow_custom=allow_custom, interoperability=interoperability, **stix_dict)
def _register_type(new_type, version=None): def parse_observable(data, _valid_refs=None, allow_custom=False, version=None):
"""Deserialize a string or file-like object into a STIX Cyber Observable
object.
Args:
data (str, dict, file-like object): The STIX2 content to be parsed.
_valid_refs: A list of object references valid for the scope of the
object being parsed. Use empty list if no valid refs are present.
allow_custom (bool): Whether to allow custom properties or not.
Default: False.
version (str): If present, it forces the parser to use the version
provided. Otherwise, the default version specified by the library
will be used.
Returns:
An instantiated Python STIX Cyber Observable object.
"""
obj = _get_dict(data)
# get deep copy since we are going modify the dict and might
# modify the original dict as _get_dict() does not return new
# dict when passed a dict
obj = copy.deepcopy(obj)
obj['_valid_refs'] = _valid_refs or []
if version:
# If the version argument was passed, override other approaches.
v = 'v' + version.replace('.', '')
else:
# Use default version (latest) if no version was provided.
v = 'v' + stix2.DEFAULT_VERSION.replace('.', '')
if 'type' not in obj:
raise ParseError("Can't parse observable with no 'type' property: %s" % str(obj))
try:
OBJ_MAP_OBSERVABLE = STIX2_OBJ_MAPS[v]['observables']
obj_class = OBJ_MAP_OBSERVABLE[obj['type']]
except KeyError:
if allow_custom:
# flag allows for unknown custom objects too, but will not
# be parsed into STIX observable object, just returned as is
return obj
raise CustomContentError("Can't parse unknown observable type '%s'! For custom observables, "
"use the CustomObservable decorator." % obj['type'])
EXT_MAP = STIX2_OBJ_MAPS[v]['observable-extensions']
if 'extensions' in obj and obj['type'] in EXT_MAP:
for name, ext in obj['extensions'].items():
try:
ext_class = EXT_MAP[obj['type']][name]
except KeyError:
if not allow_custom:
raise CustomContentError("Can't parse unknown extension type '%s'"
"for observable type '%s'!" % (name, obj['type']))
else: # extension was found
obj['extensions'][name] = ext_class(allow_custom=allow_custom, **obj['extensions'][name])
return obj_class(allow_custom=allow_custom, **obj)
def _register_object(new_type, version=None):
"""Register a custom STIX Object type. """Register a custom STIX Object type.
Args: Args:
new_type (class): A class to register in the Object map. new_type (class): A class to register in the Object map.
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If
None, use latest version. None, use latest version.
"""
if not version:
# Use latest version
v = 'v' + stix2.DEFAULT_VERSION.replace('.', '')
else:
v = 'v' + version.replace('.', '')
OBJ_MAP = STIX2_OBJ_MAPS[v] """
if version:
v = 'v' + version.replace('.', '')
else:
# Use default version (latest) if no version was provided.
v = 'v' + stix2.DEFAULT_VERSION.replace('.', '')
OBJ_MAP = STIX2_OBJ_MAPS[v]['objects']
OBJ_MAP[new_type._type] = new_type OBJ_MAP[new_type._type] = new_type
def _collect_stix2_obj_maps(): def _register_marking(new_marking, version=None):
"""Navigate the package once and retrieve all OBJ_MAP dicts for each v2X """Register a custom STIX Marking Definition type.
package."""
Args:
new_marking (class): A class to register in the Marking map.
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If
None, use latest version.
"""
if version:
v = 'v' + version.replace('.', '')
else:
# Use default version (latest) if no version was provided.
v = 'v' + stix2.DEFAULT_VERSION.replace('.', '')
OBJ_MAP_MARKING = STIX2_OBJ_MAPS[v]['markings']
OBJ_MAP_MARKING[new_marking._type] = new_marking
def _register_observable(new_observable, version=None):
"""Register a custom STIX Cyber Observable type.
Args:
new_observable (class): A class to register in the Observables map.
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If
None, use latest version.
"""
if version:
v = 'v' + version.replace('.', '')
else:
# Use default version (latest) if no version was provided.
v = 'v' + stix2.DEFAULT_VERSION.replace('.', '')
OBJ_MAP_OBSERVABLE = STIX2_OBJ_MAPS[v]['observables']
OBJ_MAP_OBSERVABLE[new_observable._type] = new_observable
def _register_observable_extension(observable, new_extension, version=None):
"""Register a custom extension to a STIX Cyber Observable type.
Args:
observable: An observable object
new_extension (class): A class to register in the Observables
Extensions map.
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If
None, use latest version.
"""
if version:
v = 'v' + version.replace('.', '')
else:
# Use default version (latest) if no version was provided.
v = 'v' + stix2.DEFAULT_VERSION.replace('.', '')
try:
observable_type = observable._type
except AttributeError:
raise ValueError(
"Unknown observable type. Custom observables must be "
"created with the @CustomObservable decorator.",
)
OBJ_MAP_OBSERVABLE = STIX2_OBJ_MAPS[v]['observables']
EXT_MAP = STIX2_OBJ_MAPS[v]['observable-extensions']
try:
EXT_MAP[observable_type][new_extension._type] = new_extension
except KeyError:
if observable_type not in OBJ_MAP_OBSERVABLE:
raise ValueError(
"Unknown observable type '%s'. Custom observables "
"must be created with the @CustomObservable decorator."
% observable_type,
)
else:
EXT_MAP[observable_type] = {new_extension._type: new_extension}
def _collect_stix2_mappings():
"""Navigate the package once and retrieve all object mapping dicts for each
v2X package. Includes OBJ_MAP, OBJ_MAP_OBSERVABLE, EXT_MAP."""
if not STIX2_OBJ_MAPS: if not STIX2_OBJ_MAPS:
top_level_module = importlib.import_module('stix2') top_level_module = importlib.import_module('stix2')
path = top_level_module.__path__ path = top_level_module.__path__
prefix = str(top_level_module.__name__) + '.' prefix = str(top_level_module.__name__) + '.'
for module_loader, name, is_pkg in pkgutil.walk_packages(path=path, for module_loader, name, is_pkg in pkgutil.walk_packages(path=path, prefix=prefix):
prefix=prefix): ver = name.split('.')[1]
if name.startswith('stix2.v2') and is_pkg: if re.match(r'^stix2\.v2[0-9]$', name) and is_pkg:
mod = importlib.import_module(name, str(top_level_module.__name__)) mod = importlib.import_module(name, str(top_level_module.__name__))
STIX2_OBJ_MAPS[name.split('.')[-1]] = mod.OBJ_MAP STIX2_OBJ_MAPS[ver] = {}
STIX2_OBJ_MAPS[ver]['objects'] = mod.OBJ_MAP
STIX2_OBJ_MAPS[ver]['observables'] = mod.OBJ_MAP_OBSERVABLE
STIX2_OBJ_MAPS[ver]['observable-extensions'] = mod.EXT_MAP
elif re.match(r'^stix2\.v2[0-9]\.common$', name) and is_pkg is False:
mod = importlib.import_module(name, str(top_level_module.__name__))
STIX2_OBJ_MAPS[ver]['markings'] = mod.OBJ_MAP_MARKING

120
stix2/custom.py Normal file
View File

@ -0,0 +1,120 @@
from collections import OrderedDict
import re
from .base import _cls_init, _Extension, _Observable, _STIXBase
from .core import (
STIXDomainObject, _register_marking, _register_object,
_register_observable, _register_observable_extension,
)
from .utils import TYPE_REGEX, get_class_hierarchy_names
def _custom_object_builder(cls, type, properties, version):
class _CustomObject(cls, STIXDomainObject):
if not re.match(TYPE_REGEX, type):
raise ValueError(
"Invalid type name '%s': must only contain the "
"characters a-z (lowercase ASCII), 0-9, and hyphen (-)." % type,
)
elif len(type) < 3 or len(type) > 250:
raise ValueError(
"Invalid type name '%s': must be between 3 and 250 characters." % type,
)
if not properties or not isinstance(properties, list):
raise ValueError("Must supply a list, containing tuples. For example, [('property1', IntegerProperty())]")
_type = type
_properties = OrderedDict(properties)
def __init__(self, **kwargs):
_STIXBase.__init__(self, **kwargs)
_cls_init(cls, self, kwargs)
_register_object(_CustomObject, version=version)
return _CustomObject
def _custom_marking_builder(cls, type, properties, version):
class _CustomMarking(cls, _STIXBase):
if not properties or not isinstance(properties, list):
raise ValueError("Must supply a list, containing tuples. For example, [('property1', IntegerProperty())]")
_type = type
_properties = OrderedDict(properties)
def __init__(self, **kwargs):
_STIXBase.__init__(self, **kwargs)
_cls_init(cls, self, kwargs)
_register_marking(_CustomMarking, version=version)
return _CustomMarking
def _custom_observable_builder(cls, type, properties, version):
class _CustomObservable(cls, _Observable):
if not re.match(TYPE_REGEX, type):
raise ValueError(
"Invalid observable type name '%s': must only contain the "
"characters a-z (lowercase ASCII), 0-9, and hyphen (-)." % type,
)
elif len(type) < 3 or len(type) > 250:
raise ValueError("Invalid observable type name '%s': must be between 3 and 250 characters." % type)
if not properties or not isinstance(properties, list):
raise ValueError("Must supply a list, containing tuples. For example, [('property1', IntegerProperty())]")
# Check properties ending in "_ref/s" are ObjectReferenceProperties
for prop_name, prop in properties:
if prop_name.endswith('_ref') and ('ObjectReferenceProperty' not in get_class_hierarchy_names(prop)):
raise ValueError(
"'%s' is named like an object reference property but "
"is not an ObjectReferenceProperty." % prop_name,
)
elif (prop_name.endswith('_refs') and ('ListProperty' not in get_class_hierarchy_names(prop)
or 'ObjectReferenceProperty' not in get_class_hierarchy_names(prop.contained))):
raise ValueError(
"'%s' is named like an object reference list property but "
"is not a ListProperty containing ObjectReferenceProperty." % prop_name,
)
_type = type
_properties = OrderedDict(properties)
def __init__(self, **kwargs):
_Observable.__init__(self, **kwargs)
_cls_init(cls, self, kwargs)
_register_observable(_CustomObservable, version=version)
return _CustomObservable
def _custom_extension_builder(cls, observable, type, properties, version):
if not observable or not issubclass(observable, _Observable):
raise ValueError("'observable' must be a valid Observable class!")
class _CustomExtension(cls, _Extension):
if not re.match(TYPE_REGEX, type):
raise ValueError(
"Invalid extension type name '%s': must only contain the "
"characters a-z (lowercase ASCII), 0-9, and hyphen (-)." % type,
)
elif len(type) < 3 or len(type) > 250:
raise ValueError("Invalid extension type name '%s': must be between 3 and 250 characters." % type)
if not properties or not isinstance(properties, list):
raise ValueError("Must supply a list, containing tuples. For example, [('property1', IntegerProperty())]")
_type = type
_properties = OrderedDict(properties)
def __init__(self, **kwargs):
_Extension.__init__(self, **kwargs)
_cls_init(cls, self, kwargs)
_register_observable_extension(observable, _CustomExtension, version=version)
return _CustomExtension

View File

@ -1,4 +1,5 @@
"""Python STIX 2.0 DataStore API. """
Python STIX2 DataStore API.
.. autosummary:: .. autosummary::
:toctree: datastore :toctree: datastore
@ -83,7 +84,8 @@ class DataStoreMixin(object):
try: try:
return self.source.get(*args, **kwargs) return self.source.get(*args, **kwargs)
except AttributeError: except AttributeError:
raise AttributeError('%s has no data source to query' % self.__class__.__name__) msg = "%s has no data source to query"
raise AttributeError(msg % self.__class__.__name__)
def all_versions(self, *args, **kwargs): def all_versions(self, *args, **kwargs):
"""Retrieve all versions of a single STIX object by ID. """Retrieve all versions of a single STIX object by ID.
@ -100,7 +102,8 @@ class DataStoreMixin(object):
try: try:
return self.source.all_versions(*args, **kwargs) return self.source.all_versions(*args, **kwargs)
except AttributeError: except AttributeError:
raise AttributeError('%s has no data source to query' % self.__class__.__name__) msg = "%s has no data source to query"
raise AttributeError(msg % self.__class__.__name__)
def query(self, *args, **kwargs): def query(self, *args, **kwargs):
"""Retrieve STIX objects matching a set of filters. """Retrieve STIX objects matching a set of filters.
@ -118,7 +121,8 @@ class DataStoreMixin(object):
try: try:
return self.source.query(*args, **kwargs) return self.source.query(*args, **kwargs)
except AttributeError: except AttributeError:
raise AttributeError('%s has no data source to query' % self.__class__.__name__) msg = "%s has no data source to query"
raise AttributeError(msg % self.__class__.__name__)
def creator_of(self, *args, **kwargs): def creator_of(self, *args, **kwargs):
"""Retrieve the Identity refered to by the object's `created_by_ref`. """Retrieve the Identity refered to by the object's `created_by_ref`.
@ -137,7 +141,8 @@ class DataStoreMixin(object):
try: try:
return self.source.creator_of(*args, **kwargs) return self.source.creator_of(*args, **kwargs)
except AttributeError: except AttributeError:
raise AttributeError('%s has no data source to query' % self.__class__.__name__) msg = "%s has no data source to query"
raise AttributeError(msg % self.__class__.__name__)
def relationships(self, *args, **kwargs): def relationships(self, *args, **kwargs):
"""Retrieve Relationships involving the given STIX object. """Retrieve Relationships involving the given STIX object.
@ -163,7 +168,8 @@ class DataStoreMixin(object):
try: try:
return self.source.relationships(*args, **kwargs) return self.source.relationships(*args, **kwargs)
except AttributeError: except AttributeError:
raise AttributeError('%s has no data source to query' % self.__class__.__name__) msg = "%s has no data source to query"
raise AttributeError(msg % self.__class__.__name__)
def related_to(self, *args, **kwargs): def related_to(self, *args, **kwargs):
"""Retrieve STIX Objects that have a Relationship involving the given """Retrieve STIX Objects that have a Relationship involving the given
@ -193,7 +199,8 @@ class DataStoreMixin(object):
try: try:
return self.source.related_to(*args, **kwargs) return self.source.related_to(*args, **kwargs)
except AttributeError: except AttributeError:
raise AttributeError('%s has no data source to query' % self.__class__.__name__) msg = "%s has no data source to query"
raise AttributeError(msg % self.__class__.__name__)
def add(self, *args, **kwargs): def add(self, *args, **kwargs):
"""Method for storing STIX objects. """Method for storing STIX objects.
@ -208,7 +215,8 @@ class DataStoreMixin(object):
try: try:
return self.sink.add(*args, **kwargs) return self.sink.add(*args, **kwargs)
except AttributeError: except AttributeError:
raise AttributeError('%s has no data sink to put objects in' % self.__class__.__name__) msg = "%s has no data sink to put objects in"
raise AttributeError(msg % self.__class__.__name__)
class DataSink(with_metaclass(ABCMeta)): class DataSink(with_metaclass(ABCMeta)):
@ -301,7 +309,7 @@ class DataSource(with_metaclass(ABCMeta)):
""" """
def creator_of(self, obj): def creator_of(self, obj):
"""Retrieve the Identity refered to by the object's `created_by_ref`. """Retrieve the Identity referred to by the object's `created_by_ref`.
Args: Args:
obj: The STIX object whose `created_by_ref` property will be looked obj: The STIX object whose `created_by_ref` property will be looked
@ -457,7 +465,7 @@ class CompositeDataSource(DataSource):
""" """
if not self.has_data_sources(): if not self.has_data_sources():
raise AttributeError('CompositeDataSource has no data sources') raise AttributeError("CompositeDataSource has no data sources")
all_data = [] all_data = []
all_filters = FilterSet() all_filters = FilterSet()
@ -504,7 +512,7 @@ class CompositeDataSource(DataSource):
""" """
if not self.has_data_sources(): if not self.has_data_sources():
raise AttributeError('CompositeDataSource has no data sources') raise AttributeError("CompositeDataSource has no data sources")
all_data = [] all_data = []
all_filters = FilterSet() all_filters = FilterSet()
@ -543,7 +551,7 @@ class CompositeDataSource(DataSource):
""" """
if not self.has_data_sources(): if not self.has_data_sources():
raise AttributeError('CompositeDataSource has no data sources') raise AttributeError("CompositeDataSource has no data sources")
if not query: if not query:
# don't mess with the query (i.e. deduplicate, as that's done # don't mess with the query (i.e. deduplicate, as that's done
@ -594,7 +602,7 @@ class CompositeDataSource(DataSource):
""" """
if not self.has_data_sources(): if not self.has_data_sources():
raise AttributeError('CompositeDataSource has no data sources') raise AttributeError("CompositeDataSource has no data sources")
results = [] results = []
for ds in self.data_sources: for ds in self.data_sources:
@ -634,7 +642,7 @@ class CompositeDataSource(DataSource):
""" """
if not self.has_data_sources(): if not self.has_data_sources():
raise AttributeError('CompositeDataSource has no data sources') raise AttributeError("CompositeDataSource has no data sources")
results = [] results = []
for ds in self.data_sources: for ds in self.data_sources:

View File

@ -1,21 +1,23 @@
""" """Python STIX2 FileSystem Source/Sink"""
Python STIX 2.0 FileSystem Source/Sink # Temporary while we address TODO statement
from __future__ import print_function
"""
import errno import errno
import io
import json import json
import os import os
import re
import stat import stat
import sys
import pytz
import six import six
from stix2 import v20, v21
from stix2.base import _STIXBase from stix2.base import _STIXBase
from stix2.core import Bundle, parse from stix2.core import parse
from stix2.datastore import DataSink, DataSource, DataStoreMixin from stix2.datastore import DataSink, DataSource, DataStoreMixin
from stix2.datastore.filters import Filter, FilterSet, apply_common_filters from stix2.datastore.filters import Filter, FilterSet, apply_common_filters
from stix2.utils import get_type_from_id, is_marking from stix2.utils import format_datetime, get_type_from_id, is_marking
def _timestamp2filename(timestamp): def _timestamp2filename(timestamp):
@ -23,15 +25,14 @@ def _timestamp2filename(timestamp):
Encapsulates a way to create unique filenames based on an object's Encapsulates a way to create unique filenames based on an object's
"modified" property value. This should not include an extension. "modified" property value. This should not include an extension.
:param timestamp: A timestamp, as a datetime.datetime object. Args:
""" timestamp: A timestamp, as a datetime.datetime object.
# Different times will only produce different file names if all timestamps
# are in the same time zone! So if timestamp is timezone-aware convert
# to UTC just to be safe. If naive, just use as-is.
if timestamp.tzinfo is not None:
timestamp = timestamp.astimezone(pytz.utc)
return timestamp.strftime("%Y%m%d%H%M%S%f") """
# The format_datetime will determine the correct level of precision.
ts = format_datetime(timestamp)
ts = re.sub(r"[-T:\.Z ]", "", ts)
return ts
class AuthSet(object): class AuthSet(object):
@ -45,8 +46,8 @@ class AuthSet(object):
anywhere, which means the query was impossible to match, so you can skip anywhere, which means the query was impossible to match, so you can skip
searching altogether. For a blacklist, this means nothing is excluded searching altogether. For a blacklist, this means nothing is excluded
and you must search everywhere. and you must search everywhere.
"""
"""
BLACK = 0 BLACK = 0
WHITE = 1 WHITE = 1
@ -56,9 +57,11 @@ class AuthSet(object):
prohibited values. The type of set (black or white) is determined prohibited values. The type of set (black or white) is determined
from the allowed and/or prohibited values given. from the allowed and/or prohibited values given.
:param allowed: A set of allowed values (or None if no allow filters Args:
were found in the query) allowed: A set of allowed values (or None if no allow filters
:param prohibited: A set of prohibited values (not None) were found in the query)
prohibited: A set of prohibited values (not None)
""" """
if allowed is None: if allowed is None:
self.__values = prohibited self.__values = prohibited
@ -88,7 +91,7 @@ class AuthSet(object):
def __repr__(self): def __repr__(self):
return "{}list: {}".format( return "{}list: {}".format(
"white" if self.auth_type == AuthSet.WHITE else "black", "white" if self.auth_type == AuthSet.WHITE else "black",
self.values self.values,
) )
@ -103,9 +106,13 @@ def _update_allow(allow_set, value):
implicitly AND'd, the given values are intersected with the existing allow implicitly AND'd, the given values are intersected with the existing allow
set, which may remove values. At the end, it may even wind up empty. set, which may remove values. At the end, it may even wind up empty.
:param allow_set: The allow set, or None Args:
:param value: The value(s) to add (single value, or iterable of values) allow_set: The allow set, or None
:return: The updated allow set (not None) value: The value(s) to add (single value, or iterable of values)
Returns:
The updated allow set (not None)
""" """
adding_seq = hasattr(value, "__iter__") and \ adding_seq = hasattr(value, "__iter__") and \
not isinstance(value, six.string_types) not isinstance(value, six.string_types)
@ -116,7 +123,6 @@ def _update_allow(allow_set, value):
allow_set.update(value) allow_set.update(value)
else: else:
allow_set.add(value) allow_set.add(value)
else: else:
# strangely, the "&=" operator requires a set on the RHS # strangely, the "&=" operator requires a set on the RHS
# whereas the method allows any iterable. # whereas the method allows any iterable.
@ -133,11 +139,14 @@ def _find_search_optimizations(filters):
Searches through all the filters, and creates white/blacklists of types and Searches through all the filters, and creates white/blacklists of types and
IDs, which can be used to optimize the filesystem search. IDs, which can be used to optimize the filesystem search.
:param filters: An iterable of filter objects representing a query Args:
:return: A 2-tuple of AuthSet objects: the first is for object types, and filters: An iterable of filter objects representing a query
the second is for object IDs.
"""
Returns:
A 2-tuple of AuthSet objects: the first is for object types, and
the second is for object IDs.
"""
# The basic approach to this is to determine what is allowed and # The basic approach to this is to determine what is allowed and
# prohibited, independently, and then combine them to create the final # prohibited, independently, and then combine them to create the final
# white/blacklists. # white/blacklists.
@ -158,15 +167,19 @@ def _find_search_optimizations(filters):
# An "allow" ID filter implies a type filter too, since IDs # An "allow" ID filter implies a type filter too, since IDs
# contain types within them. # contain types within them.
allowed_ids = _update_allow(allowed_ids, filter_.value) allowed_ids = _update_allow(allowed_ids, filter_.value)
allowed_types = _update_allow(allowed_types, allowed_types = _update_allow(
get_type_from_id(filter_.value)) allowed_types,
get_type_from_id(filter_.value),
)
elif filter_.op == "!=": elif filter_.op == "!=":
prohibited_ids.add(filter_.value) prohibited_ids.add(filter_.value)
elif filter_.op == "in": elif filter_.op == "in":
allowed_ids = _update_allow(allowed_ids, filter_.value) allowed_ids = _update_allow(allowed_ids, filter_.value)
allowed_types = _update_allow(allowed_types, ( allowed_types = _update_allow(
get_type_from_id(id_) for id_ in filter_.value allowed_types, (
)) get_type_from_id(id_) for id_ in filter_.value
),
)
opt_types = AuthSet(allowed_types, prohibited_types) opt_types = AuthSet(allowed_types, prohibited_types)
opt_ids = AuthSet(allowed_ids, prohibited_ids) opt_ids = AuthSet(allowed_ids, prohibited_ids)
@ -196,30 +209,35 @@ def _get_matching_dir_entries(parent_dir, auth_set, st_mode_test=None, ext=""):
Search a directory (non-recursively), and find entries which match the Search a directory (non-recursively), and find entries which match the
given criteria. given criteria.
:param parent_dir: The directory to search Args:
:param auth_set: an AuthSet instance, which represents a black/whitelist parent_dir: The directory to search
filter on filenames auth_set: an AuthSet instance, which represents a black/whitelist
:param st_mode_test: A callable allowing filtering based on the type of filter on filenames
directory entry. E.g. just get directories, or just get files. It st_mode_test: A callable allowing filtering based on the type of
will be passed the st_mode field of a stat() structure and should directory entry. E.g. just get directories, or just get files. It
return True to include the file, or False to exclude it. Easy thing to will be passed the st_mode field of a stat() structure and should
do is pass one of the stat module functions, e.g. stat.S_ISREG. If return True to include the file, or False to exclude it. Easy thing to
None, don't filter based on entry type. do is pass one of the stat module functions, e.g. stat.S_ISREG. If
:param ext: Determines how names from auth_set match up to directory None, don't filter based on entry type.
entries, and allows filtering by extension. The extension is added ext: Determines how names from auth_set match up to directory
to auth_set values to obtain directory entries; it is removed from entries, and allows filtering by extension. The extension is added
directory entries to obtain auth_set values. In this way, auth_set to auth_set values to obtain directory entries; it is removed from
may be treated as having only "basenames" of the entries. Only entries directory entries to obtain auth_set values. In this way, auth_set
having the given extension will be included in the results. If not may be treated as having only "basenames" of the entries. Only entries
empty, the extension MUST include a leading ".". The default is the having the given extension will be included in the results. If not
empty string, which will result in direct comparisons, and no empty, the extension MUST include a leading ".". The default is the
extension-based filtering. empty string, which will result in direct comparisons, and no
:return: A list of directory entries matching the criteria. These will not extension-based filtering.
have any path info included; they will just be bare names.
:raises OSError: If there are errors accessing directory contents or
stat()'ing files
"""
Returns:
(list): A list of directory entries matching the criteria. These will not
have any path info included; they will just be bare names.
Raises:
OSError: If there are errors accessing directory contents or stat()'ing
files
"""
results = [] results = []
if auth_set.auth_type == AuthSet.WHITE: if auth_set.auth_type == AuthSet.WHITE:
for value in auth_set.values: for value in auth_set.values:
@ -237,7 +255,6 @@ def _get_matching_dir_entries(parent_dir, auth_set, st_mode_test=None, ext=""):
if e.errno != errno.ENOENT: if e.errno != errno.ENOENT:
raise raise
# else, file-not-found is ok, just skip # else, file-not-found is ok, just skip
else: # auth_set is a blacklist else: # auth_set is a blacklist
for entry in os.listdir(parent_dir): for entry in os.listdir(parent_dir):
if ext: if ext:
@ -272,28 +289,34 @@ def _check_object_from_file(query, filepath, allow_custom, version):
Read a STIX object from the given file, and check it against the given Read a STIX object from the given file, and check it against the given
filters. filters.
:param query: Iterable of filters Args:
:param filepath: Path to file to read query: Iterable of filters
:param allow_custom: Whether to allow custom properties as well unknown filepath: Path to file to read
allow_custom: Whether to allow custom properties as well unknown
custom objects. custom objects.
:param version: Which STIX2 version to use. (e.g. "2.0", "2.1"). If None, version (str): If present, it forces the parser to use the version
use latest version. provided. Otherwise, the library will make the best effort based
:return: The (parsed) STIX object, if the object passes the filters. If on checking the "spec_version" property.
Returns:
The (parsed) STIX object, if the object passes the filters. If
not, None is returned. not, None is returned.
:raises TypeError: If the file had invalid JSON
:raises IOError: If there are problems opening/reading the file Raises:
:raises stix2.exceptions.STIXError: If there were problems creating a STIX TypeError: If the file had invalid JSON
object from the JSON IOError: If there are problems opening/reading the file
stix2.exceptions.STIXError: If there were problems creating a STIX
object from the JSON
""" """
try: try:
with open(filepath, "r") as f: with io.open(filepath, "r") as f:
stix_json = json.load(f) stix_json = json.load(f)
except ValueError: # not a JSON file except ValueError: # not a JSON file
raise TypeError( raise TypeError(
"STIX JSON object at '{0}' could either not be parsed " "STIX JSON object at '{0}' could either not be parsed "
"to JSON or was not valid STIX JSON".format( "to JSON or was not valid STIX JSON".format(filepath),
filepath)) )
stix_obj = parse(stix_json, allow_custom, version) stix_obj = parse(stix_json, allow_custom, version)
@ -312,35 +335,49 @@ def _search_versioned(query, type_path, auth_ids, allow_custom, version):
particular versioned type (i.e. not markings), and return any which match particular versioned type (i.e. not markings), and return any which match
the query. the query.
:param query: The query to match against Args:
:param type_path: The directory with type-specific STIX object files query: The query to match against
:param auth_ids: Search optimization based on object ID type_path: The directory with type-specific STIX object files
:param allow_custom: Whether to allow custom properties as well unknown auth_ids: Search optimization based on object ID
custom objects. allow_custom: Whether to allow custom properties as well unknown
:param version: Which STIX2 version to use. (e.g. "2.0", "2.1"). If None, custom objects.
use latest version. version (str): If present, it forces the parser to use the version
:return: A list of all matching objects provided. Otherwise, the library will make the best effort based
:raises TypeError, stix2.exceptions.STIXError: If any objects had invalid on checking the "spec_version" property.
content
:raises IOError, OSError: If there were any problems opening/reading files Returns:
A list of all matching objects
Raises:
stix2.exceptions.STIXError: If any objects had invalid content
TypeError: If any objects had invalid content
IOError: If there were any problems opening/reading files
OSError: If there were any problems opening/reading files
""" """
results = [] results = []
id_dirs = _get_matching_dir_entries(type_path, auth_ids, id_dirs = _get_matching_dir_entries(
stat.S_ISDIR) type_path, auth_ids,
stat.S_ISDIR,
)
for id_dir in id_dirs: for id_dir in id_dirs:
id_path = os.path.join(type_path, id_dir) id_path = os.path.join(type_path, id_dir)
# This leverages a more sophisticated function to do a simple thing: # This leverages a more sophisticated function to do a simple thing:
# get all the JSON files from a directory. I guess it does give us # get all the JSON files from a directory. I guess it does give us
# file type checking, ensuring we only get regular files. # file type checking, ensuring we only get regular files.
version_files = _get_matching_dir_entries(id_path, _AUTHSET_ANY, version_files = _get_matching_dir_entries(
stat.S_ISREG, ".json") id_path, _AUTHSET_ANY,
stat.S_ISREG, ".json",
)
for version_file in version_files: for version_file in version_files:
version_path = os.path.join(id_path, version_file) version_path = os.path.join(id_path, version_file)
try: try:
stix_obj = _check_object_from_file(query, version_path, stix_obj = _check_object_from_file(
allow_custom, version) query, version_path,
allow_custom, version,
)
if stix_obj: if stix_obj:
results.append(stix_obj) results.append(stix_obj)
except IOError as e: except IOError as e:
@ -350,14 +387,18 @@ def _search_versioned(query, type_path, auth_ids, allow_custom, version):
# For backward-compatibility, also search for plain files named after # For backward-compatibility, also search for plain files named after
# object IDs, in the type directory. # object IDs, in the type directory.
id_files = _get_matching_dir_entries(type_path, auth_ids, stat.S_ISREG, id_files = _get_matching_dir_entries(
".json") type_path, auth_ids, stat.S_ISREG,
".json",
)
for id_file in id_files: for id_file in id_files:
id_path = os.path.join(type_path, id_file) id_path = os.path.join(type_path, id_file)
try: try:
stix_obj = _check_object_from_file(query, id_path, allow_custom, stix_obj = _check_object_from_file(
version) query, id_path, allow_custom,
version,
)
if stix_obj: if stix_obj:
results.append(stix_obj) results.append(stix_obj)
except IOError as e: except IOError as e:
@ -373,27 +414,39 @@ def _search_markings(query, markings_path, auth_ids, allow_custom, version):
Searches the given directory, which contains markings data, and return any Searches the given directory, which contains markings data, and return any
which match the query. which match the query.
:param query: The query to match against Args:
:param markings_path: The directory with STIX markings files query: The query to match against
:param auth_ids: Search optimization based on object ID markings_path: The directory with STIX markings files
:param allow_custom: Whether to allow custom properties as well unknown auth_ids: Search optimization based on object ID
custom objects. allow_custom: Whether to allow custom properties as well unknown
:param version: Which STIX2 version to use. (e.g. "2.0", "2.1"). If None, custom objects.
use latest version. version (str): If present, it forces the parser to use the version
:return: A list of all matching objects provided. Otherwise, the library will make the best effort based
:raises TypeError, stix2.exceptions.STIXError: If any objects had invalid on checking the "spec_version" property.
content
:raises IOError, OSError: If there were any problems opening/reading files Returns:
A list of all matching objects
Raises:
stix2.exceptions.STIXError: If any objects had invalid content
TypeError: If any objects had invalid content
IOError: If there were any problems opening/reading files
OSError: If there were any problems opening/reading files
""" """
results = [] results = []
id_files = _get_matching_dir_entries(markings_path, auth_ids, stat.S_ISREG, id_files = _get_matching_dir_entries(
".json") markings_path, auth_ids, stat.S_ISREG,
".json",
)
for id_file in id_files: for id_file in id_files:
id_path = os.path.join(markings_path, id_file) id_path = os.path.join(markings_path, id_file)
try: try:
stix_obj = _check_object_from_file(query, id_path, allow_custom, stix_obj = _check_object_from_file(
version) query, id_path, allow_custom,
version,
)
if stix_obj: if stix_obj:
results.append(stix_obj) results.append(stix_obj)
except IOError as e: except IOError as e:
@ -413,12 +466,12 @@ class FileSystemStore(DataStoreMixin):
Args: Args:
stix_dir (str): path to directory of STIX objects stix_dir (str): path to directory of STIX objects
allow_custom (bool): whether to allow custom STIX content to be allow_custom (bool): whether to allow custom STIX content to be
pushed/retrieved. Defaults to True for FileSystemSource side(retrieving data) pushed/retrieved. Defaults to True for FileSystemSource side
and False for FileSystemSink side(pushing data). However, when (retrieving data) and False for FileSystemSink
parameter is supplied, it will be applied to both FileSystemSource side(pushing data). However, when parameter is supplied, it
and FileSystemSink. will be applied to both FileSystemSource and FileSystemSink.
bundlify (bool): whether to wrap objects in bundles when saving them. bundlify (bool): whether to wrap objects in bundles when saving
Default: False. them. Default: False.
Attributes: Attributes:
source (FileSystemSource): FileSystemSource source (FileSystemSource): FileSystemSource
@ -434,7 +487,7 @@ class FileSystemStore(DataStoreMixin):
super(FileSystemStore, self).__init__( super(FileSystemStore, self).__init__(
source=FileSystemSource(stix_dir=stix_dir, allow_custom=allow_custom_source), source=FileSystemSource(stix_dir=stix_dir, allow_custom=allow_custom_source),
sink=FileSystemSink(stix_dir=stix_dir, allow_custom=allow_custom_sink, bundlify=bundlify) sink=FileSystemSink(stix_dir=stix_dir, allow_custom=allow_custom_sink, bundlify=bundlify),
) )
@ -466,7 +519,7 @@ class FileSystemSink(DataSink):
def stix_dir(self): def stix_dir(self):
return self._stix_dir return self._stix_dir
def _check_path_and_write(self, stix_obj): def _check_path_and_write(self, stix_obj, encoding='utf-8'):
"""Write the given STIX object to a file in the STIX file directory. """Write the given STIX object to a file in the STIX file directory.
""" """
type_dir = os.path.join(self._stix_dir, stix_obj["type"]) type_dir = os.path.join(self._stix_dir, stix_obj["type"])
@ -483,10 +536,21 @@ class FileSystemSink(DataSink):
os.makedirs(obj_dir) os.makedirs(obj_dir)
if self.bundlify: if self.bundlify:
stix_obj = Bundle(stix_obj, allow_custom=self.allow_custom) if 'spec_version' in stix_obj:
# Assuming future specs will allow multiple SDO/SROs
# versions in a single bundle we won't need to check this
# and just use the latest supported Bundle version.
stix_obj = v21.Bundle(stix_obj, allow_custom=self.allow_custom)
else:
stix_obj = v20.Bundle(stix_obj, allow_custom=self.allow_custom)
with open(file_path, "w") as f: # TODO: Better handling of the overwriting case.
f.write(str(stix_obj)) if os.path.isfile(file_path):
print("Attempted to overwrite file!", file_path, file=sys.stderr)
else:
with io.open(file_path, 'w', encoding=encoding) as f:
stix_obj = stix_obj.serialize(pretty=True, encoding=encoding, ensure_ascii=False)
f.write(stix_obj)
def add(self, stix_data=None, version=None): def add(self, stix_data=None, version=None):
"""Add STIX objects to file directory. """Add STIX objects to file directory.
@ -495,8 +559,9 @@ class FileSystemSink(DataSink):
stix_data (STIX object OR dict OR str OR list): valid STIX 2.0 content stix_data (STIX object OR dict OR str OR list): valid STIX 2.0 content
in a STIX object (or list of), dict (or list of), or a STIX 2.0 in a STIX object (or list of), dict (or list of), or a STIX 2.0
json encoded string. json encoded string.
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If version (str): If present, it forces the parser to use the version
None, use latest version. provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
Note: Note:
``stix_data`` can be a Bundle object, but each object in it will be ``stix_data`` can be a Bundle object, but each object in it will be
@ -504,7 +569,7 @@ class FileSystemSink(DataSink):
the Bundle contained, but not the Bundle itself. the Bundle contained, but not the Bundle itself.
""" """
if isinstance(stix_data, Bundle): if isinstance(stix_data, (v20.Bundle, v21.Bundle)):
# recursively add individual STIX objects # recursively add individual STIX objects
for stix_obj in stix_data.get("objects", []): for stix_obj in stix_data.get("objects", []):
self.add(stix_obj, version=version) self.add(stix_obj, version=version)
@ -520,12 +585,14 @@ class FileSystemSink(DataSink):
elif isinstance(stix_data, list): elif isinstance(stix_data, list):
# recursively add individual STIX objects # recursively add individual STIX objects
for stix_obj in stix_data: for stix_obj in stix_data:
self.add(stix_obj, version=version) self.add(stix_obj)
else: else:
raise TypeError("stix_data must be a STIX object (or list of), " raise TypeError(
"JSON formatted STIX (or list of), " "stix_data must be a STIX object (or list of), "
"or a JSON formatted STIX bundle") "JSON formatted STIX (or list of), "
"or a JSON formatted STIX bundle",
)
class FileSystemSource(DataSource): class FileSystemSource(DataSource):
@ -560,8 +627,9 @@ class FileSystemSource(DataSource):
stix_id (str): The STIX ID of the STIX object to be retrieved. stix_id (str): The STIX ID of the STIX object to be retrieved.
_composite_filters (FilterSet): collection of filters passed from the parent _composite_filters (FilterSet): collection of filters passed from the parent
CompositeDataSource, not user supplied CompositeDataSource, not user supplied
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If version (str): If present, it forces the parser to use the version
None, use latest version. provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
Returns: Returns:
(STIX object): STIX object that has the supplied STIX ID. (STIX object): STIX object that has the supplied STIX ID.
@ -591,10 +659,11 @@ class FileSystemSource(DataSource):
Args: Args:
stix_id (str): The STIX ID of the STIX objects to be retrieved. stix_id (str): The STIX ID of the STIX objects to be retrieved.
_composite_filters (FilterSet): collection of filters passed from the parent _composite_filters (FilterSet): collection of filters passed from
CompositeDataSource, not user supplied the parent CompositeDataSource, not user supplied
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If version (str): If present, it forces the parser to use the version
None, use latest version. provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
Returns: Returns:
(list): of STIX objects that has the supplied STIX ID. (list): of STIX objects that has the supplied STIX ID.
@ -614,10 +683,11 @@ class FileSystemSource(DataSource):
Args: Args:
query (list): list of filters to search on query (list): list of filters to search on
_composite_filters (FilterSet): collection of filters passed from the _composite_filters (FilterSet): collection of filters passed from
CompositeDataSource, not user supplied the CompositeDataSource, not user supplied
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If version (str): If present, it forces the parser to use the version
None, use latest version. provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
Returns: Returns:
(list): list of STIX objects that matches the supplied (list): list of STIX objects that matches the supplied
@ -625,9 +695,7 @@ class FileSystemSource(DataSource):
parsed into a python STIX objects and then returned. parsed into a python STIX objects and then returned.
""" """
all_data = [] all_data = []
query = FilterSet(query) query = FilterSet(query)
# combine all query filters # combine all query filters
@ -637,19 +705,22 @@ class FileSystemSource(DataSource):
query.add(_composite_filters) query.add(_composite_filters)
auth_types, auth_ids = _find_search_optimizations(query) auth_types, auth_ids = _find_search_optimizations(query)
type_dirs = _get_matching_dir_entries(
type_dirs = _get_matching_dir_entries(self._stix_dir, auth_types, self._stix_dir, auth_types,
stat.S_ISDIR) stat.S_ISDIR,
)
for type_dir in type_dirs: for type_dir in type_dirs:
type_path = os.path.join(self._stix_dir, type_dir) type_path = os.path.join(self._stix_dir, type_dir)
if type_dir == "marking-definition": if type_dir == "marking-definition":
type_results = _search_markings(query, type_path, auth_ids, type_results = _search_markings(
self.allow_custom, version) query, type_path, auth_ids,
self.allow_custom, version,
)
else: else:
type_results = _search_versioned(query, type_path, auth_ids, type_results = _search_versioned(
self.allow_custom, version) query, type_path, auth_ids,
self.allow_custom, version,
)
all_data.extend(type_results) all_data.extend(type_results)
return all_data return all_data

View File

@ -1,7 +1,4 @@
""" """Filters for Python STIX2 DataSources, DataSinks, DataStores"""
Filters for Python STIX 2.0 DataSources, DataSinks, DataStores
"""
import collections import collections
from datetime import datetime from datetime import datetime
@ -14,8 +11,10 @@ import stix2.utils
FILTER_OPS = ['=', '!=', 'in', '>', '<', '>=', '<=', 'contains'] FILTER_OPS = ['=', '!=', 'in', '>', '<', '>=', '<=', 'contains']
"""Supported filter value types""" """Supported filter value types"""
FILTER_VALUE_TYPES = (bool, dict, float, int, list, tuple, six.string_types, FILTER_VALUE_TYPES = (
datetime) bool, dict, float, int, list, tuple, six.string_types,
datetime,
)
def _check_filter_components(prop, op, value): def _check_filter_components(prop, op, value):
@ -38,14 +37,14 @@ def _check_filter_components(prop, op, value):
# check filter value type is supported # check filter value type is supported
raise TypeError("Filter value of '%s' is not supported. The type must be a Python immutable type or dictionary" % type(value)) raise TypeError("Filter value of '%s' is not supported. The type must be a Python immutable type or dictionary" % type(value))
if prop == "type" and "_" in value: if prop == 'type' and '_' in value:
# check filter where the property is type, value (type name) cannot have underscores # check filter where the property is type, value (type name) cannot have underscores
raise ValueError("Filter for property 'type' cannot have its value '%s' include underscores" % value) raise ValueError("Filter for property 'type' cannot have its value '%s' include underscores" % value)
return True return True
class Filter(collections.namedtuple("Filter", ['property', 'op', 'value'])): class Filter(collections.namedtuple('Filter', ['property', 'op', 'value'])):
"""STIX 2 filters that support the querying functionality of STIX 2 """STIX 2 filters that support the querying functionality of STIX 2
DataStores and DataSources. DataStores and DataSources.
@ -157,7 +156,7 @@ def _check_filter(filter_, stix_obj):
""" """
# For properties like granular_markings and external_references # For properties like granular_markings and external_references
# need to extract the first property from the string. # need to extract the first property from the string.
prop = filter_.property.split(".")[0] prop = filter_.property.split('.')[0]
if prop not in stix_obj.keys(): if prop not in stix_obj.keys():
# check filter "property" is in STIX object - if cant be # check filter "property" is in STIX object - if cant be
@ -165,9 +164,9 @@ def _check_filter(filter_, stix_obj):
# (i.e. did not make it through the filter) # (i.e. did not make it through the filter)
return False return False
if "." in filter_.property: if '.' in filter_.property:
# Check embedded properties, from e.g. granular_markings or external_references # Check embedded properties, from e.g. granular_markings or external_references
sub_property = filter_.property.split(".", 1)[1] sub_property = filter_.property.split('.', 1)[1]
sub_filter = filter_._replace(property=sub_property) sub_filter = filter_._replace(property=sub_property)
if isinstance(stix_obj[prop], list): if isinstance(stix_obj[prop], list):
@ -222,8 +221,9 @@ class FilterSet(object):
Operates like set, only adding unique stix2.Filters to the FilterSet Operates like set, only adding unique stix2.Filters to the FilterSet
NOTE: method designed to be very accomodating (i.e. even accepting filters=None) Note:
as it allows for blind calls (very useful in DataStore) method designed to be very accomodating (i.e. even accepting filters=None)
as it allows for blind calls (very useful in DataStore)
Args: Args:
filters: stix2.Filter OR list of stix2.Filter OR stix2.FilterSet filters: stix2.Filter OR list of stix2.Filter OR stix2.FilterSet
@ -244,11 +244,13 @@ class FilterSet(object):
def remove(self, filters=None): def remove(self, filters=None):
"""Remove a Filter, list of Filters, or FilterSet from the FilterSet. """Remove a Filter, list of Filters, or FilterSet from the FilterSet.
NOTE: method designed to be very accomodating (i.e. even accepting filters=None) Note:
as it allows for blind calls (very useful in DataStore) method designed to be very accomodating (i.e. even accepting filters=None)
as it allows for blind calls (very useful in DataStore)
Args: Args:
filters: stix2.Filter OR list of stix2.Filter or stix2.FilterSet filters: stix2.Filter OR list of stix2.Filter or stix2.FilterSet
""" """
if not filters: if not filters:
# so remove() can be called blindly, useful for # so remove() can be called blindly, useful for

View File

@ -1,28 +1,33 @@
""" """Python STIX2 Memory Source/Sink"""
Python STIX 2.0 Memory Source/Sink
"""
import io
import itertools import itertools
import json import json
import os import os
from stix2 import v20, v21
from stix2.base import _STIXBase from stix2.base import _STIXBase
from stix2.core import Bundle, parse from stix2.core import parse
from stix2.datastore import DataSink, DataSource, DataStoreMixin from stix2.datastore import DataSink, DataSource, DataStoreMixin
from stix2.datastore.filters import FilterSet, apply_common_filters from stix2.datastore.filters import FilterSet, apply_common_filters
from stix2.utils import is_marking from stix2.utils import is_marking
def _add(store, stix_data=None, allow_custom=True, version=None): def _add(store, stix_data, allow_custom=True, version=None):
"""Add STIX objects to MemoryStore/Sink. """Add STIX objects to MemoryStore/Sink.
Adds STIX objects to an in-memory dictionary for fast lookup. Adds STIX objects to an in-memory dictionary for fast lookup.
Recursive function, breaks down STIX Bundles and lists. Recursive function, breaks down STIX Bundles and lists.
Args: Args:
store: A MemoryStore, MemorySink or MemorySource object.
stix_data (list OR dict OR STIX object): STIX objects to be added stix_data (list OR dict OR STIX object): STIX objects to be added
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If allow_custom (bool): Whether to allow custom properties as well unknown
None, use latest version. custom objects. Note that unknown custom objects cannot be parsed
into STIX objects, and will be returned as is. Default: False.
version (str): Which STIX2 version to lock the parser to. (e.g. "2.0",
"2.1"). If None, the library makes the best effort to figure
out the spec representation of the object.
""" """
if isinstance(stix_data, list): if isinstance(stix_data, list):
@ -70,13 +75,15 @@ class _ObjectFamily(object):
def add(self, obj): def add(self, obj):
self.all_versions[obj["modified"]] = obj self.all_versions[obj["modified"]] = obj
if self.latest_version is None or \ if (self.latest_version is None or
obj["modified"] > self.latest_version["modified"]: obj["modified"] > self.latest_version["modified"]):
self.latest_version = obj self.latest_version = obj
def __str__(self): def __str__(self):
return "<<{}; latest={}>>".format(self.all_versions, return "<<{}; latest={}>>".format(
self.latest_version["modified"]) self.all_versions,
self.latest_version["modified"],
)
def __repr__(self): def __repr__(self):
return str(self) return str(self)
@ -96,8 +103,6 @@ class MemoryStore(DataStoreMixin):
allow_custom (bool): whether to allow custom STIX content. allow_custom (bool): whether to allow custom STIX content.
Only applied when export/input functions called, i.e. Only applied when export/input functions called, i.e.
load_from_file() and save_to_file(). Defaults to True. load_from_file() and save_to_file(). Defaults to True.
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If
None, use latest version.
Attributes: Attributes:
_data (dict): the in-memory dict that holds STIX objects _data (dict): the in-memory dict that holds STIX objects
@ -109,19 +114,21 @@ class MemoryStore(DataStoreMixin):
self._data = {} self._data = {}
if stix_data: if stix_data:
_add(self, stix_data, allow_custom, version=version) _add(self, stix_data, allow_custom, version)
super(MemoryStore, self).__init__( super(MemoryStore, self).__init__(
source=MemorySource(stix_data=self._data, allow_custom=allow_custom, version=version, _store=True), source=MemorySource(stix_data=self._data, allow_custom=allow_custom, version=version, _store=True),
sink=MemorySink(stix_data=self._data, allow_custom=allow_custom, version=version, _store=True) sink=MemorySink(stix_data=self._data, allow_custom=allow_custom, version=version, _store=True),
) )
def save_to_file(self, *args, **kwargs): def save_to_file(self, *args, **kwargs):
"""Write SITX objects from in-memory dictionary to JSON file, as a STIX """Write SITX objects from in-memory dictionary to JSON file, as a STIX
Bundle. Bundle. If a directory is given, the Bundle 'id' will be used as
filename. Otherwise, the provided value will be used.
Args: Args:
file_path (str): file path to write STIX data to path (str): file path to write STIX data to.
encoding (str): The file encoding. Default utf-8.
""" """
return self.sink.save_to_file(*args, **kwargs) return self.sink.save_to_file(*args, **kwargs)
@ -129,13 +136,11 @@ class MemoryStore(DataStoreMixin):
def load_from_file(self, *args, **kwargs): def load_from_file(self, *args, **kwargs):
"""Load STIX data from JSON file. """Load STIX data from JSON file.
File format is expected to be a single JSON File format is expected to be a single JSON STIX object or JSON STIX
STIX object or JSON STIX bundle. bundle.
Args: Args:
file_path (str): file path to load STIX data from path (str): file path to load STIX data from
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If
None, use latest version.
""" """
return self.source.load_from_file(*args, **kwargs) return self.source.load_from_file(*args, **kwargs)
@ -156,6 +161,9 @@ class MemorySink(DataSink):
allow_custom (bool): whether to allow custom objects/properties allow_custom (bool): whether to allow custom objects/properties
when exporting STIX content to file. when exporting STIX content to file.
Default: True. Default: True.
version (str): If present, it forces the parser to use the version
provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
Attributes: Attributes:
_data (dict): the in-memory dict that holds STIX objects. _data (dict): the in-memory dict that holds STIX objects.
@ -171,25 +179,41 @@ class MemorySink(DataSink):
else: else:
self._data = {} self._data = {}
if stix_data: if stix_data:
_add(self, stix_data, allow_custom, version=version) _add(self, stix_data, allow_custom, version)
def add(self, stix_data, version=None): def add(self, stix_data, version=None):
_add(self, stix_data, self.allow_custom, version) _add(self, stix_data, self.allow_custom, version)
add.__doc__ = _add.__doc__ add.__doc__ = _add.__doc__
def save_to_file(self, file_path): def save_to_file(self, path, encoding="utf-8"):
file_path = os.path.abspath(file_path) path = os.path.abspath(path)
all_objs = itertools.chain.from_iterable( all_objs = list(itertools.chain.from_iterable(
value.all_versions.values() if isinstance(value, _ObjectFamily) value.all_versions.values() if isinstance(value, _ObjectFamily)
else [value] else [value]
for value in self._data.values() for value in self._data.values()
) ))
if not os.path.exists(os.path.dirname(file_path)): if any("spec_version" in x for x in all_objs):
os.makedirs(os.path.dirname(file_path)) bundle = v21.Bundle(all_objs, allow_custom=self.allow_custom)
with open(file_path, "w") as f: else:
f.write(str(Bundle(list(all_objs), allow_custom=self.allow_custom))) bundle = v20.Bundle(all_objs, allow_custom=self.allow_custom)
if path.endswith(".json"):
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
else:
if not os.path.exists(path):
os.makedirs(path)
# if the user only provided a directory, use the bundle id for filename
path = os.path.join(path, bundle["id"] + ".json")
with io.open(path, "w", encoding=encoding) as f:
bundle = bundle.serialize(pretty=True, encoding=encoding, ensure_ascii=False)
f.write(bundle)
return path
save_to_file.__doc__ = MemoryStore.save_to_file.__doc__ save_to_file.__doc__ = MemoryStore.save_to_file.__doc__
@ -209,6 +233,9 @@ class MemorySource(DataSource):
allow_custom (bool): whether to allow custom objects/properties allow_custom (bool): whether to allow custom objects/properties
when importing STIX content from file. when importing STIX content from file.
Default: True. Default: True.
version (str): If present, it forces the parser to use the version
provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
Attributes: Attributes:
_data (dict): the in-memory dict that holds STIX objects. _data (dict): the in-memory dict that holds STIX objects.
@ -224,7 +251,7 @@ class MemorySource(DataSource):
else: else:
self._data = {} self._data = {}
if stix_data: if stix_data:
_add(self, stix_data, allow_custom, version=version) _add(self, stix_data, allow_custom, version)
def get(self, stix_id, _composite_filters=None): def get(self, stix_id, _composite_filters=None):
"""Retrieve STIX object from in-memory dict via STIX ID. """Retrieve STIX object from in-memory dict via STIX ID.
@ -251,8 +278,8 @@ class MemorySource(DataSource):
all_filters = list( all_filters = list(
itertools.chain( itertools.chain(
_composite_filters or [], _composite_filters or [],
self.filters self.filters,
) ),
) )
stix_obj = next(apply_common_filters([stix_obj], all_filters), None) stix_obj = next(apply_common_filters([stix_obj], all_filters), None)
@ -260,15 +287,13 @@ class MemorySource(DataSource):
return stix_obj return stix_obj
def all_versions(self, stix_id, _composite_filters=None): def all_versions(self, stix_id, _composite_filters=None):
"""Retrieve STIX objects from in-memory dict via STIX ID, all versions of it """Retrieve STIX objects from in-memory dict via STIX ID, all versions
of it.
Note: Since Memory sources/sinks don't handle multiple versions of a
STIX object, this operation is unnecessary. Translate call to get().
Args: Args:
stix_id (str): The STIX ID of the STIX 2 object to retrieve. stix_id (str): The STIX ID of the STIX 2 object to retrieve.
_composite_filters (FilterSet): collection of filters passed from the parent _composite_filters (FilterSet): collection of filters passed from
CompositeDataSource, not user supplied the parent CompositeDataSource, not user supplied
Returns: Returns:
(list): list of STIX objects that have the supplied ID. (list): list of STIX objects that have the supplied ID.
@ -289,12 +314,12 @@ class MemorySource(DataSource):
all_filters = list( all_filters = list(
itertools.chain( itertools.chain(
_composite_filters or [], _composite_filters or [],
self.filters self.filters,
) ),
) )
results.extend( results.extend(
apply_common_filters(stix_objs_to_filter, all_filters) apply_common_filters(stix_objs_to_filter, all_filters),
) )
return results return results
@ -308,8 +333,8 @@ class MemorySource(DataSource):
Args: Args:
query (list): list of filters to search on query (list): list of filters to search on
_composite_filters (FilterSet): collection of filters passed from the _composite_filters (FilterSet): collection of filters passed from
CompositeDataSource, not user supplied the CompositeDataSource, not user supplied
Returns: Returns:
(list): list of STIX objects that match the supplied query. (list): list of STIX objects that match the supplied query.
@ -335,12 +360,8 @@ class MemorySource(DataSource):
return all_data return all_data
def load_from_file(self, file_path, version=None): def load_from_file(self, file_path, version=None):
with open(os.path.abspath(file_path), "r") as f: with io.open(os.path.abspath(file_path), "r") as f:
stix_data = json.load(f) stix_data = json.load(f)
# Override user version selection if loading a bundle
if stix_data["type"] == "bundle":
version = stix_data["spec_version"]
_add(self, stix_data, self.allow_custom, version) _add(self, stix_data, self.allow_custom, version)
load_from_file.__doc__ = MemoryStore.load_from_file.__doc__ load_from_file.__doc__ = MemoryStore.load_from_file.__doc__

View File

@ -1,12 +1,13 @@
""" """Python STIX2 TAXIICollection Source/Sink"""
Python STIX 2.x TAXIICollectionStore
"""
from requests.exceptions import HTTPError from requests.exceptions import HTTPError
from stix2 import v20, v21
from stix2.base import _STIXBase from stix2.base import _STIXBase
from stix2.core import Bundle, parse from stix2.core import parse
from stix2.datastore import (DataSink, DataSource, DataSourceError, from stix2.datastore import (
DataStoreMixin) DataSink, DataSource, DataSourceError, DataStoreMixin,
)
from stix2.datastore.filters import Filter, FilterSet, apply_common_filters from stix2.datastore.filters import Filter, FilterSet, apply_common_filters
from stix2.utils import deduplicate from stix2.utils import deduplicate
@ -43,7 +44,7 @@ class TAXIICollectionStore(DataStoreMixin):
super(TAXIICollectionStore, self).__init__( super(TAXIICollectionStore, self).__init__(
source=TAXIICollectionSource(collection, allow_custom=allow_custom_source), source=TAXIICollectionSource(collection, allow_custom=allow_custom_source),
sink=TAXIICollectionSink(collection, allow_custom=allow_custom_sink) sink=TAXIICollectionSink(collection, allow_custom=allow_custom_sink),
) )
@ -66,12 +67,16 @@ class TAXIICollectionSink(DataSink):
if collection.can_write: if collection.can_write:
self.collection = collection self.collection = collection
else: else:
raise DataSourceError("The TAXII Collection object provided does not have write access" raise DataSourceError(
" to the underlying linked Collection resource") "The TAXII Collection object provided does not have write access"
" to the underlying linked Collection resource",
)
except (HTTPError, ValidationError) as e: except (HTTPError, ValidationError) as e:
raise DataSourceError("The underlying TAXII Collection resource defined in the supplied TAXII" raise DataSourceError(
" Collection object provided could not be reached. Receved error:", e) "The underlying TAXII Collection resource defined in the supplied TAXII"
" Collection object provided could not be reached. Receved error:", e,
)
self.allow_custom = allow_custom self.allow_custom = allow_custom
@ -79,26 +84,34 @@ class TAXIICollectionSink(DataSink):
"""Add/push STIX content to TAXII Collection endpoint """Add/push STIX content to TAXII Collection endpoint
Args: Args:
stix_data (STIX object OR dict OR str OR list): valid STIX 2.0 content stix_data (STIX object OR dict OR str OR list): valid STIX2
in a STIX object (or Bundle), STIX onject dict (or Bundle dict), or a STIX 2.0 content in a STIX object (or Bundle), STIX object dict (or
json encoded string, or list of any of the following Bundle dict), or a STIX2 json encoded string, or list of
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If any of the following.
None, use latest version. version (str): If present, it forces the parser to use the version
provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
""" """
if isinstance(stix_data, _STIXBase): if isinstance(stix_data, _STIXBase):
# adding python STIX object # adding python STIX object
if stix_data["type"] == "bundle": if stix_data['type'] == 'bundle':
bundle = stix_data.serialize(encoding="utf-8") bundle = stix_data.serialize(encoding='utf-8', ensure_ascii=False)
elif 'spec_version' in stix_data:
# If the spec_version is present, use new Bundle object...
bundle = v21.Bundle(stix_data, allow_custom=self.allow_custom).serialize(encoding='utf-8', ensure_ascii=False)
else: else:
bundle = Bundle(stix_data, allow_custom=self.allow_custom).serialize(encoding="utf-8") bundle = v20.Bundle(stix_data, allow_custom=self.allow_custom).serialize(encoding='utf-8', ensure_ascii=False)
elif isinstance(stix_data, dict): elif isinstance(stix_data, dict):
# adding python dict (of either Bundle or STIX obj) # adding python dict (of either Bundle or STIX obj)
if stix_data["type"] == "bundle": if stix_data['type'] == 'bundle':
bundle = parse(stix_data, allow_custom=self.allow_custom, version=version).serialize(encoding="utf-8") bundle = parse(stix_data, allow_custom=self.allow_custom, version=version).serialize(encoding='utf-8', ensure_ascii=False)
elif 'spec_version' in stix_data:
# If the spec_version is present, use new Bundle object...
bundle = v21.Bundle(stix_data, allow_custom=self.allow_custom).serialize(encoding='utf-8', ensure_ascii=False)
else: else:
bundle = Bundle(stix_data, allow_custom=self.allow_custom).serialize(encoding="utf-8") bundle = v20.Bundle(stix_data, allow_custom=self.allow_custom).serialize(encoding='utf-8', ensure_ascii=False)
elif isinstance(stix_data, list): elif isinstance(stix_data, list):
# adding list of something - recurse on each # adding list of something - recurse on each
@ -109,10 +122,13 @@ class TAXIICollectionSink(DataSink):
elif isinstance(stix_data, str): elif isinstance(stix_data, str):
# adding json encoded string of STIX content # adding json encoded string of STIX content
stix_data = parse(stix_data, allow_custom=self.allow_custom, version=version) stix_data = parse(stix_data, allow_custom=self.allow_custom, version=version)
if stix_data["type"] == "bundle": if stix_data['type'] == 'bundle':
bundle = stix_data.serialize(encoding="utf-8") bundle = stix_data.serialize(encoding='utf-8', ensure_ascii=False)
elif 'spec_version' in stix_data:
# If the spec_version is present, use new Bundle object...
bundle = v21.Bundle(stix_data, allow_custom=self.allow_custom).serialize(encoding='utf-8', ensure_ascii=False)
else: else:
bundle = Bundle(stix_data, allow_custom=self.allow_custom).serialize(encoding="utf-8") bundle = v20.Bundle(stix_data, allow_custom=self.allow_custom).serialize(encoding='utf-8', ensure_ascii=False)
else: else:
raise TypeError("stix_data must be as STIX object(or list of),json formatted STIX (or list of), or a json formatted STIX bundle") raise TypeError("stix_data must be as STIX object(or list of),json formatted STIX (or list of), or a json formatted STIX bundle")
@ -139,12 +155,16 @@ class TAXIICollectionSource(DataSource):
if collection.can_read: if collection.can_read:
self.collection = collection self.collection = collection
else: else:
raise DataSourceError("The TAXII Collection object provided does not have read access" raise DataSourceError(
" to the underlying linked Collection resource") "The TAXII Collection object provided does not have read access"
" to the underlying linked Collection resource",
)
except (HTTPError, ValidationError) as e: except (HTTPError, ValidationError) as e:
raise DataSourceError("The underlying TAXII Collection resource defined in the supplied TAXII" raise DataSourceError(
" Collection object provided could not be reached. Recieved error:", e) "The underlying TAXII Collection resource defined in the supplied TAXII"
" Collection object provided could not be reached. Recieved error:", e,
)
self.allow_custom = allow_custom self.allow_custom = allow_custom
@ -154,10 +174,11 @@ class TAXIICollectionSource(DataSource):
Args: Args:
stix_id (str): The STIX ID of the STIX object to be retrieved. stix_id (str): The STIX ID of the STIX object to be retrieved.
_composite_filters (FilterSet): collection of filters passed from the parent version (str): If present, it forces the parser to use the version
CompositeDataSource, not user supplied provided. Otherwise, the library will make the best effort based
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If on checking the "spec_version" property.
None, use latest version. _composite_filters (FilterSet): collection of filters passed from
the parent CompositeDataSource, not user supplied
Returns: Returns:
(STIX object): STIX object that has the supplied STIX ID. (STIX object): STIX object that has the supplied STIX ID.
@ -173,15 +194,16 @@ class TAXIICollectionSource(DataSource):
if _composite_filters: if _composite_filters:
query.add(_composite_filters) query.add(_composite_filters)
# dont extract TAXII filters from query (to send to TAXII endpoint) # don't extract TAXII filters from query (to send to TAXII endpoint)
# as directly retrieveing a STIX object by ID # as directly retrieving a STIX object by ID
try: try:
stix_objs = self.collection.get_object(stix_id)["objects"] stix_objs = self.collection.get_object(stix_id)['objects']
stix_obj = list(apply_common_filters(stix_objs, query)) stix_obj = list(apply_common_filters(stix_objs, query))
except HTTPError as e: except HTTPError as e:
if e.response.status_code == 404: if e.response.status_code == 404:
# if resource not found or access is denied from TAXII server, return None # if resource not found or access is denied from TAXII server,
# return None
stix_obj = [] stix_obj = []
else: else:
raise DataSourceError("TAXII Collection resource returned error", e) raise DataSourceError("TAXII Collection resource returned error", e)
@ -202,10 +224,11 @@ class TAXIICollectionSource(DataSource):
Args: Args:
stix_id (str): The STIX ID of the STIX objects to be retrieved. stix_id (str): The STIX ID of the STIX objects to be retrieved.
version (str): If present, it forces the parser to use the version
provided. Otherwise, the library will make the best effort based
on checking the "spec_version" property.
_composite_filters (FilterSet): collection of filters passed from the parent _composite_filters (FilterSet): collection of filters passed from the parent
CompositeDataSource, not user supplied CompositeDataSource, not user supplied
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If
None, use latest version.
Returns: Returns:
(see query() as all_versions() is just a wrapper) (see query() as all_versions() is just a wrapper)
@ -213,8 +236,8 @@ class TAXIICollectionSource(DataSource):
""" """
# make query in TAXII query format since 'id' is TAXII field # make query in TAXII query format since 'id' is TAXII field
query = [ query = [
Filter("id", "=", stix_id), Filter('id', '=', stix_id),
Filter("version", "=", "all") Filter('version', '=', 'all'),
] ]
all_data = self.query(query=query, _composite_filters=_composite_filters) all_data = self.query(query=query, _composite_filters=_composite_filters)
@ -236,10 +259,11 @@ class TAXIICollectionSource(DataSource):
Args: Args:
query (list): list of filters to search on query (list): list of filters to search on
_composite_filters (FilterSet): collection of filters passed from the version (str): If present, it forces the parser to use the version
CompositeDataSource, not user supplied provided. Otherwise, the library will make the best effort based
version (str): Which STIX2 version to use. (e.g. "2.0", "2.1"). If on checking the "spec_version" property.
None, use latest version. _composite_filters (FilterSet): collection of filters passed from
the CompositeDataSource, not user supplied
Returns: Returns:
(list): list of STIX objects that matches the supplied (list): list of STIX objects that matches the supplied
@ -263,7 +287,7 @@ class TAXIICollectionSource(DataSource):
# query TAXII collection # query TAXII collection
try: try:
all_data = self.collection.get_objects(**taxii_filters_dict)["objects"] all_data = self.collection.get_objects(**taxii_filters_dict)['objects']
# deduplicate data (before filtering as reduces wasted filtering) # deduplicate data (before filtering as reduces wasted filtering)
all_data = deduplicate(all_data) all_data = deduplicate(all_data)
@ -275,9 +299,11 @@ class TAXIICollectionSource(DataSource):
except HTTPError as e: except HTTPError as e:
# if resources not found or access is denied from TAXII server, return empty list # if resources not found or access is denied from TAXII server, return empty list
if e.response.status_code == 404: if e.response.status_code == 404:
raise DataSourceError("The requested STIX objects for the TAXII Collection resource defined in" raise DataSourceError(
" the supplied TAXII Collection object are either not found or access is" "The requested STIX objects for the TAXII Collection resource defined in"
" denied. Received error: ", e) " the supplied TAXII Collection object are either not found or access is"
" denied. Received error: ", e,
)
# parse python STIX objects from the STIX object dicts # parse python STIX objects from the STIX object dicts
stix_objs = [parse(stix_obj_dict, allow_custom=self.allow_custom, version=version) for stix_obj_dict in all_data] stix_objs = [parse(stix_obj_dict, allow_custom=self.allow_custom, version=version) for stix_obj_dict in all_data]
@ -290,18 +316,17 @@ class TAXIICollectionSource(DataSource):
Does not put in TAXII spec format as the TAXII2Client (that we use) Does not put in TAXII spec format as the TAXII2Client (that we use)
does this for us. does this for us.
Notes: Note:
Currently, the TAXII2Client can handle TAXII filters where the Currently, the TAXII2Client can handle TAXII filters where the
filter value is list, as both a comma-seperated string or python list filter value is list, as both a comma-seperated string or python
list.
For instance - "?match[type]=indicator,sighting" can be in a For instance - "?match[type]=indicator,sighting" can be in a
filter in any of these formats: filter in any of these formats:
Filter("type", "<any op>", "indicator,sighting") Filter("type", "<any op>", "indicator,sighting")
Filter("type", "<any op>", ["indicator", "sighting"]) Filter("type", "<any op>", ["indicator", "sighting"])
Args: Args:
query (list): list of filters to extract which ones are TAXII query (list): list of filters to extract which ones are TAXII
specific. specific.

View File

@ -1,5 +1,4 @@
"""Python STIX 2.0 Environment API. """Python STIX2 Environment API."""
"""
import copy import copy
@ -27,9 +26,11 @@ class ObjectFactory(object):
default. Defaults to True. default. Defaults to True.
""" """
def __init__(self, created_by_ref=None, created=None, def __init__(
external_references=None, object_marking_refs=None, self, created_by_ref=None, created=None,
list_append=True): external_references=None, object_marking_refs=None,
list_append=True,
):
self._defaults = {} self._defaults = {}
if created_by_ref: if created_by_ref:
@ -166,3 +167,22 @@ class Environment(DataStoreMixin):
def parse(self, *args, **kwargs): def parse(self, *args, **kwargs):
return _parse(*args, **kwargs) return _parse(*args, **kwargs)
parse.__doc__ = _parse.__doc__ parse.__doc__ = _parse.__doc__
def creator_of(self, obj):
"""Retrieve the Identity refered to by the object's `created_by_ref`.
Args:
obj: The STIX object whose `created_by_ref` property will be looked
up.
Returns:
str: The STIX object's creator, or None, if the object contains no
`created_by_ref` property or the object's creator cannot be
found.
"""
creator_id = obj.get('created_by_ref', '')
if creator_id:
return self.get(creator_id)
else:
return None

View File

@ -1,5 +1,4 @@
"""STIX 2 error classes. """STIX2 Error Classes."""
"""
class STIXError(Exception): class STIXError(Exception):
@ -30,8 +29,10 @@ class MissingPropertiesError(STIXError, ValueError):
def __str__(self): def __str__(self):
msg = "No values for required properties for {0}: ({1})." msg = "No values for required properties for {0}: ({1})."
return msg.format(self.cls.__name__, return msg.format(
", ".join(x for x in self.properties)) self.cls.__name__,
", ".join(x for x in self.properties),
)
class ExtraPropertiesError(STIXError, TypeError): class ExtraPropertiesError(STIXError, TypeError):
@ -44,8 +45,10 @@ class ExtraPropertiesError(STIXError, TypeError):
def __str__(self): def __str__(self):
msg = "Unexpected properties for {0}: ({1})." msg = "Unexpected properties for {0}: ({1})."
return msg.format(self.cls.__name__, return msg.format(
", ".join(x for x in self.properties)) self.cls.__name__,
", ".join(x for x in self.properties),
)
class ImmutableError(STIXError, ValueError): class ImmutableError(STIXError, ValueError):
@ -110,8 +113,10 @@ class MutuallyExclusivePropertiesError(STIXError, TypeError):
def __str__(self): def __str__(self):
msg = "The ({1}) properties for {0} are mutually exclusive." msg = "The ({1}) properties for {0} are mutually exclusive."
return msg.format(self.cls.__name__, return msg.format(
", ".join(x for x in self.properties)) self.cls.__name__,
", ".join(x for x in self.properties),
)
class DependentPropertiesError(STIXError, TypeError): class DependentPropertiesError(STIXError, TypeError):
@ -124,8 +129,10 @@ class DependentPropertiesError(STIXError, TypeError):
def __str__(self): def __str__(self):
msg = "The property dependencies for {0}: ({1}) are not met." msg = "The property dependencies for {0}: ({1}) are not met."
return msg.format(self.cls.__name__, return msg.format(
", ".join(name for x in self.dependencies for name in x)) self.cls.__name__,
", ".join(name for x in self.dependencies for name in x),
)
class AtLeastOnePropertyError(STIXError, TypeError): class AtLeastOnePropertyError(STIXError, TypeError):
@ -138,8 +145,10 @@ class AtLeastOnePropertyError(STIXError, TypeError):
def __str__(self): def __str__(self):
msg = "At least one of the ({1}) properties for {0} must be populated." msg = "At least one of the ({1}) properties for {0} must be populated."
return msg.format(self.cls.__name__, return msg.format(
", ".join(x for x in self.properties)) self.cls.__name__,
", ".join(x for x in self.properties),
)
class RevokeError(STIXError, ValueError): class RevokeError(STIXError, ValueError):

View File

@ -9,7 +9,6 @@ Note:
Definitions. The corresponding methods on those classes are identical to Definitions. The corresponding methods on those classes are identical to
these functions except that the `obj` parameter is omitted. these functions except that the `obj` parameter is omitted.
.. autosummary:: .. autosummary::
:toctree: markings :toctree: markings
@ -51,7 +50,7 @@ def get_markings(obj, selectors=None, inherited=False, descendants=False):
obj, obj,
selectors, selectors,
inherited, inherited,
descendants descendants,
) )
if inherited: if inherited:
@ -208,7 +207,7 @@ def is_marked(obj, marking=None, selectors=None, inherited=False, descendants=Fa
marking, marking,
selectors, selectors,
inherited, inherited,
descendants descendants,
) )
if inherited: if inherited:
@ -221,7 +220,7 @@ def is_marked(obj, marking=None, selectors=None, inherited=False, descendants=Fa
granular_marks, granular_marks,
selectors, selectors,
inherited, inherited,
descendants descendants,
) )
result = result or object_markings.is_marked(obj, object_marks) result = result or object_markings.is_marked(obj, object_marks)

View File

@ -1,5 +1,4 @@
"""Functions for working with STIX 2.0 granular markings. """Functions for working with STIX2 granular markings."""
"""
from stix2 import exceptions from stix2 import exceptions
from stix2.markings import utils from stix2.markings import utils
@ -29,7 +28,7 @@ def get_markings(obj, selectors, inherited=False, descendants=False):
selectors = utils.convert_to_list(selectors) selectors = utils.convert_to_list(selectors)
utils.validate(obj, selectors) utils.validate(obj, selectors)
granular_markings = obj.get("granular_markings", []) granular_markings = obj.get('granular_markings', [])
if not granular_markings: if not granular_markings:
return [] return []
@ -38,11 +37,13 @@ def get_markings(obj, selectors, inherited=False, descendants=False):
for marking in granular_markings: for marking in granular_markings:
for user_selector in selectors: for user_selector in selectors:
for marking_selector in marking.get("selectors", []): for marking_selector in marking.get('selectors', []):
if any([(user_selector == marking_selector), # Catch explicit selectors. if any([
(user_selector.startswith(marking_selector) and inherited), # Catch inherited selectors. (user_selector == marking_selector), # Catch explicit selectors.
(marking_selector.startswith(user_selector) and descendants)]): # Catch descendants selectors (user_selector.startswith(marking_selector) and inherited), # Catch inherited selectors.
refs = marking.get("marking_ref", []) (marking_selector.startswith(user_selector) and descendants),
]): # Catch descendants selectors
refs = marking.get('marking_ref', [])
results.update([refs]) results.update([refs])
return list(results) return list(results)
@ -93,7 +94,7 @@ def remove_markings(obj, marking, selectors):
marking = utils.convert_to_marking_list(marking) marking = utils.convert_to_marking_list(marking)
utils.validate(obj, selectors) utils.validate(obj, selectors)
granular_markings = obj.get("granular_markings") granular_markings = obj.get('granular_markings')
if not granular_markings: if not granular_markings:
return obj return obj
@ -102,9 +103,9 @@ def remove_markings(obj, marking, selectors):
to_remove = [] to_remove = []
for m in marking: for m in marking:
to_remove.append({"marking_ref": m, "selectors": selectors}) to_remove.append({'marking_ref': m, 'selectors': selectors})
remove = utils.build_granular_marking(to_remove).get("granular_markings") remove = utils.build_granular_marking(to_remove).get('granular_markings')
if not any(marking in granular_markings for marking in remove): if not any(marking in granular_markings for marking in remove):
raise exceptions.MarkingNotFoundError(obj, remove) raise exceptions.MarkingNotFoundError(obj, remove)
@ -145,10 +146,10 @@ def add_markings(obj, marking, selectors):
granular_marking = [] granular_marking = []
for m in marking: for m in marking:
granular_marking.append({"marking_ref": m, "selectors": sorted(selectors)}) granular_marking.append({'marking_ref': m, 'selectors': sorted(selectors)})
if obj.get("granular_markings"): if obj.get('granular_markings'):
granular_marking.extend(obj.get("granular_markings")) granular_marking.extend(obj.get('granular_markings'))
granular_marking = utils.expand_markings(granular_marking) granular_marking = utils.expand_markings(granular_marking)
granular_marking = utils.compress_markings(granular_marking) granular_marking = utils.compress_markings(granular_marking)
@ -176,7 +177,7 @@ def clear_markings(obj, selectors):
selectors = utils.convert_to_list(selectors) selectors = utils.convert_to_list(selectors)
utils.validate(obj, selectors) utils.validate(obj, selectors)
granular_markings = obj.get("granular_markings") granular_markings = obj.get('granular_markings')
if not granular_markings: if not granular_markings:
return obj return obj
@ -184,25 +185,26 @@ def clear_markings(obj, selectors):
granular_markings = utils.expand_markings(granular_markings) granular_markings = utils.expand_markings(granular_markings)
sdo = utils.build_granular_marking( sdo = utils.build_granular_marking(
[{"selectors": selectors, "marking_ref": "N/A"}] [{'selectors': selectors, 'marking_ref': 'N/A'}],
) )
clear = sdo.get("granular_markings", []) clear = sdo.get('granular_markings', [])
if not any(clear_selector in sdo_selectors.get("selectors", []) if not any(
for sdo_selectors in granular_markings clear_selector in sdo_selectors.get('selectors', [])
for clear_marking in clear for sdo_selectors in granular_markings
for clear_selector in clear_marking.get("selectors", []) for clear_marking in clear
): for clear_selector in clear_marking.get('selectors', [])
):
raise exceptions.MarkingNotFoundError(obj, clear) raise exceptions.MarkingNotFoundError(obj, clear)
for granular_marking in granular_markings: for granular_marking in granular_markings:
for s in selectors: for s in selectors:
if s in granular_marking.get("selectors", []): if s in granular_marking.get('selectors', []):
marking_refs = granular_marking.get("marking_ref") marking_refs = granular_marking.get('marking_ref')
if marking_refs: if marking_refs:
granular_marking["marking_ref"] = "" granular_marking['marking_ref'] = ''
granular_markings = utils.compress_markings(granular_markings) granular_markings = utils.compress_markings(granular_markings)
@ -245,19 +247,21 @@ def is_marked(obj, marking=None, selectors=None, inherited=False, descendants=Fa
marking = utils.convert_to_marking_list(marking) marking = utils.convert_to_marking_list(marking)
utils.validate(obj, selectors) utils.validate(obj, selectors)
granular_markings = obj.get("granular_markings", []) granular_markings = obj.get('granular_markings', [])
marked = False marked = False
markings = set() markings = set()
for granular_marking in granular_markings: for granular_marking in granular_markings:
for user_selector in selectors: for user_selector in selectors:
for marking_selector in granular_marking.get("selectors", []): for marking_selector in granular_marking.get('selectors', []):
if any([(user_selector == marking_selector), # Catch explicit selectors. if any([
(user_selector.startswith(marking_selector) and inherited), # Catch inherited selectors. (user_selector == marking_selector), # Catch explicit selectors.
(marking_selector.startswith(user_selector) and descendants)]): # Catch descendants selectors (user_selector.startswith(marking_selector) and inherited), # Catch inherited selectors.
marking_ref = granular_marking.get("marking_ref", "") (marking_selector.startswith(user_selector) and descendants),
]): # Catch descendants selectors
marking_ref = granular_marking.get('marking_ref', '')
if marking and any(x == marking_ref for x in marking): if marking and any(x == marking_ref for x in marking):
markings.update([marking_ref]) markings.update([marking_ref])

View File

@ -1,5 +1,4 @@
"""Functions for working with STIX 2.0 object markings. """Functions for working with STIX2 object markings."""
"""
from stix2 import exceptions from stix2 import exceptions
from stix2.markings import utils from stix2.markings import utils
@ -18,7 +17,7 @@ def get_markings(obj):
markings are present in `object_marking_refs`. markings are present in `object_marking_refs`.
""" """
return obj.get("object_marking_refs", []) return obj.get('object_marking_refs', [])
def add_markings(obj, marking): def add_markings(obj, marking):
@ -35,7 +34,7 @@ def add_markings(obj, marking):
""" """
marking = utils.convert_to_marking_list(marking) marking = utils.convert_to_marking_list(marking)
object_markings = set(obj.get("object_marking_refs", []) + marking) object_markings = set(obj.get('object_marking_refs', []) + marking)
return new_version(obj, object_marking_refs=list(object_markings), allow_custom=True) return new_version(obj, object_marking_refs=list(object_markings), allow_custom=True)
@ -59,12 +58,12 @@ def remove_markings(obj, marking):
""" """
marking = utils.convert_to_marking_list(marking) marking = utils.convert_to_marking_list(marking)
object_markings = obj.get("object_marking_refs", []) object_markings = obj.get('object_marking_refs', [])
if not object_markings: if not object_markings:
return obj return obj
if any(x not in obj["object_marking_refs"] for x in marking): if any(x not in obj['object_marking_refs'] for x in marking):
raise exceptions.MarkingNotFoundError(obj, marking) raise exceptions.MarkingNotFoundError(obj, marking)
new_markings = [x for x in object_markings if x not in marking] new_markings = [x for x in object_markings if x not in marking]
@ -124,7 +123,7 @@ def is_marked(obj, marking=None):
""" """
marking = utils.convert_to_marking_list(marking) marking = utils.convert_to_marking_list(marking)
object_markings = obj.get("object_marking_refs", []) object_markings = obj.get('object_marking_refs', [])
if marking: if marking:
return any(x in object_markings for x in marking) return any(x in object_markings for x in marking)

View File

@ -1,5 +1,4 @@
"""Utility functions for STIX 2.0 data markings. """Utility functions for STIX2 data markings."""
"""
import collections import collections
@ -23,7 +22,7 @@ def _evaluate_expression(obj, selector):
""" """
for items, value in iterpath(obj): for items, value in iterpath(obj):
path = ".".join(items) path = '.'.join(items)
if path == selector and value: if path == selector and value:
return [value] return [value]
@ -119,12 +118,12 @@ def compress_markings(granular_markings):
map_ = collections.defaultdict(set) map_ = collections.defaultdict(set)
for granular_marking in granular_markings: for granular_marking in granular_markings:
if granular_marking.get("marking_ref"): if granular_marking.get('marking_ref'):
map_[granular_marking.get("marking_ref")].update(granular_marking.get("selectors")) map_[granular_marking.get('marking_ref')].update(granular_marking.get('selectors'))
compressed = \ compressed = \
[ [
{"marking_ref": marking_ref, "selectors": sorted(selectors)} {'marking_ref': marking_ref, 'selectors': sorted(selectors)}
for marking_ref, selectors in six.iteritems(map_) for marking_ref, selectors in six.iteritems(map_)
] ]
@ -173,14 +172,14 @@ def expand_markings(granular_markings):
expanded = [] expanded = []
for marking in granular_markings: for marking in granular_markings:
selectors = marking.get("selectors") selectors = marking.get('selectors')
marking_ref = marking.get("marking_ref") marking_ref = marking.get('marking_ref')
expanded.extend( expanded.extend(
[ [
{"marking_ref": marking_ref, "selectors": [selector]} {'marking_ref': marking_ref, 'selectors': [selector]}
for selector in selectors for selector in selectors
] ],
) )
return expanded return expanded
@ -189,7 +188,7 @@ def expand_markings(granular_markings):
def build_granular_marking(granular_marking): def build_granular_marking(granular_marking):
"""Return a dictionary with the required structure for a granular marking. """Return a dictionary with the required structure for a granular marking.
""" """
return {"granular_markings": expand_markings(granular_marking)} return {'granular_markings': expand_markings(granular_marking)}
def iterpath(obj, path=None): def iterpath(obj, path=None):
@ -229,7 +228,7 @@ def iterpath(obj, path=None):
elif isinstance(varobj, list): elif isinstance(varobj, list):
for item in varobj: for item in varobj:
index = "[{0}]".format(varobj.index(item)) index = '[{0}]'.format(varobj.index(item))
path.append(index) path.append(index)
yield (path, item) yield (path, item)

362
stix2/pattern_visitor.py Normal file
View File

@ -0,0 +1,362 @@
import importlib
import inspect
from antlr4 import CommonTokenStream, InputStream
import six
from stix2patterns.grammars.STIXPatternLexer import STIXPatternLexer
from stix2patterns.grammars.STIXPatternParser import (
STIXPatternParser, TerminalNode,
)
from stix2patterns.grammars.STIXPatternVisitor import STIXPatternVisitor
from stix2patterns.validator import STIXPatternErrorListener
from .patterns import *
from .patterns import _BooleanExpression
# flake8: noqa F405
def collapse_lists(lists):
result = []
for c in lists:
if isinstance(c, list):
result.extend(c)
else:
result.append(c)
return result
def remove_terminal_nodes(parse_tree_nodes):
values = []
for x in parse_tree_nodes:
if not isinstance(x, TerminalNode):
values.append(x)
return values
# This class defines a complete generic visitor for a parse tree produced by STIXPatternParser.
class STIXPatternVisitorForSTIX2(STIXPatternVisitor):
classes = {}
def __init__(self, module_suffix, module_name):
if module_suffix and module_name:
self.module_suffix = module_suffix
if not STIXPatternVisitorForSTIX2.classes:
module = importlib.import_module(module_name)
for k, c in inspect.getmembers(module, inspect.isclass):
STIXPatternVisitorForSTIX2.classes[k] = c
else:
self.module_suffix = None
super(STIXPatternVisitor, self).__init__()
def get_class(self, class_name):
if class_name in STIXPatternVisitorForSTIX2.classes:
return STIXPatternVisitorForSTIX2.classes[class_name]
else:
return None
def instantiate(self, klass_name, *args):
klass_to_instantiate = None
if self.module_suffix:
klass_to_instantiate = self.get_class(klass_name + "For" + self.module_suffix)
if not klass_to_instantiate:
# use the classes in python_stix2
klass_to_instantiate = globals()[klass_name]
return klass_to_instantiate(*args)
# Visit a parse tree produced by STIXPatternParser#pattern.
def visitPattern(self, ctx):
children = self.visitChildren(ctx)
return children[0]
# Visit a parse tree produced by STIXPatternParser#observationExpressions.
def visitObservationExpressions(self, ctx):
children = self.visitChildren(ctx)
if len(children) == 1:
return children[0]
else:
return FollowedByObservationExpression([children[0], children[2]])
# Visit a parse tree produced by STIXPatternParser#observationExpressionOr.
def visitObservationExpressionOr(self, ctx):
children = self.visitChildren(ctx)
if len(children) == 1:
return children[0]
else:
return self.instantiate("OrObservationExpression", [children[0], children[2]])
# Visit a parse tree produced by STIXPatternParser#observationExpressionAnd.
def visitObservationExpressionAnd(self, ctx):
children = self.visitChildren(ctx)
if len(children) == 1:
return children[0]
else:
return self.instantiate("AndObservationExpression", [children[0], children[2]])
# Visit a parse tree produced by STIXPatternParser#observationExpressionRepeated.
def visitObservationExpressionRepeated(self, ctx):
children = self.visitChildren(ctx)
return self.instantiate("QualifiedObservationExpression", children[0], children[1])
# Visit a parse tree produced by STIXPatternParser#observationExpressionSimple.
def visitObservationExpressionSimple(self, ctx):
children = self.visitChildren(ctx)
return self.instantiate("ObservationExpression", children[1])
# Visit a parse tree produced by STIXPatternParser#observationExpressionCompound.
def visitObservationExpressionCompound(self, ctx):
children = self.visitChildren(ctx)
return self.instantiate("ObservationExpression", children[1])
# Visit a parse tree produced by STIXPatternParser#observationExpressionWithin.
def visitObservationExpressionWithin(self, ctx):
children = self.visitChildren(ctx)
return self.instantiate("QualifiedObservationExpression", children[0], children[1])
# Visit a parse tree produced by STIXPatternParser#observationExpressionStartStop.
def visitObservationExpressionStartStop(self, ctx):
children = self.visitChildren(ctx)
return self.instantiate("QualifiedObservationExpression", children[0], children[1])
# Visit a parse tree produced by STIXPatternParser#comparisonExpression.
def visitComparisonExpression(self, ctx):
children = self.visitChildren(ctx)
if len(children) == 1:
return children[0]
else:
if isinstance(children[0], _BooleanExpression):
children[0].operands.append(children[2])
return children[0]
else:
return self.instantiate("OrBooleanExpression", [children[0], children[2]])
# Visit a parse tree produced by STIXPatternParser#comparisonExpressionAnd.
def visitComparisonExpressionAnd(self, ctx):
# TODO: NOT
children = self.visitChildren(ctx)
if len(children) == 1:
return children[0]
else:
if isinstance(children[0], _BooleanExpression):
children[0].operands.append(children[2])
return children[0]
else:
return self.instantiate("AndBooleanExpression", [children[0], children[2]])
# Visit a parse tree produced by STIXPatternParser#propTestEqual.
def visitPropTestEqual(self, ctx):
children = self.visitChildren(ctx)
operator = children[1].symbol.type
negated = operator != STIXPatternParser.EQ
return self.instantiate(
"EqualityComparisonExpression", children[0], children[3 if len(children) > 3 else 2],
negated,
)
# Visit a parse tree produced by STIXPatternParser#propTestOrder.
def visitPropTestOrder(self, ctx):
children = self.visitChildren(ctx)
operator = children[1].symbol.type
if operator == STIXPatternParser.GT:
return self.instantiate(
"GreaterThanComparisonExpression", children[0],
children[3 if len(children) > 3 else 2], False,
)
elif operator == STIXPatternParser.LT:
return self.instantiate(
"LessThanComparisonExpression", children[0],
children[3 if len(children) > 3 else 2], False,
)
elif operator == STIXPatternParser.GE:
return self.instantiate(
"GreaterThanEqualComparisonExpression", children[0],
children[3 if len(children) > 3 else 2], False,
)
elif operator == STIXPatternParser.LE:
return self.instantiate(
"LessThanEqualComparisonExpression", children[0],
children[3 if len(children) > 3 else 2], False,
)
# Visit a parse tree produced by STIXPatternParser#propTestSet.
def visitPropTestSet(self, ctx):
children = self.visitChildren(ctx)
return self.instantiate("InComparisonExpression", children[0], children[3 if len(children) > 3 else 2], False)
# Visit a parse tree produced by STIXPatternParser#propTestLike.
def visitPropTestLike(self, ctx):
children = self.visitChildren(ctx)
return self.instantiate("LikeComparisonExpression", children[0], children[3 if len(children) > 3 else 2], False)
# Visit a parse tree produced by STIXPatternParser#propTestRegex.
def visitPropTestRegex(self, ctx):
children = self.visitChildren(ctx)
return self.instantiate(
"MatchesComparisonExpression", children[0], children[3 if len(children) > 3 else 2],
False,
)
# Visit a parse tree produced by STIXPatternParser#propTestIsSubset.
def visitPropTestIsSubset(self, ctx):
children = self.visitChildren(ctx)
return self.instantiate("IsSubsetComparisonExpression", children[0], children[3 if len(children) > 3 else 2])
# Visit a parse tree produced by STIXPatternParser#propTestIsSuperset.
def visitPropTestIsSuperset(self, ctx):
children = self.visitChildren(ctx)
return self.instantiate("IsSupersetComparisonExpression", children[0], children[3 if len(children) > 3 else 2])
# Visit a parse tree produced by STIXPatternParser#propTestParen.
def visitPropTestParen(self, ctx):
children = self.visitChildren(ctx)
return self.instantiate("ParentheticalExpression", children[1])
# Visit a parse tree produced by STIXPatternParser#startStopQualifier.
def visitStartStopQualifier(self, ctx):
children = self.visitChildren(ctx)
return StartStopQualifier(children[1], children[3])
# Visit a parse tree produced by STIXPatternParser#withinQualifier.
def visitWithinQualifier(self, ctx):
children = self.visitChildren(ctx)
return WithinQualifier(children[1])
# Visit a parse tree produced by STIXPatternParser#repeatedQualifier.
def visitRepeatedQualifier(self, ctx):
children = self.visitChildren(ctx)
return RepeatQualifier(children[1])
# Visit a parse tree produced by STIXPatternParser#objectPath.
def visitObjectPath(self, ctx):
children = self.visitChildren(ctx)
flat_list = collapse_lists(children[2:])
property_path = []
i = 0
while i < len(flat_list):
current = flat_list[i]
if i == len(flat_list)-1:
property_path.append(current)
break
next = flat_list[i+1]
if isinstance(next, TerminalNode):
property_path.append(self.instantiate("ListObjectPathComponent", current.property_name, next.getText()))
i += 2
else:
property_path.append(current)
i += 1
return self.instantiate("ObjectPath", children[0].getText(), property_path)
# Visit a parse tree produced by STIXPatternParser#objectType.
def visitObjectType(self, ctx):
children = self.visitChildren(ctx)
return children[0]
# Visit a parse tree produced by STIXPatternParser#firstPathComponent.
def visitFirstPathComponent(self, ctx):
children = self.visitChildren(ctx)
step = children[0].getText()
# if step.endswith("_ref"):
# return stix2.ReferenceObjectPathComponent(step)
# else:
return self.instantiate("BasicObjectPathComponent", step, False)
# Visit a parse tree produced by STIXPatternParser#indexPathStep.
def visitIndexPathStep(self, ctx):
children = self.visitChildren(ctx)
return children[1]
# Visit a parse tree produced by STIXPatternParser#pathStep.
def visitPathStep(self, ctx):
return collapse_lists(self.visitChildren(ctx))
# Visit a parse tree produced by STIXPatternParser#keyPathStep.
def visitKeyPathStep(self, ctx):
children = self.visitChildren(ctx)
if isinstance(children[1], StringConstant):
# special case for hashes
return children[1].value
else:
return self.instantiate("BasicObjectPathComponent", children[1].getText(), True)
# Visit a parse tree produced by STIXPatternParser#setLiteral.
def visitSetLiteral(self, ctx):
children = self.visitChildren(ctx)
return self.instantiate("ListConstant", remove_terminal_nodes(children))
# Visit a parse tree produced by STIXPatternParser#primitiveLiteral.
def visitPrimitiveLiteral(self, ctx):
children = self.visitChildren(ctx)
return children[0]
# Visit a parse tree produced by STIXPatternParser#orderableLiteral.
def visitOrderableLiteral(self, ctx):
children = self.visitChildren(ctx)
return children[0]
def visitTerminal(self, node):
if node.symbol.type == STIXPatternParser.IntPosLiteral or node.symbol.type == STIXPatternParser.IntNegLiteral:
return IntegerConstant(node.getText())
elif node.symbol.type == STIXPatternParser.FloatPosLiteral or node.symbol.type == STIXPatternParser.FloatNegLiteral:
return FloatConstant(node.getText())
elif node.symbol.type == STIXPatternParser.HexLiteral:
return HexConstant(node.getText(), from_parse_tree=True)
elif node.symbol.type == STIXPatternParser.BinaryLiteral:
return BinaryConstant(node.getText(), from_parse_tree=True)
elif node.symbol.type == STIXPatternParser.StringLiteral:
return StringConstant(node.getText().strip('\''), from_parse_tree=True)
elif node.symbol.type == STIXPatternParser.BoolLiteral:
return BooleanConstant(node.getText())
elif node.symbol.type == STIXPatternParser.TimestampLiteral:
return TimestampConstant(node.getText())
else:
return node
def aggregateResult(self, aggregate, nextResult):
if aggregate:
aggregate.append(nextResult)
elif nextResult:
aggregate = [nextResult]
return aggregate
def create_pattern_object(pattern, module_suffix="", module_name=""):
"""
Validates a pattern against the STIX Pattern grammar. Error messages are
returned in a list. The test passed if the returned list is empty.
"""
start = ''
if isinstance(pattern, six.string_types):
start = pattern[:2]
pattern = InputStream(pattern)
if not start:
start = pattern.readline()[:2]
pattern.seek(0)
parseErrListener = STIXPatternErrorListener()
lexer = STIXPatternLexer(pattern)
# it always adds a console listener by default... remove it.
lexer.removeErrorListeners()
stream = CommonTokenStream(lexer)
parser = STIXPatternParser(stream)
parser.buildParseTrees = True
# it always adds a console listener by default... remove it.
parser.removeErrorListeners()
parser.addErrorListener(parseErrListener)
# To improve error messages, replace "<INVALID>" in the literal
# names with symbolic names. This is a hack, but seemed like
# the simplest workaround.
for i, lit_name in enumerate(parser.literalNames):
if lit_name == u"<INVALID>":
parser.literalNames[i] = parser.symbolicNames[i]
tree = parser.pattern()
builder = STIXPatternVisitorForSTIX2(module_suffix, module_name)
return builder.visit(tree)

View File

@ -1,11 +1,12 @@
"""Classes to aid in working with the STIX 2 patterning language. """Classes to aid in working with the STIX 2 patterning language."""
"""
import base64 import base64
import binascii import binascii
import datetime import datetime
import re import re
import six
from .utils import parse_into_datetime from .utils import parse_into_datetime
@ -13,6 +14,14 @@ def escape_quotes_and_backslashes(s):
return s.replace(u'\\', u'\\\\').replace(u"'", u"\\'") return s.replace(u'\\', u'\\\\').replace(u"'", u"\\'")
def quote_if_needed(x):
if isinstance(x, six.string_types):
if x.find("-") != -1:
if not x.startswith("'"):
return "'" + x + "'"
return x
class _Constant(object): class _Constant(object):
pass pass
@ -23,11 +32,13 @@ class StringConstant(_Constant):
Args: Args:
value (str): string value value (str): string value
""" """
def __init__(self, value):
def __init__(self, value, from_parse_tree=False):
self.needs_to_be_quoted = not from_parse_tree
self.value = value self.value = value
def __str__(self): def __str__(self):
return "'%s'" % escape_quotes_and_backslashes(self.value) return "'%s'" % (escape_quotes_and_backslashes(self.value) if self.needs_to_be_quoted else self.value)
class TimestampConstant(_Constant): class TimestampConstant(_Constant):
@ -86,8 +97,8 @@ class BooleanConstant(_Constant):
self.value = value self.value = value
return return
trues = ['true', 't'] trues = ['true', 't', '1']
falses = ['false', 'f'] falses = ['false', 'f', '0']
try: try:
if value.lower() in trues: if value.lower() in trues:
self.value = True self.value = True
@ -143,7 +154,7 @@ class HashConstant(StringConstant):
vocab_key = _HASH_REGEX[key][1] vocab_key = _HASH_REGEX[key][1]
if not re.match(_HASH_REGEX[key][0], value): if not re.match(_HASH_REGEX[key][0], value):
raise ValueError("'%s' is not a valid %s hash" % (value, vocab_key)) raise ValueError("'%s' is not a valid %s hash" % (value, vocab_key))
self.value = value super(HashConstant, self).__init__(value)
class BinaryConstant(_Constant): class BinaryConstant(_Constant):
@ -152,7 +163,13 @@ class BinaryConstant(_Constant):
Args: Args:
value (str): base64 encoded string value value (str): base64 encoded string value
""" """
def __init__(self, value):
def __init__(self, value, from_parse_tree=False):
# support with or without a 'b'
if from_parse_tree:
m = re.match("^b'(.+)'$", value)
if m:
value = m.group(1)
try: try:
base64.b64decode(value) base64.b64decode(value)
self.value = value self.value = value
@ -169,10 +186,16 @@ class HexConstant(_Constant):
Args: Args:
value (str): hexadecimal value value (str): hexadecimal value
""" """
def __init__(self, value): def __init__(self, value, from_parse_tree=False):
if not re.match('^([a-fA-F0-9]{2})+$', value): # support with or without an 'h'
raise ValueError("must contain an even number of hexadecimal characters") if not from_parse_tree and re.match('^([a-fA-F0-9]{2})+$', value):
self.value = value self.value = value
else:
m = re.match("^h'(([a-fA-F0-9]{2})+)'$", value)
if m:
self.value = m.group(1)
else:
raise ValueError("must contain an even number of hexadecimal characters")
def __str__(self): def __str__(self):
return "h'%s'" % self.value return "h'%s'" % self.value
@ -185,10 +208,11 @@ class ListConstant(_Constant):
value (list): list of values value (list): list of values
""" """
def __init__(self, values): def __init__(self, values):
self.value = values # handle _Constants or make a _Constant
self.value = [x if isinstance(x, _Constant) else make_constant(x) for x in values]
def __str__(self): def __str__(self):
return "(" + ", ".join([("%s" % make_constant(x)) for x in self.value]) + ")" return "(" + ", ".join(["%s" % x for x in self.value]) + ")"
def make_constant(value): def make_constant(value):
@ -229,7 +253,10 @@ class _ObjectPathComponent(object):
parse1 = component_name.split("[") parse1 = component_name.split("[")
return ListObjectPathComponent(parse1[0], parse1[1][:-1]) return ListObjectPathComponent(parse1[0], parse1[1][:-1])
else: else:
return BasicObjectPathComponent(component_name) return BasicObjectPathComponent(component_name, False)
def __str__(self):
return quote_if_needed(self.property_name)
class BasicObjectPathComponent(_ObjectPathComponent): class BasicObjectPathComponent(_ObjectPathComponent):
@ -243,14 +270,11 @@ class BasicObjectPathComponent(_ObjectPathComponent):
property_name (str): object property name property_name (str): object property name
is_key (bool): is dictionary key, default: False is_key (bool): is dictionary key, default: False
""" """
def __init__(self, property_name, is_key=False): def __init__(self, property_name, is_key):
self.property_name = property_name self.property_name = property_name
# TODO: set is_key to True if this component is a dictionary key # TODO: set is_key to True if this component is a dictionary key
# self.is_key = is_key # self.is_key = is_key
def __str__(self):
return self.property_name
class ListObjectPathComponent(_ObjectPathComponent): class ListObjectPathComponent(_ObjectPathComponent):
"""List object path component (for an observation or expression) """List object path component (for an observation or expression)
@ -264,7 +288,7 @@ class ListObjectPathComponent(_ObjectPathComponent):
self.index = index self.index = index
def __str__(self): def __str__(self):
return "%s[%s]" % (self.property_name, self.index) return "%s[%s]" % (quote_if_needed(self.property_name), self.index)
class ReferenceObjectPathComponent(_ObjectPathComponent): class ReferenceObjectPathComponent(_ObjectPathComponent):
@ -276,9 +300,6 @@ class ReferenceObjectPathComponent(_ObjectPathComponent):
def __init__(self, reference_property_name): def __init__(self, reference_property_name):
self.property_name = reference_property_name self.property_name = reference_property_name
def __str__(self):
return self.property_name
class ObjectPath(object): class ObjectPath(object):
"""Pattern operand object (property) path """Pattern operand object (property) path
@ -289,12 +310,14 @@ class ObjectPath(object):
""" """
def __init__(self, object_type_name, property_path): def __init__(self, object_type_name, property_path):
self.object_type_name = object_type_name self.object_type_name = object_type_name
self.property_path = [x if isinstance(x, _ObjectPathComponent) else self.property_path = [
_ObjectPathComponent.create_ObjectPathComponent(x) x if isinstance(x, _ObjectPathComponent) else
for x in property_path] _ObjectPathComponent.create_ObjectPathComponent(x)
for x in property_path
]
def __str__(self): def __str__(self):
return "%s:%s" % (self.object_type_name, ".".join(["%s" % x for x in self.property_path])) return "%s:%s" % (self.object_type_name, ".".join(["%s" % quote_if_needed(x) for x in self.property_path]))
def merge(self, other): def merge(self, other):
"""Extend the object property with that of the supplied object property path""" """Extend the object property with that of the supplied object property path"""

View File

@ -1,8 +1,9 @@
"""Classes for representing properties of STIX Objects and Cyber Observables. """Classes for representing properties of STIX Objects and Cyber Observables."""
"""
import base64 import base64
import binascii import binascii
import collections import collections
import copy
import inspect import inspect
import re import re
import uuid import uuid
@ -11,19 +12,22 @@ from six import string_types, text_type
from stix2patterns.validator import run_validator from stix2patterns.validator import run_validator
from .base import _STIXBase from .base import _STIXBase
from .exceptions import DictionaryKeyError from .core import STIX2_OBJ_MAPS, parse, parse_observable
from .utils import _get_dict, parse_into_datetime from .exceptions import CustomContentError, DictionaryKeyError
from .utils import _get_dict, get_class_hierarchy_names, parse_into_datetime
# This uses the regular expression for a RFC 4122, Version 4 UUID. In the # This uses the regular expression for a RFC 4122, Version 4 UUID. In the
# 8-4-4-4-12 hexadecimal representation, the first hex digit of the third # 8-4-4-4-12 hexadecimal representation, the first hex digit of the third
# component must be a 4, and the first hex digit of the fourth component must be # component must be a 4, and the first hex digit of the fourth component
# 8, 9, a, or b (10xx bit pattern). # must be 8, 9, a, or b (10xx bit pattern).
ID_REGEX = re.compile("^[a-z0-9][a-z0-9-]+[a-z0-9]--" # object type ID_REGEX = re.compile(
"[0-9a-fA-F]{8}-" r"^[a-z0-9][a-z0-9-]+[a-z0-9]--" # object type
"[0-9a-fA-F]{4}-" "[0-9a-fA-F]{8}-"
"4[0-9a-fA-F]{3}-" "[0-9a-fA-F]{4}-"
"[89abAB][0-9a-fA-F]{3}-" "4[0-9a-fA-F]{3}-"
"[0-9a-fA-F]{12}$") "[89abAB][0-9a-fA-F]{3}-"
"[0-9a-fA-F]{12}$",
)
ID_REGEX_interoperability = re.compile("^[a-z0-9][a-z0-9-]+[a-z0-9]--" # object type ID_REGEX_interoperability = re.compile("^[a-z0-9][a-z0-9-]+[a-z0-9]--" # object type
"[0-9a-fA-F]{8}-" "[0-9a-fA-F]{8}-"
@ -44,14 +48,15 @@ class Property(object):
``__init__()``. ``__init__()``.
Args: Args:
required (bool): If ``True``, the property must be provided when creating an required (bool): If ``True``, the property must be provided when
object with that property. No default value exists for these properties. creating an object with that property. No default value exists for
(Default: ``False``) these properties. (Default: ``False``)
fixed: This provides a constant default value. Users are free to fixed: This provides a constant default value. Users are free to
provide this value explicity when constructing an object (which allows provide this value explicity when constructing an object (which
you to copy **all** values from an existing object to a new object), but allows you to copy **all** values from an existing object to a new
if the user provides a value other than the ``fixed`` value, it will raise object), but if the user provides a value other than the ``fixed``
an error. This is semantically equivalent to defining both: value, it will raise an error. This is semantically equivalent to
defining both:
- a ``clean()`` function that checks if the value matches the fixed - a ``clean()`` function that checks if the value matches the fixed
value, and value, and
@ -62,29 +67,31 @@ class Property(object):
- ``def clean(self, value) -> any:`` - ``def clean(self, value) -> any:``
- Return a value that is valid for this property. If ``value`` is not - Return a value that is valid for this property. If ``value`` is not
valid for this property, this will attempt to transform it first. If valid for this property, this will attempt to transform it first. If
``value`` is not valid and no such transformation is possible, it should ``value`` is not valid and no such transformation is possible, it
raise a ValueError. should raise a ValueError.
- ``def default(self):`` - ``def default(self):``
- provide a default value for this property. - provide a default value for this property.
- ``default()`` can return the special value ``NOW`` to use the current - ``default()`` can return the special value ``NOW`` to use the current
time. This is useful when several timestamps in the same object need time. This is useful when several timestamps in the same object
to use the same default value, so calling now() for each property-- need to use the same default value, so calling now() for each
likely several microseconds apart-- does not work. property-- likely several microseconds apart-- does not work.
Subclasses can instead provide a lambda function for ``default`` as a keyword Subclasses can instead provide a lambda function for ``default`` as a
argument. ``clean`` should not be provided as a lambda since lambdas cannot keyword argument. ``clean`` should not be provided as a lambda since
raise their own exceptions. lambdas cannot raise their own exceptions.
When instantiating Properties, ``required`` and ``default`` should not be
used together. ``default`` implies that the property is required in the
specification so this function will be used to supply a value if none is
provided. ``required`` means that the user must provide this; it is
required in the specification and we can't or don't want to create a
default value.
When instantiating Properties, ``required`` and ``default`` should not be used
together. ``default`` implies that the property is required in the specification
so this function will be used to supply a value if none is provided.
``required`` means that the user must provide this; it is required in the
specification and we can't or don't want to create a default value.
""" """
def _default_clean(self, value): def _default_clean(self, value):
if value != self._fixed_value: if value != self._fixed_value:
raise ValueError("must equal '{0}'.".format(self._fixed_value)) raise ValueError("must equal '{}'.".format(self._fixed_value))
return value return value
def __init__(self, required=False, fixed=None, default=None): def __init__(self, required=False, fixed=None, default=None):
@ -143,7 +150,7 @@ class ListProperty(Property):
if type(self.contained) is EmbeddedObjectProperty: if type(self.contained) is EmbeddedObjectProperty:
obj_type = self.contained.type obj_type = self.contained.type
elif type(self.contained).__name__ is 'STIXObjectProperty': elif type(self.contained).__name__ is "STIXObjectProperty":
# ^ this way of checking doesn't require a circular import # ^ this way of checking doesn't require a circular import
# valid is already an instance of a python-stix2 class; no need # valid is already an instance of a python-stix2 class; no need
# to turn it into a dictionary and then pass it to the class # to turn it into a dictionary and then pass it to the class
@ -191,7 +198,7 @@ class IDProperty(Property):
def clean(self, value): def clean(self, value):
if not value.startswith(self.required_prefix): if not value.startswith(self.required_prefix):
raise ValueError("must start with '{0}'.".format(self.required_prefix)) raise ValueError("must start with '{}'.".format(self.required_prefix))
if hasattr(self, 'interoperability') and self.interoperability: if hasattr(self, 'interoperability') and self.interoperability:
if not ID_REGEX_interoperability.match(value): if not ID_REGEX_interoperability.match(value):
raise ValueError(ERROR_INVALID_ID) raise ValueError(ERROR_INVALID_ID)
@ -206,21 +213,51 @@ class IDProperty(Property):
class IntegerProperty(Property): class IntegerProperty(Property):
def __init__(self, min=None, max=None, **kwargs):
self.min = min
self.max = max
super(IntegerProperty, self).__init__(**kwargs)
def clean(self, value): def clean(self, value):
try: try:
return int(value) value = int(value)
except Exception: except Exception:
raise ValueError("must be an integer.") raise ValueError("must be an integer.")
if self.min is not None and value < self.min:
msg = "minimum value is {}. received {}".format(self.min, value)
raise ValueError(msg)
if self.max is not None and value > self.max:
msg = "maximum value is {}. received {}".format(self.max, value)
raise ValueError(msg)
return value
class FloatProperty(Property): class FloatProperty(Property):
def __init__(self, min=None, max=None, **kwargs):
self.min = min
self.max = max
super(FloatProperty, self).__init__(**kwargs)
def clean(self, value): def clean(self, value):
try: try:
return float(value) value = float(value)
except Exception: except Exception:
raise ValueError("must be a float.") raise ValueError("must be a float.")
if self.min is not None and value < self.min:
msg = "minimum value is {}. received {}".format(self.min, value)
raise ValueError(msg)
if self.max is not None and value > self.max:
msg = "maximum value is {}. received {}".format(self.max, value)
raise ValueError(msg)
return value
class BooleanProperty(Property): class BooleanProperty(Property):
@ -228,8 +265,8 @@ class BooleanProperty(Property):
if isinstance(value, bool): if isinstance(value, bool):
return value return value
trues = ['true', 't'] trues = ['true', 't', '1']
falses = ['false', 'f'] falses = ['false', 'f', '0']
try: try:
if value.lower() in trues: if value.lower() in trues:
return True return True
@ -256,6 +293,10 @@ class TimestampProperty(Property):
class DictionaryProperty(Property): class DictionaryProperty(Property):
def __init__(self, spec_version='2.0', **kwargs):
self.spec_version = spec_version
super(DictionaryProperty, self).__init__(**kwargs)
def clean(self, value): def clean(self, value):
try: try:
dictified = _get_dict(value) dictified = _get_dict(value)
@ -263,35 +304,40 @@ class DictionaryProperty(Property):
raise ValueError("The dictionary property must contain a dictionary") raise ValueError("The dictionary property must contain a dictionary")
if dictified == {}: if dictified == {}:
raise ValueError("The dictionary property must contain a non-empty dictionary") raise ValueError("The dictionary property must contain a non-empty dictionary")
for k in dictified.keys(): for k in dictified.keys():
if len(k) < 3: if self.spec_version == '2.0':
raise DictionaryKeyError(k, "shorter than 3 characters") if len(k) < 3:
elif len(k) > 256: raise DictionaryKeyError(k, "shorter than 3 characters")
raise DictionaryKeyError(k, "longer than 256 characters") elif len(k) > 256:
if not re.match('^[a-zA-Z0-9_-]+$', k): raise DictionaryKeyError(k, "longer than 256 characters")
raise DictionaryKeyError(k, "contains characters other than" elif self.spec_version == '2.1':
"lowercase a-z, uppercase A-Z, " if len(k) > 250:
"numerals 0-9, hyphen (-), or " raise DictionaryKeyError(k, "longer than 250 characters")
"underscore (_)") if not re.match(r"^[a-zA-Z0-9_-]+$", k):
msg = (
"contains characters other than lowercase a-z, "
"uppercase A-Z, numerals 0-9, hyphen (-), or "
"underscore (_)"
)
raise DictionaryKeyError(k, msg)
return dictified return dictified
HASHES_REGEX = { HASHES_REGEX = {
"MD5": ("^[a-fA-F0-9]{32}$", "MD5"), "MD5": (r"^[a-fA-F0-9]{32}$", "MD5"),
"MD6": ("^[a-fA-F0-9]{32}|[a-fA-F0-9]{40}|[a-fA-F0-9]{56}|[a-fA-F0-9]{64}|[a-fA-F0-9]{96}|[a-fA-F0-9]{128}$", "MD6"), "MD6": (r"^[a-fA-F0-9]{32}|[a-fA-F0-9]{40}|[a-fA-F0-9]{56}|[a-fA-F0-9]{64}|[a-fA-F0-9]{96}|[a-fA-F0-9]{128}$", "MD6"),
"RIPEMD160": ("^[a-fA-F0-9]{40}$", "RIPEMD-160"), "RIPEMD160": (r"^[a-fA-F0-9]{40}$", "RIPEMD-160"),
"SHA1": ("^[a-fA-F0-9]{40}$", "SHA-1"), "SHA1": (r"^[a-fA-F0-9]{40}$", "SHA-1"),
"SHA224": ("^[a-fA-F0-9]{56}$", "SHA-224"), "SHA224": (r"^[a-fA-F0-9]{56}$", "SHA-224"),
"SHA256": ("^[a-fA-F0-9]{64}$", "SHA-256"), "SHA256": (r"^[a-fA-F0-9]{64}$", "SHA-256"),
"SHA384": ("^[a-fA-F0-9]{96}$", "SHA-384"), "SHA384": (r"^[a-fA-F0-9]{96}$", "SHA-384"),
"SHA512": ("^[a-fA-F0-9]{128}$", "SHA-512"), "SHA512": (r"^[a-fA-F0-9]{128}$", "SHA-512"),
"SHA3224": ("^[a-fA-F0-9]{56}$", "SHA3-224"), "SHA3224": (r"^[a-fA-F0-9]{56}$", "SHA3-224"),
"SHA3256": ("^[a-fA-F0-9]{64}$", "SHA3-256"), "SHA3256": (r"^[a-fA-F0-9]{64}$", "SHA3-256"),
"SHA3384": ("^[a-fA-F0-9]{96}$", "SHA3-384"), "SHA3384": (r"^[a-fA-F0-9]{96}$", "SHA3-384"),
"SHA3512": ("^[a-fA-F0-9]{128}$", "SHA3-512"), "SHA3512": (r"^[a-fA-F0-9]{128}$", "SHA3-512"),
"SSDEEP": ("^[a-zA-Z0-9/+:.]{1,128}$", "ssdeep"), "SSDEEP": (r"^[a-zA-Z0-9/+:.]{1,128}$", "ssdeep"),
"WHIRLPOOL": ("^[a-fA-F0-9]{128}$", "WHIRLPOOL"), "WHIRLPOOL": (r"^[a-fA-F0-9]{128}$", "WHIRLPOOL"),
} }
@ -304,7 +350,7 @@ class HashesProperty(DictionaryProperty):
if key in HASHES_REGEX: if key in HASHES_REGEX:
vocab_key = HASHES_REGEX[key][1] vocab_key = HASHES_REGEX[key][1]
if not re.match(HASHES_REGEX[key][0], v): if not re.match(HASHES_REGEX[key][0], v):
raise ValueError("'%s' is not a valid %s hash" % (v, vocab_key)) raise ValueError("'{0}' is not a valid {1} hash".format(v, vocab_key))
if k != vocab_key: if k != vocab_key:
clean_dict[vocab_key] = clean_dict[k] clean_dict[vocab_key] = clean_dict[k]
del clean_dict[k] del clean_dict[k]
@ -324,7 +370,7 @@ class BinaryProperty(Property):
class HexProperty(Property): class HexProperty(Property):
def clean(self, value): def clean(self, value):
if not re.match('^([a-fA-F0-9]{2})+$', value): if not re.match(r"^([a-fA-F0-9]{2})+$", value):
raise ValueError("must contain an even number of hexadecimal characters") raise ValueError("must contain an even number of hexadecimal characters")
return value return value
@ -344,7 +390,7 @@ class ReferenceProperty(Property):
value = str(value) value = str(value)
if self.type: if self.type:
if not value.startswith(self.type): if not value.startswith(self.type):
raise ValueError("must start with '{0}'.".format(self.type)) raise ValueError("must start with '{}'.".format(self.type))
if hasattr(self, 'interoperability') and self.interoperability: if hasattr(self, 'interoperability') and self.interoperability:
if not ID_REGEX_interoperability.match(value): if not ID_REGEX_interoperability.match(value):
raise ValueError(ERROR_INVALID_ID) raise ValueError(ERROR_INVALID_ID)
@ -354,7 +400,7 @@ class ReferenceProperty(Property):
return value return value
SELECTOR_REGEX = re.compile("^[a-z0-9_-]{3,250}(\\.(\\[\\d+\\]|[a-z0-9_-]{1,250}))*$") SELECTOR_REGEX = re.compile(r"^[a-z0-9_-]{3,250}(\.(\[\d+\]|[a-z0-9_-]{1,250}))*$")
class SelectorProperty(Property): class SelectorProperty(Property):
@ -384,7 +430,7 @@ class EmbeddedObjectProperty(Property):
if type(value) is dict: if type(value) is dict:
value = self.type(**value) value = self.type(**value)
elif not isinstance(value, self.type): elif not isinstance(value, self.type):
raise ValueError("must be of type %s." % self.type.__name__) raise ValueError("must be of type {}.".format(self.type.__name__))
return value return value
@ -399,7 +445,7 @@ class EnumProperty(StringProperty):
def clean(self, value): def clean(self, value):
value = super(EnumProperty, self).clean(value) value = super(EnumProperty, self).clean(value)
if value not in self.allowed: if value not in self.allowed:
raise ValueError("value '%s' is not valid for this enumeration." % value) raise ValueError("value '{}' is not valid for this enumeration.".format(value))
return self.string_type(value) return self.string_type(value)
@ -412,3 +458,127 @@ class PatternProperty(StringProperty):
raise ValueError(str(errors[0])) raise ValueError(str(errors[0]))
return self.string_type(value) return self.string_type(value)
class ObservableProperty(Property):
"""Property for holding Cyber Observable Objects.
"""
def __init__(self, spec_version='2.0', allow_custom=False, *args, **kwargs):
self.allow_custom = allow_custom
self.spec_version = spec_version
super(ObservableProperty, self).__init__(*args, **kwargs)
def clean(self, value):
try:
dictified = _get_dict(value)
# get deep copy since we are going modify the dict and might
# modify the original dict as _get_dict() does not return new
# dict when passed a dict
dictified = copy.deepcopy(dictified)
except ValueError:
raise ValueError("The observable property must contain a dictionary")
if dictified == {}:
raise ValueError("The observable property must contain a non-empty dictionary")
valid_refs = dict((k, v['type']) for (k, v) in dictified.items())
for key, obj in dictified.items():
parsed_obj = parse_observable(
obj,
valid_refs,
allow_custom=self.allow_custom,
version=self.spec_version,
)
dictified[key] = parsed_obj
return dictified
class ExtensionsProperty(DictionaryProperty):
"""Property for representing extensions on Observable objects.
"""
def __init__(self, spec_version='2.0', allow_custom=False, enclosing_type=None, required=False):
self.allow_custom = allow_custom
self.enclosing_type = enclosing_type
super(ExtensionsProperty, self).__init__(spec_version=spec_version, required=required)
def clean(self, value):
try:
dictified = _get_dict(value)
# get deep copy since we are going modify the dict and might
# modify the original dict as _get_dict() does not return new
# dict when passed a dict
dictified = copy.deepcopy(dictified)
except ValueError:
raise ValueError("The extensions property must contain a dictionary")
if dictified == {}:
raise ValueError("The extensions property must contain a non-empty dictionary")
v = 'v' + self.spec_version.replace('.', '')
specific_type_map = STIX2_OBJ_MAPS[v]['observable-extensions'].get(self.enclosing_type, {})
for key, subvalue in dictified.items():
if key in specific_type_map:
cls = specific_type_map[key]
if type(subvalue) is dict:
if self.allow_custom:
subvalue['allow_custom'] = True
dictified[key] = cls(**subvalue)
else:
dictified[key] = cls(**subvalue)
elif type(subvalue) is cls:
# If already an instance of an _Extension class, assume it's valid
dictified[key] = subvalue
else:
raise ValueError("Cannot determine extension type.")
else:
raise CustomContentError("Can't parse unknown extension type: {}".format(key))
return dictified
class STIXObjectProperty(Property):
def __init__(self, spec_version='2.0', allow_custom=False, interoperability=False, *args, **kwargs):
self.allow_custom = allow_custom
self.spec_version = spec_version
self.interoperability = interoperability
super(STIXObjectProperty, self).__init__(*args, **kwargs)
def clean(self, value):
# Any STIX Object (SDO, SRO, or Marking Definition) can be added to
# a bundle with no further checks.
if any(x in ('STIXDomainObject', 'STIXRelationshipObject', 'MarkingDefinition')
for x in get_class_hierarchy_names(value)):
# A simple "is this a spec version 2.1+ object" test. For now,
# limit 2.0 bundles to 2.0 objects. It's not possible yet to
# have validation co-constraints among properties, e.g. have
# validation here depend on the value of another property
# (spec_version). So this is a hack, and not technically spec-
# compliant.
if 'spec_version' in value and self.spec_version == '2.0':
raise ValueError(
"Spec version 2.0 bundles don't yet support "
"containing objects of a different spec "
"version.",
)
return value
try:
dictified = _get_dict(value)
except ValueError:
raise ValueError("This property may only contain a dictionary or object")
if dictified == {}:
raise ValueError("This property may only contain a non-empty dictionary or object")
if 'type' in dictified and dictified['type'] == 'bundle':
raise ValueError("This property may not contain a Bundle object")
if 'spec_version' in dictified and self.spec_version == '2.0':
# See above comment regarding spec_version.
raise ValueError(
"Spec version 2.0 bundles don't yet support "
"containing objects of a different spec version.",
)
parsed_obj = parse(dictified, allow_custom=self.allow_custom, interoperability=self.interoperability)
return parsed_obj

View File

@ -1,379 +0,0 @@
import datetime
import pytest
import stix2
def test_create_comparison_expression():
exp = stix2.EqualityComparisonExpression("file:hashes.'SHA-256'",
stix2.HashConstant("aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f", "SHA-256")) # noqa
assert str(exp) == "file:hashes.'SHA-256' = 'aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f'"
def test_boolean_expression():
exp1 = stix2.MatchesComparisonExpression("email-message:from_ref.value",
stix2.StringConstant(".+\\@example\\.com$"))
exp2 = stix2.MatchesComparisonExpression("email-message:body_multipart[*].body_raw_ref.name",
stix2.StringConstant("^Final Report.+\\.exe$"))
exp = stix2.AndBooleanExpression([exp1, exp2])
assert str(exp) == "email-message:from_ref.value MATCHES '.+\\\\@example\\\\.com$' AND email-message:body_multipart[*].body_raw_ref.name MATCHES '^Final Report.+\\\\.exe$'" # noqa
def test_boolean_expression_with_parentheses():
exp1 = stix2.MatchesComparisonExpression(stix2.ObjectPath("email-message",
[stix2.ReferenceObjectPathComponent("from_ref"),
stix2.BasicObjectPathComponent("value")]),
stix2.StringConstant(".+\\@example\\.com$"))
exp2 = stix2.MatchesComparisonExpression("email-message:body_multipart[*].body_raw_ref.name",
stix2.StringConstant("^Final Report.+\\.exe$"))
exp = stix2.ParentheticalExpression(stix2.AndBooleanExpression([exp1, exp2]))
assert str(exp) == "(email-message:from_ref.value MATCHES '.+\\\\@example\\\\.com$' AND email-message:body_multipart[*].body_raw_ref.name MATCHES '^Final Report.+\\\\.exe$')" # noqa
def test_hash_followed_by_registryKey_expression_python_constant():
hash_exp = stix2.EqualityComparisonExpression("file:hashes.MD5",
stix2.HashConstant("79054025255fb1a26e4bc422aef54eb4", "MD5"))
o_exp1 = stix2.ObservationExpression(hash_exp)
reg_exp = stix2.EqualityComparisonExpression(stix2.ObjectPath("windows-registry-key", ["key"]),
stix2.StringConstant("HKEY_LOCAL_MACHINE\\foo\\bar"))
o_exp2 = stix2.ObservationExpression(reg_exp)
fb_exp = stix2.FollowedByObservationExpression([o_exp1, o_exp2])
para_exp = stix2.ParentheticalExpression(fb_exp)
qual_exp = stix2.WithinQualifier(300)
exp = stix2.QualifiedObservationExpression(para_exp, qual_exp)
assert str(exp) == "([file:hashes.MD5 = '79054025255fb1a26e4bc422aef54eb4'] FOLLOWEDBY [windows-registry-key:key = 'HKEY_LOCAL_MACHINE\\\\foo\\\\bar']) WITHIN 300 SECONDS" # noqa
def test_hash_followed_by_registryKey_expression():
hash_exp = stix2.EqualityComparisonExpression("file:hashes.MD5",
stix2.HashConstant("79054025255fb1a26e4bc422aef54eb4", "MD5"))
o_exp1 = stix2.ObservationExpression(hash_exp)
reg_exp = stix2.EqualityComparisonExpression(stix2.ObjectPath("windows-registry-key", ["key"]),
stix2.StringConstant("HKEY_LOCAL_MACHINE\\foo\\bar"))
o_exp2 = stix2.ObservationExpression(reg_exp)
fb_exp = stix2.FollowedByObservationExpression([o_exp1, o_exp2])
para_exp = stix2.ParentheticalExpression(fb_exp)
qual_exp = stix2.WithinQualifier(stix2.IntegerConstant(300))
exp = stix2.QualifiedObservationExpression(para_exp, qual_exp)
assert str(exp) == "([file:hashes.MD5 = '79054025255fb1a26e4bc422aef54eb4'] FOLLOWEDBY [windows-registry-key:key = 'HKEY_LOCAL_MACHINE\\\\foo\\\\bar']) WITHIN 300 SECONDS" # noqa
def test_file_observable_expression():
exp1 = stix2.EqualityComparisonExpression("file:hashes.'SHA-256'",
stix2.HashConstant(
"aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f",
'SHA-256'))
exp2 = stix2.EqualityComparisonExpression("file:mime_type", stix2.StringConstant("application/x-pdf"))
bool_exp = stix2.ObservationExpression(stix2.AndBooleanExpression([exp1, exp2]))
assert str(bool_exp) == "[file:hashes.'SHA-256' = 'aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f' AND file:mime_type = 'application/x-pdf']" # noqa
@pytest.mark.parametrize("observation_class, op", [
(stix2.AndObservationExpression, 'AND'),
(stix2.OrObservationExpression, 'OR'),
])
def test_multiple_file_observable_expression(observation_class, op):
exp1 = stix2.EqualityComparisonExpression("file:hashes.'SHA-256'",
stix2.HashConstant(
"bf07a7fbb825fc0aae7bf4a1177b2b31fcf8a3feeaf7092761e18c859ee52a9c",
'SHA-256'))
exp2 = stix2.EqualityComparisonExpression("file:hashes.MD5",
stix2.HashConstant("cead3f77f6cda6ec00f57d76c9a6879f", "MD5"))
bool1_exp = stix2.OrBooleanExpression([exp1, exp2])
exp3 = stix2.EqualityComparisonExpression("file:hashes.'SHA-256'",
stix2.HashConstant(
"aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f",
'SHA-256'))
op1_exp = stix2.ObservationExpression(bool1_exp)
op2_exp = stix2.ObservationExpression(exp3)
exp = observation_class([op1_exp, op2_exp])
assert str(exp) == "[file:hashes.'SHA-256' = 'bf07a7fbb825fc0aae7bf4a1177b2b31fcf8a3feeaf7092761e18c859ee52a9c' OR file:hashes.MD5 = 'cead3f77f6cda6ec00f57d76c9a6879f'] {} [file:hashes.'SHA-256' = 'aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f']".format(op) # noqa
def test_root_types():
ast = stix2.ObservationExpression(
stix2.AndBooleanExpression(
[stix2.ParentheticalExpression(
stix2.OrBooleanExpression([
stix2.EqualityComparisonExpression("a:b", stix2.StringConstant("1")),
stix2.EqualityComparisonExpression("b:c", stix2.StringConstant("2"))])),
stix2.EqualityComparisonExpression(u"b:d", stix2.StringConstant("3"))]))
assert str(ast) == "[(a:b = '1' OR b:c = '2') AND b:d = '3']"
def test_artifact_payload():
exp1 = stix2.EqualityComparisonExpression("artifact:mime_type",
"application/vnd.tcpdump.pcap")
exp2 = stix2.MatchesComparisonExpression("artifact:payload_bin",
stix2.StringConstant("\\xd4\\xc3\\xb2\\xa1\\x02\\x00\\x04\\x00"))
and_exp = stix2.ObservationExpression(stix2.AndBooleanExpression([exp1, exp2]))
assert str(and_exp) == "[artifact:mime_type = 'application/vnd.tcpdump.pcap' AND artifact:payload_bin MATCHES '\\\\xd4\\\\xc3\\\\xb2\\\\xa1\\\\x02\\\\x00\\\\x04\\\\x00']" # noqa
def test_greater_than_python_constant():
exp1 = stix2.GreaterThanComparisonExpression("file:extensions.windows-pebinary-ext.sections[*].entropy", 7.0)
exp = stix2.ObservationExpression(exp1)
assert str(exp) == "[file:extensions.windows-pebinary-ext.sections[*].entropy > 7.0]"
def test_greater_than():
exp1 = stix2.GreaterThanComparisonExpression("file:extensions.windows-pebinary-ext.sections[*].entropy",
stix2.FloatConstant(7.0))
exp = stix2.ObservationExpression(exp1)
assert str(exp) == "[file:extensions.windows-pebinary-ext.sections[*].entropy > 7.0]"
def test_less_than():
exp = stix2.LessThanComparisonExpression("file:size", 1024)
assert str(exp) == "file:size < 1024"
def test_greater_than_or_equal():
exp = stix2.GreaterThanEqualComparisonExpression("file:size",
1024)
assert str(exp) == "file:size >= 1024"
def test_less_than_or_equal():
exp = stix2.LessThanEqualComparisonExpression("file:size",
1024)
assert str(exp) == "file:size <= 1024"
def test_not():
exp = stix2.LessThanComparisonExpression("file:size",
1024,
negated=True)
assert str(exp) == "file:size NOT < 1024"
def test_and_observable_expression():
exp1 = stix2.AndBooleanExpression([stix2.EqualityComparisonExpression("user-account:account_type",
"unix"),
stix2.EqualityComparisonExpression("user-account:user_id",
stix2.StringConstant("1007")),
stix2.EqualityComparisonExpression("user-account:account_login",
"Peter")])
exp2 = stix2.AndBooleanExpression([stix2.EqualityComparisonExpression("user-account:account_type",
"unix"),
stix2.EqualityComparisonExpression("user-account:user_id",
stix2.StringConstant("1008")),
stix2.EqualityComparisonExpression("user-account:account_login",
"Paul")])
exp3 = stix2.AndBooleanExpression([stix2.EqualityComparisonExpression("user-account:account_type",
"unix"),
stix2.EqualityComparisonExpression("user-account:user_id",
stix2.StringConstant("1009")),
stix2.EqualityComparisonExpression("user-account:account_login",
"Mary")])
exp = stix2.AndObservationExpression([stix2.ObservationExpression(exp1),
stix2.ObservationExpression(exp2),
stix2.ObservationExpression(exp3)])
assert str(exp) == "[user-account:account_type = 'unix' AND user-account:user_id = '1007' AND user-account:account_login = 'Peter'] AND [user-account:account_type = 'unix' AND user-account:user_id = '1008' AND user-account:account_login = 'Paul'] AND [user-account:account_type = 'unix' AND user-account:user_id = '1009' AND user-account:account_login = 'Mary']" # noqa
def test_invalid_and_observable_expression():
with pytest.raises(ValueError) as excinfo:
stix2.AndBooleanExpression([stix2.EqualityComparisonExpression("user-account:display_name",
"admin"),
stix2.EqualityComparisonExpression("email-addr:display_name",
stix2.StringConstant("admin"))])
assert "All operands to an 'AND' expression must have the same object type" in str(excinfo)
def test_hex():
exp_and = stix2.AndBooleanExpression([stix2.EqualityComparisonExpression("file:mime_type",
"image/bmp"),
stix2.EqualityComparisonExpression("file:magic_number_hex",
stix2.HexConstant("ffd8"))])
exp = stix2.ObservationExpression(exp_and)
assert str(exp) == "[file:mime_type = 'image/bmp' AND file:magic_number_hex = h'ffd8']"
def test_multiple_qualifiers():
exp_and = stix2.AndBooleanExpression([stix2.EqualityComparisonExpression("network-traffic:dst_ref.type",
"domain-name"),
stix2.EqualityComparisonExpression("network-traffic:dst_ref.value",
"example.com")])
exp_ob = stix2.ObservationExpression(exp_and)
qual_rep = stix2.RepeatQualifier(5)
qual_within = stix2.WithinQualifier(stix2.IntegerConstant(1800))
exp = stix2.QualifiedObservationExpression(stix2.QualifiedObservationExpression(exp_ob, qual_rep), qual_within)
assert str(exp) == "[network-traffic:dst_ref.type = 'domain-name' AND network-traffic:dst_ref.value = 'example.com'] REPEATS 5 TIMES WITHIN 1800 SECONDS" # noqa
def test_set_op():
exp = stix2.ObservationExpression(stix2.IsSubsetComparisonExpression("network-traffic:dst_ref.value",
"2001:0db8:dead:beef:0000:0000:0000:0000/64"))
assert str(exp) == "[network-traffic:dst_ref.value ISSUBSET '2001:0db8:dead:beef:0000:0000:0000:0000/64']"
def test_timestamp():
ts = stix2.TimestampConstant('2014-01-13T07:03:17Z')
assert str(ts) == "t'2014-01-13T07:03:17Z'"
def test_boolean():
exp = stix2.EqualityComparisonExpression("email-message:is_multipart",
True)
assert str(exp) == "email-message:is_multipart = true"
def test_binary():
const = stix2.BinaryConstant("dGhpcyBpcyBhIHRlc3Q=")
exp = stix2.EqualityComparisonExpression("artifact:payload_bin",
const)
assert str(exp) == "artifact:payload_bin = b'dGhpcyBpcyBhIHRlc3Q='"
def test_list():
exp = stix2.InComparisonExpression("process:name",
['proccy', 'proximus', 'badproc'])
assert str(exp) == "process:name IN ('proccy', 'proximus', 'badproc')"
def test_list2():
# alternate way to construct an "IN" Comparison Expression
exp = stix2.EqualityComparisonExpression("process:name",
['proccy', 'proximus', 'badproc'])
assert str(exp) == "process:name IN ('proccy', 'proximus', 'badproc')"
def test_invalid_constant_type():
with pytest.raises(ValueError) as excinfo:
stix2.EqualityComparisonExpression("artifact:payload_bin",
{'foo': 'bar'})
assert 'Unable to create a constant' in str(excinfo)
def test_invalid_integer_constant():
with pytest.raises(ValueError) as excinfo:
stix2.IntegerConstant('foo')
assert 'must be an integer' in str(excinfo)
def test_invalid_timestamp_constant():
with pytest.raises(ValueError) as excinfo:
stix2.TimestampConstant('foo')
assert 'Must be a datetime object or timestamp string' in str(excinfo)
def test_invalid_float_constant():
with pytest.raises(ValueError) as excinfo:
stix2.FloatConstant('foo')
assert 'must be a float' in str(excinfo)
@pytest.mark.parametrize("data, result", [
(True, True),
(False, False),
('True', True),
('False', False),
('true', True),
('false', False),
('t', True),
('f', False),
('T', True),
('F', False),
(1, True),
(0, False),
])
def test_boolean_constant(data, result):
boolean = stix2.BooleanConstant(data)
assert boolean.value == result
def test_invalid_boolean_constant():
with pytest.raises(ValueError) as excinfo:
stix2.BooleanConstant('foo')
assert 'must be a boolean' in str(excinfo)
@pytest.mark.parametrize("hashtype, data", [
('MD5', 'zzz'),
('ssdeep', 'zzz=='),
])
def test_invalid_hash_constant(hashtype, data):
with pytest.raises(ValueError) as excinfo:
stix2.HashConstant(data, hashtype)
assert 'is not a valid {} hash'.format(hashtype) in str(excinfo)
def test_invalid_hex_constant():
with pytest.raises(ValueError) as excinfo:
stix2.HexConstant('mm')
assert "must contain an even number of hexadecimal characters" in str(excinfo)
def test_invalid_binary_constant():
with pytest.raises(ValueError) as excinfo:
stix2.BinaryConstant('foo')
assert 'must contain a base64' in str(excinfo)
def test_escape_quotes_and_backslashes():
exp = stix2.MatchesComparisonExpression("file:name",
"^Final Report.+\\.exe$")
assert str(exp) == "file:name MATCHES '^Final Report.+\\\\.exe$'"
def test_like():
exp = stix2.LikeComparisonExpression("directory:path",
"C:\\Windows\\%\\foo")
assert str(exp) == "directory:path LIKE 'C:\\\\Windows\\\\%\\\\foo'"
def test_issuperset():
exp = stix2.IsSupersetComparisonExpression("ipv4-addr:value",
"198.51.100.0/24")
assert str(exp) == "ipv4-addr:value ISSUPERSET '198.51.100.0/24'"
def test_repeat_qualifier():
qual = stix2.RepeatQualifier(stix2.IntegerConstant(5))
assert str(qual) == 'REPEATS 5 TIMES'
def test_invalid_repeat_qualifier():
with pytest.raises(ValueError) as excinfo:
stix2.RepeatQualifier('foo')
assert 'is not a valid argument for a Repeat Qualifier' in str(excinfo)
def test_invalid_within_qualifier():
with pytest.raises(ValueError) as excinfo:
stix2.WithinQualifier('foo')
assert 'is not a valid argument for a Within Qualifier' in str(excinfo)
def test_startstop_qualifier():
qual = stix2.StartStopQualifier(stix2.TimestampConstant('2016-06-01T00:00:00Z'),
datetime.datetime(2017, 3, 12, 8, 30, 0))
assert str(qual) == "START t'2016-06-01T00:00:00Z' STOP t'2017-03-12T08:30:00Z'"
qual2 = stix2.StartStopQualifier(datetime.date(2016, 6, 1),
stix2.TimestampConstant('2016-07-01T00:00:00Z'))
assert str(qual2) == "START t'2016-06-01T00:00:00Z' STOP t'2016-07-01T00:00:00Z'"
def test_invalid_startstop_qualifier():
with pytest.raises(ValueError) as excinfo:
stix2.StartStopQualifier('foo',
stix2.TimestampConstant('2016-06-01T00:00:00Z'))
assert 'is not a valid argument for a Start/Stop Qualifier' in str(excinfo)
with pytest.raises(ValueError) as excinfo:
stix2.StartStopQualifier(datetime.date(2016, 6, 1),
'foo')
assert 'is not a valid argument for a Start/Stop Qualifier' in str(excinfo)
def test_make_constant_already_a_constant():
str_const = stix2.StringConstant('Foo')
result = stix2.patterns.make_constant(str_const)
assert result is str_const

View File

@ -1,210 +0,0 @@
# -*- coding: utf-8 -*-
import datetime as dt
from io import StringIO
import pytest
import pytz
import stix2.utils
amsterdam = pytz.timezone('Europe/Amsterdam')
eastern = pytz.timezone('US/Eastern')
@pytest.mark.parametrize('dttm, timestamp', [
(dt.datetime(2017, 1, 1, tzinfo=pytz.utc), '2017-01-01T00:00:00Z'),
(amsterdam.localize(dt.datetime(2017, 1, 1)), '2016-12-31T23:00:00Z'),
(eastern.localize(dt.datetime(2017, 1, 1, 12, 34, 56)), '2017-01-01T17:34:56Z'),
(eastern.localize(dt.datetime(2017, 7, 1)), '2017-07-01T04:00:00Z'),
(dt.datetime(2017, 7, 1), '2017-07-01T00:00:00Z'),
(dt.datetime(2017, 7, 1, 0, 0, 0, 1), '2017-07-01T00:00:00.000001Z'),
(stix2.utils.STIXdatetime(2017, 7, 1, 0, 0, 0, 1, precision='millisecond'), '2017-07-01T00:00:00.000Z'),
(stix2.utils.STIXdatetime(2017, 7, 1, 0, 0, 0, 1, precision='second'), '2017-07-01T00:00:00Z'),
])
def test_timestamp_formatting(dttm, timestamp):
assert stix2.utils.format_datetime(dttm) == timestamp
@pytest.mark.parametrize('timestamp, dttm', [
(dt.datetime(2017, 1, 1, 0, tzinfo=pytz.utc), dt.datetime(2017, 1, 1, 0, 0, 0, tzinfo=pytz.utc)),
(dt.date(2017, 1, 1), dt.datetime(2017, 1, 1, 0, 0, 0, tzinfo=pytz.utc)),
('2017-01-01T00:00:00Z', dt.datetime(2017, 1, 1, 0, 0, 0, tzinfo=pytz.utc)),
('2017-01-01T02:00:00+2:00', dt.datetime(2017, 1, 1, 0, 0, 0, tzinfo=pytz.utc)),
('2017-01-01T00:00:00', dt.datetime(2017, 1, 1, 0, 0, 0, tzinfo=pytz.utc)),
])
def test_parse_datetime(timestamp, dttm):
assert stix2.utils.parse_into_datetime(timestamp) == dttm
@pytest.mark.parametrize('timestamp, dttm, precision', [
('2017-01-01T01:02:03.000001', dt.datetime(2017, 1, 1, 1, 2, 3, 0, tzinfo=pytz.utc), 'millisecond'),
('2017-01-01T01:02:03.001', dt.datetime(2017, 1, 1, 1, 2, 3, 1000, tzinfo=pytz.utc), 'millisecond'),
('2017-01-01T01:02:03.1', dt.datetime(2017, 1, 1, 1, 2, 3, 100000, tzinfo=pytz.utc), 'millisecond'),
('2017-01-01T01:02:03.45', dt.datetime(2017, 1, 1, 1, 2, 3, 450000, tzinfo=pytz.utc), 'millisecond'),
('2017-01-01T01:02:03.45', dt.datetime(2017, 1, 1, 1, 2, 3, tzinfo=pytz.utc), 'second'),
])
def test_parse_datetime_precision(timestamp, dttm, precision):
assert stix2.utils.parse_into_datetime(timestamp, precision) == dttm
@pytest.mark.parametrize('ts', [
'foobar',
1,
])
def test_parse_datetime_invalid(ts):
with pytest.raises(ValueError):
stix2.utils.parse_into_datetime('foobar')
@pytest.mark.parametrize('data', [
{"a": 1},
'{"a": 1}',
StringIO(u'{"a": 1}'),
[("a", 1,)],
])
def test_get_dict(data):
assert stix2.utils._get_dict(data)
@pytest.mark.parametrize('data', [
1,
[1],
['a', 1],
"foobar",
])
def test_get_dict_invalid(data):
with pytest.raises(ValueError):
stix2.utils._get_dict(data)
@pytest.mark.parametrize('stix_id, type', [
('malware--d69c8146-ab35-4d50-8382-6fc80e641d43', 'malware'),
('intrusion-set--899ce53f-13a0-479b-a0e4-67d46e241542', 'intrusion-set')
])
def test_get_type_from_id(stix_id, type):
assert stix2.utils.get_type_from_id(stix_id) == type
def test_deduplicate(stix_objs1):
unique = stix2.utils.deduplicate(stix_objs1)
# Only 3 objects are unique
# 2 id's vary
# 2 modified times vary for a particular id
assert len(unique) == 3
ids = [obj['id'] for obj in unique]
mods = [obj['modified'] for obj in unique]
assert "indicator--00000000-0000-4000-8000-000000000001" in ids
assert "indicator--00000000-0000-4000-8000-000000000001" in ids
assert "2017-01-27T13:49:53.935Z" in mods
assert "2017-01-27T13:49:53.936Z" in mods
@pytest.mark.parametrize('object, tuple_to_find, expected_index', [
(stix2.ObservedData(
id="observed-data--b67d30ff-02ac-498a-92f9-32f845f448cf",
created_by_ref="identity--f431f809-377b-45e0-aa1c-6a4751cae5ff",
created="2016-04-06T19:58:16.000Z",
modified="2016-04-06T19:58:16.000Z",
first_observed="2015-12-21T19:00:00Z",
last_observed="2015-12-21T19:00:00Z",
number_observed=50,
objects={
"0": {
"name": "foo.exe",
"type": "file"
},
"1": {
"type": "ipv4-addr",
"value": "198.51.100.3"
},
"2": {
"type": "network-traffic",
"src_ref": "1",
"protocols": [
"tcp",
"http"
],
"extensions": {
"http-request-ext": {
"request_method": "get",
"request_value": "/download.html",
"request_version": "http/1.1",
"request_header": {
"Accept-Encoding": "gzip,deflate",
"User-Agent": "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.6) Gecko/20040113",
"Host": "www.example.com"
}
}
}
}
},
), ('1', {"type": "ipv4-addr", "value": "198.51.100.3"}), 1),
({
"type": "x-example",
"id": "x-example--d5413db2-c26c-42e0-b0e0-ec800a310bfb",
"created": "2018-06-11T01:25:22.063Z",
"modified": "2018-06-11T01:25:22.063Z",
"dictionary": {
"key": {
"key_one": "value",
"key_two": "value"
}
}
}, ('key', {'key_one': 'value', 'key_two': 'value'}), 0),
({
"type": "language-content",
"id": "language-content--b86bd89f-98bb-4fa9-8cb2-9ad421da981d",
"created": "2017-02-08T21:31:22.007Z",
"modified": "2017-02-08T21:31:22.007Z",
"object_ref": "campaign--12a111f0-b824-4baf-a224-83b80237a094",
"object_modified": "2017-02-08T21:31:22.007Z",
"contents": {
"de": {
"name": "Bank Angriff 1",
"description": "Weitere Informationen über Banküberfall"
},
"fr": {
"name": "Attaque Bank 1",
"description": "Plus d'informations sur la crise bancaire"
}
}
}, ('fr', {"name": "Attaque Bank 1", "description": "Plus d'informations sur la crise bancaire"}), 1)
])
def test_find_property_index(object, tuple_to_find, expected_index):
assert stix2.utils.find_property_index(
object,
*tuple_to_find
) == expected_index
@pytest.mark.parametrize('dict_value, tuple_to_find, expected_index', [
({
"contents": {
"de": {
"name": "Bank Angriff 1",
"description": "Weitere Informationen über Banküberfall"
},
"fr": {
"name": "Attaque Bank 1",
"description": "Plus d'informations sur la crise bancaire"
},
"es": {
"name": "Ataque al Banco",
"description": "Mas informacion sobre el ataque al banco"
}
}
}, ('es', {"name": "Ataque al Banco", "description": "Mas informacion sobre el ataque al banco"}), 1), # Sorted alphabetically
({
'my_list': [
{"key_one": 1},
{"key_two": 2}
]
}, ('key_one', 1), 0)
])
def test_iterate_over_values(dict_value, tuple_to_find, expected_index):
assert stix2.utils._find_property_in_seq(dict_value.values(), *tuple_to_find) == expected_index

View File

View File

@ -4,8 +4,9 @@ import pytest
import stix2 import stix2
from .constants import (FAKE_TIME, INDICATOR_KWARGS, MALWARE_KWARGS, from .constants import (
RELATIONSHIP_KWARGS) FAKE_TIME, INDICATOR_KWARGS, MALWARE_KWARGS, RELATIONSHIP_KWARGS,
)
# Inspired by: http://stackoverflow.com/a/24006251 # Inspired by: http://stackoverflow.com/a/24006251
@ -35,17 +36,17 @@ def uuid4(monkeypatch):
@pytest.fixture @pytest.fixture
def indicator(uuid4, clock): def indicator(uuid4, clock):
return stix2.Indicator(**INDICATOR_KWARGS) return stix2.v20.Indicator(**INDICATOR_KWARGS)
@pytest.fixture @pytest.fixture
def malware(uuid4, clock): def malware(uuid4, clock):
return stix2.Malware(**MALWARE_KWARGS) return stix2.v20.Malware(**MALWARE_KWARGS)
@pytest.fixture @pytest.fixture
def relationship(uuid4, clock): def relationship(uuid4, clock):
return stix2.Relationship(**RELATIONSHIP_KWARGS) return stix2.v20.Relationship(**RELATIONSHIP_KWARGS)
@pytest.fixture @pytest.fixture
@ -54,61 +55,61 @@ def stix_objs1():
"created": "2017-01-27T13:49:53.935Z", "created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000001", "id": "indicator--00000000-0000-4000-8000-000000000001",
"labels": [ "labels": [
"url-watchlist" "url-watchlist",
], ],
"modified": "2017-01-27T13:49:53.935Z", "modified": "2017-01-27T13:49:53.935Z",
"name": "Malicious site hosting downloader", "name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']", "pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator", "type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z" "valid_from": "2017-01-27T13:49:53.935382Z",
} }
ind2 = { ind2 = {
"created": "2017-01-27T13:49:53.935Z", "created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000001", "id": "indicator--00000000-0000-4000-8000-000000000001",
"labels": [ "labels": [
"url-watchlist" "url-watchlist",
], ],
"modified": "2017-01-27T13:49:53.935Z", "modified": "2017-01-27T13:49:53.935Z",
"name": "Malicious site hosting downloader", "name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']", "pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator", "type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z" "valid_from": "2017-01-27T13:49:53.935382Z",
} }
ind3 = { ind3 = {
"created": "2017-01-27T13:49:53.935Z", "created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000001", "id": "indicator--00000000-0000-4000-8000-000000000001",
"labels": [ "labels": [
"url-watchlist" "url-watchlist",
], ],
"modified": "2017-01-27T13:49:53.936Z", "modified": "2017-01-27T13:49:53.936Z",
"name": "Malicious site hosting downloader", "name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']", "pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator", "type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z" "valid_from": "2017-01-27T13:49:53.935382Z",
} }
ind4 = { ind4 = {
"created": "2017-01-27T13:49:53.935Z", "created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000002", "id": "indicator--00000000-0000-4000-8000-000000000002",
"labels": [ "labels": [
"url-watchlist" "url-watchlist",
], ],
"modified": "2017-01-27T13:49:53.935Z", "modified": "2017-01-27T13:49:53.935Z",
"name": "Malicious site hosting downloader", "name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']", "pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator", "type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z" "valid_from": "2017-01-27T13:49:53.935382Z",
} }
ind5 = { ind5 = {
"created": "2017-01-27T13:49:53.935Z", "created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000002", "id": "indicator--00000000-0000-4000-8000-000000000002",
"labels": [ "labels": [
"url-watchlist" "url-watchlist",
], ],
"modified": "2017-01-27T13:49:53.935Z", "modified": "2017-01-27T13:49:53.935Z",
"name": "Malicious site hosting downloader", "name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']", "pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator", "type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z" "valid_from": "2017-01-27T13:49:53.935382Z",
} }
return [ind1, ind2, ind3, ind4, ind5] return [ind1, ind2, ind3, ind4, ind5]
@ -119,41 +120,41 @@ def stix_objs2():
"created": "2017-01-27T13:49:53.935Z", "created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000001", "id": "indicator--00000000-0000-4000-8000-000000000001",
"labels": [ "labels": [
"url-watchlist" "url-watchlist",
], ],
"modified": "2017-01-31T13:49:53.935Z", "modified": "2017-01-31T13:49:53.935Z",
"name": "Malicious site hosting downloader", "name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']", "pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator", "type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z" "valid_from": "2017-01-27T13:49:53.935382Z",
} }
ind7 = { ind7 = {
"created": "2017-01-27T13:49:53.935Z", "created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000002", "id": "indicator--00000000-0000-4000-8000-000000000002",
"labels": [ "labels": [
"url-watchlist" "url-watchlist",
], ],
"modified": "2017-01-27T13:49:53.935Z", "modified": "2017-01-27T13:49:53.935Z",
"name": "Malicious site hosting downloader", "name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']", "pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator", "type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z" "valid_from": "2017-01-27T13:49:53.935382Z",
} }
ind8 = { ind8 = {
"created": "2017-01-27T13:49:53.935Z", "created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000002", "id": "indicator--00000000-0000-4000-8000-000000000002",
"labels": [ "labels": [
"url-watchlist" "url-watchlist",
], ],
"modified": "2017-01-27T13:49:53.935Z", "modified": "2017-01-27T13:49:53.935Z",
"name": "Malicious site hosting downloader", "name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']", "pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator", "type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z" "valid_from": "2017-01-27T13:49:53.935382Z",
} }
return [ind6, ind7, ind8] return [ind6, ind7, ind8]
@pytest.fixture @pytest.fixture
def real_stix_objs2(stix_objs2): def real_stix_objs2(stix_objs2):
return [stix2.parse(x) for x in stix_objs2] return [stix2.parse(x, version="2.0") for x in stix_objs2]

View File

@ -12,6 +12,7 @@ INDICATOR_ID = "indicator--a740531e-63ff-4e49-a9e1-a0a3eed0e3e7"
INTRUSION_SET_ID = "intrusion-set--4e78f46f-a023-4e5f-bc24-71b3ca22ec29" INTRUSION_SET_ID = "intrusion-set--4e78f46f-a023-4e5f-bc24-71b3ca22ec29"
MALWARE_ID = "malware--9c4638ec-f1de-4ddb-abf4-1b760417654e" MALWARE_ID = "malware--9c4638ec-f1de-4ddb-abf4-1b760417654e"
MARKING_DEFINITION_ID = "marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9" MARKING_DEFINITION_ID = "marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9"
NOTE_ID = "note--0c7b5b88-8ff7-4a4d-aa9d-feb398cd0061"
OBSERVED_DATA_ID = "observed-data--b67d30ff-02ac-498a-92f9-32f845f448cf" OBSERVED_DATA_ID = "observed-data--b67d30ff-02ac-498a-92f9-32f845f448cf"
RELATIONSHIP_ID = "relationship--df7c87eb-75d2-4948-af81-9d49d246f301" RELATIONSHIP_ID = "relationship--df7c87eb-75d2-4948-af81-9d49d246f301"
REPORT_ID = "report--84e4d88f-44ea-4bcd-bbf3-b2c1c320bcb3" REPORT_ID = "report--84e4d88f-44ea-4bcd-bbf3-b2c1c320bcb3"
@ -31,7 +32,7 @@ MARKING_IDS = [
RELATIONSHIP_IDS = [ RELATIONSHIP_IDS = [
'relationship--06520621-5352-4e6a-b976-e8fa3d437ffd', 'relationship--06520621-5352-4e6a-b976-e8fa3d437ffd',
'relationship--181c9c09-43e6-45dd-9374-3bec192f05ef', 'relationship--181c9c09-43e6-45dd-9374-3bec192f05ef',
'relationship--a0cbb21c-8daf-4a7f-96aa-7155a4ef8f70' 'relationship--a0cbb21c-8daf-4a7f-96aa-7155a4ef8f70',
] ]
# *_KWARGS contains all required arguments to create an instance of that STIX object # *_KWARGS contains all required arguments to create an instance of that STIX object
@ -86,7 +87,7 @@ MALWARE_MORE_KWARGS = dict(
modified="2016-04-06T20:03:00.000Z", modified="2016-04-06T20:03:00.000Z",
labels=['ransomware'], labels=['ransomware'],
name="Cryptolocker", name="Cryptolocker",
description="A ransomware related to ..." description="A ransomware related to ...",
) )
OBSERVED_DATA_KWARGS = dict( OBSERVED_DATA_KWARGS = dict(
@ -97,8 +98,8 @@ OBSERVED_DATA_KWARGS = dict(
"0": { "0": {
"type": "windows-registry-key", "type": "windows-registry-key",
"key": "HKEY_LOCAL_MACHINE\\System\\Foo\\Bar", "key": "HKEY_LOCAL_MACHINE\\System\\Foo\\Bar",
} },
} },
) )
REPORT_KWARGS = dict( REPORT_KWARGS = dict(

View File

@ -24,14 +24,14 @@ EXPECTED = """{
def test_attack_pattern_example(): def test_attack_pattern_example():
ap = stix2.AttackPattern( ap = stix2.v20.AttackPattern(
id="attack-pattern--0c7b5b88-8ff7-4a4d-aa9d-feb398cd0061", id="attack-pattern--0c7b5b88-8ff7-4a4d-aa9d-feb398cd0061",
created="2016-05-12T08:17:27.000Z", created="2016-05-12T08:17:27.000Z",
modified="2016-05-12T08:17:27.000Z", modified="2016-05-12T08:17:27.000Z",
name="Spear Phishing", name="Spear Phishing",
external_references=[{ external_references=[{
"source_name": "capec", "source_name": "capec",
"external_id": "CAPEC-163" "external_id": "CAPEC-163",
}], }],
description="...", description="...",
) )
@ -39,25 +39,27 @@ def test_attack_pattern_example():
assert str(ap) == EXPECTED assert str(ap) == EXPECTED
@pytest.mark.parametrize("data", [ @pytest.mark.parametrize(
EXPECTED, "data", [
{ EXPECTED,
"type": "attack-pattern", {
"id": "attack-pattern--0c7b5b88-8ff7-4a4d-aa9d-feb398cd0061", "type": "attack-pattern",
"created": "2016-05-12T08:17:27.000Z", "id": "attack-pattern--0c7b5b88-8ff7-4a4d-aa9d-feb398cd0061",
"modified": "2016-05-12T08:17:27.000Z", "created": "2016-05-12T08:17:27.000Z",
"description": "...", "modified": "2016-05-12T08:17:27.000Z",
"external_references": [ "description": "...",
{ "external_references": [
"external_id": "CAPEC-163", {
"source_name": "capec" "external_id": "CAPEC-163",
} "source_name": "capec",
], },
"name": "Spear Phishing", ],
}, "name": "Spear Phishing",
]) },
],
)
def test_parse_attack_pattern(data): def test_parse_attack_pattern(data):
ap = stix2.parse(data) ap = stix2.parse(data, version="2.0")
assert ap.type == 'attack-pattern' assert ap.type == 'attack-pattern'
assert ap.id == ATTACK_PATTERN_ID assert ap.id == ATTACK_PATTERN_ID
@ -71,12 +73,12 @@ def test_parse_attack_pattern(data):
def test_attack_pattern_invalid_labels(): def test_attack_pattern_invalid_labels():
with pytest.raises(stix2.exceptions.InvalidValueError): with pytest.raises(stix2.exceptions.InvalidValueError):
stix2.AttackPattern( stix2.v20.AttackPattern(
id="attack-pattern--0c7b5b88-8ff7-4a4d-aa9d-feb398cd0061", id="attack-pattern--0c7b5b88-8ff7-4a4d-aa9d-feb398cd0061",
created="2016-05-12T08:17:27Z", created="2016-05-12T08:17:27Z",
modified="2016-05-12T08:17:27Z", modified="2016-05-12T08:17:27Z",
name="Spear Phishing", name="Spear Phishing",
labels=1 labels=1,
) )
# TODO: Add other examples # TODO: Add other examples

View File

@ -0,0 +1,236 @@
import json
import pytest
import stix2
EXPECTED_BUNDLE = """{
"type": "bundle",
"id": "bundle--00000000-0000-4000-8000-000000000007",
"spec_version": "2.0",
"objects": [
{
"type": "indicator",
"id": "indicator--00000000-0000-4000-8000-000000000001",
"created": "2017-01-01T12:34:56.000Z",
"modified": "2017-01-01T12:34:56.000Z",
"pattern": "[file:hashes.MD5 = 'd41d8cd98f00b204e9800998ecf8427e']",
"valid_from": "2017-01-01T12:34:56Z",
"labels": [
"malicious-activity"
]
},
{
"type": "malware",
"id": "malware--00000000-0000-4000-8000-000000000003",
"created": "2017-01-01T12:34:56.000Z",
"modified": "2017-01-01T12:34:56.000Z",
"name": "Cryptolocker",
"labels": [
"ransomware"
]
},
{
"type": "relationship",
"id": "relationship--00000000-0000-4000-8000-000000000005",
"created": "2017-01-01T12:34:56.000Z",
"modified": "2017-01-01T12:34:56.000Z",
"relationship_type": "indicates",
"source_ref": "indicator--a740531e-63ff-4e49-a9e1-a0a3eed0e3e7",
"target_ref": "malware--9c4638ec-f1de-4ddb-abf4-1b760417654e"
}
]
}"""
EXPECTED_BUNDLE_DICT = {
"type": "bundle",
"id": "bundle--00000000-0000-4000-8000-000000000007",
"spec_version": "2.0",
"objects": [
{
"type": "indicator",
"id": "indicator--00000000-0000-4000-8000-000000000001",
"created": "2017-01-01T12:34:56.000Z",
"modified": "2017-01-01T12:34:56.000Z",
"pattern": "[file:hashes.MD5 = 'd41d8cd98f00b204e9800998ecf8427e']",
"valid_from": "2017-01-01T12:34:56Z",
"labels": [
"malicious-activity",
],
},
{
"type": "malware",
"id": "malware--00000000-0000-4000-8000-000000000003",
"created": "2017-01-01T12:34:56.000Z",
"modified": "2017-01-01T12:34:56.000Z",
"name": "Cryptolocker",
"labels": [
"ransomware",
],
},
{
"type": "relationship",
"id": "relationship--00000000-0000-4000-8000-000000000005",
"created": "2017-01-01T12:34:56.000Z",
"modified": "2017-01-01T12:34:56.000Z",
"relationship_type": "indicates",
"source_ref": "indicator--a740531e-63ff-4e49-a9e1-a0a3eed0e3e7",
"target_ref": "malware--9c4638ec-f1de-4ddb-abf4-1b760417654e",
},
],
}
def test_empty_bundle():
bundle = stix2.v20.Bundle()
assert bundle.type == "bundle"
assert bundle.id.startswith("bundle--")
with pytest.raises(AttributeError):
assert bundle.objects
def test_bundle_with_wrong_type():
with pytest.raises(stix2.exceptions.InvalidValueError) as excinfo:
stix2.v20.Bundle(type="not-a-bundle")
assert excinfo.value.cls == stix2.v20.Bundle
assert excinfo.value.prop_name == "type"
assert excinfo.value.reason == "must equal 'bundle'."
assert str(excinfo.value) == "Invalid value for Bundle 'type': must equal 'bundle'."
def test_bundle_id_must_start_with_bundle():
with pytest.raises(stix2.exceptions.InvalidValueError) as excinfo:
stix2.v20.Bundle(id='my-prefix--')
assert excinfo.value.cls == stix2.v20.Bundle
assert excinfo.value.prop_name == "id"
assert excinfo.value.reason == "must start with 'bundle--'."
assert str(excinfo.value) == "Invalid value for Bundle 'id': must start with 'bundle--'."
def test_create_bundle1(indicator, malware, relationship):
bundle = stix2.v20.Bundle(objects=[indicator, malware, relationship])
assert str(bundle) == EXPECTED_BUNDLE
assert bundle.serialize(pretty=True) == EXPECTED_BUNDLE
def test_create_bundle2(indicator, malware, relationship):
bundle = stix2.v20.Bundle(objects=[indicator, malware, relationship])
assert json.loads(bundle.serialize()) == EXPECTED_BUNDLE_DICT
def test_create_bundle_with_positional_args(indicator, malware, relationship):
bundle = stix2.v20.Bundle(indicator, malware, relationship)
assert str(bundle) == EXPECTED_BUNDLE
def test_create_bundle_with_positional_listarg(indicator, malware, relationship):
bundle = stix2.v20.Bundle([indicator, malware, relationship])
assert str(bundle) == EXPECTED_BUNDLE
def test_create_bundle_with_listarg_and_positional_arg(indicator, malware, relationship):
bundle = stix2.v20.Bundle([indicator, malware], relationship)
assert str(bundle) == EXPECTED_BUNDLE
def test_create_bundle_with_listarg_and_kwarg(indicator, malware, relationship):
bundle = stix2.v20.Bundle([indicator, malware], objects=[relationship])
assert str(bundle) == EXPECTED_BUNDLE
def test_create_bundle_with_arg_listarg_and_kwarg(indicator, malware, relationship):
bundle = stix2.v20.Bundle([indicator], malware, objects=[relationship])
assert str(bundle) == EXPECTED_BUNDLE
def test_create_bundle_invalid(indicator, malware, relationship):
with pytest.raises(ValueError) as excinfo:
stix2.v20.Bundle(objects=[1])
assert excinfo.value.reason == "This property may only contain a dictionary or object"
with pytest.raises(ValueError) as excinfo:
stix2.v20.Bundle(objects=[{}])
assert excinfo.value.reason == "This property may only contain a non-empty dictionary or object"
with pytest.raises(ValueError) as excinfo:
stix2.v20.Bundle(objects=[{'type': 'bundle'}])
assert excinfo.value.reason == 'This property may not contain a Bundle object'
@pytest.mark.parametrize("version", ["2.0"])
def test_parse_bundle(version):
bundle = stix2.parse(EXPECTED_BUNDLE, version=version)
assert bundle.type == "bundle"
assert bundle.id.startswith("bundle--")
assert type(bundle.objects[0]) is stix2.v20.Indicator
assert bundle.objects[0].type == 'indicator'
assert bundle.objects[1].type == 'malware'
assert bundle.objects[2].type == 'relationship'
def test_parse_unknown_type():
unknown = {
"type": "other",
"id": "other--8e2e2d2b-17d4-4cbf-938f-98ee46b3cd3f",
"created": "2016-04-06T20:03:00Z",
"modified": "2016-04-06T20:03:00Z",
"created_by_ref": "identity--f431f809-377b-45e0-aa1c-6a4751cae5ff",
"description": "Campaign by Green Group against a series of targets in the financial services sector.",
"name": "Green Group Attacks Against Finance",
}
with pytest.raises(stix2.exceptions.ParseError) as excinfo:
stix2.parse(unknown, version="2.0")
assert str(excinfo.value) == "Can't parse unknown object type 'other'! For custom types, use the CustomObject decorator."
def test_stix_object_property():
prop = stix2.properties.STIXObjectProperty(spec_version='2.0')
identity = stix2.v20.Identity(name="test", identity_class="individual")
assert prop.clean(identity) is identity
def test_bundle_with_different_spec_objects():
# This is a 2.0 case only...
data = [
{
"spec_version": "2.1",
"type": "indicator",
"id": "indicator--00000000-0000-4000-8000-000000000001",
"created": "2017-01-01T12:34:56.000Z",
"modified": "2017-01-01T12:34:56.000Z",
"pattern": "[file:hashes.MD5 = 'd41d8cd98f00b204e9800998ecf8427e']",
"valid_from": "2017-01-01T12:34:56Z",
"labels": [
"malicious-activity",
],
},
{
"type": "malware",
"id": "malware--00000000-0000-4000-8000-000000000003",
"created": "2017-01-01T12:34:56.000Z",
"modified": "2017-01-01T12:34:56.000Z",
"name": "Cryptolocker",
"labels": [
"ransomware",
],
},
]
with pytest.raises(ValueError) as excinfo:
stix2.v20.Bundle(objects=data)
assert "Spec version 2.0 bundles don't yet support containing objects of a different spec version." in str(excinfo.value)

View File

@ -19,32 +19,34 @@ EXPECTED = """{
def test_campaign_example(): def test_campaign_example():
campaign = stix2.Campaign( campaign = stix2.v20.Campaign(
id="campaign--8e2e2d2b-17d4-4cbf-938f-98ee46b3cd3f", id="campaign--8e2e2d2b-17d4-4cbf-938f-98ee46b3cd3f",
created_by_ref="identity--f431f809-377b-45e0-aa1c-6a4751cae5ff", created_by_ref="identity--f431f809-377b-45e0-aa1c-6a4751cae5ff",
created="2016-04-06T20:03:00Z", created="2016-04-06T20:03:00Z",
modified="2016-04-06T20:03:00Z", modified="2016-04-06T20:03:00Z",
name="Green Group Attacks Against Finance", name="Green Group Attacks Against Finance",
description="Campaign by Green Group against a series of targets in the financial services sector." description="Campaign by Green Group against a series of targets in the financial services sector.",
) )
assert str(campaign) == EXPECTED assert str(campaign) == EXPECTED
@pytest.mark.parametrize("data", [ @pytest.mark.parametrize(
EXPECTED, "data", [
{ EXPECTED,
"type": "campaign", {
"id": "campaign--8e2e2d2b-17d4-4cbf-938f-98ee46b3cd3f", "type": "campaign",
"created": "2016-04-06T20:03:00Z", "id": "campaign--8e2e2d2b-17d4-4cbf-938f-98ee46b3cd3f",
"modified": "2016-04-06T20:03:00Z", "created": "2016-04-06T20:03:00Z",
"created_by_ref": "identity--f431f809-377b-45e0-aa1c-6a4751cae5ff", "modified": "2016-04-06T20:03:00Z",
"description": "Campaign by Green Group against a series of targets in the financial services sector.", "created_by_ref": "identity--f431f809-377b-45e0-aa1c-6a4751cae5ff",
"name": "Green Group Attacks Against Finance", "description": "Campaign by Green Group against a series of targets in the financial services sector.",
}, "name": "Green Group Attacks Against Finance",
]) },
],
)
def test_parse_campaign(data): def test_parse_campaign(data):
cmpn = stix2.parse(data) cmpn = stix2.parse(data, version="2.0")
assert cmpn.type == 'campaign' assert cmpn.type == 'campaign'
assert cmpn.id == CAMPAIGN_ID assert cmpn.id == CAMPAIGN_ID

172
stix2/test/v20/test_core.py Normal file
View File

@ -0,0 +1,172 @@
import pytest
import stix2
from stix2 import core, exceptions
BUNDLE = {
"type": "bundle",
"spec_version": "2.0",
"id": "bundle--00000000-0000-4000-8000-000000000007",
"objects": [
{
"type": "indicator",
"id": "indicator--00000000-0000-4000-8000-000000000001",
"created": "2017-01-01T12:34:56.000Z",
"modified": "2017-01-01T12:34:56.000Z",
"pattern": "[file:hashes.MD5 = 'd41d8cd98f00b204e9800998ecf8427e']",
"valid_from": "2017-01-01T12:34:56Z",
"labels": [
"malicious-activity",
],
},
{
"type": "malware",
"id": "malware--00000000-0000-4000-8000-000000000003",
"created": "2017-01-01T12:34:56.000Z",
"modified": "2017-01-01T12:34:56.000Z",
"name": "Cryptolocker",
"labels": [
"ransomware",
],
},
{
"type": "relationship",
"id": "relationship--00000000-0000-4000-8000-000000000005",
"created": "2017-01-01T12:34:56.000Z",
"modified": "2017-01-01T12:34:56.000Z",
"relationship_type": "indicates",
"source_ref": "indicator--a740531e-63ff-4e49-a9e1-a0a3eed0e3e7",
"target_ref": "malware--9c4638ec-f1de-4ddb-abf4-1b760417654e",
},
],
}
def test_dict_to_stix2_bundle_with_version():
with pytest.raises(exceptions.ExtraPropertiesError) as excinfo:
core.dict_to_stix2(BUNDLE, version='2.1')
assert str(excinfo.value) == "Unexpected properties for Bundle: (spec_version)."
def test_parse_observable_with_version():
observable = {"type": "file", "name": "foo.exe"}
obs_obj = core.parse_observable(observable, version='2.0')
v = 'v20'
assert v in str(obs_obj.__class__)
@pytest.mark.xfail(reason="The default version is no longer 2.0", condition=stix2.DEFAULT_VERSION != "2.0")
def test_parse_observable_with_no_version():
observable = {"type": "file", "name": "foo.exe"}
obs_obj = core.parse_observable(observable)
v = 'v20'
assert v in str(obs_obj.__class__)
def test_register_object_with_version():
bundle = core.dict_to_stix2(BUNDLE, version='2.0')
core._register_object(bundle.objects[0].__class__, version='2.0')
v = 'v20'
assert bundle.objects[0].type in core.STIX2_OBJ_MAPS[v]['objects']
assert v in str(bundle.objects[0].__class__)
def test_register_marking_with_version():
core._register_marking(stix2.v20.TLP_WHITE.__class__, version='2.0')
v = 'v20'
assert stix2.v20.TLP_WHITE.definition._type in core.STIX2_OBJ_MAPS[v]['markings']
assert v in str(stix2.v20.TLP_WHITE.__class__)
@pytest.mark.xfail(reason="The default version is no longer 2.0", condition=stix2.DEFAULT_VERSION != "2.0")
def test_register_marking_with_no_version():
# Uses default version (2.0 in this case)
core._register_marking(stix2.v20.TLP_WHITE.__class__)
v = 'v20'
assert stix2.v20.TLP_WHITE.definition._type in core.STIX2_OBJ_MAPS[v]['markings']
assert v in str(stix2.v20.TLP_WHITE.__class__)
def test_register_observable_with_version():
observed_data = stix2.v20.ObservedData(
id="observed-data--b67d30ff-02ac-498a-92f9-32f845f448cf",
created_by_ref="identity--f431f809-377b-45e0-aa1c-6a4751cae5ff",
created="2016-04-06T19:58:16.000Z",
modified="2016-04-06T19:58:16.000Z",
first_observed="2015-12-21T19:00:00Z",
last_observed="2015-12-21T19:00:00Z",
number_observed=50,
objects={
"0": {
"name": "foo.exe",
"type": "file",
"extensions": {
"ntfs-ext": {
"alternate_data_streams": [
{
"name": "second.stream",
"size": 25536,
},
],
},
},
},
"1": {
"type": "directory",
"path": "/usr/home",
"contains_refs": ["0"],
},
},
)
core._register_observable(observed_data.objects['0'].__class__, version='2.0')
v = 'v20'
assert observed_data.objects['0'].type in core.STIX2_OBJ_MAPS[v]['observables']
assert v in str(observed_data.objects['0'].__class__)
def test_register_observable_extension_with_version():
observed_data = stix2.v20.ObservedData(
id="observed-data--b67d30ff-02ac-498a-92f9-32f845f448cf",
created_by_ref="identity--f431f809-377b-45e0-aa1c-6a4751cae5ff",
created="2016-04-06T19:58:16.000Z",
modified="2016-04-06T19:58:16.000Z",
first_observed="2015-12-21T19:00:00Z",
last_observed="2015-12-21T19:00:00Z",
number_observed=50,
objects={
"0": {
"name": "foo.exe",
"type": "file",
"extensions": {
"ntfs-ext": {
"alternate_data_streams": [
{
"name": "second.stream",
"size": 25536,
},
],
},
},
},
"1": {
"type": "directory",
"path": "/usr/home",
"contains_refs": ["0"],
},
},
)
core._register_observable_extension(observed_data.objects['0'], observed_data.objects['0'].extensions['ntfs-ext'].__class__, version='2.0')
v = 'v20'
assert observed_data.objects['0'].type in core.STIX2_OBJ_MAPS[v]['observables']
assert v in str(observed_data.objects['0'].__class__)
assert observed_data.objects['0'].extensions['ntfs-ext']._type in core.STIX2_OBJ_MAPS[v]['observable-extensions']['file']
assert v in str(observed_data.objects['0'].extensions['ntfs-ext'].__class__)

View File

@ -19,32 +19,34 @@ EXPECTED = """{
def test_course_of_action_example(): def test_course_of_action_example():
coa = stix2.CourseOfAction( coa = stix2.v20.CourseOfAction(
id="course-of-action--8e2e2d2b-17d4-4cbf-938f-98ee46b3cd3f", id="course-of-action--8e2e2d2b-17d4-4cbf-938f-98ee46b3cd3f",
created_by_ref="identity--f431f809-377b-45e0-aa1c-6a4751cae5ff", created_by_ref="identity--f431f809-377b-45e0-aa1c-6a4751cae5ff",
created="2016-04-06T20:03:48.000Z", created="2016-04-06T20:03:48.000Z",
modified="2016-04-06T20:03:48.000Z", modified="2016-04-06T20:03:48.000Z",
name="Add TCP port 80 Filter Rule to the existing Block UDP 1434 Filter", name="Add TCP port 80 Filter Rule to the existing Block UDP 1434 Filter",
description="This is how to add a filter rule to block inbound access to TCP port 80 to the existing UDP 1434 filter ..." description="This is how to add a filter rule to block inbound access to TCP port 80 to the existing UDP 1434 filter ...",
) )
assert str(coa) == EXPECTED assert str(coa) == EXPECTED
@pytest.mark.parametrize("data", [ @pytest.mark.parametrize(
EXPECTED, "data", [
{ EXPECTED,
"created": "2016-04-06T20:03:48.000Z", {
"created_by_ref": "identity--f431f809-377b-45e0-aa1c-6a4751cae5ff", "created": "2016-04-06T20:03:48.000Z",
"description": "This is how to add a filter rule to block inbound access to TCP port 80 to the existing UDP 1434 filter ...", "created_by_ref": "identity--f431f809-377b-45e0-aa1c-6a4751cae5ff",
"id": "course-of-action--8e2e2d2b-17d4-4cbf-938f-98ee46b3cd3f", "description": "This is how to add a filter rule to block inbound access to TCP port 80 to the existing UDP 1434 filter ...",
"modified": "2016-04-06T20:03:48.000Z", "id": "course-of-action--8e2e2d2b-17d4-4cbf-938f-98ee46b3cd3f",
"name": "Add TCP port 80 Filter Rule to the existing Block UDP 1434 Filter", "modified": "2016-04-06T20:03:48.000Z",
"type": "course-of-action" "name": "Add TCP port 80 Filter Rule to the existing Block UDP 1434 Filter",
}, "type": "course-of-action",
]) },
],
)
def test_parse_course_of_action(data): def test_parse_course_of_action(data):
coa = stix2.parse(data) coa = stix2.parse(data, version="2.0")
assert coa.type == 'course-of-action' assert coa.type == 'course-of-action'
assert coa.id == COURSE_OF_ACTION_ID assert coa.id == COURSE_OF_ACTION_ID

View File

@ -4,7 +4,7 @@ import stix2
from .constants import FAKE_TIME, MARKING_DEFINITION_ID from .constants import FAKE_TIME, MARKING_DEFINITION_ID
IDENTITY_CUSTOM_PROP = stix2.Identity( IDENTITY_CUSTOM_PROP = stix2.v20.Identity(
name="John Smith", name="John Smith",
identity_class="individual", identity_class="individual",
x_foo="bar", x_foo="bar",
@ -14,7 +14,7 @@ IDENTITY_CUSTOM_PROP = stix2.Identity(
def test_identity_custom_property(): def test_identity_custom_property():
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
stix2.Identity( stix2.v20.Identity(
id="identity--311b2d2d-f010-4473-83ec-1edf84858f4c", id="identity--311b2d2d-f010-4473-83ec-1edf84858f4c",
created="2015-12-21T19:59:11Z", created="2015-12-21T19:59:11Z",
modified="2015-12-21T19:59:11Z", modified="2015-12-21T19:59:11Z",
@ -25,7 +25,7 @@ def test_identity_custom_property():
assert str(excinfo.value) == "'custom_properties' must be a dictionary" assert str(excinfo.value) == "'custom_properties' must be a dictionary"
with pytest.raises(stix2.exceptions.ExtraPropertiesError) as excinfo: with pytest.raises(stix2.exceptions.ExtraPropertiesError) as excinfo:
stix2.Identity( stix2.v20.Identity(
id="identity--311b2d2d-f010-4473-83ec-1edf84858f4c", id="identity--311b2d2d-f010-4473-83ec-1edf84858f4c",
created="2015-12-21T19:59:11Z", created="2015-12-21T19:59:11Z",
modified="2015-12-21T19:59:11Z", modified="2015-12-21T19:59:11Z",
@ -35,10 +35,10 @@ def test_identity_custom_property():
"foo": "bar", "foo": "bar",
}, },
foo="bar", foo="bar",
) )
assert "Unexpected properties for Identity" in str(excinfo.value) assert "Unexpected properties for Identity" in str(excinfo.value)
identity = stix2.Identity( identity = stix2.v20.Identity(
id="identity--311b2d2d-f010-4473-83ec-1edf84858f4c", id="identity--311b2d2d-f010-4473-83ec-1edf84858f4c",
created="2015-12-21T19:59:11Z", created="2015-12-21T19:59:11Z",
modified="2015-12-21T19:59:11Z", modified="2015-12-21T19:59:11Z",
@ -53,7 +53,7 @@ def test_identity_custom_property():
def test_identity_custom_property_invalid(): def test_identity_custom_property_invalid():
with pytest.raises(stix2.exceptions.ExtraPropertiesError) as excinfo: with pytest.raises(stix2.exceptions.ExtraPropertiesError) as excinfo:
stix2.Identity( stix2.v20.Identity(
id="identity--311b2d2d-f010-4473-83ec-1edf84858f4c", id="identity--311b2d2d-f010-4473-83ec-1edf84858f4c",
created="2015-12-21T19:59:11Z", created="2015-12-21T19:59:11Z",
modified="2015-12-21T19:59:11Z", modified="2015-12-21T19:59:11Z",
@ -61,13 +61,13 @@ def test_identity_custom_property_invalid():
identity_class="individual", identity_class="individual",
x_foo="bar", x_foo="bar",
) )
assert excinfo.value.cls == stix2.Identity assert excinfo.value.cls == stix2.v20.Identity
assert excinfo.value.properties == ['x_foo'] assert excinfo.value.properties == ['x_foo']
assert "Unexpected properties for" in str(excinfo.value) assert "Unexpected properties for" in str(excinfo.value)
def test_identity_custom_property_allowed(): def test_identity_custom_property_allowed():
identity = stix2.Identity( identity = stix2.v20.Identity(
id="identity--311b2d2d-f010-4473-83ec-1edf84858f4c", id="identity--311b2d2d-f010-4473-83ec-1edf84858f4c",
created="2015-12-21T19:59:11Z", created="2015-12-21T19:59:11Z",
modified="2015-12-21T19:59:11Z", modified="2015-12-21T19:59:11Z",
@ -79,8 +79,9 @@ def test_identity_custom_property_allowed():
assert identity.x_foo == "bar" assert identity.x_foo == "bar"
@pytest.mark.parametrize("data", [ @pytest.mark.parametrize(
"""{ "data", [
"""{
"type": "identity", "type": "identity",
"id": "identity--311b2d2d-f010-4473-83ec-1edf84858f4c", "id": "identity--311b2d2d-f010-4473-83ec-1edf84858f4c",
"created": "2015-12-21T19:59:11Z", "created": "2015-12-21T19:59:11Z",
@ -89,34 +90,35 @@ def test_identity_custom_property_allowed():
"identity_class": "individual", "identity_class": "individual",
"foo": "bar" "foo": "bar"
}""", }""",
]) ],
)
def test_parse_identity_custom_property(data): def test_parse_identity_custom_property(data):
with pytest.raises(stix2.exceptions.ExtraPropertiesError) as excinfo: with pytest.raises(stix2.exceptions.ExtraPropertiesError) as excinfo:
identity = stix2.parse(data) stix2.parse(data, version="2.0")
assert excinfo.value.cls == stix2.Identity assert excinfo.value.cls == stix2.v20.Identity
assert excinfo.value.properties == ['foo'] assert excinfo.value.properties == ['foo']
assert "Unexpected properties for" in str(excinfo.value) assert "Unexpected properties for" in str(excinfo.value)
identity = stix2.parse(data, allow_custom=True) identity = stix2.parse(data, version="2.0", allow_custom=True)
assert identity.foo == "bar" assert identity.foo == "bar"
def test_custom_property_object_in_bundled_object(): def test_custom_property_object_in_bundled_object():
bundle = stix2.Bundle(IDENTITY_CUSTOM_PROP, allow_custom=True) bundle = stix2.v20.Bundle(IDENTITY_CUSTOM_PROP, allow_custom=True)
assert bundle.objects[0].x_foo == "bar" assert bundle.objects[0].x_foo == "bar"
assert '"x_foo": "bar"' in str(bundle) assert '"x_foo": "bar"' in str(bundle)
def test_custom_properties_object_in_bundled_object(): def test_custom_properties_object_in_bundled_object():
obj = stix2.Identity( obj = stix2.v20.Identity(
name="John Smith", name="John Smith",
identity_class="individual", identity_class="individual",
custom_properties={ custom_properties={
"x_foo": "bar", "x_foo": "bar",
} },
) )
bundle = stix2.Bundle(obj, allow_custom=True) bundle = stix2.v20.Bundle(obj, allow_custom=True)
assert bundle.objects[0].x_foo == "bar" assert bundle.objects[0].x_foo == "bar"
assert '"x_foo": "bar"' in str(bundle) assert '"x_foo": "bar"' in str(bundle)
@ -132,9 +134,9 @@ def test_custom_property_dict_in_bundled_object():
'x_foo': 'bar', 'x_foo': 'bar',
} }
with pytest.raises(stix2.exceptions.ExtraPropertiesError): with pytest.raises(stix2.exceptions.ExtraPropertiesError):
bundle = stix2.Bundle(custom_identity) stix2.v20.Bundle(custom_identity)
bundle = stix2.Bundle(custom_identity, allow_custom=True) bundle = stix2.v20.Bundle(custom_identity, allow_custom=True)
assert bundle.objects[0].x_foo == "bar" assert bundle.objects[0].x_foo == "bar"
assert '"x_foo": "bar"' in str(bundle) assert '"x_foo": "bar"' in str(bundle)
@ -150,23 +152,23 @@ def test_custom_properties_dict_in_bundled_object():
'x_foo': 'bar', 'x_foo': 'bar',
}, },
} }
bundle = stix2.Bundle(custom_identity) bundle = stix2.v20.Bundle(custom_identity)
assert bundle.objects[0].x_foo == "bar" assert bundle.objects[0].x_foo == "bar"
assert '"x_foo": "bar"' in str(bundle) assert '"x_foo": "bar"' in str(bundle)
def test_custom_property_in_observed_data(): def test_custom_property_in_observed_data():
artifact = stix2.File( artifact = stix2.v20.File(
allow_custom=True, allow_custom=True,
name='test', name='test',
x_foo='bar' x_foo='bar',
) )
observed_data = stix2.ObservedData( observed_data = stix2.v20.ObservedData(
allow_custom=True, allow_custom=True,
first_observed="2015-12-21T19:00:00Z", first_observed="2015-12-21T19:00:00Z",
last_observed="2015-12-21T19:00:00Z", last_observed="2015-12-21T19:00:00Z",
number_observed=0, number_observed=1,
objects={"0": artifact}, objects={"0": artifact},
) )
@ -175,20 +177,20 @@ def test_custom_property_in_observed_data():
def test_custom_property_object_in_observable_extension(): def test_custom_property_object_in_observable_extension():
ntfs = stix2.NTFSExt( ntfs = stix2.v20.NTFSExt(
allow_custom=True, allow_custom=True,
sid=1, sid=1,
x_foo='bar', x_foo='bar',
) )
artifact = stix2.File( artifact = stix2.v20.File(
name='test', name='test',
extensions={'ntfs-ext': ntfs}, extensions={'ntfs-ext': ntfs},
) )
observed_data = stix2.ObservedData( observed_data = stix2.v20.ObservedData(
allow_custom=True, allow_custom=True,
first_observed="2015-12-21T19:00:00Z", first_observed="2015-12-21T19:00:00Z",
last_observed="2015-12-21T19:00:00Z", last_observed="2015-12-21T19:00:00Z",
number_observed=0, number_observed=1,
objects={"0": artifact}, objects={"0": artifact},
) )
@ -198,17 +200,17 @@ def test_custom_property_object_in_observable_extension():
def test_custom_property_dict_in_observable_extension(): def test_custom_property_dict_in_observable_extension():
with pytest.raises(stix2.exceptions.ExtraPropertiesError): with pytest.raises(stix2.exceptions.ExtraPropertiesError):
artifact = stix2.File( stix2.v20.File(
name='test', name='test',
extensions={ extensions={
'ntfs-ext': { 'ntfs-ext': {
'sid': 1, 'sid': 1,
'x_foo': 'bar', 'x_foo': 'bar',
} },
}, },
) )
artifact = stix2.File( artifact = stix2.v20.File(
allow_custom=True, allow_custom=True,
name='test', name='test',
extensions={ extensions={
@ -216,14 +218,14 @@ def test_custom_property_dict_in_observable_extension():
'allow_custom': True, 'allow_custom': True,
'sid': 1, 'sid': 1,
'x_foo': 'bar', 'x_foo': 'bar',
} },
}, },
) )
observed_data = stix2.ObservedData( observed_data = stix2.v20.ObservedData(
allow_custom=True, allow_custom=True,
first_observed="2015-12-21T19:00:00Z", first_observed="2015-12-21T19:00:00Z",
last_observed="2015-12-21T19:00:00Z", last_observed="2015-12-21T19:00:00Z",
number_observed=0, number_observed=1,
objects={"0": artifact}, objects={"0": artifact},
) )
@ -237,15 +239,15 @@ def test_identity_custom_property_revoke():
def test_identity_custom_property_edit_markings(): def test_identity_custom_property_edit_markings():
marking_obj = stix2.MarkingDefinition( marking_obj = stix2.v20.MarkingDefinition(
id=MARKING_DEFINITION_ID, id=MARKING_DEFINITION_ID,
definition_type="statement", definition_type="statement",
definition=stix2.StatementMarking(statement="Copyright 2016, Example Corp") definition=stix2.v20.StatementMarking(statement="Copyright 2016, Example Corp"),
) )
marking_obj2 = stix2.MarkingDefinition( marking_obj2 = stix2.v20.MarkingDefinition(
id=MARKING_DEFINITION_ID, id=MARKING_DEFINITION_ID,
definition_type="statement", definition_type="statement",
definition=stix2.StatementMarking(statement="Another one") definition=stix2.v20.StatementMarking(statement="Another one"),
) )
# None of the following should throw exceptions # None of the following should throw exceptions
@ -258,9 +260,11 @@ def test_identity_custom_property_edit_markings():
def test_custom_marking_no_init_1(): def test_custom_marking_no_init_1():
@stix2.CustomMarking('x-new-obj', [ @stix2.v20.CustomMarking(
('property1', stix2.properties.StringProperty(required=True)), 'x-new-obj', [
]) ('property1', stix2.properties.StringProperty(required=True)),
],
)
class NewObj(): class NewObj():
pass pass
@ -269,9 +273,11 @@ def test_custom_marking_no_init_1():
def test_custom_marking_no_init_2(): def test_custom_marking_no_init_2():
@stix2.CustomMarking('x-new-obj2', [ @stix2.v20.CustomMarking(
('property1', stix2.properties.StringProperty(required=True)), 'x-new-obj2', [
]) ('property1', stix2.properties.StringProperty(required=True)),
],
)
class NewObj2(object): class NewObj2(object):
pass pass
@ -279,10 +285,12 @@ def test_custom_marking_no_init_2():
assert no2.property1 == 'something' assert no2.property1 == 'something'
@stix2.sdo.CustomObject('x-new-type', [ @stix2.v20.CustomObject(
('property1', stix2.properties.StringProperty(required=True)), 'x-new-type', [
('property2', stix2.properties.IntegerProperty()), ('property1', stix2.properties.StringProperty(required=True)),
]) ('property2', stix2.properties.IntegerProperty()),
],
)
class NewType(object): class NewType(object):
def __init__(self, property2=None, **kwargs): def __init__(self, property2=None, **kwargs):
if property2 and property2 < 10: if property2 and property2 < 10:
@ -312,9 +320,11 @@ def test_custom_object_type():
def test_custom_object_no_init_1(): def test_custom_object_no_init_1():
@stix2.sdo.CustomObject('x-new-obj', [ @stix2.v20.CustomObject(
('property1', stix2.properties.StringProperty(required=True)), 'x-new-obj', [
]) ('property1', stix2.properties.StringProperty(required=True)),
],
)
class NewObj(): class NewObj():
pass pass
@ -323,9 +333,11 @@ def test_custom_object_no_init_1():
def test_custom_object_no_init_2(): def test_custom_object_no_init_2():
@stix2.sdo.CustomObject('x-new-obj2', [ @stix2.v20.CustomObject(
('property1', stix2.properties.StringProperty(required=True)), 'x-new-obj2', [
]) ('property1', stix2.properties.StringProperty(required=True)),
],
)
class NewObj2(object): class NewObj2(object):
pass pass
@ -335,17 +347,21 @@ def test_custom_object_no_init_2():
def test_custom_object_invalid_type_name(): def test_custom_object_invalid_type_name():
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
@stix2.sdo.CustomObject('x', [ @stix2.v20.CustomObject(
('property1', stix2.properties.StringProperty(required=True)), 'x', [
]) ('property1', stix2.properties.StringProperty(required=True)),
],
)
class NewObj(object): class NewObj(object):
pass # pragma: no cover pass # pragma: no cover
assert "Invalid type name 'x': " in str(excinfo.value) assert "Invalid type name 'x': " in str(excinfo.value)
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
@stix2.sdo.CustomObject('x_new_object', [ @stix2.v20.CustomObject(
('property1', stix2.properties.StringProperty(required=True)), 'x_new_object', [
]) ('property1', stix2.properties.StringProperty(required=True)),
],
)
class NewObj2(object): class NewObj2(object):
pass # pragma: no cover pass # pragma: no cover
assert "Invalid type name 'x_new_object':" in str(excinfo.value) assert "Invalid type name 'x_new_object':" in str(excinfo.value)
@ -358,8 +374,8 @@ def test_parse_custom_object_type():
"property1": "something" "property1": "something"
}""" }"""
nt = stix2.parse(nt_string) nt = stix2.parse(nt_string, version="2.0", allow_custom=True)
assert nt.property1 == 'something' assert nt["property1"] == 'something'
def test_parse_unregistered_custom_object_type(): def test_parse_unregistered_custom_object_type():
@ -370,7 +386,7 @@ def test_parse_unregistered_custom_object_type():
}""" }"""
with pytest.raises(stix2.exceptions.ParseError) as excinfo: with pytest.raises(stix2.exceptions.ParseError) as excinfo:
stix2.parse(nt_string) stix2.parse(nt_string, version="2.0")
assert "Can't parse unknown object type" in str(excinfo.value) assert "Can't parse unknown object type" in str(excinfo.value)
assert "use the CustomObject decorator." in str(excinfo.value) assert "use the CustomObject decorator." in str(excinfo.value)
@ -385,15 +401,17 @@ def test_parse_unregistered_custom_object_type_w_allow_custom():
"property1": "something" "property1": "something"
}""" }"""
custom_obj = stix2.parse(nt_string, allow_custom=True) custom_obj = stix2.parse(nt_string, version="2.0", allow_custom=True)
assert custom_obj["type"] == "x-foobar-observable" assert custom_obj["type"] == "x-foobar-observable"
@stix2.observables.CustomObservable('x-new-observable', [ @stix2.v20.CustomObservable(
('property1', stix2.properties.StringProperty(required=True)), 'x-new-observable', [
('property2', stix2.properties.IntegerProperty()), ('property1', stix2.properties.StringProperty(required=True)),
('x_property3', stix2.properties.BooleanProperty()), ('property2', stix2.properties.IntegerProperty()),
]) ('x_property3', stix2.properties.BooleanProperty()),
],
)
class NewObservable(): class NewObservable():
def __init__(self, property2=None, **kwargs): def __init__(self, property2=None, **kwargs):
if property2 and property2 < 10: if property2 and property2 < 10:
@ -428,9 +446,11 @@ def test_custom_observable_raises_exception():
def test_custom_observable_object_no_init_1(): def test_custom_observable_object_no_init_1():
@stix2.observables.CustomObservable('x-new-observable', [ @stix2.v20.CustomObservable(
('property1', stix2.properties.StringProperty()), 'x-new-observable', [
]) ('property1', stix2.properties.StringProperty()),
],
)
class NewObs(): class NewObs():
pass pass
@ -439,9 +459,11 @@ def test_custom_observable_object_no_init_1():
def test_custom_observable_object_no_init_2(): def test_custom_observable_object_no_init_2():
@stix2.observables.CustomObservable('x-new-obs2', [ @stix2.v20.CustomObservable(
('property1', stix2.properties.StringProperty()), 'x-new-obs2', [
]) ('property1', stix2.properties.StringProperty()),
],
)
class NewObs2(object): class NewObs2(object):
pass pass
@ -451,17 +473,21 @@ def test_custom_observable_object_no_init_2():
def test_custom_observable_object_invalid_type_name(): def test_custom_observable_object_invalid_type_name():
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
@stix2.observables.CustomObservable('x', [ @stix2.v20.CustomObservable(
('property1', stix2.properties.StringProperty()), 'x', [
]) ('property1', stix2.properties.StringProperty()),
],
)
class NewObs(object): class NewObs(object):
pass # pragma: no cover pass # pragma: no cover
assert "Invalid observable type name 'x':" in str(excinfo.value) assert "Invalid observable type name 'x':" in str(excinfo.value)
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
@stix2.observables.CustomObservable('x_new_obs', [ @stix2.v20.CustomObservable(
('property1', stix2.properties.StringProperty()), 'x_new_obs', [
]) ('property1', stix2.properties.StringProperty()),
],
)
class NewObs2(object): class NewObs2(object):
pass # pragma: no cover pass # pragma: no cover
assert "Invalid observable type name 'x_new_obs':" in str(excinfo.value) assert "Invalid observable type name 'x_new_obs':" in str(excinfo.value)
@ -469,9 +495,11 @@ def test_custom_observable_object_invalid_type_name():
def test_custom_observable_object_invalid_ref_property(): def test_custom_observable_object_invalid_ref_property():
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
@stix2.observables.CustomObservable('x-new-obs', [ @stix2.v20.CustomObservable(
('property_ref', stix2.properties.StringProperty()), 'x-new-obs', [
]) ('property_ref', stix2.properties.StringProperty()),
],
)
class NewObs(): class NewObs():
pass pass
assert "is named like an object reference property but is not an ObjectReferenceProperty" in str(excinfo.value) assert "is named like an object reference property but is not an ObjectReferenceProperty" in str(excinfo.value)
@ -479,9 +507,11 @@ def test_custom_observable_object_invalid_ref_property():
def test_custom_observable_object_invalid_refs_property(): def test_custom_observable_object_invalid_refs_property():
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
@stix2.observables.CustomObservable('x-new-obs', [ @stix2.v20.CustomObservable(
('property_refs', stix2.properties.StringProperty()), 'x-new-obs', [
]) ('property_refs', stix2.properties.StringProperty()),
],
)
class NewObs(): class NewObs():
pass pass
assert "is named like an object reference list property but is not a ListProperty containing ObjectReferenceProperty" in str(excinfo.value) assert "is named like an object reference list property but is not a ListProperty containing ObjectReferenceProperty" in str(excinfo.value)
@ -489,33 +519,39 @@ def test_custom_observable_object_invalid_refs_property():
def test_custom_observable_object_invalid_refs_list_property(): def test_custom_observable_object_invalid_refs_list_property():
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
@stix2.observables.CustomObservable('x-new-obs', [ @stix2.v20.CustomObservable(
('property_refs', stix2.properties.ListProperty(stix2.properties.StringProperty)), 'x-new-obs', [
]) ('property_refs', stix2.properties.ListProperty(stix2.properties.StringProperty)),
],
)
class NewObs(): class NewObs():
pass pass
assert "is named like an object reference list property but is not a ListProperty containing ObjectReferenceProperty" in str(excinfo.value) assert "is named like an object reference list property but is not a ListProperty containing ObjectReferenceProperty" in str(excinfo.value)
def test_custom_observable_object_invalid_valid_refs(): def test_custom_observable_object_invalid_valid_refs():
@stix2.observables.CustomObservable('x-new-obs', [ @stix2.v20.CustomObservable(
('property1', stix2.properties.StringProperty(required=True)), 'x-new-obs', [
('property_ref', stix2.properties.ObjectReferenceProperty(valid_types='email-addr')), ('property1', stix2.properties.StringProperty(required=True)),
]) ('property_ref', stix2.properties.ObjectReferenceProperty(valid_types='email-addr')),
],
)
class NewObs(): class NewObs():
pass pass
with pytest.raises(Exception) as excinfo: with pytest.raises(Exception) as excinfo:
NewObs(_valid_refs=['1'], NewObs(
property1='something', _valid_refs=['1'],
property_ref='1') property1='something',
property_ref='1',
)
assert "must be created with _valid_refs as a dict, not a list" in str(excinfo.value) assert "must be created with _valid_refs as a dict, not a list" in str(excinfo.value)
def test_custom_no_properties_raises_exception(): def test_custom_no_properties_raises_exception():
with pytest.raises(ValueError): with pytest.raises(TypeError):
@stix2.sdo.CustomObject('x-new-object-type') @stix2.v20.CustomObject('x-new-object-type')
class NewObject1(object): class NewObject1(object):
pass pass
@ -523,7 +559,7 @@ def test_custom_no_properties_raises_exception():
def test_custom_wrong_properties_arg_raises_exception(): def test_custom_wrong_properties_arg_raises_exception():
with pytest.raises(ValueError): with pytest.raises(ValueError):
@stix2.observables.CustomObservable('x-new-object-type', (("prop", stix2.properties.BooleanProperty()))) @stix2.v20.CustomObservable('x-new-object-type', (("prop", stix2.properties.BooleanProperty())))
class NewObject2(object): class NewObject2(object):
pass pass
@ -534,8 +570,8 @@ def test_parse_custom_observable_object():
"property1": "something" "property1": "something"
}""" }"""
nt = stix2.parse_observable(nt_string, []) nt = stix2.parse_observable(nt_string, [], version='2.0')
assert isinstance(nt, stix2.core._STIXBase) assert isinstance(nt, stix2.base._STIXBase)
assert nt.property1 == 'something' assert nt.property1 == 'something'
@ -546,14 +582,14 @@ def test_parse_unregistered_custom_observable_object():
}""" }"""
with pytest.raises(stix2.exceptions.CustomContentError) as excinfo: with pytest.raises(stix2.exceptions.CustomContentError) as excinfo:
stix2.parse_observable(nt_string) stix2.parse_observable(nt_string, version='2.0')
assert "Can't parse unknown observable type" in str(excinfo.value) assert "Can't parse unknown observable type" in str(excinfo.value)
parsed_custom = stix2.parse_observable(nt_string, allow_custom=True) parsed_custom = stix2.parse_observable(nt_string, allow_custom=True, version='2.0')
assert parsed_custom['property1'] == 'something' assert parsed_custom['property1'] == 'something'
with pytest.raises(AttributeError) as excinfo: with pytest.raises(AttributeError) as excinfo:
assert parsed_custom.property1 == 'something' assert parsed_custom.property1 == 'something'
assert not isinstance(parsed_custom, stix2.core._STIXBase) assert not isinstance(parsed_custom, stix2.base._STIXBase)
def test_parse_unregistered_custom_observable_object_with_no_type(): def test_parse_unregistered_custom_observable_object_with_no_type():
@ -562,7 +598,7 @@ def test_parse_unregistered_custom_observable_object_with_no_type():
}""" }"""
with pytest.raises(stix2.exceptions.ParseError) as excinfo: with pytest.raises(stix2.exceptions.ParseError) as excinfo:
stix2.parse_observable(nt_string, allow_custom=True) stix2.parse_observable(nt_string, allow_custom=True, version='2.0')
assert "Can't parse observable with no 'type' property" in str(excinfo.value) assert "Can't parse observable with no 'type' property" in str(excinfo.value)
@ -582,7 +618,7 @@ def test_parse_observed_data_with_custom_observable():
} }
} }
}""" }"""
parsed = stix2.parse(input_str, allow_custom=True) parsed = stix2.parse(input_str, version="2.0", allow_custom=True)
assert parsed.objects['0']['property1'] == 'something' assert parsed.objects['0']['property1'] == 'something'
@ -592,7 +628,7 @@ def test_parse_invalid_custom_observable_object():
}""" }"""
with pytest.raises(stix2.exceptions.ParseError) as excinfo: with pytest.raises(stix2.exceptions.ParseError) as excinfo:
stix2.parse_observable(nt_string) stix2.parse_observable(nt_string, version='2.0')
assert "Can't parse observable with no 'type' property" in str(excinfo.value) assert "Can't parse observable with no 'type' property" in str(excinfo.value)
@ -634,7 +670,7 @@ def test_observable_custom_property_allowed():
def test_observed_data_with_custom_observable_object(): def test_observed_data_with_custom_observable_object():
no = NewObservable(property1='something') no = NewObservable(property1='something')
ob_data = stix2.ObservedData( ob_data = stix2.v20.ObservedData(
first_observed=FAKE_TIME, first_observed=FAKE_TIME,
last_observed=FAKE_TIME, last_observed=FAKE_TIME,
number_observed=1, number_observed=1,
@ -644,10 +680,12 @@ def test_observed_data_with_custom_observable_object():
assert ob_data.objects['0'].property1 == 'something' assert ob_data.objects['0'].property1 == 'something'
@stix2.observables.CustomExtension(stix2.DomainName, 'x-new-ext', [ @stix2.v20.CustomExtension(
('property1', stix2.properties.StringProperty(required=True)), stix2.v20.DomainName, 'x-new-ext', [
('property2', stix2.properties.IntegerProperty()), ('property1', stix2.properties.StringProperty(required=True)),
]) ('property2', stix2.properties.IntegerProperty()),
],
)
class NewExtension(): class NewExtension():
def __init__(self, property2=None, **kwargs): def __init__(self, property2=None, **kwargs):
if property2 and property2 < 10: if property2 and property2 < 10:
@ -670,7 +708,7 @@ def test_custom_extension():
with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo: with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo:
NewExtension(property2=42) NewExtension(property2=42)
assert excinfo.value.properties == ['property1'] assert excinfo.value.properties == ['property1']
assert str(excinfo.value) == "No values for required properties for _Custom: (property1)." assert str(excinfo.value) == "No values for required properties for _CustomExtension: (property1)."
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
NewExtension(property1='something', property2=4) NewExtension(property1='something', property2=4)
@ -681,16 +719,19 @@ def test_custom_extension_wrong_observable_type():
# NewExtension is an extension of DomainName, not File # NewExtension is an extension of DomainName, not File
ext = NewExtension(property1='something') ext = NewExtension(property1='something')
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
stix2.File(name="abc.txt", stix2.v20.File(
extensions={ name="abc.txt",
"ntfs-ext": ext, extensions={
}) "ntfs-ext": ext,
},
)
assert 'Cannot determine extension type' in excinfo.value.reason assert 'Cannot determine extension type' in excinfo.value.reason
@pytest.mark.parametrize("data", [ @pytest.mark.parametrize(
"""{ "data", [
"""{
"keys": [ "keys": [
{ {
"test123": 123, "test123": 123,
@ -698,11 +739,14 @@ def test_custom_extension_wrong_observable_type():
} }
] ]
}""", }""",
]) ],
)
def test_custom_extension_with_list_and_dict_properties_observable_type(data): def test_custom_extension_with_list_and_dict_properties_observable_type(data):
@stix2.observables.CustomExtension(stix2.UserAccount, 'some-extension', [ @stix2.v20.CustomExtension(
('keys', stix2.properties.ListProperty(stix2.properties.DictionaryProperty, required=True)) stix2.v20.UserAccount, 'some-extension', [
]) ('keys', stix2.properties.ListProperty(stix2.properties.DictionaryProperty, required=True)),
],
)
class SomeCustomExtension: class SomeCustomExtension:
pass pass
@ -716,30 +760,36 @@ def test_custom_extension_invalid_observable():
class Foo(object): class Foo(object):
pass pass
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
@stix2.observables.CustomExtension(Foo, 'x-new-ext', [ @stix2.v20.CustomExtension(
('property1', stix2.properties.StringProperty(required=True)), Foo, 'x-new-ext', [
]) ('property1', stix2.properties.StringProperty(required=True)),
],
)
class FooExtension(): class FooExtension():
pass # pragma: no cover pass # pragma: no cover
assert str(excinfo.value) == "'observable' must be a valid Observable class!" assert str(excinfo.value) == "'observable' must be a valid Observable class!"
class Bar(stix2.observables._Observable): class Bar(stix2.v20.observables._Observable):
pass pass
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
@stix2.observables.CustomExtension(Bar, 'x-new-ext', [ @stix2.v20.CustomExtension(
('property1', stix2.properties.StringProperty(required=True)), Bar, 'x-new-ext', [
]) ('property1', stix2.properties.StringProperty(required=True)),
],
)
class BarExtension(): class BarExtension():
pass pass
assert "Unknown observable type" in str(excinfo.value) assert "Unknown observable type" in str(excinfo.value)
assert "Custom observables must be created with the @CustomObservable decorator." in str(excinfo.value) assert "Custom observables must be created with the @CustomObservable decorator." in str(excinfo.value)
class Baz(stix2.observables._Observable): class Baz(stix2.v20.observables._Observable):
_type = 'Baz' _type = 'Baz'
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
@stix2.observables.CustomExtension(Baz, 'x-new-ext', [ @stix2.v20.CustomExtension(
('property1', stix2.properties.StringProperty(required=True)), Baz, 'x-new-ext', [
]) ('property1', stix2.properties.StringProperty(required=True)),
],
)
class BazExtension(): class BazExtension():
pass pass
assert "Unknown observable type" in str(excinfo.value) assert "Unknown observable type" in str(excinfo.value)
@ -748,17 +798,21 @@ def test_custom_extension_invalid_observable():
def test_custom_extension_invalid_type_name(): def test_custom_extension_invalid_type_name():
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
@stix2.observables.CustomExtension(stix2.File, 'x', { @stix2.v20.CustomExtension(
'property1': stix2.properties.StringProperty(required=True), stix2.v20.File, 'x', {
}) 'property1': stix2.properties.StringProperty(required=True),
},
)
class FooExtension(): class FooExtension():
pass # pragma: no cover pass # pragma: no cover
assert "Invalid extension type name 'x':" in str(excinfo.value) assert "Invalid extension type name 'x':" in str(excinfo.value)
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
@stix2.observables.CustomExtension(stix2.File, 'x_new_ext', { @stix2.v20.CustomExtension(
'property1': stix2.properties.StringProperty(required=True), stix2.File, 'x_new_ext', {
}) 'property1': stix2.properties.StringProperty(required=True),
},
)
class BlaExtension(): class BlaExtension():
pass # pragma: no cover pass # pragma: no cover
assert "Invalid extension type name 'x_new_ext':" in str(excinfo.value) assert "Invalid extension type name 'x_new_ext':" in str(excinfo.value)
@ -766,7 +820,7 @@ def test_custom_extension_invalid_type_name():
def test_custom_extension_no_properties(): def test_custom_extension_no_properties():
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
@stix2.observables.CustomExtension(stix2.DomainName, 'x-new-ext2', None) @stix2.v20.CustomExtension(stix2.v20.DomainName, 'x-new-ext2', None)
class BarExtension(): class BarExtension():
pass pass
assert "Must supply a list, containing tuples." in str(excinfo.value) assert "Must supply a list, containing tuples." in str(excinfo.value)
@ -774,7 +828,7 @@ def test_custom_extension_no_properties():
def test_custom_extension_empty_properties(): def test_custom_extension_empty_properties():
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
@stix2.observables.CustomExtension(stix2.DomainName, 'x-new-ext2', []) @stix2.v20.CustomExtension(stix2.v20.DomainName, 'x-new-ext2', [])
class BarExtension(): class BarExtension():
pass pass
assert "Must supply a list, containing tuples." in str(excinfo.value) assert "Must supply a list, containing tuples." in str(excinfo.value)
@ -782,16 +836,18 @@ def test_custom_extension_empty_properties():
def test_custom_extension_dict_properties(): def test_custom_extension_dict_properties():
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
@stix2.observables.CustomExtension(stix2.DomainName, 'x-new-ext2', {}) @stix2.v20.CustomExtension(stix2.v20.DomainName, 'x-new-ext2', {})
class BarExtension(): class BarExtension():
pass pass
assert "Must supply a list, containing tuples." in str(excinfo.value) assert "Must supply a list, containing tuples." in str(excinfo.value)
def test_custom_extension_no_init_1(): def test_custom_extension_no_init_1():
@stix2.observables.CustomExtension(stix2.DomainName, 'x-new-extension', [ @stix2.v20.CustomExtension(
('property1', stix2.properties.StringProperty(required=True)), stix2.v20.DomainName, 'x-new-extension', [
]) ('property1', stix2.properties.StringProperty(required=True)),
],
)
class NewExt(): class NewExt():
pass pass
@ -800,9 +856,11 @@ def test_custom_extension_no_init_1():
def test_custom_extension_no_init_2(): def test_custom_extension_no_init_2():
@stix2.observables.CustomExtension(stix2.DomainName, 'x-new-ext2', [ @stix2.v20.CustomExtension(
('property1', stix2.properties.StringProperty(required=True)), stix2.v20.DomainName, 'x-new-ext2', [
]) ('property1', stix2.properties.StringProperty(required=True)),
],
)
class NewExt2(object): class NewExt2(object):
pass pass
@ -822,13 +880,14 @@ def test_parse_observable_with_custom_extension():
} }
}""" }"""
parsed = stix2.parse_observable(input_str) parsed = stix2.parse_observable(input_str, version='2.0')
assert parsed.extensions['x-new-ext'].property2 == 12 assert parsed.extensions['x-new-ext'].property2 == 12
@pytest.mark.parametrize("data", [ @pytest.mark.parametrize(
# URL is not in EXT_MAP "data", [
"""{ # URL is not in EXT_MAP
"""{
"type": "url", "type": "url",
"value": "example.com", "value": "example.com",
"extensions": { "extensions": {
@ -838,8 +897,8 @@ def test_parse_observable_with_custom_extension():
} }
} }
}""", }""",
# File is in EXT_MAP # File is in EXT_MAP
"""{ """{
"type": "file", "type": "file",
"name": "foo.txt", "name": "foo.txt",
"extensions": { "extensions": {
@ -849,15 +908,16 @@ def test_parse_observable_with_custom_extension():
} }
} }
}""", }""",
]) ],
)
def test_parse_observable_with_unregistered_custom_extension(data): def test_parse_observable_with_unregistered_custom_extension(data):
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
stix2.parse_observable(data) stix2.parse_observable(data, version='2.0')
assert "Can't parse unknown extension type" in str(excinfo.value) assert "Can't parse unknown extension type" in str(excinfo.value)
parsed_ob = stix2.parse_observable(data, allow_custom=True) parsed_ob = stix2.parse_observable(data, allow_custom=True, version='2.0')
assert parsed_ob['extensions']['x-foobar-ext']['property1'] == 'foo' assert parsed_ob['extensions']['x-foobar-ext']['property1'] == 'foo'
assert not isinstance(parsed_ob['extensions']['x-foobar-ext'], stix2.core._STIXBase) assert not isinstance(parsed_ob['extensions']['x-foobar-ext'], stix2.base._STIXBase)
def test_register_custom_object(): def test_register_custom_object():
@ -865,18 +925,19 @@ def test_register_custom_object():
class CustomObject2(object): class CustomObject2(object):
_type = 'awesome-object' _type = 'awesome-object'
stix2._register_type(CustomObject2) stix2.core._register_object(CustomObject2, version="2.0")
# Note that we will always check against newest OBJ_MAP. # Note that we will always check against newest OBJ_MAP.
assert (CustomObject2._type, CustomObject2) in stix2.OBJ_MAP.items() assert (CustomObject2._type, CustomObject2) in stix2.v20.OBJ_MAP.items()
def test_extension_property_location(): def test_extension_property_location():
assert 'extensions' in stix2.v20.observables.OBJ_MAP_OBSERVABLE['x-new-observable']._properties assert 'extensions' in stix2.v20.OBJ_MAP_OBSERVABLE['x-new-observable']._properties
assert 'extensions' not in stix2.v20.observables.EXT_MAP['domain-name']['x-new-ext']._properties assert 'extensions' not in stix2.v20.EXT_MAP['domain-name']['x-new-ext']._properties
@pytest.mark.parametrize("data", [ @pytest.mark.parametrize(
"""{ "data", [
"""{
"type": "x-example", "type": "x-example",
"id": "x-example--336d8a9f-91f1-46c5-b142-6441bb9f8b8d", "id": "x-example--336d8a9f-91f1-46c5-b142-6441bb9f8b8d",
"created": "2018-06-12T16:20:58.059Z", "created": "2018-06-12T16:20:58.059Z",
@ -888,18 +949,23 @@ def test_extension_property_location():
} }
} }
}""", }""",
]) ],
)
def test_custom_object_nested_dictionary(data): def test_custom_object_nested_dictionary(data):
@stix2.sdo.CustomObject('x-example', [ @stix2.v20.CustomObject(
('dictionary', stix2.properties.DictionaryProperty()), 'x-example', [
]) ('dictionary', stix2.properties.DictionaryProperty()),
],
)
class Example(object): class Example(object):
def __init__(self, **kwargs): def __init__(self, **kwargs):
pass pass
example = Example(id='x-example--336d8a9f-91f1-46c5-b142-6441bb9f8b8d', example = Example(
created='2018-06-12T16:20:58.059Z', id='x-example--336d8a9f-91f1-46c5-b142-6441bb9f8b8d',
modified='2018-06-12T16:20:58.059Z', created='2018-06-12T16:20:58.059Z',
dictionary={'key': {'key_b': 'value', 'key_a': 'value'}}) modified='2018-06-12T16:20:58.059Z',
dictionary={'key': {'key_b': 'value', 'key_a': 'value'}},
)
assert data == str(example) assert data == str(example)

View File

@ -1,9 +1,11 @@
import pytest import pytest
from stix2.datastore import (CompositeDataSource, DataSink, DataSource, from stix2.datastore import (
DataStoreMixin) CompositeDataSource, DataSink, DataSource, DataStoreMixin,
)
from stix2.datastore.filters import Filter from stix2.datastore.filters import Filter
from stix2.test.constants import CAMPAIGN_MORE_KWARGS
from .constants import CAMPAIGN_MORE_KWARGS
def test_datasource_abstract_class_raises_error(): def test_datasource_abstract_class_raises_error():
@ -46,15 +48,19 @@ def test_datastore_creator_of_raises():
def test_datastore_relationships_raises(): def test_datastore_relationships_raises():
with pytest.raises(AttributeError) as excinfo: with pytest.raises(AttributeError) as excinfo:
DataStoreMixin().relationships(obj="indicator--00000000-0000-4000-8000-000000000001", DataStoreMixin().relationships(
target_only=True) obj="indicator--00000000-0000-4000-8000-000000000001",
target_only=True,
)
assert "DataStoreMixin has no data source to query" == str(excinfo.value) assert "DataStoreMixin has no data source to query" == str(excinfo.value)
def test_datastore_related_to_raises(): def test_datastore_related_to_raises():
with pytest.raises(AttributeError) as excinfo: with pytest.raises(AttributeError) as excinfo:
DataStoreMixin().related_to(obj="indicator--00000000-0000-4000-8000-000000000001", DataStoreMixin().related_to(
target_only=True) obj="indicator--00000000-0000-4000-8000-000000000001",
target_only=True,
)
assert "DataStoreMixin has no data source to query" == str(excinfo.value) assert "DataStoreMixin has no data source to query" == str(excinfo.value)
@ -84,15 +90,19 @@ def test_composite_datastore_query_raises_error():
def test_composite_datastore_relationships_raises_error(): def test_composite_datastore_relationships_raises_error():
with pytest.raises(AttributeError) as excinfo: with pytest.raises(AttributeError) as excinfo:
CompositeDataSource().relationships(obj="indicator--00000000-0000-4000-8000-000000000001", CompositeDataSource().relationships(
target_only=True) obj="indicator--00000000-0000-4000-8000-000000000001",
target_only=True,
)
assert "CompositeDataSource has no data sources" == str(excinfo.value) assert "CompositeDataSource has no data sources" == str(excinfo.value)
def test_composite_datastore_related_to_raises_error(): def test_composite_datastore_related_to_raises_error():
with pytest.raises(AttributeError) as excinfo: with pytest.raises(AttributeError) as excinfo:
CompositeDataSource().related_to(obj="indicator--00000000-0000-4000-8000-000000000001", CompositeDataSource().related_to(
target_only=True) obj="indicator--00000000-0000-4000-8000-000000000001",
target_only=True,
)
assert "CompositeDataSource has no data sources" == str(excinfo.value) assert "CompositeDataSource has no data sources" == str(excinfo.value)

View File

@ -15,8 +15,10 @@ def test_add_remove_composite_datasource():
with pytest.raises(TypeError) as excinfo: with pytest.raises(TypeError) as excinfo:
cds.add_data_sources([ds1, ds2, ds1, ds3]) cds.add_data_sources([ds1, ds2, ds1, ds3])
assert str(excinfo.value) == ("DataSource (to be added) is not of type " assert str(excinfo.value) == (
"stix2.DataSource. DataSource type is '<class 'stix2.datastore.memory.MemorySink'>'") "DataSource (to be added) is not of type "
"stix2.DataSource. DataSource type is '<class 'stix2.datastore.memory.MemorySink'>'"
)
cds.add_data_sources([ds1, ds2, ds1]) cds.add_data_sources([ds1, ds2, ds1])
@ -28,10 +30,12 @@ def test_add_remove_composite_datasource():
def test_composite_datasource_operations(stix_objs1, stix_objs2): def test_composite_datasource_operations(stix_objs1, stix_objs2):
BUNDLE1 = dict(id="bundle--%s" % make_id(), BUNDLE1 = dict(
objects=stix_objs1, id="bundle--%s" % make_id(),
spec_version="2.0", objects=stix_objs1,
type="bundle") spec_version="2.0",
type="bundle",
)
cds1 = CompositeDataSource() cds1 = CompositeDataSource()
ds1_1 = MemorySource(stix_data=BUNDLE1) ds1_1 = MemorySource(stix_data=BUNDLE1)
ds1_2 = MemorySource(stix_data=stix_objs2) ds1_2 = MemorySource(stix_data=stix_objs2)
@ -57,11 +61,11 @@ def test_composite_datasource_operations(stix_objs1, stix_objs2):
assert indicator["type"] == "indicator" assert indicator["type"] == "indicator"
query1 = [ query1 = [
Filter("type", "=", "indicator") Filter("type", "=", "indicator"),
] ]
query2 = [ query2 = [
Filter("valid_from", "=", "2017-01-27T13:49:53.935382Z") Filter("valid_from", "=", "2017-01-27T13:49:53.935382Z"),
] ]
cds1.filters.add(query2) cds1.filters.add(query2)

View File

@ -8,18 +8,17 @@ import stat
import pytest import pytest
import pytz import pytz
from stix2 import (Bundle, Campaign, CustomObject, FileSystemSink, import stix2
FileSystemSource, FileSystemStore, Filter, Identity, from stix2.datastore.filesystem import (
Indicator, Malware, MarkingDefinition, Relationship, AuthSet, _find_search_optimizations, _get_matching_dir_entries,
TLPMarking, parse, properties) _timestamp2filename,
from stix2.datastore.filesystem import (AuthSet, _find_search_optimizations, )
_get_matching_dir_entries,
_timestamp2filename)
from stix2.exceptions import STIXError from stix2.exceptions import STIXError
from stix2.test.constants import (CAMPAIGN_ID, CAMPAIGN_KWARGS, IDENTITY_ID,
IDENTITY_KWARGS, INDICATOR_ID, from .constants import (
INDICATOR_KWARGS, MALWARE_ID, MALWARE_KWARGS, CAMPAIGN_ID, CAMPAIGN_KWARGS, IDENTITY_ID, IDENTITY_KWARGS, INDICATOR_ID,
RELATIONSHIP_IDS) INDICATOR_KWARGS, MALWARE_ID, MALWARE_KWARGS, RELATIONSHIP_IDS,
)
FS_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "stix2_data") FS_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "stix2_data")
@ -27,7 +26,7 @@ FS_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "stix2_data"
@pytest.fixture @pytest.fixture
def fs_store(): def fs_store():
# create # create
yield FileSystemStore(FS_PATH) yield stix2.FileSystemStore(FS_PATH)
# remove campaign dir # remove campaign dir
shutil.rmtree(os.path.join(FS_PATH, "campaign"), True) shutil.rmtree(os.path.join(FS_PATH, "campaign"), True)
@ -36,7 +35,7 @@ def fs_store():
@pytest.fixture @pytest.fixture
def fs_source(): def fs_source():
# create # create
fs = FileSystemSource(FS_PATH) fs = stix2.FileSystemSource(FS_PATH)
assert fs.stix_dir == FS_PATH assert fs.stix_dir == FS_PATH
yield fs yield fs
@ -47,7 +46,7 @@ def fs_source():
@pytest.fixture @pytest.fixture
def fs_sink(): def fs_sink():
# create # create
fs = FileSystemSink(FS_PATH) fs = stix2.FileSystemSink(FS_PATH)
assert fs.stix_dir == FS_PATH assert fs.stix_dir == FS_PATH
yield fs yield fs
@ -78,7 +77,7 @@ def bad_stix_files():
# bad STIX object # bad STIX object
stix_obj = { stix_obj = {
"id": "intrusion-set--test-bad-stix", "id": "intrusion-set--test-bad-stix",
"spec_version": "2.0" "spec_version": "2.0",
# no "type" field # no "type" field
} }
@ -92,22 +91,24 @@ def bad_stix_files():
@pytest.fixture(scope='module') @pytest.fixture(scope='module')
def rel_fs_store(): def rel_fs_store():
cam = Campaign(id=CAMPAIGN_ID, **CAMPAIGN_KWARGS) cam = stix2.v20.Campaign(id=CAMPAIGN_ID, **CAMPAIGN_KWARGS)
idy = Identity(id=IDENTITY_ID, **IDENTITY_KWARGS) idy = stix2.v20.Identity(id=IDENTITY_ID, **IDENTITY_KWARGS)
ind = Indicator(id=INDICATOR_ID, **INDICATOR_KWARGS) ind = stix2.v20.Indicator(id=INDICATOR_ID, **INDICATOR_KWARGS)
mal = Malware(id=MALWARE_ID, **MALWARE_KWARGS) mal = stix2.v20.Malware(id=MALWARE_ID, **MALWARE_KWARGS)
rel1 = Relationship(ind, 'indicates', mal, id=RELATIONSHIP_IDS[0]) rel1 = stix2.v20.Relationship(ind, 'indicates', mal, id=RELATIONSHIP_IDS[0])
rel2 = Relationship(mal, 'targets', idy, id=RELATIONSHIP_IDS[1]) rel2 = stix2.v20.Relationship(mal, 'targets', idy, id=RELATIONSHIP_IDS[1])
rel3 = Relationship(cam, 'uses', mal, id=RELATIONSHIP_IDS[2]) rel3 = stix2.v20.Relationship(cam, 'uses', mal, id=RELATIONSHIP_IDS[2])
stix_objs = [cam, idy, ind, mal, rel1, rel2, rel3] stix_objs = [cam, idy, ind, mal, rel1, rel2, rel3]
fs = FileSystemStore(FS_PATH) fs = stix2.FileSystemStore(FS_PATH)
for o in stix_objs: for o in stix_objs:
fs.add(o) fs.add(o)
yield fs yield fs
for o in stix_objs: for o in stix_objs:
filepath = os.path.join(FS_PATH, o.type, o.id, filepath = os.path.join(
_timestamp2filename(o.modified) + '.json') FS_PATH, o.type, o.id,
_timestamp2filename(o.modified) + '.json',
)
# Some test-scoped fixtures (e.g. fs_store) delete all campaigns, so by # Some test-scoped fixtures (e.g. fs_store) delete all campaigns, so by
# the time this module-scoped fixture tears itself down, it may find # the time this module-scoped fixture tears itself down, it may find
@ -124,13 +125,13 @@ def rel_fs_store():
def test_filesystem_source_nonexistent_folder(): def test_filesystem_source_nonexistent_folder():
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
FileSystemSource('nonexistent-folder') stix2.FileSystemSource('nonexistent-folder')
assert "for STIX data does not exist" in str(excinfo) assert "for STIX data does not exist" in str(excinfo)
def test_filesystem_sink_nonexistent_folder(): def test_filesystem_sink_nonexistent_folder():
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
FileSystemSink('nonexistent-folder') stix2.FileSystemSink('nonexistent-folder')
assert "for STIX data does not exist" in str(excinfo) assert "for STIX data does not exist" in str(excinfo)
@ -158,8 +159,10 @@ def test_filesystem_source_get_object(fs_source):
mal = fs_source.get("malware--6b616fc1-1505-48e3-8b2c-0d19337bff38") mal = fs_source.get("malware--6b616fc1-1505-48e3-8b2c-0d19337bff38")
assert mal.id == "malware--6b616fc1-1505-48e3-8b2c-0d19337bff38" assert mal.id == "malware--6b616fc1-1505-48e3-8b2c-0d19337bff38"
assert mal.name == "Rover" assert mal.name == "Rover"
assert mal.modified == datetime.datetime(2018, 11, 16, 22, 54, 20, 390000, assert mal.modified == datetime.datetime(
pytz.utc) 2018, 11, 16, 22, 54, 20, 390000,
pytz.utc,
)
def test_filesystem_source_get_nonexistent_object(fs_source): def test_filesystem_source_get_nonexistent_object(fs_source):
@ -169,18 +172,20 @@ def test_filesystem_source_get_nonexistent_object(fs_source):
def test_filesystem_source_all_versions(fs_source): def test_filesystem_source_all_versions(fs_source):
ids = fs_source.all_versions( ids = fs_source.all_versions(
"identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5" "identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5",
) )
assert len(ids) == 2 assert len(ids) == 2
assert all(id_.id == "identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5" assert all(
for id_ in ids) id_.id == "identity--c78cb6e5-0c4b-4611-8297-d1b8b55e40b5"
for id_ in ids
)
assert all(id_.name == "The MITRE Corporation" for id_ in ids) assert all(id_.name == "The MITRE Corporation" for id_ in ids)
assert all(id_.type == "identity" for id_ in ids) assert all(id_.type == "identity" for id_ in ids)
def test_filesystem_source_query_single(fs_source): def test_filesystem_source_query_single(fs_source):
# query2 # query2
is_2 = fs_source.query([Filter("external_references.external_id", '=', "T1027")]) is_2 = fs_source.query([stix2.Filter("external_references.external_id", '=', "T1027")])
assert len(is_2) == 1 assert len(is_2) == 1
is_2 = is_2[0] is_2 = is_2[0]
@ -188,9 +193,9 @@ def test_filesystem_source_query_single(fs_source):
assert is_2.type == "attack-pattern" assert is_2.type == "attack-pattern"
def test_filesytem_source_query_multiple(fs_source): def test_filesystem_source_query_multiple(fs_source):
# query # query
intrusion_sets = fs_source.query([Filter("type", '=', "intrusion-set")]) intrusion_sets = fs_source.query([stix2.Filter("type", '=', "intrusion-set")])
assert len(intrusion_sets) == 2 assert len(intrusion_sets) == 2
assert "intrusion-set--a653431d-6a5e-4600-8ad3-609b5af57064" in [is_.id for is_ in intrusion_sets] assert "intrusion-set--a653431d-6a5e-4600-8ad3-609b5af57064" in [is_.id for is_ in intrusion_sets]
assert "intrusion-set--f3bdec95-3d62-42d9-a840-29630f6cdc1a" in [is_.id for is_ in intrusion_sets] assert "intrusion-set--f3bdec95-3d62-42d9-a840-29630f6cdc1a" in [is_.id for is_ in intrusion_sets]
@ -205,9 +210,9 @@ def test_filesystem_source_backward_compatible(fs_source):
# it. # it.
modified = datetime.datetime(2018, 11, 16, 22, 54, 20, 390000, pytz.utc) modified = datetime.datetime(2018, 11, 16, 22, 54, 20, 390000, pytz.utc)
results = fs_source.query([ results = fs_source.query([
Filter("type", "=", "malware"), stix2.Filter("type", "=", "malware"),
Filter("id", "=", "malware--6b616fc1-1505-48e3-8b2c-0d19337bff38"), stix2.Filter("id", "=", "malware--6b616fc1-1505-48e3-8b2c-0d19337bff38"),
Filter("modified", "=", modified) stix2.Filter("modified", "=", modified),
]) ])
assert len(results) == 1 assert len(results) == 1
@ -220,14 +225,18 @@ def test_filesystem_source_backward_compatible(fs_source):
def test_filesystem_sink_add_python_stix_object(fs_sink, fs_source): def test_filesystem_sink_add_python_stix_object(fs_sink, fs_source):
# add python stix object # add python stix object
camp1 = Campaign(name="Hannibal", camp1 = stix2.v20.Campaign(
objective="Targeting Italian and Spanish Diplomat internet accounts", name="Hannibal",
aliases=["War Elephant"]) objective="Targeting Italian and Spanish Diplomat internet accounts",
aliases=["War Elephant"],
)
fs_sink.add(camp1) fs_sink.add(camp1)
filepath = os.path.join(FS_PATH, "campaign", camp1.id, filepath = os.path.join(
_timestamp2filename(camp1.modified) + ".json") FS_PATH, "campaign", camp1.id,
_timestamp2filename(camp1.modified) + ".json",
)
assert os.path.exists(filepath) assert os.path.exists(filepath)
camp1_r = fs_source.get(camp1.id) camp1_r = fs_source.get(camp1.id)
@ -247,7 +256,7 @@ def test_filesystem_sink_add_stix_object_dict(fs_sink, fs_source):
"aliases": ["Purple Robes"], "aliases": ["Purple Robes"],
"id": "campaign--8e2e2d2b-17d4-4cbf-938f-98ee46b3cd3f", "id": "campaign--8e2e2d2b-17d4-4cbf-938f-98ee46b3cd3f",
"created": "2017-05-31T21:31:53.197755Z", "created": "2017-05-31T21:31:53.197755Z",
"modified": "2017-05-31T21:31:53.197755Z" "modified": "2017-05-31T21:31:53.197755Z",
} }
fs_sink.add(camp2) fs_sink.add(camp2)
@ -258,9 +267,11 @@ def test_filesystem_sink_add_stix_object_dict(fs_sink, fs_source):
# as what's in the dict, since the parsing process can enforce a precision # as what's in the dict, since the parsing process can enforce a precision
# constraint (e.g. truncate to milliseconds), which results in a slightly # constraint (e.g. truncate to milliseconds), which results in a slightly
# different name. # different name.
camp2obj = parse(camp2) camp2obj = stix2.parse(camp2)
filepath = os.path.join(FS_PATH, "campaign", camp2obj["id"], filepath = os.path.join(
_timestamp2filename(camp2obj["modified"]) + ".json") FS_PATH, "campaign", camp2obj["id"],
_timestamp2filename(camp2obj["modified"]) + ".json",
)
assert os.path.exists(filepath) assert os.path.exists(filepath)
@ -286,16 +297,18 @@ def test_filesystem_sink_add_stix_bundle_dict(fs_sink, fs_source):
"aliases": ["Huns"], "aliases": ["Huns"],
"id": "campaign--b8f86161-ccae-49de-973a-4ca320c62478", "id": "campaign--b8f86161-ccae-49de-973a-4ca320c62478",
"created": "2017-05-31T21:31:53.197755Z", "created": "2017-05-31T21:31:53.197755Z",
"modified": "2017-05-31T21:31:53.197755Z" "modified": "2017-05-31T21:31:53.197755Z",
} },
] ],
} }
fs_sink.add(bund) fs_sink.add(bund)
camp_obj = parse(bund["objects"][0]) camp_obj = stix2.parse(bund["objects"][0])
filepath = os.path.join(FS_PATH, "campaign", camp_obj["id"], filepath = os.path.join(
_timestamp2filename(camp_obj["modified"]) + ".json") FS_PATH, "campaign", camp_obj["id"],
_timestamp2filename(camp_obj["modified"]) + ".json",
)
assert os.path.exists(filepath) assert os.path.exists(filepath)
@ -316,10 +329,12 @@ def test_filesystem_sink_add_json_stix_object(fs_sink, fs_source):
fs_sink.add(camp4) fs_sink.add(camp4)
camp4obj = parse(camp4) camp4obj = stix2.parse(camp4)
filepath = os.path.join(FS_PATH, "campaign", filepath = os.path.join(
"campaign--6a6ca372-ba07-42cc-81ef-9840fc1f963d", FS_PATH, "campaign",
_timestamp2filename(camp4obj["modified"]) + ".json") "campaign--6a6ca372-ba07-42cc-81ef-9840fc1f963d",
_timestamp2filename(camp4obj["modified"]) + ".json",
)
assert os.path.exists(filepath) assert os.path.exists(filepath)
@ -339,12 +354,14 @@ def test_filesystem_sink_json_stix_bundle(fs_sink, fs_source):
' "name": "Spartacus", "objective": "Oppressive regimes of Africa and Middle East"}]}' ' "name": "Spartacus", "objective": "Oppressive regimes of Africa and Middle East"}]}'
fs_sink.add(bund2) fs_sink.add(bund2)
bund2obj = parse(bund2) bund2obj = stix2.parse(bund2)
camp_obj = bund2obj["objects"][0] camp_obj = bund2obj["objects"][0]
filepath = os.path.join(FS_PATH, "campaign", filepath = os.path.join(
"campaign--2c03b8bf-82ee-433e-9918-ca2cb6e9534b", FS_PATH, "campaign",
_timestamp2filename(camp_obj["modified"]) + ".json") "campaign--2c03b8bf-82ee-433e-9918-ca2cb6e9534b",
_timestamp2filename(camp_obj["modified"]) + ".json",
)
assert os.path.exists(filepath) assert os.path.exists(filepath)
@ -357,9 +374,11 @@ def test_filesystem_sink_json_stix_bundle(fs_sink, fs_source):
def test_filesystem_sink_add_objects_list(fs_sink, fs_source): def test_filesystem_sink_add_objects_list(fs_sink, fs_source):
# add list of objects # add list of objects
camp6 = Campaign(name="Comanche", camp6 = stix2.v20.Campaign(
objective="US Midwest manufacturing firms, oil refineries, and businesses", name="Comanche",
aliases=["Horse Warrior"]) objective="US Midwest manufacturing firms, oil refineries, and businesses",
aliases=["Horse Warrior"],
)
camp7 = { camp7 = {
"name": "Napolean", "name": "Napolean",
@ -368,19 +387,22 @@ def test_filesystem_sink_add_objects_list(fs_sink, fs_source):
"aliases": ["The Frenchmen"], "aliases": ["The Frenchmen"],
"id": "campaign--122818b6-1112-4fb0-b11b-b111107ca70a", "id": "campaign--122818b6-1112-4fb0-b11b-b111107ca70a",
"created": "2017-05-31T21:31:53.197755Z", "created": "2017-05-31T21:31:53.197755Z",
"modified": "2017-05-31T21:31:53.197755Z" "modified": "2017-05-31T21:31:53.197755Z",
} }
fs_sink.add([camp6, camp7]) fs_sink.add([camp6, camp7])
camp7obj = parse(camp7) camp7obj = stix2.parse(camp7)
camp6filepath = os.path.join(FS_PATH, "campaign", camp6.id, camp6filepath = os.path.join(
_timestamp2filename(camp6["modified"]) + FS_PATH, "campaign", camp6.id,
".json") _timestamp2filename(camp6["modified"]) +
".json",
)
camp7filepath = os.path.join( camp7filepath = os.path.join(
FS_PATH, "campaign", "campaign--122818b6-1112-4fb0-b11b-b111107ca70a", FS_PATH, "campaign", "campaign--122818b6-1112-4fb0-b11b-b111107ca70a",
_timestamp2filename(camp7obj["modified"]) + ".json") _timestamp2filename(camp7obj["modified"]) + ".json",
)
assert os.path.exists(camp6filepath) assert os.path.exists(camp6filepath)
assert os.path.exists(camp7filepath) assert os.path.exists(camp7filepath)
@ -399,14 +421,14 @@ def test_filesystem_sink_add_objects_list(fs_sink, fs_source):
def test_filesystem_sink_marking(fs_sink): def test_filesystem_sink_marking(fs_sink):
marking = MarkingDefinition( marking = stix2.v20.MarkingDefinition(
definition_type="tlp", definition_type="tlp",
definition=TLPMarking(tlp="green") definition=stix2.v20.TLPMarking(tlp="green"),
) )
fs_sink.add(marking) fs_sink.add(marking)
marking_filepath = os.path.join( marking_filepath = os.path.join(
FS_PATH, "marking-definition", marking["id"] + ".json" FS_PATH, "marking-definition", marking["id"] + ".json",
) )
assert os.path.exists(marking_filepath) assert os.path.exists(marking_filepath)
@ -436,14 +458,14 @@ def test_filesystem_store_all_versions(fs_store):
def test_filesystem_store_query(fs_store): def test_filesystem_store_query(fs_store):
# query() # query()
tools = fs_store.query([Filter("labels", "in", "tool")]) tools = fs_store.query([stix2.Filter("labels", "in", "tool")])
assert len(tools) == 2 assert len(tools) == 2
assert "tool--242f3da3-4425-4d11-8f5c-b842886da966" in [tool.id for tool in tools] assert "tool--242f3da3-4425-4d11-8f5c-b842886da966" in [tool.id for tool in tools]
assert "tool--03342581-f790-4f03-ba41-e82e67392e23" in [tool.id for tool in tools] assert "tool--03342581-f790-4f03-ba41-e82e67392e23" in [tool.id for tool in tools]
def test_filesystem_store_query_single_filter(fs_store): def test_filesystem_store_query_single_filter(fs_store):
query = Filter("labels", "in", "tool") query = stix2.Filter("labels", "in", "tool")
tools = fs_store.query(query) tools = fs_store.query(query)
assert len(tools) == 2 assert len(tools) == 2
assert "tool--242f3da3-4425-4d11-8f5c-b842886da966" in [tool.id for tool in tools] assert "tool--242f3da3-4425-4d11-8f5c-b842886da966" in [tool.id for tool in tools]
@ -458,45 +480,53 @@ def test_filesystem_store_empty_query(fs_store):
def test_filesystem_store_query_multiple_filters(fs_store): def test_filesystem_store_query_multiple_filters(fs_store):
fs_store.source.filters.add(Filter("labels", "in", "tool")) fs_store.source.filters.add(stix2.Filter("labels", "in", "tool"))
tools = fs_store.query(Filter("id", "=", "tool--242f3da3-4425-4d11-8f5c-b842886da966")) tools = fs_store.query(stix2.Filter("id", "=", "tool--242f3da3-4425-4d11-8f5c-b842886da966"))
assert len(tools) == 1 assert len(tools) == 1
assert tools[0].id == "tool--242f3da3-4425-4d11-8f5c-b842886da966" assert tools[0].id == "tool--242f3da3-4425-4d11-8f5c-b842886da966"
def test_filesystem_store_query_dont_include_type_folder(fs_store): def test_filesystem_store_query_dont_include_type_folder(fs_store):
results = fs_store.query(Filter("type", "!=", "tool")) results = fs_store.query(stix2.Filter("type", "!=", "tool"))
assert len(results) == 28 assert len(results) == 28
def test_filesystem_store_add(fs_store): def test_filesystem_store_add(fs_store):
# add() # add()
camp1 = Campaign(name="Great Heathen Army", camp1 = stix2.v20.Campaign(
objective="Targeting the government of United Kingdom and insitutions affiliated with the Church Of England", name="Great Heathen Army",
aliases=["Ragnar"]) objective="Targeting the government of United Kingdom and insitutions affiliated with the Church Of England",
aliases=["Ragnar"],
)
fs_store.add(camp1) fs_store.add(camp1)
camp1_r = fs_store.get(camp1.id) camp1_r = fs_store.get(camp1.id)
assert camp1_r.id == camp1.id assert camp1_r.id == camp1.id
assert camp1_r.name == camp1.name assert camp1_r.name == camp1.name
filepath = os.path.join(FS_PATH, "campaign", camp1_r.id, filepath = os.path.join(
_timestamp2filename(camp1_r.modified) + ".json") FS_PATH, "campaign", camp1_r.id,
_timestamp2filename(camp1_r.modified) + ".json",
)
# remove # remove
os.remove(filepath) os.remove(filepath)
def test_filesystem_store_add_as_bundle(): def test_filesystem_store_add_as_bundle():
fs_store = FileSystemStore(FS_PATH, bundlify=True) fs_store = stix2.FileSystemStore(FS_PATH, bundlify=True)
camp1 = Campaign(name="Great Heathen Army", camp1 = stix2.v20.Campaign(
objective="Targeting the government of United Kingdom and insitutions affiliated with the Church Of England", name="Great Heathen Army",
aliases=["Ragnar"]) objective="Targeting the government of United Kingdom and insitutions affiliated with the Church Of England",
aliases=["Ragnar"],
)
fs_store.add(camp1) fs_store.add(camp1)
filepath = os.path.join(FS_PATH, "campaign", camp1.id, filepath = os.path.join(
_timestamp2filename(camp1.modified) + ".json") FS_PATH, "campaign", camp1.id,
_timestamp2filename(camp1.modified) + ".json",
)
with open(filepath) as bundle_file: with open(filepath) as bundle_file:
assert '"type": "bundle"' in bundle_file.read() assert '"type": "bundle"' in bundle_file.read()
@ -509,7 +539,7 @@ def test_filesystem_store_add_as_bundle():
def test_filesystem_add_bundle_object(fs_store): def test_filesystem_add_bundle_object(fs_store):
bundle = Bundle() bundle = stix2.v20.Bundle()
fs_store.add(bundle) fs_store.add(bundle)
@ -524,14 +554,14 @@ def test_filesystem_store_add_invalid_object(fs_store):
def test_filesystem_store_add_marking(fs_store): def test_filesystem_store_add_marking(fs_store):
marking = MarkingDefinition( marking = stix2.v20.MarkingDefinition(
definition_type="tlp", definition_type="tlp",
definition=TLPMarking(tlp="green") definition=stix2.v20.TLPMarking(tlp="green"),
) )
fs_store.add(marking) fs_store.add(marking)
marking_filepath = os.path.join( marking_filepath = os.path.join(
FS_PATH, "marking-definition", marking["id"] + ".json" FS_PATH, "marking-definition", marking["id"] + ".json",
) )
assert os.path.exists(marking_filepath) assert os.path.exists(marking_filepath)
@ -544,12 +574,14 @@ def test_filesystem_store_add_marking(fs_store):
def test_filesystem_object_with_custom_property(fs_store): def test_filesystem_object_with_custom_property(fs_store):
camp = Campaign(name="Scipio Africanus", camp = stix2.v20.Campaign(
objective="Defeat the Carthaginians", name="Scipio Africanus",
x_empire="Roman", objective="Defeat the Carthaginians",
allow_custom=True) x_empire="Roman",
allow_custom=True,
)
fs_store.add(camp, True) fs_store.add(camp)
camp_r = fs_store.get(camp.id) camp_r = fs_store.get(camp.id)
assert camp_r.id == camp.id assert camp_r.id == camp.id
@ -557,12 +589,14 @@ def test_filesystem_object_with_custom_property(fs_store):
def test_filesystem_object_with_custom_property_in_bundle(fs_store): def test_filesystem_object_with_custom_property_in_bundle(fs_store):
camp = Campaign(name="Scipio Africanus", camp = stix2.v20.Campaign(
objective="Defeat the Carthaginians", name="Scipio Africanus",
x_empire="Roman", objective="Defeat the Carthaginians",
allow_custom=True) x_empire="Roman",
allow_custom=True,
)
bundle = Bundle(camp, allow_custom=True) bundle = stix2.v20.Bundle(camp, allow_custom=True)
fs_store.add(bundle) fs_store.add(bundle)
camp_r = fs_store.get(camp.id) camp_r = fs_store.get(camp.id)
@ -571,9 +605,11 @@ def test_filesystem_object_with_custom_property_in_bundle(fs_store):
def test_filesystem_custom_object(fs_store): def test_filesystem_custom_object(fs_store):
@CustomObject('x-new-obj', [ @stix2.v20.CustomObject(
('property1', properties.StringProperty(required=True)), 'x-new-obj', [
]) ('property1', stix2.properties.StringProperty(required=True)),
],
)
class NewObj(): class NewObj():
pass pass
@ -581,8 +617,8 @@ def test_filesystem_custom_object(fs_store):
fs_store.add(newobj) fs_store.add(newobj)
newobj_r = fs_store.get(newobj.id) newobj_r = fs_store.get(newobj.id)
assert newobj_r.id == newobj.id assert newobj_r["id"] == newobj["id"]
assert newobj_r.property1 == 'something' assert newobj_r["property1"] == 'something'
# remove dir # remove dir
shutil.rmtree(os.path.join(FS_PATH, "x-new-obj"), True) shutil.rmtree(os.path.join(FS_PATH, "x-new-obj"), True)
@ -690,7 +726,7 @@ def test_auth_set_black1():
def test_optimize_types1(): def test_optimize_types1():
filters = [ filters = [
Filter("type", "=", "foo") stix2.Filter("type", "=", "foo"),
] ]
auth_types, auth_ids = _find_search_optimizations(filters) auth_types, auth_ids = _find_search_optimizations(filters)
@ -703,8 +739,8 @@ def test_optimize_types1():
def test_optimize_types2(): def test_optimize_types2():
filters = [ filters = [
Filter("type", "=", "foo"), stix2.Filter("type", "=", "foo"),
Filter("type", "=", "bar") stix2.Filter("type", "=", "bar"),
] ]
auth_types, auth_ids = _find_search_optimizations(filters) auth_types, auth_ids = _find_search_optimizations(filters)
@ -717,8 +753,8 @@ def test_optimize_types2():
def test_optimize_types3(): def test_optimize_types3():
filters = [ filters = [
Filter("type", "in", ["A", "B", "C"]), stix2.Filter("type", "in", ["A", "B", "C"]),
Filter("type", "in", ["B", "C", "D"]) stix2.Filter("type", "in", ["B", "C", "D"]),
] ]
auth_types, auth_ids = _find_search_optimizations(filters) auth_types, auth_ids = _find_search_optimizations(filters)
@ -731,8 +767,8 @@ def test_optimize_types3():
def test_optimize_types4(): def test_optimize_types4():
filters = [ filters = [
Filter("type", "in", ["A", "B", "C"]), stix2.Filter("type", "in", ["A", "B", "C"]),
Filter("type", "in", ["D", "E", "F"]) stix2.Filter("type", "in", ["D", "E", "F"]),
] ]
auth_types, auth_ids = _find_search_optimizations(filters) auth_types, auth_ids = _find_search_optimizations(filters)
@ -745,8 +781,8 @@ def test_optimize_types4():
def test_optimize_types5(): def test_optimize_types5():
filters = [ filters = [
Filter("type", "in", ["foo", "bar"]), stix2.Filter("type", "in", ["foo", "bar"]),
Filter("type", "!=", "bar") stix2.Filter("type", "!=", "bar"),
] ]
auth_types, auth_ids = _find_search_optimizations(filters) auth_types, auth_ids = _find_search_optimizations(filters)
@ -759,8 +795,8 @@ def test_optimize_types5():
def test_optimize_types6(): def test_optimize_types6():
filters = [ filters = [
Filter("type", "!=", "foo"), stix2.Filter("type", "!=", "foo"),
Filter("type", "!=", "bar") stix2.Filter("type", "!=", "bar"),
] ]
auth_types, auth_ids = _find_search_optimizations(filters) auth_types, auth_ids = _find_search_optimizations(filters)
@ -773,8 +809,8 @@ def test_optimize_types6():
def test_optimize_types7(): def test_optimize_types7():
filters = [ filters = [
Filter("type", "=", "foo"), stix2.Filter("type", "=", "foo"),
Filter("type", "!=", "foo") stix2.Filter("type", "!=", "foo"),
] ]
auth_types, auth_ids = _find_search_optimizations(filters) auth_types, auth_ids = _find_search_optimizations(filters)
@ -798,8 +834,8 @@ def test_optimize_types8():
def test_optimize_types_ids1(): def test_optimize_types_ids1():
filters = [ filters = [
Filter("type", "in", ["foo", "bar"]), stix2.Filter("type", "in", ["foo", "bar"]),
Filter("id", "=", "foo--00000000-0000-0000-0000-000000000000") stix2.Filter("id", "=", "foo--00000000-0000-0000-0000-000000000000"),
] ]
auth_types, auth_ids = _find_search_optimizations(filters) auth_types, auth_ids = _find_search_optimizations(filters)
@ -812,8 +848,8 @@ def test_optimize_types_ids1():
def test_optimize_types_ids2(): def test_optimize_types_ids2():
filters = [ filters = [
Filter("type", "=", "foo"), stix2.Filter("type", "=", "foo"),
Filter("id", "=", "bar--00000000-0000-0000-0000-000000000000") stix2.Filter("id", "=", "bar--00000000-0000-0000-0000-000000000000"),
] ]
auth_types, auth_ids = _find_search_optimizations(filters) auth_types, auth_ids = _find_search_optimizations(filters)
@ -826,8 +862,8 @@ def test_optimize_types_ids2():
def test_optimize_types_ids3(): def test_optimize_types_ids3():
filters = [ filters = [
Filter("type", "in", ["foo", "bar"]), stix2.Filter("type", "in", ["foo", "bar"]),
Filter("id", "!=", "bar--00000000-0000-0000-0000-000000000000") stix2.Filter("id", "!=", "bar--00000000-0000-0000-0000-000000000000"),
] ]
auth_types, auth_ids = _find_search_optimizations(filters) auth_types, auth_ids = _find_search_optimizations(filters)
@ -840,12 +876,14 @@ def test_optimize_types_ids3():
def test_optimize_types_ids4(): def test_optimize_types_ids4():
filters = [ filters = [
Filter("type", "in", ["A", "B", "C"]), stix2.Filter("type", "in", ["A", "B", "C"]),
Filter("id", "in", [ stix2.Filter(
"B--00000000-0000-0000-0000-000000000000", "id", "in", [
"C--00000000-0000-0000-0000-000000000000", "B--00000000-0000-0000-0000-000000000000",
"D--00000000-0000-0000-0000-000000000000", "C--00000000-0000-0000-0000-000000000000",
]) "D--00000000-0000-0000-0000-000000000000",
],
),
] ]
auth_types, auth_ids = _find_search_optimizations(filters) auth_types, auth_ids = _find_search_optimizations(filters)
@ -855,20 +893,22 @@ def test_optimize_types_ids4():
assert auth_ids.auth_type == AuthSet.WHITE assert auth_ids.auth_type == AuthSet.WHITE
assert auth_ids.values == { assert auth_ids.values == {
"B--00000000-0000-0000-0000-000000000000", "B--00000000-0000-0000-0000-000000000000",
"C--00000000-0000-0000-0000-000000000000" "C--00000000-0000-0000-0000-000000000000",
} }
def test_optimize_types_ids5(): def test_optimize_types_ids5():
filters = [ filters = [
Filter("type", "in", ["A", "B", "C"]), stix2.Filter("type", "in", ["A", "B", "C"]),
Filter("type", "!=", "C"), stix2.Filter("type", "!=", "C"),
Filter("id", "in", [ stix2.Filter(
"B--00000000-0000-0000-0000-000000000000", "id", "in", [
"C--00000000-0000-0000-0000-000000000000", "B--00000000-0000-0000-0000-000000000000",
"D--00000000-0000-0000-0000-000000000000" "C--00000000-0000-0000-0000-000000000000",
]), "D--00000000-0000-0000-0000-000000000000",
Filter("id", "!=", "D--00000000-0000-0000-0000-000000000000") ],
),
stix2.Filter("id", "!=", "D--00000000-0000-0000-0000-000000000000"),
] ]
auth_types, auth_ids = _find_search_optimizations(filters) auth_types, auth_ids = _find_search_optimizations(filters)
@ -881,7 +921,7 @@ def test_optimize_types_ids5():
def test_optimize_types_ids6(): def test_optimize_types_ids6():
filters = [ filters = [
Filter("id", "=", "A--00000000-0000-0000-0000-000000000000") stix2.Filter("id", "=", "A--00000000-0000-0000-0000-000000000000"),
] ]
auth_types, auth_ids = _find_search_optimizations(filters) auth_types, auth_ids = _find_search_optimizations(filters)
@ -895,7 +935,7 @@ def test_optimize_types_ids6():
def test_search_auth_set_white1(): def test_search_auth_set_white1():
auth_set = AuthSet( auth_set = AuthSet(
{"attack-pattern", "doesntexist"}, {"attack-pattern", "doesntexist"},
set() set(),
) )
results = _get_matching_dir_entries(FS_PATH, auth_set, stat.S_ISDIR) results = _get_matching_dir_entries(FS_PATH, auth_set, stat.S_ISDIR)
@ -909,19 +949,19 @@ def test_search_auth_set_white2():
auth_set = AuthSet( auth_set = AuthSet(
{ {
"malware--6b616fc1-1505-48e3-8b2c-0d19337bff38", "malware--6b616fc1-1505-48e3-8b2c-0d19337bff38",
"malware--92ec0cbd-2c30-44a2-b270-73f4ec949841" "malware--92ec0cbd-2c30-44a2-b270-73f4ec949841",
}, },
{ {
"malware--92ec0cbd-2c30-44a2-b270-73f4ec949841", "malware--92ec0cbd-2c30-44a2-b270-73f4ec949841",
"malware--96b08451-b27a-4ff6-893f-790e26393a8e", "malware--96b08451-b27a-4ff6-893f-790e26393a8e",
"doesntexist" "doesntexist",
} },
) )
results = _get_matching_dir_entries( results = _get_matching_dir_entries(
os.path.join(FS_PATH, "malware"), os.path.join(FS_PATH, "malware"),
auth_set, stat.S_ISDIR auth_set, stat.S_ISDIR,
) )
assert results == ["malware--6b616fc1-1505-48e3-8b2c-0d19337bff38"] assert results == ["malware--6b616fc1-1505-48e3-8b2c-0d19337bff38"]
@ -931,9 +971,11 @@ def test_search_auth_set_white3():
auth_set = AuthSet({"20170531213258226477", "doesntexist"}, set()) auth_set = AuthSet({"20170531213258226477", "doesntexist"}, set())
results = _get_matching_dir_entries( results = _get_matching_dir_entries(
os.path.join(FS_PATH, "malware", os.path.join(
"malware--6b616fc1-1505-48e3-8b2c-0d19337bff38"), FS_PATH, "malware",
auth_set, stat.S_ISREG, ".json" "malware--6b616fc1-1505-48e3-8b2c-0d19337bff38",
),
auth_set, stat.S_ISREG, ".json",
) )
assert results == ["20170531213258226477.json"] assert results == ["20170531213258226477.json"]
@ -942,23 +984,23 @@ def test_search_auth_set_white3():
def test_search_auth_set_black1(): def test_search_auth_set_black1():
auth_set = AuthSet( auth_set = AuthSet(
None, None,
{"tool--242f3da3-4425-4d11-8f5c-b842886da966", "doesntexist"} {"tool--242f3da3-4425-4d11-8f5c-b842886da966", "doesntexist"},
) )
results = _get_matching_dir_entries( results = _get_matching_dir_entries(
os.path.join(FS_PATH, "tool"), os.path.join(FS_PATH, "tool"),
auth_set, stat.S_ISDIR auth_set, stat.S_ISDIR,
) )
assert set(results) == { assert set(results) == {
"tool--03342581-f790-4f03-ba41-e82e67392e23" "tool--03342581-f790-4f03-ba41-e82e67392e23",
} }
def test_search_auth_set_white_empty(): def test_search_auth_set_white_empty():
auth_set = AuthSet( auth_set = AuthSet(
set(), set(),
set() set(),
) )
results = _get_matching_dir_entries(FS_PATH, auth_set, stat.S_ISDIR) results = _get_matching_dir_entries(FS_PATH, auth_set, stat.S_ISDIR)
@ -971,7 +1013,7 @@ def test_search_auth_set_black_empty(rel_fs_store):
# predictable (it adds "campaign"). # predictable (it adds "campaign").
auth_set = AuthSet( auth_set = AuthSet(
None, None,
set() set(),
) )
results = _get_matching_dir_entries(FS_PATH, auth_set, stat.S_ISDIR) results = _get_matching_dir_entries(FS_PATH, auth_set, stat.S_ISDIR)
@ -987,14 +1029,14 @@ def test_search_auth_set_black_empty(rel_fs_store):
"malware", "malware",
"marking-definition", "marking-definition",
"relationship", "relationship",
"tool" "tool",
} }
def test_timestamp2filename_naive(): def test_timestamp2filename_naive():
dt = datetime.datetime( dt = datetime.datetime(
2010, 6, 15, 2010, 6, 15,
8, 30, 10, 1234 8, 30, 10, 1234,
) )
filename = _timestamp2filename(dt) filename = _timestamp2filename(dt)
@ -1007,7 +1049,7 @@ def test_timestamp2filename_tz():
dt = datetime.datetime( dt = datetime.datetime(
2010, 6, 15, 2010, 6, 15,
7, 30, 10, 1234, 7, 30, 10, 1234,
tz tz,
) )
filename = _timestamp2filename(dt) filename = _timestamp2filename(dt)

View File

@ -10,23 +10,23 @@ stix_objs = [
"description": "\n\nTITLE:\n\tPoison Ivy", "description": "\n\nTITLE:\n\tPoison Ivy",
"id": "malware--fdd60b30-b67c-41e3-b0b9-f01faf20d111", "id": "malware--fdd60b30-b67c-41e3-b0b9-f01faf20d111",
"labels": [ "labels": [
"remote-access-trojan" "remote-access-trojan",
], ],
"modified": "2017-01-27T13:49:53.997Z", "modified": "2017-01-27T13:49:53.997Z",
"name": "Poison Ivy", "name": "Poison Ivy",
"type": "malware" "type": "malware",
}, },
{ {
"created": "2014-05-08T09:00:00.000Z", "created": "2014-05-08T09:00:00.000Z",
"id": "indicator--a932fcc6-e032-476c-826f-cb970a5a1ade", "id": "indicator--a932fcc6-e032-476c-826f-cb970a5a1ade",
"labels": [ "labels": [
"file-hash-watchlist" "file-hash-watchlist",
], ],
"modified": "2014-05-08T09:00:00.000Z", "modified": "2014-05-08T09:00:00.000Z",
"name": "File hash for Poison Ivy variant", "name": "File hash for Poison Ivy variant",
"pattern": "[file:hashes.'SHA-256' = 'ef537f25c895bfa782526529a9b63d97aa631564d5d789c2b765448c8635fb6c']", "pattern": "[file:hashes.'SHA-256' = 'ef537f25c895bfa782526529a9b63d97aa631564d5d789c2b765448c8635fb6c']",
"type": "indicator", "type": "indicator",
"valid_from": "2014-05-08T09:00:00.000000Z" "valid_from": "2014-05-08T09:00:00.000000Z",
}, },
{ {
"created": "2014-05-08T09:00:00.000Z", "created": "2014-05-08T09:00:00.000Z",
@ -34,20 +34,20 @@ stix_objs = [
{ {
"marking_ref": "marking-definition--5e57c739-391a-4eb3-b6be-7d15ca92d5ed", "marking_ref": "marking-definition--5e57c739-391a-4eb3-b6be-7d15ca92d5ed",
"selectors": [ "selectors": [
"relationship_type" "relationship_type",
] ],
} },
], ],
"id": "relationship--2f9a9aa9-108a-4333-83e2-4fb25add0463", "id": "relationship--2f9a9aa9-108a-4333-83e2-4fb25add0463",
"modified": "2014-05-08T09:00:00.000Z", "modified": "2014-05-08T09:00:00.000Z",
"object_marking_refs": [ "object_marking_refs": [
"marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9" "marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9",
], ],
"relationship_type": "indicates", "relationship_type": "indicates",
"revoked": True, "revoked": True,
"source_ref": "indicator--a932fcc6-e032-476c-826f-cb970a5a1ade", "source_ref": "indicator--a932fcc6-e032-476c-826f-cb970a5a1ade",
"target_ref": "malware--fdd60b30-b67c-41e3-b0b9-f01faf20d111", "target_ref": "malware--fdd60b30-b67c-41e3-b0b9-f01faf20d111",
"type": "relationship" "type": "relationship",
}, },
{ {
"id": "vulnerability--ee916c28-c7a4-4d0d-ad56-a8d357f89fef", "id": "vulnerability--ee916c28-c7a4-4d0d-ad56-a8d357f89fef",
@ -60,10 +60,10 @@ stix_objs = [
"external_references": [ "external_references": [
{ {
"source_name": "cve", "source_name": "cve",
"external_id": "CVE-2014-0160" "external_id": "CVE-2014-0160",
} },
], ],
"labels": ["heartbleed", "has-logo"] "labels": ["heartbleed", "has-logo"],
}, },
{ {
"type": "observed-data", "type": "observed-data",
@ -77,11 +77,11 @@ stix_objs = [
"objects": { "objects": {
"0": { "0": {
"type": "file", "type": "file",
"name": "HAL 9000.exe" "name": "HAL 9000.exe",
} },
} },
} },
] ]
@ -406,8 +406,10 @@ def test_filters4():
# Assert invalid Filter cannot be created # Assert invalid Filter cannot be created
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
Filter("modified", "?", "2017-01-27T13:49:53.935Z") Filter("modified", "?", "2017-01-27T13:49:53.935Z")
assert str(excinfo.value) == ("Filter operator '?' not supported " assert str(excinfo.value) == (
"for specified property: 'modified'") "Filter operator '?' not supported "
"for specified property: 'modified'"
)
def test_filters5(stix_objs2, real_stix_objs2): def test_filters5(stix_objs2, real_stix_objs2):
@ -447,7 +449,7 @@ def test_filters7(stix_objs2, real_stix_objs2):
"0": { "0": {
"type": "file", "type": "file",
"hashes": { "hashes": {
"SHA-256": "35a01331e9ad96f751278b891b6ea09699806faedfa237d40513d92ad1b7100f" "SHA-256": "35a01331e9ad96f751278b891b6ea09699806faedfa237d40513d92ad1b7100f",
}, },
"extensions": { "extensions": {
"pdf-ext": { "pdf-ext": {
@ -457,14 +459,14 @@ def test_filters7(stix_objs2, real_stix_objs2):
"Author": "Adobe Systems Incorporated", "Author": "Adobe Systems Incorporated",
"Creator": "Adobe FrameMaker 5.5.3 for Power Macintosh", "Creator": "Adobe FrameMaker 5.5.3 for Power Macintosh",
"Producer": "Acrobat Distiller 3.01 for Power Macintosh", "Producer": "Acrobat Distiller 3.01 for Power Macintosh",
"CreationDate": "20070412090123-02" "CreationDate": "20070412090123-02",
}, },
"pdfid0": "DFCE52BD827ECF765649852119D", "pdfid0": "DFCE52BD827ECF765649852119D",
"pdfid1": "57A1E0F9ED2AE523E313C" "pdfid1": "57A1E0F9ED2AE523E313C",
} },
} },
} },
} },
} }
stix_objects = list(stix_objs2) + [obsvd_data_obj] stix_objects = list(stix_objs2) + [obsvd_data_obj]

View File

@ -3,111 +3,113 @@ import shutil
import pytest import pytest
from stix2 import (Bundle, Campaign, CustomObject, Filter, Identity, Indicator, from stix2 import Filter, MemorySource, MemoryStore, properties
Malware, MemorySource, MemoryStore, Relationship,
properties)
from stix2.datastore import make_id from stix2.datastore import make_id
from stix2.utils import parse_into_datetime from stix2.utils import parse_into_datetime
from stix2.v20 import (
Bundle, Campaign, CustomObject, Identity, Indicator, Malware, Relationship,
)
from .constants import (CAMPAIGN_ID, CAMPAIGN_KWARGS, IDENTITY_ID, from .constants import (
IDENTITY_KWARGS, INDICATOR_ID, INDICATOR_KWARGS, CAMPAIGN_ID, CAMPAIGN_KWARGS, IDENTITY_ID, IDENTITY_KWARGS, INDICATOR_ID,
MALWARE_ID, MALWARE_KWARGS, RELATIONSHIP_IDS) INDICATOR_KWARGS, MALWARE_ID, MALWARE_KWARGS, RELATIONSHIP_IDS,
)
IND1 = { IND1 = {
"created": "2017-01-27T13:49:53.935Z", "created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000001", "id": "indicator--00000000-0000-4000-8000-000000000001",
"labels": [ "labels": [
"url-watchlist" "url-watchlist",
], ],
"modified": "2017-01-27T13:49:53.935Z", "modified": "2017-01-27T13:49:53.935Z",
"name": "Malicious site hosting downloader", "name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']", "pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator", "type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z" "valid_from": "2017-01-27T13:49:53.935382Z",
} }
IND2 = { IND2 = {
"created": "2017-01-27T13:49:53.935Z", "created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000001", "id": "indicator--00000000-0000-4000-8000-000000000001",
"labels": [ "labels": [
"url-watchlist" "url-watchlist",
], ],
"modified": "2017-01-27T13:49:53.935Z", "modified": "2017-01-27T13:49:53.935Z",
"name": "Malicious site hosting downloader", "name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']", "pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator", "type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z" "valid_from": "2017-01-27T13:49:53.935382Z",
} }
IND3 = { IND3 = {
"created": "2017-01-27T13:49:53.935Z", "created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000001", "id": "indicator--00000000-0000-4000-8000-000000000001",
"labels": [ "labels": [
"url-watchlist" "url-watchlist",
], ],
"modified": "2017-01-27T13:49:53.936Z", "modified": "2017-01-27T13:49:53.936Z",
"name": "Malicious site hosting downloader", "name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']", "pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator", "type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z" "valid_from": "2017-01-27T13:49:53.935382Z",
} }
IND4 = { IND4 = {
"created": "2017-01-27T13:49:53.935Z", "created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000002", "id": "indicator--00000000-0000-4000-8000-000000000002",
"labels": [ "labels": [
"url-watchlist" "url-watchlist",
], ],
"modified": "2017-01-27T13:49:53.935Z", "modified": "2017-01-27T13:49:53.935Z",
"name": "Malicious site hosting downloader", "name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']", "pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator", "type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z" "valid_from": "2017-01-27T13:49:53.935382Z",
} }
IND5 = { IND5 = {
"created": "2017-01-27T13:49:53.935Z", "created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000002", "id": "indicator--00000000-0000-4000-8000-000000000002",
"labels": [ "labels": [
"url-watchlist" "url-watchlist",
], ],
"modified": "2017-01-27T13:49:53.935Z", "modified": "2017-01-27T13:49:53.935Z",
"name": "Malicious site hosting downloader", "name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']", "pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator", "type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z" "valid_from": "2017-01-27T13:49:53.935382Z",
} }
IND6 = { IND6 = {
"created": "2017-01-27T13:49:53.935Z", "created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000001", "id": "indicator--00000000-0000-4000-8000-000000000001",
"labels": [ "labels": [
"url-watchlist" "url-watchlist",
], ],
"modified": "2017-01-31T13:49:53.935Z", "modified": "2017-01-31T13:49:53.935Z",
"name": "Malicious site hosting downloader", "name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']", "pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator", "type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z" "valid_from": "2017-01-27T13:49:53.935382Z",
} }
IND7 = { IND7 = {
"created": "2017-01-27T13:49:53.935Z", "created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000002", "id": "indicator--00000000-0000-4000-8000-000000000002",
"labels": [ "labels": [
"url-watchlist" "url-watchlist",
], ],
"modified": "2017-01-27T13:49:53.935Z", "modified": "2017-01-27T13:49:53.935Z",
"name": "Malicious site hosting downloader", "name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']", "pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator", "type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z" "valid_from": "2017-01-27T13:49:53.935382Z",
} }
IND8 = { IND8 = {
"created": "2017-01-27T13:49:53.935Z", "created": "2017-01-27T13:49:53.935Z",
"id": "indicator--00000000-0000-4000-8000-000000000002", "id": "indicator--00000000-0000-4000-8000-000000000002",
"labels": [ "labels": [
"url-watchlist" "url-watchlist",
], ],
"modified": "2017-01-27T13:49:53.935Z", "modified": "2017-01-27T13:49:53.935Z",
"name": "Malicious site hosting downloader", "name": "Malicious site hosting downloader",
"pattern": "[url:value = 'http://x4z9arb.cn/4712']", "pattern": "[url:value = 'http://x4z9arb.cn/4712']",
"type": "indicator", "type": "indicator",
"valid_from": "2017-01-27T13:49:53.935382Z" "valid_from": "2017-01-27T13:49:53.935382Z",
} }
STIX_OBJS2 = [IND6, IND7, IND8] STIX_OBJS2 = [IND6, IND7, IND8]
@ -139,11 +141,22 @@ def rel_mem_store():
@pytest.fixture @pytest.fixture
def fs_mem_store(request, mem_store): def fs_mem_store(request, mem_store):
filename = 'memory_test/mem_store.json' filename = mem_store.save_to_file('memory_test/mem_store.json')
mem_store.save_to_file(filename)
def fin(): def fin():
# teardown, excecuted regardless of exception # teardown, executed regardless of exception
shutil.rmtree(os.path.dirname(filename))
request.addfinalizer(fin)
return filename
@pytest.fixture
def fs_mem_store_no_name(request, mem_store):
filename = mem_store.save_to_file('memory_test/')
def fin():
# teardown, executed regardless of exception
shutil.rmtree(os.path.dirname(filename)) shutil.rmtree(os.path.dirname(filename))
request.addfinalizer(fin) request.addfinalizer(fin)
@ -162,10 +175,12 @@ def test_memory_source_get_nonexistant_object(mem_source):
def test_memory_store_all_versions(mem_store): def test_memory_store_all_versions(mem_store):
# Add bundle of items to sink # Add bundle of items to sink
mem_store.add(dict(id="bundle--%s" % make_id(), mem_store.add(dict(
objects=STIX_OBJS2, id="bundle--%s" % make_id(),
spec_version="2.0", objects=STIX_OBJS2,
type="bundle")) spec_version="2.0",
type="bundle",
))
resp = mem_store.all_versions("indicator--00000000-0000-4000-8000-000000000001") resp = mem_store.all_versions("indicator--00000000-0000-4000-8000-000000000001")
assert len(resp) == 3 assert len(resp) == 3
@ -203,7 +218,7 @@ def test_memory_store_query_multiple_filters(mem_store):
assert len(resp) == 2 assert len(resp) == 2
def test_memory_store_save_load_file(mem_store, fs_mem_store): def test_memory_store_save_load_file(fs_mem_store):
filename = fs_mem_store # the fixture fs_mem_store yields filename where the memory store was written to filename = fs_mem_store # the fixture fs_mem_store yields filename where the memory store was written to
# STIX2 contents of mem_store have already been written to file # STIX2 contents of mem_store have already been written to file
@ -219,6 +234,22 @@ def test_memory_store_save_load_file(mem_store, fs_mem_store):
assert mem_store2.get("indicator--00000000-0000-4000-8000-000000000001") assert mem_store2.get("indicator--00000000-0000-4000-8000-000000000001")
def test_memory_store_save_load_file_no_name_provided(fs_mem_store_no_name):
filename = fs_mem_store_no_name # the fixture fs_mem_store yields filename where the memory store was written to
# STIX2 contents of mem_store have already been written to file
# (this is done in fixture 'fs_mem_store'), so can already read-in here
contents = open(os.path.abspath(filename)).read()
assert '"id": "indicator--00000000-0000-4000-8000-000000000001",' in contents
assert '"id": "indicator--00000000-0000-4000-8000-000000000001",' in contents
mem_store2 = MemoryStore()
mem_store2.load_from_file(filename)
assert mem_store2.get("indicator--00000000-0000-4000-8000-000000000001")
assert mem_store2.get("indicator--00000000-0000-4000-8000-000000000001")
def test_memory_store_add_invalid_object(mem_store): def test_memory_store_add_invalid_object(mem_store):
ind = ('indicator', IND1) # tuple isn't valid ind = ('indicator', IND1) # tuple isn't valid
with pytest.raises(TypeError): with pytest.raises(TypeError):
@ -226,23 +257,67 @@ def test_memory_store_add_invalid_object(mem_store):
def test_memory_store_object_with_custom_property(mem_store): def test_memory_store_object_with_custom_property(mem_store):
camp = Campaign(name="Scipio Africanus", camp = Campaign(
objective="Defeat the Carthaginians", name="Scipio Africanus",
x_empire="Roman", objective="Defeat the Carthaginians",
allow_custom=True) x_empire="Roman",
allow_custom=True,
)
mem_store.add(camp, True) mem_store.add(camp)
camp_r = mem_store.get(camp.id) camp_r = mem_store.get(camp.id)
assert camp_r.id == camp.id assert camp_r.id == camp.id
assert camp_r.x_empire == camp.x_empire assert camp_r.x_empire == camp.x_empire
def test_memory_store_object_creator_of_present(mem_store):
camp = Campaign(
name="Scipio Africanus",
objective="Defeat the Carthaginians",
created_by_ref="identity--e4196283-7420-4277-a7a3-d57f61ef1389",
x_empire="Roman",
allow_custom=True,
)
iden = Identity(
id="identity--e4196283-7420-4277-a7a3-d57f61ef1389",
name="Foo Corp.",
identity_class="corporation",
)
mem_store.add(camp)
mem_store.add(iden)
camp_r = mem_store.get(camp.id)
assert camp_r.id == camp.id
assert camp_r.x_empire == camp.x_empire
assert mem_store.creator_of(camp_r) == iden
def test_memory_store_object_creator_of_missing(mem_store):
camp = Campaign(
name="Scipio Africanus",
objective="Defeat the Carthaginians",
x_empire="Roman",
allow_custom=True,
)
mem_store.add(camp)
camp_r = mem_store.get(camp.id)
assert camp_r.id == camp.id
assert camp_r.x_empire == camp.x_empire
assert mem_store.creator_of(camp) is None
def test_memory_store_object_with_custom_property_in_bundle(mem_store): def test_memory_store_object_with_custom_property_in_bundle(mem_store):
camp = Campaign(name="Scipio Africanus", camp = Campaign(
objective="Defeat the Carthaginians", name="Scipio Africanus",
x_empire="Roman", objective="Defeat the Carthaginians",
allow_custom=True) x_empire="Roman",
allow_custom=True,
)
bundle = Bundle(camp, allow_custom=True) bundle = Bundle(camp, allow_custom=True)
mem_store.add(bundle) mem_store.add(bundle)
@ -253,14 +328,16 @@ def test_memory_store_object_with_custom_property_in_bundle(mem_store):
def test_memory_store_custom_object(mem_store): def test_memory_store_custom_object(mem_store):
@CustomObject('x-new-obj', [ @CustomObject(
('property1', properties.StringProperty(required=True)), 'x-new-obj', [
]) ('property1', properties.StringProperty(required=True)),
],
)
class NewObj(): class NewObj():
pass pass
newobj = NewObj(property1='something') newobj = NewObj(property1='something')
mem_store.add(newobj, True) mem_store.add(newobj)
newobj_r = mem_store.get(newobj.id) newobj_r = mem_store.get(newobj.id)
assert newobj_r.id == newobj.id assert newobj_r.id == newobj.id
@ -337,3 +414,12 @@ def test_related_to_by_target(rel_mem_store):
assert len(resp) == 2 assert len(resp) == 2
assert any(x['id'] == CAMPAIGN_ID for x in resp) assert any(x['id'] == CAMPAIGN_ID for x in resp)
assert any(x['id'] == INDICATOR_ID for x in resp) assert any(x['id'] == INDICATOR_ID for x in resp)
def test_object_family_internal_components(mem_source):
# Testing internal components.
str_representation = str(mem_source._data['indicator--00000000-0000-4000-8000-000000000001'])
repr_representation = repr(mem_source._data['indicator--00000000-0000-4000-8000-000000000001'])
assert "latest=2017-01-27 13:49:53.936000+00:00>>" in str_representation
assert "latest=2017-01-27 13:49:53.936000+00:00>>" in repr_representation

View File

@ -3,10 +3,10 @@ import json
from medallion.filters.basic_filter import BasicFilter from medallion.filters.basic_filter import BasicFilter
import pytest import pytest
from requests.models import Response from requests.models import Response
import six
from taxii2client import Collection, _filter_kwargs_to_query_params from taxii2client import Collection, _filter_kwargs_to_query_params
from stix2 import (Bundle, TAXIICollectionSink, TAXIICollectionSource, import stix2
TAXIICollectionStore, ThreatActor)
from stix2.datastore import DataSourceError from stix2.datastore import DataSourceError
from stix2.datastore.filters import Filter from stix2.datastore.filters import Filter
@ -18,50 +18,52 @@ class MockTAXIICollectionEndpoint(Collection):
def __init__(self, url, collection_info): def __init__(self, url, collection_info):
super(MockTAXIICollectionEndpoint, self).__init__( super(MockTAXIICollectionEndpoint, self).__init__(
url, collection_info=collection_info url, collection_info=collection_info,
) )
self.objects = [] self.objects = []
def add_objects(self, bundle): def add_objects(self, bundle):
self._verify_can_write() self._verify_can_write()
if isinstance(bundle, str): if isinstance(bundle, six.string_types):
bundle = json.loads(bundle) bundle = json.loads(bundle, encoding='utf-8')
for object in bundle.get("objects", []): for object in bundle.get("objects", []):
self.objects.append(object) self.objects.append(object)
def get_objects(self, **filter_kwargs): def get_objects(self, **filter_kwargs):
self._verify_can_read() self._verify_can_read()
query_params = _filter_kwargs_to_query_params(filter_kwargs) query_params = _filter_kwargs_to_query_params(filter_kwargs)
if not isinstance(query_params, dict): assert isinstance(query_params, dict)
query_params = json.loads(query_params) full_filter = BasicFilter(query_params)
full_filter = BasicFilter(query_params or {})
objs = full_filter.process_filter( objs = full_filter.process_filter(
self.objects, self.objects,
("id", "type", "version"), ("id", "type", "version"),
[] [],
) )
if objs: if objs:
return Bundle(objects=objs) return stix2.v20.Bundle(objects=objs)
else: else:
resp = Response() resp = Response()
resp.status_code = 404 resp.status_code = 404
resp.raise_for_status() resp.raise_for_status()
def get_object(self, id, version=None): def get_object(self, id, **filter_kwargs):
self._verify_can_read() self._verify_can_read()
query_params = None query_params = _filter_kwargs_to_query_params(filter_kwargs)
if version: assert isinstance(query_params, dict)
query_params = _filter_kwargs_to_query_params({"version": version}) full_filter = BasicFilter(query_params)
if query_params:
query_params = json.loads(query_params) # In this endpoint we must first filter objects by id beforehand.
full_filter = BasicFilter(query_params or {}) objects = [x for x in self.objects if x["id"] == id]
objs = full_filter.process_filter( if objects:
self.objects, filtered_objects = full_filter.process_filter(
("version",), objects,
[] ("version",),
) [],
if objs: )
return Bundle(objects=objs) else:
filtered_objects = []
if filtered_objects:
return stix2.v20.Bundle(objects=filtered_objects)
else: else:
resp = Response() resp = Response()
resp.status_code = 404 resp.status_code = 404
@ -70,16 +72,18 @@ class MockTAXIICollectionEndpoint(Collection):
@pytest.fixture @pytest.fixture
def collection(stix_objs1): def collection(stix_objs1):
mock = MockTAXIICollectionEndpoint(COLLECTION_URL, { mock = MockTAXIICollectionEndpoint(
"id": "91a7b528-80eb-42ed-a74d-c6fbd5a26116", COLLECTION_URL, {
"title": "Writable Collection", "id": "91a7b528-80eb-42ed-a74d-c6fbd5a26116",
"description": "This collection is a dropbox for submitting indicators", "title": "Writable Collection",
"can_read": True, "description": "This collection is a dropbox for submitting indicators",
"can_write": True, "can_read": True,
"media_types": [ "can_write": True,
"application/vnd.oasis.stix+json; version=2.0" "media_types": [
] "application/vnd.oasis.stix+json; version=2.0",
}) ],
},
)
mock.objects.extend(stix_objs1) mock.objects.extend(stix_objs1)
return mock return mock
@ -87,94 +91,118 @@ def collection(stix_objs1):
@pytest.fixture @pytest.fixture
def collection_no_rw_access(stix_objs1): def collection_no_rw_access(stix_objs1):
mock = MockTAXIICollectionEndpoint(COLLECTION_URL, { mock = MockTAXIICollectionEndpoint(
"id": "91a7b528-80eb-42ed-a74d-c6fbd5a26116", COLLECTION_URL, {
"title": "Not writeable or readable Collection", "id": "91a7b528-80eb-42ed-a74d-c6fbd5a26116",
"description": "This collection is a dropbox for submitting indicators", "title": "Not writeable or readable Collection",
"can_read": False, "description": "This collection is a dropbox for submitting indicators",
"can_write": False, "can_read": False,
"media_types": [ "can_write": False,
"application/vnd.oasis.stix+json; version=2.0" "media_types": [
] "application/vnd.oasis.stix+json; version=2.0",
}) ],
},
)
mock.objects.extend(stix_objs1) mock.objects.extend(stix_objs1)
return mock return mock
def test_ds_taxii(collection): def test_ds_taxii(collection):
ds = TAXIICollectionSource(collection) ds = stix2.TAXIICollectionSource(collection)
assert ds.collection is not None assert ds.collection is not None
def test_add_stix2_object(collection): def test_add_stix2_object(collection):
tc_sink = TAXIICollectionSink(collection) tc_sink = stix2.TAXIICollectionSink(collection)
# create new STIX threat-actor # create new STIX threat-actor
ta = ThreatActor(name="Teddy Bear", ta = stix2.v20.ThreatActor(
labels=["nation-state"], name="Teddy Bear",
sophistication="innovator", labels=["nation-state"],
resource_level="government", sophistication="innovator",
goals=[ resource_level="government",
"compromising environment NGOs", goals=[
"water-hole attacks geared towards energy sector", "compromising environment NGOs",
]) "water-hole attacks geared towards energy sector",
],
)
tc_sink.add(ta) tc_sink.add(ta)
def test_add_stix2_with_custom_object(collection): def test_add_stix2_with_custom_object(collection):
tc_sink = TAXIICollectionStore(collection, allow_custom=True) tc_sink = stix2.TAXIICollectionStore(collection, allow_custom=True)
# create new STIX threat-actor # create new STIX threat-actor
ta = ThreatActor(name="Teddy Bear", ta = stix2.v20.ThreatActor(
labels=["nation-state"], name="Teddy Bear",
sophistication="innovator", labels=["nation-state"],
resource_level="government", sophistication="innovator",
goals=[ resource_level="government",
"compromising environment NGOs", goals=[
"water-hole attacks geared towards energy sector", "compromising environment NGOs",
], "water-hole attacks geared towards energy sector",
foo="bar", ],
allow_custom=True) foo="bar",
allow_custom=True,
)
tc_sink.add(ta) tc_sink.add(ta)
def test_add_list_object(collection, indicator): def test_add_list_object(collection, indicator):
tc_sink = TAXIICollectionSink(collection) tc_sink = stix2.TAXIICollectionSink(collection)
# create new STIX threat-actor # create new STIX threat-actor
ta = ThreatActor(name="Teddy Bear", ta = stix2.v20.ThreatActor(
labels=["nation-state"], name="Teddy Bear",
sophistication="innovator", labels=["nation-state"],
resource_level="government", sophistication="innovator",
goals=[ resource_level="government",
"compromising environment NGOs", goals=[
"water-hole attacks geared towards energy sector", "compromising environment NGOs",
]) "water-hole attacks geared towards energy sector",
],
)
tc_sink.add([ta, indicator]) tc_sink.add([ta, indicator])
def test_get_object_found(collection):
tc_source = stix2.TAXIICollectionSource(collection)
result = tc_source.query([
stix2.Filter("id", "=", "indicator--00000000-0000-4000-8000-000000000001"),
])
assert result
def test_get_object_not_found(collection):
tc_source = stix2.TAXIICollectionSource(collection)
result = tc_source.get("indicator--00000000-0000-4000-8000-000000000005")
assert result is None
def test_add_stix2_bundle_object(collection): def test_add_stix2_bundle_object(collection):
tc_sink = TAXIICollectionSink(collection) tc_sink = stix2.TAXIICollectionSink(collection)
# create new STIX threat-actor # create new STIX threat-actor
ta = ThreatActor(name="Teddy Bear", ta = stix2.v20.ThreatActor(
labels=["nation-state"], name="Teddy Bear",
sophistication="innovator", labels=["nation-state"],
resource_level="government", sophistication="innovator",
goals=[ resource_level="government",
"compromising environment NGOs", goals=[
"water-hole attacks geared towards energy sector", "compromising environment NGOs",
]) "water-hole attacks geared towards energy sector",
],
)
tc_sink.add(Bundle(objects=[ta])) tc_sink.add(stix2.v20.Bundle(objects=[ta]))
def test_add_str_object(collection): def test_add_str_object(collection):
tc_sink = TAXIICollectionSink(collection) tc_sink = stix2.TAXIICollectionSink(collection)
# create new STIX threat-actor # create new STIX threat-actor
ta = """{ ta = """{
@ -198,7 +226,7 @@ def test_add_str_object(collection):
def test_add_dict_object(collection): def test_add_dict_object(collection):
tc_sink = TAXIICollectionSink(collection) tc_sink = stix2.TAXIICollectionSink(collection)
ta = { ta = {
"type": "threat-actor", "type": "threat-actor",
@ -208,25 +236,24 @@ def test_add_dict_object(collection):
"name": "Teddy Bear", "name": "Teddy Bear",
"goals": [ "goals": [
"compromising environment NGOs", "compromising environment NGOs",
"water-hole attacks geared towards energy sector" "water-hole attacks geared towards energy sector",
], ],
"sophistication": "innovator", "sophistication": "innovator",
"resource_level": "government", "resource_level": "government",
"labels": [ "labels": [
"nation-state" "nation-state",
] ],
} }
tc_sink.add(ta) tc_sink.add(ta)
def test_add_dict_bundle_object(collection): def test_add_dict_bundle_object(collection):
tc_sink = TAXIICollectionSink(collection) tc_sink = stix2.TAXIICollectionSink(collection)
ta = { ta = {
"type": "bundle", "type": "bundle",
"id": "bundle--860ccc8d-56c9-4fda-9384-84276fb52fb1", "id": "bundle--860ccc8d-56c9-4fda-9384-84276fb52fb1",
"spec_version": "2.0",
"objects": [ "objects": [
{ {
"type": "threat-actor", "type": "threat-actor",
@ -236,22 +263,22 @@ def test_add_dict_bundle_object(collection):
"name": "Teddy Bear", "name": "Teddy Bear",
"goals": [ "goals": [
"compromising environment NGOs", "compromising environment NGOs",
"water-hole attacks geared towards energy sector" "water-hole attacks geared towards energy sector",
], ],
"sophistication": "innovator", "sophistication": "innovator",
"resource_level": "government", "resource_level": "government",
"labels": [ "labels": [
"nation-state" "nation-state",
] ],
} },
] ],
} }
tc_sink.add(ta) tc_sink.add(ta)
def test_get_stix2_object(collection): def test_get_stix2_object(collection):
tc_sink = TAXIICollectionSource(collection) tc_sink = stix2.TAXIICollectionSource(collection)
objects = tc_sink.get("indicator--00000000-0000-4000-8000-000000000001") objects = tc_sink.get("indicator--00000000-0000-4000-8000-000000000001")
@ -271,10 +298,10 @@ def test_parse_taxii_filters(collection):
Filter("added_after", "=", "2016-02-01T00:00:01.000Z"), Filter("added_after", "=", "2016-02-01T00:00:01.000Z"),
Filter("id", "=", "taxii stix object ID"), Filter("id", "=", "taxii stix object ID"),
Filter("type", "=", "taxii stix object ID"), Filter("type", "=", "taxii stix object ID"),
Filter("version", "=", "first") Filter("version", "=", "first"),
] ]
ds = TAXIICollectionSource(collection) ds = stix2.TAXIICollectionSource(collection)
taxii_filters = ds._parse_taxii_filters(query) taxii_filters = ds._parse_taxii_filters(query)
@ -282,7 +309,7 @@ def test_parse_taxii_filters(collection):
def test_add_get_remove_filter(collection): def test_add_get_remove_filter(collection):
ds = TAXIICollectionSource(collection) ds = stix2.TAXIICollectionSource(collection)
# First 3 filters are valid, remaining properties are erroneous in some way # First 3 filters are valid, remaining properties are erroneous in some way
valid_filters = [ valid_filters = [
@ -318,7 +345,7 @@ def test_add_get_remove_filter(collection):
def test_get_all_versions(collection): def test_get_all_versions(collection):
ds = TAXIICollectionStore(collection) ds = stix2.TAXIICollectionStore(collection)
indicators = ds.all_versions('indicator--00000000-0000-4000-8000-000000000001') indicators = ds.all_versions('indicator--00000000-0000-4000-8000-000000000001')
# There are 3 indicators but 2 share the same 'modified' timestamp # There are 3 indicators but 2 share the same 'modified' timestamp
@ -330,7 +357,7 @@ def test_can_read_error(collection_no_rw_access):
instance that does not have read access, check ValueError exception is raised""" instance that does not have read access, check ValueError exception is raised"""
with pytest.raises(DataSourceError) as excinfo: with pytest.raises(DataSourceError) as excinfo:
TAXIICollectionSource(collection_no_rw_access) stix2.TAXIICollectionSource(collection_no_rw_access)
assert "Collection object provided does not have read access" in str(excinfo.value) assert "Collection object provided does not have read access" in str(excinfo.value)
@ -339,7 +366,7 @@ def test_can_write_error(collection_no_rw_access):
instance that does not have write access, check ValueError exception is raised""" instance that does not have write access, check ValueError exception is raised"""
with pytest.raises(DataSourceError) as excinfo: with pytest.raises(DataSourceError) as excinfo:
TAXIICollectionSink(collection_no_rw_access) stix2.TAXIICollectionSink(collection_no_rw_access)
assert "Collection object provided does not have write access" in str(excinfo.value) assert "Collection object provided does not have write access" in str(excinfo.value)
@ -360,7 +387,7 @@ def test_get_404():
resp.status_code = 404 resp.status_code = 404
resp.raise_for_status() resp.raise_for_status()
ds = TAXIICollectionSource(TAXIICollection404()) ds = stix2.TAXIICollectionSource(TAXIICollection404())
# this will raise 404 from mock TAXII Client but TAXIICollectionStore # this will raise 404 from mock TAXII Client but TAXIICollectionStore
# should handle gracefully and return None # should handle gracefully and return None
@ -372,7 +399,7 @@ def test_all_versions_404(collection):
""" a TAXIICollectionSource.all_version() call that recieves an HTTP 404 """ a TAXIICollectionSource.all_version() call that recieves an HTTP 404
response code from the taxii2client should be returned as an exception""" response code from the taxii2client should be returned as an exception"""
ds = TAXIICollectionStore(collection) ds = stix2.TAXIICollectionStore(collection)
with pytest.raises(DataSourceError) as excinfo: with pytest.raises(DataSourceError) as excinfo:
ds.all_versions("indicator--1") ds.all_versions("indicator--1")
@ -384,7 +411,7 @@ def test_query_404(collection):
""" a TAXIICollectionSource.query() call that recieves an HTTP 404 """ a TAXIICollectionSource.query() call that recieves an HTTP 404
response code from the taxii2client should be returned as an exception""" response code from the taxii2client should be returned as an exception"""
ds = TAXIICollectionStore(collection) ds = stix2.TAXIICollectionStore(collection)
query = [Filter("type", "=", "malware")] query = [Filter("type", "=", "malware")]
with pytest.raises(DataSourceError) as excinfo: with pytest.raises(DataSourceError) as excinfo:

View File

@ -2,109 +2,127 @@ import pytest
import stix2 import stix2
from .constants import (CAMPAIGN_ID, CAMPAIGN_KWARGS, FAKE_TIME, IDENTITY_ID, from .constants import (
IDENTITY_KWARGS, INDICATOR_ID, INDICATOR_KWARGS, CAMPAIGN_ID, CAMPAIGN_KWARGS, FAKE_TIME, IDENTITY_ID, IDENTITY_KWARGS,
MALWARE_ID, MALWARE_KWARGS, RELATIONSHIP_IDS) INDICATOR_ID, INDICATOR_KWARGS, MALWARE_ID, MALWARE_KWARGS,
RELATIONSHIP_IDS,
)
@pytest.fixture @pytest.fixture
def ds(): def ds():
cam = stix2.Campaign(id=CAMPAIGN_ID, **CAMPAIGN_KWARGS) cam = stix2.v20.Campaign(id=CAMPAIGN_ID, **CAMPAIGN_KWARGS)
idy = stix2.Identity(id=IDENTITY_ID, **IDENTITY_KWARGS) idy = stix2.v20.Identity(id=IDENTITY_ID, **IDENTITY_KWARGS)
ind = stix2.Indicator(id=INDICATOR_ID, **INDICATOR_KWARGS) ind = stix2.v20.Indicator(id=INDICATOR_ID, **INDICATOR_KWARGS)
mal = stix2.Malware(id=MALWARE_ID, **MALWARE_KWARGS) mal = stix2.v20.Malware(id=MALWARE_ID, **MALWARE_KWARGS)
rel1 = stix2.Relationship(ind, 'indicates', mal, id=RELATIONSHIP_IDS[0]) rel1 = stix2.v20.Relationship(ind, 'indicates', mal, id=RELATIONSHIP_IDS[0])
rel2 = stix2.Relationship(mal, 'targets', idy, id=RELATIONSHIP_IDS[1]) rel2 = stix2.v20.Relationship(mal, 'targets', idy, id=RELATIONSHIP_IDS[1])
rel3 = stix2.Relationship(cam, 'uses', mal, id=RELATIONSHIP_IDS[2]) rel3 = stix2.v20.Relationship(cam, 'uses', mal, id=RELATIONSHIP_IDS[2])
stix_objs = [cam, idy, ind, mal, rel1, rel2, rel3] stix_objs = [cam, idy, ind, mal, rel1, rel2, rel3]
yield stix2.MemoryStore(stix_objs) yield stix2.MemoryStore(stix_objs)
def test_object_factory_created_by_ref_str(): def test_object_factory_created_by_ref_str():
factory = stix2.ObjectFactory(created_by_ref=IDENTITY_ID) factory = stix2.ObjectFactory(created_by_ref=IDENTITY_ID)
ind = factory.create(stix2.Indicator, **INDICATOR_KWARGS) ind = factory.create(stix2.v20.Indicator, **INDICATOR_KWARGS)
assert ind.created_by_ref == IDENTITY_ID assert ind.created_by_ref == IDENTITY_ID
def test_object_factory_created_by_ref_obj(): def test_object_factory_created_by_ref_obj():
id_obj = stix2.Identity(id=IDENTITY_ID, **IDENTITY_KWARGS) id_obj = stix2.v20.Identity(id=IDENTITY_ID, **IDENTITY_KWARGS)
factory = stix2.ObjectFactory(created_by_ref=id_obj) factory = stix2.ObjectFactory(created_by_ref=id_obj)
ind = factory.create(stix2.Indicator, **INDICATOR_KWARGS) ind = factory.create(stix2.v20.Indicator, **INDICATOR_KWARGS)
assert ind.created_by_ref == IDENTITY_ID assert ind.created_by_ref == IDENTITY_ID
def test_object_factory_override_default(): def test_object_factory_override_default():
factory = stix2.ObjectFactory(created_by_ref=IDENTITY_ID) factory = stix2.ObjectFactory(created_by_ref=IDENTITY_ID)
new_id = "identity--983b3172-44fe-4a80-8091-eb8098841fe8" new_id = "identity--983b3172-44fe-4a80-8091-eb8098841fe8"
ind = factory.create(stix2.Indicator, created_by_ref=new_id, **INDICATOR_KWARGS) ind = factory.create(stix2.v20.Indicator, created_by_ref=new_id, **INDICATOR_KWARGS)
assert ind.created_by_ref == new_id assert ind.created_by_ref == new_id
def test_object_factory_created(): def test_object_factory_created():
factory = stix2.ObjectFactory(created=FAKE_TIME) factory = stix2.ObjectFactory(created=FAKE_TIME)
ind = factory.create(stix2.Indicator, **INDICATOR_KWARGS) ind = factory.create(stix2.v20.Indicator, **INDICATOR_KWARGS)
assert ind.created == FAKE_TIME assert ind.created == FAKE_TIME
assert ind.modified == FAKE_TIME assert ind.modified == FAKE_TIME
def test_object_factory_external_reference(): def test_object_factory_external_reference():
ext_ref = stix2.ExternalReference(source_name="ACME Threat Intel", ext_ref = stix2.v20.ExternalReference(
description="Threat report") source_name="ACME Threat Intel",
description="Threat report",
)
factory = stix2.ObjectFactory(external_references=ext_ref) factory = stix2.ObjectFactory(external_references=ext_ref)
ind = factory.create(stix2.Indicator, **INDICATOR_KWARGS) ind = factory.create(stix2.v20.Indicator, **INDICATOR_KWARGS)
assert ind.external_references[0].source_name == "ACME Threat Intel" assert ind.external_references[0].source_name == "ACME Threat Intel"
assert ind.external_references[0].description == "Threat report" assert ind.external_references[0].description == "Threat report"
ind2 = factory.create(stix2.Indicator, external_references=None, **INDICATOR_KWARGS) ind2 = factory.create(stix2.v20.Indicator, external_references=None, **INDICATOR_KWARGS)
assert 'external_references' not in ind2 assert 'external_references' not in ind2
def test_object_factory_obj_markings(): def test_object_factory_obj_markings():
stmt_marking = stix2.StatementMarking("Copyright 2016, Example Corp") stmt_marking = stix2.v20.StatementMarking("Copyright 2016, Example Corp")
mark_def = stix2.MarkingDefinition(definition_type="statement", mark_def = stix2.v20.MarkingDefinition(
definition=stmt_marking) definition_type="statement",
factory = stix2.ObjectFactory(object_marking_refs=[mark_def, stix2.TLP_AMBER]) definition=stmt_marking,
ind = factory.create(stix2.Indicator, **INDICATOR_KWARGS) )
factory = stix2.ObjectFactory(object_marking_refs=[mark_def, stix2.v20.TLP_AMBER])
ind = factory.create(stix2.v20.Indicator, **INDICATOR_KWARGS)
assert mark_def.id in ind.object_marking_refs assert mark_def.id in ind.object_marking_refs
assert stix2.TLP_AMBER.id in ind.object_marking_refs assert stix2.v20.TLP_AMBER.id in ind.object_marking_refs
factory = stix2.ObjectFactory(object_marking_refs=stix2.TLP_RED) factory = stix2.ObjectFactory(object_marking_refs=stix2.v20.TLP_RED)
ind = factory.create(stix2.Indicator, **INDICATOR_KWARGS) ind = factory.create(stix2.v20.Indicator, **INDICATOR_KWARGS)
assert stix2.TLP_RED.id in ind.object_marking_refs assert stix2.v20.TLP_RED.id in ind.object_marking_refs
def test_object_factory_list_append(): def test_object_factory_list_append():
ext_ref = stix2.ExternalReference(source_name="ACME Threat Intel", ext_ref = stix2.v20.ExternalReference(
description="Threat report from ACME") source_name="ACME Threat Intel",
ext_ref2 = stix2.ExternalReference(source_name="Yet Another Threat Report", description="Threat report from ACME",
description="Threat report from YATR") )
ext_ref3 = stix2.ExternalReference(source_name="Threat Report #3", ext_ref2 = stix2.v20.ExternalReference(
description="One more threat report") source_name="Yet Another Threat Report",
description="Threat report from YATR",
)
ext_ref3 = stix2.v20.ExternalReference(
source_name="Threat Report #3",
description="One more threat report",
)
factory = stix2.ObjectFactory(external_references=ext_ref) factory = stix2.ObjectFactory(external_references=ext_ref)
ind = factory.create(stix2.Indicator, external_references=ext_ref2, **INDICATOR_KWARGS) ind = factory.create(stix2.v20.Indicator, external_references=ext_ref2, **INDICATOR_KWARGS)
assert ind.external_references[1].source_name == "Yet Another Threat Report" assert ind.external_references[1].source_name == "Yet Another Threat Report"
ind = factory.create(stix2.Indicator, external_references=[ext_ref2, ext_ref3], **INDICATOR_KWARGS) ind = factory.create(stix2.v20.Indicator, external_references=[ext_ref2, ext_ref3], **INDICATOR_KWARGS)
assert ind.external_references[2].source_name == "Threat Report #3" assert ind.external_references[2].source_name == "Threat Report #3"
def test_object_factory_list_replace(): def test_object_factory_list_replace():
ext_ref = stix2.ExternalReference(source_name="ACME Threat Intel", ext_ref = stix2.v20.ExternalReference(
description="Threat report from ACME") source_name="ACME Threat Intel",
ext_ref2 = stix2.ExternalReference(source_name="Yet Another Threat Report", description="Threat report from ACME",
description="Threat report from YATR") )
ext_ref2 = stix2.v20.ExternalReference(
source_name="Yet Another Threat Report",
description="Threat report from YATR",
)
factory = stix2.ObjectFactory(external_references=ext_ref, list_append=False) factory = stix2.ObjectFactory(external_references=ext_ref, list_append=False)
ind = factory.create(stix2.Indicator, external_references=ext_ref2, **INDICATOR_KWARGS) ind = factory.create(stix2.v20.Indicator, external_references=ext_ref2, **INDICATOR_KWARGS)
assert len(ind.external_references) == 1 assert len(ind.external_references) == 1
assert ind.external_references[0].source_name == "Yet Another Threat Report" assert ind.external_references[0].source_name == "Yet Another Threat Report"
def test_environment_functions(): def test_environment_functions():
env = stix2.Environment(stix2.ObjectFactory(created_by_ref=IDENTITY_ID), env = stix2.Environment(
stix2.MemoryStore()) stix2.ObjectFactory(created_by_ref=IDENTITY_ID),
stix2.MemoryStore(),
)
# Create a STIX object # Create a STIX object
ind = env.create(stix2.Indicator, id=INDICATOR_ID, **INDICATOR_KWARGS) ind = env.create(stix2.v20.Indicator, id=INDICATOR_ID, **INDICATOR_KWARGS)
assert ind.created_by_ref == IDENTITY_ID assert ind.created_by_ref == IDENTITY_ID
# Add objects to datastore # Add objects to datastore
@ -125,23 +143,27 @@ def test_environment_functions():
assert len(resp) == 0 assert len(resp) == 0
# See different results after adding filters to the environment # See different results after adding filters to the environment
env.add_filters([stix2.Filter('type', '=', 'indicator'), env.add_filters([
stix2.Filter('created_by_ref', '=', IDENTITY_ID)]) stix2.Filter('type', '=', 'indicator'),
stix2.Filter('created_by_ref', '=', IDENTITY_ID),
])
env.add_filter(stix2.Filter('labels', '=', 'benign')) # should be 'malicious-activity' env.add_filter(stix2.Filter('labels', '=', 'benign')) # should be 'malicious-activity'
resp = env.get(INDICATOR_ID) resp = env.get(INDICATOR_ID)
assert resp['labels'][0] == 'benign' # should be 'malicious-activity' assert resp['labels'][0] == 'benign' # should be 'malicious-activity'
def test_environment_source_and_sink(): def test_environment_source_and_sink():
ind = stix2.Indicator(id=INDICATOR_ID, **INDICATOR_KWARGS) ind = stix2.v20.Indicator(id=INDICATOR_ID, **INDICATOR_KWARGS)
env = stix2.Environment(source=stix2.MemorySource([ind]), sink=stix2.MemorySink([ind])) env = stix2.Environment(source=stix2.MemorySource([ind]), sink=stix2.MemorySink([ind]))
assert env.get(INDICATOR_ID).labels[0] == 'malicious-activity' assert env.get(INDICATOR_ID).labels[0] == 'malicious-activity'
def test_environment_datastore_and_sink(): def test_environment_datastore_and_sink():
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
stix2.Environment(factory=stix2.ObjectFactory(), stix2.Environment(
store=stix2.MemoryStore(), sink=stix2.MemorySink) factory=stix2.ObjectFactory(),
store=stix2.MemoryStore(), sink=stix2.MemorySink,
)
assert 'Data store already provided' in str(excinfo.value) assert 'Data store already provided' in str(excinfo.value)
@ -149,7 +171,7 @@ def test_environment_no_datastore():
env = stix2.Environment(factory=stix2.ObjectFactory()) env = stix2.Environment(factory=stix2.ObjectFactory())
with pytest.raises(AttributeError) as excinfo: with pytest.raises(AttributeError) as excinfo:
env.add(stix2.Indicator(**INDICATOR_KWARGS)) env.add(stix2.v20.Indicator(**INDICATOR_KWARGS))
assert 'Environment has no data sink to put objects in' in str(excinfo.value) assert 'Environment has no data sink to put objects in' in str(excinfo.value)
with pytest.raises(AttributeError) as excinfo: with pytest.raises(AttributeError) as excinfo:
@ -182,7 +204,7 @@ def test_environment_add_filters():
def test_environment_datastore_and_no_object_factory(): def test_environment_datastore_and_no_object_factory():
# Uses a default object factory # Uses a default object factory
env = stix2.Environment(store=stix2.MemoryStore()) env = stix2.Environment(store=stix2.MemoryStore())
ind = env.create(stix2.Indicator, id=INDICATOR_ID, **INDICATOR_KWARGS) ind = env.create(stix2.v20.Indicator, id=INDICATOR_ID, **INDICATOR_KWARGS)
assert ind.id == INDICATOR_ID assert ind.id == INDICATOR_ID
@ -198,7 +220,7 @@ def test_parse_malware():
"ransomware" "ransomware"
] ]
}""" }"""
mal = env.parse(data) mal = env.parse(data, version="2.0")
assert mal.type == 'malware' assert mal.type == 'malware'
assert mal.id == MALWARE_ID assert mal.id == MALWARE_ID
@ -209,40 +231,40 @@ def test_parse_malware():
def test_creator_of(): def test_creator_of():
identity = stix2.Identity(**IDENTITY_KWARGS) identity = stix2.v20.Identity(**IDENTITY_KWARGS)
factory = stix2.ObjectFactory(created_by_ref=identity.id) factory = stix2.ObjectFactory(created_by_ref=identity.id)
env = stix2.Environment(store=stix2.MemoryStore(), factory=factory) env = stix2.Environment(store=stix2.MemoryStore(), factory=factory)
env.add(identity) env.add(identity)
ind = env.create(stix2.Indicator, **INDICATOR_KWARGS) ind = env.create(stix2.v20.Indicator, **INDICATOR_KWARGS)
creator = env.creator_of(ind) creator = env.creator_of(ind)
assert creator is identity assert creator is identity
def test_creator_of_no_datasource(): def test_creator_of_no_datasource():
identity = stix2.Identity(**IDENTITY_KWARGS) identity = stix2.v20.Identity(**IDENTITY_KWARGS)
factory = stix2.ObjectFactory(created_by_ref=identity.id) factory = stix2.ObjectFactory(created_by_ref=identity.id)
env = stix2.Environment(factory=factory) env = stix2.Environment(factory=factory)
ind = env.create(stix2.Indicator, **INDICATOR_KWARGS) ind = env.create(stix2.v20.Indicator, **INDICATOR_KWARGS)
with pytest.raises(AttributeError) as excinfo: with pytest.raises(AttributeError) as excinfo:
env.creator_of(ind) env.creator_of(ind)
assert 'Environment has no data source' in str(excinfo.value) assert 'Environment has no data source' in str(excinfo.value)
def test_creator_of_not_found(): def test_creator_of_not_found():
identity = stix2.Identity(**IDENTITY_KWARGS) identity = stix2.v20.Identity(**IDENTITY_KWARGS)
factory = stix2.ObjectFactory(created_by_ref=identity.id) factory = stix2.ObjectFactory(created_by_ref=identity.id)
env = stix2.Environment(store=stix2.MemoryStore(), factory=factory) env = stix2.Environment(store=stix2.MemoryStore(), factory=factory)
ind = env.create(stix2.Indicator, **INDICATOR_KWARGS) ind = env.create(stix2.v20.Indicator, **INDICATOR_KWARGS)
creator = env.creator_of(ind) creator = env.creator_of(ind)
assert creator is None assert creator is None
def test_creator_of_no_created_by_ref(): def test_creator_of_no_created_by_ref():
env = stix2.Environment(store=stix2.MemoryStore()) env = stix2.Environment(store=stix2.MemoryStore())
ind = env.create(stix2.Indicator, **INDICATOR_KWARGS) ind = env.create(stix2.v20.Indicator, **INDICATOR_KWARGS)
creator = env.creator_of(ind) creator = env.creator_of(ind)
assert creator is None assert creator is None
@ -262,7 +284,7 @@ def test_relationships_no_id(ds):
env = stix2.Environment(store=ds) env = stix2.Environment(store=ds)
mal = { mal = {
"type": "malware", "type": "malware",
"name": "some variant" "name": "some variant",
} }
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
env.relationships(mal) env.relationships(mal)
@ -326,7 +348,7 @@ def test_related_to_no_id(ds):
env = stix2.Environment(store=ds) env = stix2.Environment(store=ds)
mal = { mal = {
"type": "malware", "type": "malware",
"name": "some variant" "name": "some variant",
} }
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
env.related_to(mal) env.related_to(mal)

View File

@ -17,11 +17,11 @@ VERIS = """{
def test_external_reference_veris(): def test_external_reference_veris():
ref = stix2.ExternalReference( ref = stix2.v20.ExternalReference(
source_name="veris", source_name="veris",
external_id="0001AA7F-C601-424A-B2B8-BE6C9F5164E7", external_id="0001AA7F-C601-424A-B2B8-BE6C9F5164E7",
hashes={ hashes={
"SHA-256": "6db12788c37247f2316052e142f42f4b259d6561751e5f401a1ae2a6df9c674b" "SHA-256": "6db12788c37247f2316052e142f42f4b259d6561751e5f401a1ae2a6df9c674b",
}, },
url="https://github.com/vz-risk/VCDB/blob/master/data/json/0001AA7F-C601-424A-B2B8-BE6C9F5164E7.json", url="https://github.com/vz-risk/VCDB/blob/master/data/json/0001AA7F-C601-424A-B2B8-BE6C9F5164E7.json",
) )
@ -36,7 +36,7 @@ CAPEC = """{
def test_external_reference_capec(): def test_external_reference_capec():
ref = stix2.ExternalReference( ref = stix2.v20.ExternalReference(
source_name="capec", source_name="capec",
external_id="CAPEC-550", external_id="CAPEC-550",
) )
@ -53,7 +53,7 @@ CAPEC_URL = """{
def test_external_reference_capec_url(): def test_external_reference_capec_url():
ref = stix2.ExternalReference( ref = stix2.v20.ExternalReference(
source_name="capec", source_name="capec",
external_id="CAPEC-550", external_id="CAPEC-550",
url="http://capec.mitre.org/data/definitions/550.html", url="http://capec.mitre.org/data/definitions/550.html",
@ -70,7 +70,7 @@ THREAT_REPORT = """{
def test_external_reference_threat_report(): def test_external_reference_threat_report():
ref = stix2.ExternalReference( ref = stix2.v20.ExternalReference(
source_name="ACME Threat Intel", source_name="ACME Threat Intel",
description="Threat report", description="Threat report",
url="http://www.example.com/threat-report.pdf", url="http://www.example.com/threat-report.pdf",
@ -87,7 +87,7 @@ BUGZILLA = """{
def test_external_reference_bugzilla(): def test_external_reference_bugzilla():
ref = stix2.ExternalReference( ref = stix2.v20.ExternalReference(
source_name="ACME Bugzilla", source_name="ACME Bugzilla",
external_id="1370", external_id="1370",
url="https://www.example.com/bugs/1370", url="https://www.example.com/bugs/1370",
@ -103,7 +103,7 @@ OFFLINE = """{
def test_external_reference_offline(): def test_external_reference_offline():
ref = stix2.ExternalReference( ref = stix2.v20.ExternalReference(
source_name="ACME Threat Intel", source_name="ACME Threat Intel",
description="Threat report", description="Threat report",
) )
@ -116,7 +116,7 @@ def test_external_reference_offline():
def test_external_reference_source_required(): def test_external_reference_source_required():
with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo: with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo:
stix2.ExternalReference() stix2.v20.ExternalReference()
assert excinfo.value.cls == stix2.ExternalReference assert excinfo.value.cls == stix2.v20.ExternalReference
assert excinfo.value.properties == ["source_name"] assert excinfo.value.properties == ["source_name"]

View File

@ -1,8 +1,9 @@
import pytest import pytest
from stix2 import TLP_RED, Malware, markings from stix2 import markings
from stix2.exceptions import MarkingNotFoundError from stix2.exceptions import MarkingNotFoundError
from stix2.v20 import TLP_RED, Malware
from .constants import MALWARE_MORE_KWARGS as MALWARE_KWARGS_CONST from .constants import MALWARE_MORE_KWARGS as MALWARE_KWARGS_CONST
from .constants import MARKING_IDS from .constants import MARKING_IDS
@ -20,11 +21,11 @@ def test_add_marking_mark_one_selector_multiple_refs():
granular_markings=[ granular_markings=[
{ {
"selectors": ["description"], "selectors": ["description"],
"marking_ref": MARKING_IDS[0] "marking_ref": MARKING_IDS[0],
}, },
{ {
"selectors": ["description"], "selectors": ["description"],
"marking_ref": MARKING_IDS[1] "marking_ref": MARKING_IDS[1],
}, },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
@ -35,44 +36,49 @@ def test_add_marking_mark_one_selector_multiple_refs():
assert m in after["granular_markings"] assert m in after["granular_markings"]
@pytest.mark.parametrize("data", [ @pytest.mark.parametrize(
( "data", [
Malware(**MALWARE_KWARGS), (
Malware( Malware(**MALWARE_KWARGS),
granular_markings=[ Malware(
{ granular_markings=[
"selectors": ["description", "name"], {
"marking_ref": MARKING_IDS[0] "selectors": ["description", "name"],
}, "marking_ref": MARKING_IDS[0],
], },
**MALWARE_KWARGS), ],
MARKING_IDS[0], **MALWARE_KWARGS
), ),
( MARKING_IDS[0],
MALWARE_KWARGS, ),
dict( (
granular_markings=[ MALWARE_KWARGS,
{ dict(
"selectors": ["description", "name"], granular_markings=[
"marking_ref": MARKING_IDS[0] {
}, "selectors": ["description", "name"],
], "marking_ref": MARKING_IDS[0],
**MALWARE_KWARGS), },
MARKING_IDS[0], ],
), **MALWARE_KWARGS
( ),
Malware(**MALWARE_KWARGS), MARKING_IDS[0],
Malware( ),
granular_markings=[ (
{ Malware(**MALWARE_KWARGS),
"selectors": ["description", "name"], Malware(
"marking_ref": TLP_RED.id, granular_markings=[
}, {
], "selectors": ["description", "name"],
**MALWARE_KWARGS), "marking_ref": TLP_RED.id,
TLP_RED, },
), ],
]) **MALWARE_KWARGS
),
TLP_RED,
),
],
)
def test_add_marking_mark_multiple_selector_one_refs(data): def test_add_marking_mark_multiple_selector_one_refs(data):
before = data[0] before = data[0]
after = data[1] after = data[1]
@ -91,12 +97,12 @@ def test_add_marking_mark_multiple_selector_multiple_refs():
granular_markings=[ granular_markings=[
{ {
"selectors": ["description", "name"], "selectors": ["description", "name"],
"marking_ref": MARKING_IDS[0] "marking_ref": MARKING_IDS[0],
}, },
{ {
"selectors": ["description", "name"], "selectors": ["description", "name"],
"marking_ref": MARKING_IDS[1] "marking_ref": MARKING_IDS[1],
} },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
) )
@ -111,7 +117,7 @@ def test_add_marking_mark_another_property_same_marking():
granular_markings=[ granular_markings=[
{ {
"selectors": ["description"], "selectors": ["description"],
"marking_ref": MARKING_IDS[0] "marking_ref": MARKING_IDS[0],
}, },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
@ -120,7 +126,7 @@ def test_add_marking_mark_another_property_same_marking():
granular_markings=[ granular_markings=[
{ {
"selectors": ["description", "name"], "selectors": ["description", "name"],
"marking_ref": MARKING_IDS[0] "marking_ref": MARKING_IDS[0],
}, },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
@ -136,7 +142,7 @@ def test_add_marking_mark_same_property_same_marking():
granular_markings=[ granular_markings=[
{ {
"selectors": ["description"], "selectors": ["description"],
"marking_ref": MARKING_IDS[0] "marking_ref": MARKING_IDS[0],
}, },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
@ -145,7 +151,7 @@ def test_add_marking_mark_same_property_same_marking():
granular_markings=[ granular_markings=[
{ {
"selectors": ["description"], "selectors": ["description"],
"marking_ref": MARKING_IDS[0] "marking_ref": MARKING_IDS[0],
}, },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
@ -156,17 +162,22 @@ def test_add_marking_mark_same_property_same_marking():
assert m in after["granular_markings"] assert m in after["granular_markings"]
@pytest.mark.parametrize("data,marking", [ @pytest.mark.parametrize(
({"description": "test description"}, "data,marking", [
[["title"], ["marking-definition--1", "marking-definition--2"], (
"", ["marking-definition--1", "marking-definition--2"], {"description": "test description"},
[], ["marking-definition--1", "marking-definition--2"], [
[""], ["marking-definition--1", "marking-definition--2"], ["title"], ["marking-definition--1", "marking-definition--2"],
["description"], [""], "", ["marking-definition--1", "marking-definition--2"],
["description"], [], [], ["marking-definition--1", "marking-definition--2"],
["description"], ["marking-definition--1", 456] [""], ["marking-definition--1", "marking-definition--2"],
]) ["description"], [""],
]) ["description"], [],
["description"], ["marking-definition--1", 456],
],
),
],
)
def test_add_marking_bad_selector(data, marking): def test_add_marking_bad_selector(data, marking):
with pytest.raises(AssertionError): with pytest.raises(AssertionError):
markings.add_markings(data, marking[0], marking[1]) markings.add_markings(data, marking[0], marking[1])
@ -180,61 +191,61 @@ GET_MARKINGS_TEST_DATA = {
"list value", "list value",
{ {
"g": "nested", "g": "nested",
"h": 45 "h": 45,
} },
], ],
"x": { "x": {
"y": [ "y": [
"hello", "hello",
88 88,
], ],
"z": { "z": {
"foo1": "bar", "foo1": "bar",
"foo2": 65 "foo2": 65,
} },
}, },
"granular_markings": [ "granular_markings": [
{ {
"marking_ref": "1", "marking_ref": "1",
"selectors": ["a"] "selectors": ["a"],
}, },
{ {
"marking_ref": "2", "marking_ref": "2",
"selectors": ["c"] "selectors": ["c"],
}, },
{ {
"marking_ref": "3", "marking_ref": "3",
"selectors": ["c.[1]"] "selectors": ["c.[1]"],
}, },
{ {
"marking_ref": "4", "marking_ref": "4",
"selectors": ["c.[2]"] "selectors": ["c.[2]"],
}, },
{ {
"marking_ref": "5", "marking_ref": "5",
"selectors": ["c.[2].g"] "selectors": ["c.[2].g"],
}, },
{ {
"marking_ref": "6", "marking_ref": "6",
"selectors": ["x"] "selectors": ["x"],
}, },
{ {
"marking_ref": "7", "marking_ref": "7",
"selectors": ["x.y"] "selectors": ["x.y"],
}, },
{ {
"marking_ref": "8", "marking_ref": "8",
"selectors": ["x.y.[1]"] "selectors": ["x.y.[1]"],
}, },
{ {
"marking_ref": "9", "marking_ref": "9",
"selectors": ["x.z"] "selectors": ["x.z"],
}, },
{ {
"marking_ref": "10", "marking_ref": "10",
"selectors": ["x.z.foo2"] "selectors": ["x.z.foo2"],
}, },
] ],
} }
@ -245,10 +256,12 @@ def test_get_markings_smoke(data):
assert markings.get_markings(data, "a") == ["1"] assert markings.get_markings(data, "a") == ["1"]
@pytest.mark.parametrize("data", [ @pytest.mark.parametrize(
GET_MARKINGS_TEST_DATA, "data", [
{"b": 1234}, GET_MARKINGS_TEST_DATA,
]) {"b": 1234},
],
)
def test_get_markings_not_marked(data): def test_get_markings_not_marked(data):
"""Test selector that is not marked returns empty list.""" """Test selector that is not marked returns empty list."""
results = markings.get_markings(data, "b") results = markings.get_markings(data, "b")
@ -267,21 +280,23 @@ def test_get_markings_multiple_selectors(data):
assert set(xy_markings).union(xz_markings).issuperset(total) assert set(xy_markings).union(xz_markings).issuperset(total)
@pytest.mark.parametrize("data,selector", [ @pytest.mark.parametrize(
(GET_MARKINGS_TEST_DATA, "foo"), "data,selector", [
(GET_MARKINGS_TEST_DATA, ""), (GET_MARKINGS_TEST_DATA, "foo"),
(GET_MARKINGS_TEST_DATA, []), (GET_MARKINGS_TEST_DATA, ""),
(GET_MARKINGS_TEST_DATA, [""]), (GET_MARKINGS_TEST_DATA, []),
(GET_MARKINGS_TEST_DATA, "x.z.[-2]"), (GET_MARKINGS_TEST_DATA, [""]),
(GET_MARKINGS_TEST_DATA, "c.f"), (GET_MARKINGS_TEST_DATA, "x.z.[-2]"),
(GET_MARKINGS_TEST_DATA, "c.[2].i"), (GET_MARKINGS_TEST_DATA, "c.f"),
(GET_MARKINGS_TEST_DATA, "c.[3]"), (GET_MARKINGS_TEST_DATA, "c.[2].i"),
(GET_MARKINGS_TEST_DATA, "d"), (GET_MARKINGS_TEST_DATA, "c.[3]"),
(GET_MARKINGS_TEST_DATA, "x.[0]"), (GET_MARKINGS_TEST_DATA, "d"),
(GET_MARKINGS_TEST_DATA, "z.y.w"), (GET_MARKINGS_TEST_DATA, "x.[0]"),
(GET_MARKINGS_TEST_DATA, "x.z.[1]"), (GET_MARKINGS_TEST_DATA, "z.y.w"),
(GET_MARKINGS_TEST_DATA, "x.z.foo3") (GET_MARKINGS_TEST_DATA, "x.z.[1]"),
]) (GET_MARKINGS_TEST_DATA, "x.z.foo3"),
],
)
def test_get_markings_bad_selector(data, selector): def test_get_markings_bad_selector(data, selector):
"""Test bad selectors raise exception""" """Test bad selectors raise exception"""
with pytest.raises(AssertionError): with pytest.raises(AssertionError):
@ -362,40 +377,42 @@ def test_get_markings_positional_arguments_combinations(data):
assert set(markings.get_markings(data, "x.z.foo2", False, True)) == set(["10"]) assert set(markings.get_markings(data, "x.z.foo2", False, True)) == set(["10"])
@pytest.mark.parametrize("data", [ @pytest.mark.parametrize(
( "data", [
Malware( (
granular_markings=[ Malware(
{ granular_markings=[
"selectors": ["description"], {
"marking_ref": MARKING_IDS[0] "selectors": ["description"],
}, "marking_ref": MARKING_IDS[0],
{ },
"selectors": ["description"], {
"marking_ref": MARKING_IDS[1] "selectors": ["description"],
}, "marking_ref": MARKING_IDS[1],
], },
**MALWARE_KWARGS ],
**MALWARE_KWARGS
),
[MARKING_IDS[0], MARKING_IDS[1]],
), ),
[MARKING_IDS[0], MARKING_IDS[1]], (
), dict(
( granular_markings=[
dict( {
granular_markings=[ "selectors": ["description"],
{ "marking_ref": MARKING_IDS[0],
"selectors": ["description"], },
"marking_ref": MARKING_IDS[0] {
}, "selectors": ["description"],
{ "marking_ref": MARKING_IDS[1],
"selectors": ["description"], },
"marking_ref": MARKING_IDS[1] ],
}, **MALWARE_KWARGS
], ),
**MALWARE_KWARGS [MARKING_IDS[0], MARKING_IDS[1]],
), ),
[MARKING_IDS[0], MARKING_IDS[1]], ],
), )
])
def test_remove_marking_remove_one_selector_with_multiple_refs(data): def test_remove_marking_remove_one_selector_with_multiple_refs(data):
before = markings.remove_markings(data[0], data[1], ["description"]) before = markings.remove_markings(data[0], data[1], ["description"])
assert "granular_markings" not in before assert "granular_markings" not in before
@ -406,8 +423,8 @@ def test_remove_marking_remove_multiple_selector_one_ref():
granular_markings=[ granular_markings=[
{ {
"selectors": ["description", "modified"], "selectors": ["description", "modified"],
"marking_ref": MARKING_IDS[0] "marking_ref": MARKING_IDS[0],
} },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
) )
@ -420,8 +437,8 @@ def test_remove_marking_mark_one_selector_from_multiple_ones():
granular_markings=[ granular_markings=[
{ {
"selectors": ["description"], "selectors": ["description"],
"marking_ref": MARKING_IDS[0] "marking_ref": MARKING_IDS[0],
} },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
) )
@ -429,8 +446,8 @@ def test_remove_marking_mark_one_selector_from_multiple_ones():
granular_markings=[ granular_markings=[
{ {
"selectors": ["description", "modified"], "selectors": ["description", "modified"],
"marking_ref": MARKING_IDS[0] "marking_ref": MARKING_IDS[0],
} },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
) )
@ -444,12 +461,12 @@ def test_remove_marking_mark_one_selector_markings_from_multiple_ones():
granular_markings=[ granular_markings=[
{ {
"selectors": ["description"], "selectors": ["description"],
"marking_ref": MARKING_IDS[0] "marking_ref": MARKING_IDS[0],
}, },
{ {
"selectors": ["description", "modified"], "selectors": ["description", "modified"],
"marking_ref": MARKING_IDS[1] "marking_ref": MARKING_IDS[1],
} },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
) )
@ -457,12 +474,12 @@ def test_remove_marking_mark_one_selector_markings_from_multiple_ones():
granular_markings=[ granular_markings=[
{ {
"selectors": ["description", "modified"], "selectors": ["description", "modified"],
"marking_ref": MARKING_IDS[0] "marking_ref": MARKING_IDS[0],
}, },
{ {
"selectors": ["description", "modified"], "selectors": ["description", "modified"],
"marking_ref": MARKING_IDS[1] "marking_ref": MARKING_IDS[1],
} },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
) )
@ -476,12 +493,12 @@ def test_remove_marking_mark_mutilple_selector_multiple_refs():
granular_markings=[ granular_markings=[
{ {
"selectors": ["description", "modified"], "selectors": ["description", "modified"],
"marking_ref": MARKING_IDS[0] "marking_ref": MARKING_IDS[0],
}, },
{ {
"selectors": ["description", "modified"], "selectors": ["description", "modified"],
"marking_ref": MARKING_IDS[1] "marking_ref": MARKING_IDS[1],
} },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
) )
@ -494,8 +511,8 @@ def test_remove_marking_mark_another_property_same_marking():
granular_markings=[ granular_markings=[
{ {
"selectors": ["description"], "selectors": ["description"],
"marking_ref": MARKING_IDS[0] "marking_ref": MARKING_IDS[0],
} },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
) )
@ -503,12 +520,12 @@ def test_remove_marking_mark_another_property_same_marking():
granular_markings=[ granular_markings=[
{ {
"selectors": ["description"], "selectors": ["description"],
"marking_ref": MARKING_IDS[0] "marking_ref": MARKING_IDS[0],
}, },
{ {
"selectors": ["modified"], "selectors": ["modified"],
"marking_ref": MARKING_IDS[0] "marking_ref": MARKING_IDS[0],
} },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
) )
@ -522,8 +539,8 @@ def test_remove_marking_mark_same_property_same_marking():
granular_markings=[ granular_markings=[
{ {
"selectors": ["description"], "selectors": ["description"],
"marking_ref": MARKING_IDS[0] "marking_ref": MARKING_IDS[0],
} },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
) )
@ -552,8 +569,8 @@ def test_remove_marking_not_present():
granular_markings=[ granular_markings=[
{ {
"selectors": ["description"], "selectors": ["description"],
"marking_ref": MARKING_IDS[0] "marking_ref": MARKING_IDS[0],
} },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
) )
@ -566,15 +583,15 @@ IS_MARKED_TEST_DATA = [
granular_markings=[ granular_markings=[
{ {
"selectors": ["description"], "selectors": ["description"],
"marking_ref": MARKING_IDS[1] "marking_ref": MARKING_IDS[1],
}, },
{ {
"selectors": ["labels", "description"], "selectors": ["labels", "description"],
"marking_ref": MARKING_IDS[2] "marking_ref": MARKING_IDS[2],
}, },
{ {
"selectors": ["labels", "description"], "selectors": ["labels", "description"],
"marking_ref": MARKING_IDS[3] "marking_ref": MARKING_IDS[3],
}, },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
@ -583,15 +600,15 @@ IS_MARKED_TEST_DATA = [
granular_markings=[ granular_markings=[
{ {
"selectors": ["description"], "selectors": ["description"],
"marking_ref": MARKING_IDS[1] "marking_ref": MARKING_IDS[1],
}, },
{ {
"selectors": ["labels", "description"], "selectors": ["labels", "description"],
"marking_ref": MARKING_IDS[2] "marking_ref": MARKING_IDS[2],
}, },
{ {
"selectors": ["labels", "description"], "selectors": ["labels", "description"],
"marking_ref": MARKING_IDS[3] "marking_ref": MARKING_IDS[3],
}, },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
@ -606,21 +623,23 @@ def test_is_marked_smoke(data):
assert markings.is_marked(data, selectors=["modified"]) is False assert markings.is_marked(data, selectors=["modified"]) is False
@pytest.mark.parametrize("data,selector", [ @pytest.mark.parametrize(
(IS_MARKED_TEST_DATA[0], "foo"), "data,selector", [
(IS_MARKED_TEST_DATA[0], ""), (IS_MARKED_TEST_DATA[0], "foo"),
(IS_MARKED_TEST_DATA[0], []), (IS_MARKED_TEST_DATA[0], ""),
(IS_MARKED_TEST_DATA[0], [""]), (IS_MARKED_TEST_DATA[0], []),
(IS_MARKED_TEST_DATA[0], "x.z.[-2]"), (IS_MARKED_TEST_DATA[0], [""]),
(IS_MARKED_TEST_DATA[0], "c.f"), (IS_MARKED_TEST_DATA[0], "x.z.[-2]"),
(IS_MARKED_TEST_DATA[0], "c.[2].i"), (IS_MARKED_TEST_DATA[0], "c.f"),
(IS_MARKED_TEST_DATA[1], "c.[3]"), (IS_MARKED_TEST_DATA[0], "c.[2].i"),
(IS_MARKED_TEST_DATA[1], "d"), (IS_MARKED_TEST_DATA[1], "c.[3]"),
(IS_MARKED_TEST_DATA[1], "x.[0]"), (IS_MARKED_TEST_DATA[1], "d"),
(IS_MARKED_TEST_DATA[1], "z.y.w"), (IS_MARKED_TEST_DATA[1], "x.[0]"),
(IS_MARKED_TEST_DATA[1], "x.z.[1]"), (IS_MARKED_TEST_DATA[1], "z.y.w"),
(IS_MARKED_TEST_DATA[1], "x.z.foo3") (IS_MARKED_TEST_DATA[1], "x.z.[1]"),
]) (IS_MARKED_TEST_DATA[1], "x.z.foo3"),
],
)
def test_is_marked_invalid_selector(data, selector): def test_is_marked_invalid_selector(data, selector):
"""Test invalid selector raises an error.""" """Test invalid selector raises an error."""
with pytest.raises(AssertionError): with pytest.raises(AssertionError):
@ -688,61 +707,61 @@ def test_is_marked_positional_arguments_combinations():
"list value", "list value",
{ {
"g": "nested", "g": "nested",
"h": 45 "h": 45,
} },
], ],
"x": { "x": {
"y": [ "y": [
"hello", "hello",
88 88,
], ],
"z": { "z": {
"foo1": "bar", "foo1": "bar",
"foo2": 65 "foo2": 65,
} },
}, },
"granular_markings": [ "granular_markings": [
{ {
"marking_ref": "1", "marking_ref": "1",
"selectors": ["a"] "selectors": ["a"],
}, },
{ {
"marking_ref": "2", "marking_ref": "2",
"selectors": ["c"] "selectors": ["c"],
}, },
{ {
"marking_ref": "3", "marking_ref": "3",
"selectors": ["c.[1]"] "selectors": ["c.[1]"],
}, },
{ {
"marking_ref": "4", "marking_ref": "4",
"selectors": ["c.[2]"] "selectors": ["c.[2]"],
}, },
{ {
"marking_ref": "5", "marking_ref": "5",
"selectors": ["c.[2].g"] "selectors": ["c.[2].g"],
}, },
{ {
"marking_ref": "6", "marking_ref": "6",
"selectors": ["x"] "selectors": ["x"],
}, },
{ {
"marking_ref": "7", "marking_ref": "7",
"selectors": ["x.y"] "selectors": ["x.y"],
}, },
{ {
"marking_ref": "8", "marking_ref": "8",
"selectors": ["x.y.[1]"] "selectors": ["x.y.[1]"],
}, },
{ {
"marking_ref": "9", "marking_ref": "9",
"selectors": ["x.z"] "selectors": ["x.z"],
}, },
{ {
"marking_ref": "10", "marking_ref": "10",
"selectors": ["x.z.foo2"] "selectors": ["x.z.foo2"],
}, },
] ],
} }
assert markings.is_marked(test_sdo, ["1"], "a", False, False) assert markings.is_marked(test_sdo, ["1"], "a", False, False)
@ -822,8 +841,8 @@ def test_create_sdo_with_invalid_marking():
granular_markings=[ granular_markings=[
{ {
"selectors": ["foo"], "selectors": ["foo"],
"marking_ref": MARKING_IDS[0] "marking_ref": MARKING_IDS[0],
} },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
) )
@ -838,12 +857,12 @@ def test_set_marking_mark_one_selector_multiple_refs():
granular_markings=[ granular_markings=[
{ {
"selectors": ["description"], "selectors": ["description"],
"marking_ref": MARKING_IDS[0] "marking_ref": MARKING_IDS[0],
}, },
{ {
"selectors": ["description"], "selectors": ["description"],
"marking_ref": MARKING_IDS[1] "marking_ref": MARKING_IDS[1],
} },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
) )
@ -857,8 +876,8 @@ def test_set_marking_mark_multiple_selector_one_refs():
granular_markings=[ granular_markings=[
{ {
"selectors": ["description", "modified"], "selectors": ["description", "modified"],
"marking_ref": MARKING_IDS[1] "marking_ref": MARKING_IDS[1],
} },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
) )
@ -866,8 +885,8 @@ def test_set_marking_mark_multiple_selector_one_refs():
granular_markings=[ granular_markings=[
{ {
"selectors": ["description", "modified"], "selectors": ["description", "modified"],
"marking_ref": MARKING_IDS[0] "marking_ref": MARKING_IDS[0],
} },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
) )
@ -884,12 +903,12 @@ def test_set_marking_mark_multiple_selector_multiple_refs_from_none():
granular_markings=[ granular_markings=[
{ {
"selectors": ["description", "modified"], "selectors": ["description", "modified"],
"marking_ref": MARKING_IDS[0] "marking_ref": MARKING_IDS[0],
}, },
{ {
"selectors": ["description", "modified"], "selectors": ["description", "modified"],
"marking_ref": MARKING_IDS[1] "marking_ref": MARKING_IDS[1],
} },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
) )
@ -903,8 +922,8 @@ def test_set_marking_mark_another_property_same_marking():
granular_markings=[ granular_markings=[
{ {
"selectors": ["description"], "selectors": ["description"],
"marking_ref": MARKING_IDS[0] "marking_ref": MARKING_IDS[0],
} },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
) )
@ -912,12 +931,12 @@ def test_set_marking_mark_another_property_same_marking():
granular_markings=[ granular_markings=[
{ {
"selectors": ["description"], "selectors": ["description"],
"marking_ref": MARKING_IDS[1] "marking_ref": MARKING_IDS[1],
}, },
{ {
"selectors": ["description"], "selectors": ["description"],
"marking_ref": MARKING_IDS[2] "marking_ref": MARKING_IDS[2],
} },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
) )
@ -927,19 +946,21 @@ def test_set_marking_mark_another_property_same_marking():
assert m in after["granular_markings"] assert m in after["granular_markings"]
@pytest.mark.parametrize("marking", [ @pytest.mark.parametrize(
([MARKING_IDS[4], MARKING_IDS[5]], ["foo"]), "marking", [
([MARKING_IDS[4], MARKING_IDS[5]], ""), ([MARKING_IDS[4], MARKING_IDS[5]], ["foo"]),
([MARKING_IDS[4], MARKING_IDS[5]], []), ([MARKING_IDS[4], MARKING_IDS[5]], ""),
([MARKING_IDS[4], MARKING_IDS[5]], [""]), ([MARKING_IDS[4], MARKING_IDS[5]], []),
]) ([MARKING_IDS[4], MARKING_IDS[5]], [""]),
],
)
def test_set_marking_bad_selector(marking): def test_set_marking_bad_selector(marking):
before = Malware( before = Malware(
granular_markings=[ granular_markings=[
{ {
"selectors": ["description"], "selectors": ["description"],
"marking_ref": MARKING_IDS[0] "marking_ref": MARKING_IDS[0],
} },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
) )
@ -947,8 +968,8 @@ def test_set_marking_bad_selector(marking):
granular_markings=[ granular_markings=[
{ {
"selectors": ["description"], "selectors": ["description"],
"marking_ref": MARKING_IDS[0] "marking_ref": MARKING_IDS[0],
} },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
) )
@ -964,8 +985,8 @@ def test_set_marking_mark_same_property_same_marking():
granular_markings=[ granular_markings=[
{ {
"selectors": ["description"], "selectors": ["description"],
"marking_ref": MARKING_IDS[0] "marking_ref": MARKING_IDS[0],
} },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
) )
@ -973,8 +994,8 @@ def test_set_marking_mark_same_property_same_marking():
granular_markings=[ granular_markings=[
{ {
"selectors": ["description"], "selectors": ["description"],
"marking_ref": MARKING_IDS[0] "marking_ref": MARKING_IDS[0],
} },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
) )
@ -988,15 +1009,15 @@ CLEAR_MARKINGS_TEST_DATA = [
granular_markings=[ granular_markings=[
{ {
"selectors": ["description"], "selectors": ["description"],
"marking_ref": MARKING_IDS[0] "marking_ref": MARKING_IDS[0],
}, },
{ {
"selectors": ["modified", "description"], "selectors": ["modified", "description"],
"marking_ref": MARKING_IDS[1] "marking_ref": MARKING_IDS[1],
}, },
{ {
"selectors": ["modified", "description", "type"], "selectors": ["modified", "description", "type"],
"marking_ref": MARKING_IDS[2] "marking_ref": MARKING_IDS[2],
}, },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
@ -1005,19 +1026,19 @@ CLEAR_MARKINGS_TEST_DATA = [
granular_markings=[ granular_markings=[
{ {
"selectors": ["description"], "selectors": ["description"],
"marking_ref": MARKING_IDS[0] "marking_ref": MARKING_IDS[0],
}, },
{ {
"selectors": ["modified", "description"], "selectors": ["modified", "description"],
"marking_ref": MARKING_IDS[1] "marking_ref": MARKING_IDS[1],
}, },
{ {
"selectors": ["modified", "description", "type"], "selectors": ["modified", "description", "type"],
"marking_ref": MARKING_IDS[2] "marking_ref": MARKING_IDS[2],
}, },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
) ),
] ]
@ -1049,12 +1070,14 @@ def test_clear_marking_all_selectors(data):
assert "granular_markings" not in data assert "granular_markings" not in data
@pytest.mark.parametrize("data,selector", [ @pytest.mark.parametrize(
(CLEAR_MARKINGS_TEST_DATA[0], "foo"), "data,selector", [
(CLEAR_MARKINGS_TEST_DATA[0], ""), (CLEAR_MARKINGS_TEST_DATA[0], "foo"),
(CLEAR_MARKINGS_TEST_DATA[1], []), (CLEAR_MARKINGS_TEST_DATA[0], ""),
(CLEAR_MARKINGS_TEST_DATA[1], [""]), (CLEAR_MARKINGS_TEST_DATA[1], []),
]) (CLEAR_MARKINGS_TEST_DATA[1], [""]),
],
)
def test_clear_marking_bad_selector(data, selector): def test_clear_marking_bad_selector(data, selector):
"""Test bad selector raises exception.""" """Test bad selector raises exception."""
with pytest.raises(AssertionError): with pytest.raises(AssertionError):

View File

@ -18,7 +18,7 @@ EXPECTED = """{
def test_identity_example(): def test_identity_example():
identity = stix2.Identity( identity = stix2.v20.Identity(
id="identity--311b2d2d-f010-4473-83ec-1edf84858f4c", id="identity--311b2d2d-f010-4473-83ec-1edf84858f4c",
created="2015-12-21T19:59:11.000Z", created="2015-12-21T19:59:11.000Z",
modified="2015-12-21T19:59:11.000Z", modified="2015-12-21T19:59:11.000Z",
@ -29,19 +29,21 @@ def test_identity_example():
assert str(identity) == EXPECTED assert str(identity) == EXPECTED
@pytest.mark.parametrize("data", [ @pytest.mark.parametrize(
EXPECTED, "data", [
{ EXPECTED,
"created": "2015-12-21T19:59:11.000Z", {
"id": "identity--311b2d2d-f010-4473-83ec-1edf84858f4c", "created": "2015-12-21T19:59:11.000Z",
"identity_class": "individual", "id": "identity--311b2d2d-f010-4473-83ec-1edf84858f4c",
"modified": "2015-12-21T19:59:11.000Z", "identity_class": "individual",
"name": "John Smith", "modified": "2015-12-21T19:59:11.000Z",
"type": "identity" "name": "John Smith",
}, "type": "identity",
]) },
],
)
def test_parse_identity(data): def test_parse_identity(data):
identity = stix2.parse(data) identity = stix2.parse(data, version="2.0")
assert identity.type == 'identity' assert identity.type == 'identity'
assert identity.id == IDENTITY_ID assert identity.id == IDENTITY_ID
@ -52,21 +54,23 @@ def test_parse_identity(data):
def test_parse_no_type(): def test_parse_no_type():
with pytest.raises(stix2.exceptions.ParseError): with pytest.raises(stix2.exceptions.ParseError):
stix2.parse(""" stix2.parse(
"""
{ {
"id": "identity--311b2d2d-f010-4473-83ec-1edf84858f4c", "id": "identity--311b2d2d-f010-4473-83ec-1edf84858f4c",
"created": "2015-12-21T19:59:11.000Z", "created": "2015-12-21T19:59:11.000Z",
"modified": "2015-12-21T19:59:11.000Z", "modified": "2015-12-21T19:59:11.000Z",
"name": "John Smith", "name": "John Smith",
"identity_class": "individual" "identity_class": "individual"
}""") }""", version="2.0",
)
def test_identity_with_custom(): def test_identity_with_custom():
identity = stix2.Identity( identity = stix2.v20.Identity(
name="John Smith", name="John Smith",
identity_class="individual", identity_class="individual",
custom_properties={'x_foo': 'bar'} custom_properties={'x_foo': 'bar'},
) )
assert identity.x_foo == "bar" assert identity.x_foo == "bar"

View File

@ -35,7 +35,7 @@ def test_indicator_with_all_required_properties():
now = dt.datetime(2017, 1, 1, 0, 0, 1, tzinfo=pytz.utc) now = dt.datetime(2017, 1, 1, 0, 0, 1, tzinfo=pytz.utc)
epoch = dt.datetime(1970, 1, 1, 0, 0, 1, tzinfo=pytz.utc) epoch = dt.datetime(1970, 1, 1, 0, 0, 1, tzinfo=pytz.utc)
ind = stix2.Indicator( ind = stix2.v20.Indicator(
type="indicator", type="indicator",
id=INDICATOR_ID, id=INDICATOR_ID,
created=now, created=now,
@ -71,9 +71,9 @@ def test_indicator_autogenerated_properties(indicator):
def test_indicator_type_must_be_indicator(): def test_indicator_type_must_be_indicator():
with pytest.raises(stix2.exceptions.InvalidValueError) as excinfo: with pytest.raises(stix2.exceptions.InvalidValueError) as excinfo:
stix2.Indicator(type='xxx', **INDICATOR_KWARGS) stix2.v20.Indicator(type='xxx', **INDICATOR_KWARGS)
assert excinfo.value.cls == stix2.Indicator assert excinfo.value.cls == stix2.v20.Indicator
assert excinfo.value.prop_name == "type" assert excinfo.value.prop_name == "type"
assert excinfo.value.reason == "must equal 'indicator'." assert excinfo.value.reason == "must equal 'indicator'."
assert str(excinfo.value) == "Invalid value for Indicator 'type': must equal 'indicator'." assert str(excinfo.value) == "Invalid value for Indicator 'type': must equal 'indicator'."
@ -81,9 +81,9 @@ def test_indicator_type_must_be_indicator():
def test_indicator_id_must_start_with_indicator(): def test_indicator_id_must_start_with_indicator():
with pytest.raises(stix2.exceptions.InvalidValueError) as excinfo: with pytest.raises(stix2.exceptions.InvalidValueError) as excinfo:
stix2.Indicator(id='my-prefix--', **INDICATOR_KWARGS) stix2.v20.Indicator(id='my-prefix--', **INDICATOR_KWARGS)
assert excinfo.value.cls == stix2.Indicator assert excinfo.value.cls == stix2.v20.Indicator
assert excinfo.value.prop_name == "id" assert excinfo.value.prop_name == "id"
assert excinfo.value.reason == "must start with 'indicator--'." assert excinfo.value.reason == "must start with 'indicator--'."
assert str(excinfo.value) == "Invalid value for Indicator 'id': must start with 'indicator--'." assert str(excinfo.value) == "Invalid value for Indicator 'id': must start with 'indicator--'."
@ -91,26 +91,26 @@ def test_indicator_id_must_start_with_indicator():
def test_indicator_required_properties(): def test_indicator_required_properties():
with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo: with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo:
stix2.Indicator() stix2.v20.Indicator()
assert excinfo.value.cls == stix2.Indicator assert excinfo.value.cls == stix2.v20.Indicator
assert excinfo.value.properties == ["labels", "pattern"] assert excinfo.value.properties == ["labels", "pattern"]
assert str(excinfo.value) == "No values for required properties for Indicator: (labels, pattern)." assert str(excinfo.value) == "No values for required properties for Indicator: (labels, pattern)."
def test_indicator_required_property_pattern(): def test_indicator_required_property_pattern():
with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo: with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo:
stix2.Indicator(labels=['malicious-activity']) stix2.v20.Indicator(labels=['malicious-activity'])
assert excinfo.value.cls == stix2.Indicator assert excinfo.value.cls == stix2.v20.Indicator
assert excinfo.value.properties == ["pattern"] assert excinfo.value.properties == ["pattern"]
def test_indicator_created_ref_invalid_format(): def test_indicator_created_ref_invalid_format():
with pytest.raises(stix2.exceptions.InvalidValueError) as excinfo: with pytest.raises(stix2.exceptions.InvalidValueError) as excinfo:
stix2.Indicator(created_by_ref='myprefix--12345678', **INDICATOR_KWARGS) stix2.v20.Indicator(created_by_ref='myprefix--12345678', **INDICATOR_KWARGS)
assert excinfo.value.cls == stix2.Indicator assert excinfo.value.cls == stix2.v20.Indicator
assert excinfo.value.prop_name == "created_by_ref" assert excinfo.value.prop_name == "created_by_ref"
assert excinfo.value.reason == "must start with 'identity'." assert excinfo.value.reason == "must start with 'identity'."
assert str(excinfo.value) == "Invalid value for Indicator 'created_by_ref': must start with 'identity'." assert str(excinfo.value) == "Invalid value for Indicator 'created_by_ref': must start with 'identity'."
@ -118,9 +118,9 @@ def test_indicator_created_ref_invalid_format():
def test_indicator_revoked_invalid(): def test_indicator_revoked_invalid():
with pytest.raises(stix2.exceptions.InvalidValueError) as excinfo: with pytest.raises(stix2.exceptions.InvalidValueError) as excinfo:
stix2.Indicator(revoked='no', **INDICATOR_KWARGS) stix2.v20.Indicator(revoked='no', **INDICATOR_KWARGS)
assert excinfo.value.cls == stix2.Indicator assert excinfo.value.cls == stix2.v20.Indicator
assert excinfo.value.prop_name == "revoked" assert excinfo.value.prop_name == "revoked"
assert excinfo.value.reason == "must be a boolean value." assert excinfo.value.reason == "must be a boolean value."
@ -134,36 +134,38 @@ def test_cannot_assign_to_indicator_attributes(indicator):
def test_invalid_kwarg_to_indicator(): def test_invalid_kwarg_to_indicator():
with pytest.raises(stix2.exceptions.ExtraPropertiesError) as excinfo: with pytest.raises(stix2.exceptions.ExtraPropertiesError) as excinfo:
stix2.Indicator(my_custom_property="foo", **INDICATOR_KWARGS) stix2.v20.Indicator(my_custom_property="foo", **INDICATOR_KWARGS)
assert excinfo.value.cls == stix2.Indicator assert excinfo.value.cls == stix2.v20.Indicator
assert excinfo.value.properties == ['my_custom_property'] assert excinfo.value.properties == ['my_custom_property']
assert str(excinfo.value) == "Unexpected properties for Indicator: (my_custom_property)." assert str(excinfo.value) == "Unexpected properties for Indicator: (my_custom_property)."
def test_created_modified_time_are_identical_by_default(): def test_created_modified_time_are_identical_by_default():
"""By default, the created and modified times should be the same.""" """By default, the created and modified times should be the same."""
ind = stix2.Indicator(**INDICATOR_KWARGS) ind = stix2.v20.Indicator(**INDICATOR_KWARGS)
assert ind.created == ind.modified assert ind.created == ind.modified
@pytest.mark.parametrize("data", [ @pytest.mark.parametrize(
EXPECTED_INDICATOR, "data", [
{ EXPECTED_INDICATOR,
"type": "indicator", {
"id": "indicator--a740531e-63ff-4e49-a9e1-a0a3eed0e3e7", "type": "indicator",
"created": "2017-01-01T00:00:01Z", "id": "indicator--a740531e-63ff-4e49-a9e1-a0a3eed0e3e7",
"modified": "2017-01-01T00:00:01Z", "created": "2017-01-01T00:00:01Z",
"labels": [ "modified": "2017-01-01T00:00:01Z",
"malicious-activity" "labels": [
], "malicious-activity",
"pattern": "[file:hashes.MD5 = 'd41d8cd98f00b204e9800998ecf8427e']", ],
"valid_from": "1970-01-01T00:00:01Z" "pattern": "[file:hashes.MD5 = 'd41d8cd98f00b204e9800998ecf8427e']",
}, "valid_from": "1970-01-01T00:00:01Z",
]) },
],
)
def test_parse_indicator(data): def test_parse_indicator(data):
idctr = stix2.parse(data) idctr = stix2.parse(data, version="2.0")
assert idctr.type == 'indicator' assert idctr.type == 'indicator'
assert idctr.id == INDICATOR_ID assert idctr.id == INDICATOR_ID
@ -176,19 +178,19 @@ def test_parse_indicator(data):
def test_invalid_indicator_pattern(): def test_invalid_indicator_pattern():
with pytest.raises(stix2.exceptions.InvalidValueError) as excinfo: with pytest.raises(stix2.exceptions.InvalidValueError) as excinfo:
stix2.Indicator( stix2.v20.Indicator(
labels=['malicious-activity'], labels=['malicious-activity'],
pattern="file:hashes.MD5 = 'd41d8cd98f00b204e9800998ecf8427e'", pattern="file:hashes.MD5 = 'd41d8cd98f00b204e9800998ecf8427e'",
) )
assert excinfo.value.cls == stix2.Indicator assert excinfo.value.cls == stix2.v20.Indicator
assert excinfo.value.prop_name == 'pattern' assert excinfo.value.prop_name == 'pattern'
assert 'input is missing square brackets' in excinfo.value.reason assert 'input is missing square brackets' in excinfo.value.reason
with pytest.raises(stix2.exceptions.InvalidValueError) as excinfo: with pytest.raises(stix2.exceptions.InvalidValueError) as excinfo:
stix2.Indicator( stix2.v20.Indicator(
labels=['malicious-activity'], labels=['malicious-activity'],
pattern='[file:hashes.MD5 = "d41d8cd98f00b204e9800998ecf8427e"]', pattern='[file:hashes.MD5 = "d41d8cd98f00b204e9800998ecf8427e"]',
) )
assert excinfo.value.cls == stix2.Indicator assert excinfo.value.cls == stix2.v20.Indicator
assert excinfo.value.prop_name == 'pattern' assert excinfo.value.prop_name == 'pattern'
assert 'mismatched input' in excinfo.value.reason assert 'mismatched input' in excinfo.value.reason

View File

@ -27,7 +27,7 @@ EXPECTED = """{
def test_intrusion_set_example(): def test_intrusion_set_example():
intrusion_set = stix2.IntrusionSet( intrusion_set = stix2.v20.IntrusionSet(
id="intrusion-set--4e78f46f-a023-4e5f-bc24-71b3ca22ec29", id="intrusion-set--4e78f46f-a023-4e5f-bc24-71b3ca22ec29",
created_by_ref="identity--f431f809-377b-45e0-aa1c-6a4751cae5ff", created_by_ref="identity--f431f809-377b-45e0-aa1c-6a4751cae5ff",
created="2016-04-06T20:03:48.000Z", created="2016-04-06T20:03:48.000Z",
@ -35,34 +35,36 @@ def test_intrusion_set_example():
name="Bobcat Breakin", name="Bobcat Breakin",
description="Incidents usually feature a shared TTP of a bobcat being released...", description="Incidents usually feature a shared TTP of a bobcat being released...",
aliases=["Zookeeper"], aliases=["Zookeeper"],
goals=["acquisition-theft", "harassment", "damage"] goals=["acquisition-theft", "harassment", "damage"],
) )
assert str(intrusion_set) == EXPECTED assert str(intrusion_set) == EXPECTED
@pytest.mark.parametrize("data", [ @pytest.mark.parametrize(
EXPECTED, "data", [
{ EXPECTED,
"aliases": [ {
"Zookeeper" "aliases": [
], "Zookeeper",
"created": "2016-04-06T20:03:48.000Z", ],
"created_by_ref": "identity--f431f809-377b-45e0-aa1c-6a4751cae5ff", "created": "2016-04-06T20:03:48.000Z",
"description": "Incidents usually feature a shared TTP of a bobcat being released...", "created_by_ref": "identity--f431f809-377b-45e0-aa1c-6a4751cae5ff",
"goals": [ "description": "Incidents usually feature a shared TTP of a bobcat being released...",
"acquisition-theft", "goals": [
"harassment", "acquisition-theft",
"damage" "harassment",
], "damage",
"id": "intrusion-set--4e78f46f-a023-4e5f-bc24-71b3ca22ec29", ],
"modified": "2016-04-06T20:03:48.000Z", "id": "intrusion-set--4e78f46f-a023-4e5f-bc24-71b3ca22ec29",
"name": "Bobcat Breakin", "modified": "2016-04-06T20:03:48.000Z",
"type": "intrusion-set" "name": "Bobcat Breakin",
}, "type": "intrusion-set",
]) },
],
)
def test_parse_intrusion_set(data): def test_parse_intrusion_set(data):
intset = stix2.parse(data) intset = stix2.parse(data, version="2.0")
assert intset.type == "intrusion-set" assert intset.type == "intrusion-set"
assert intset.id == INTRUSION_SET_ID assert intset.id == INTRUSION_SET_ID

View File

@ -11,7 +11,7 @@ LMCO_RECON = """{
def test_lockheed_martin_cyber_kill_chain(): def test_lockheed_martin_cyber_kill_chain():
recon = stix2.KillChainPhase( recon = stix2.v20.KillChainPhase(
kill_chain_name="lockheed-martin-cyber-kill-chain", kill_chain_name="lockheed-martin-cyber-kill-chain",
phase_name="reconnaissance", phase_name="reconnaissance",
) )
@ -26,7 +26,7 @@ FOO_PRE_ATTACK = """{
def test_kill_chain_example(): def test_kill_chain_example():
preattack = stix2.KillChainPhase( preattack = stix2.v20.KillChainPhase(
kill_chain_name="foo", kill_chain_name="foo",
phase_name="pre-attack", phase_name="pre-attack",
) )
@ -37,25 +37,25 @@ def test_kill_chain_example():
def test_kill_chain_required_properties(): def test_kill_chain_required_properties():
with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo: with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo:
stix2.KillChainPhase() stix2.v20.KillChainPhase()
assert excinfo.value.cls == stix2.KillChainPhase assert excinfo.value.cls == stix2.v20.KillChainPhase
assert excinfo.value.properties == ["kill_chain_name", "phase_name"] assert excinfo.value.properties == ["kill_chain_name", "phase_name"]
def test_kill_chain_required_property_chain_name(): def test_kill_chain_required_property_chain_name():
with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo: with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo:
stix2.KillChainPhase(phase_name="weaponization") stix2.v20.KillChainPhase(phase_name="weaponization")
assert excinfo.value.cls == stix2.KillChainPhase assert excinfo.value.cls == stix2.v20.KillChainPhase
assert excinfo.value.properties == ["kill_chain_name"] assert excinfo.value.properties == ["kill_chain_name"]
def test_kill_chain_required_property_phase_name(): def test_kill_chain_required_property_phase_name():
with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo: with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo:
stix2.KillChainPhase(kill_chain_name="lockheed-martin-cyber-kill-chain") stix2.v20.KillChainPhase(kill_chain_name="lockheed-martin-cyber-kill-chain")
assert excinfo.value.cls == stix2.KillChainPhase assert excinfo.value.cls == stix2.v20.KillChainPhase
assert excinfo.value.properties == ["phase_name"] assert excinfo.value.properties == ["phase_name"]

View File

@ -23,7 +23,7 @@ EXPECTED_MALWARE = """{
def test_malware_with_all_required_properties(): def test_malware_with_all_required_properties():
now = dt.datetime(2016, 5, 12, 8, 17, 27, tzinfo=pytz.utc) now = dt.datetime(2016, 5, 12, 8, 17, 27, tzinfo=pytz.utc)
mal = stix2.Malware( mal = stix2.v20.Malware(
type="malware", type="malware",
id=MALWARE_ID, id=MALWARE_ID,
created=now, created=now,
@ -53,9 +53,9 @@ def test_malware_autogenerated_properties(malware):
def test_malware_type_must_be_malware(): def test_malware_type_must_be_malware():
with pytest.raises(stix2.exceptions.InvalidValueError) as excinfo: with pytest.raises(stix2.exceptions.InvalidValueError) as excinfo:
stix2.Malware(type='xxx', **MALWARE_KWARGS) stix2.v20.Malware(type='xxx', **MALWARE_KWARGS)
assert excinfo.value.cls == stix2.Malware assert excinfo.value.cls == stix2.v20.Malware
assert excinfo.value.prop_name == "type" assert excinfo.value.prop_name == "type"
assert excinfo.value.reason == "must equal 'malware'." assert excinfo.value.reason == "must equal 'malware'."
assert str(excinfo.value) == "Invalid value for Malware 'type': must equal 'malware'." assert str(excinfo.value) == "Invalid value for Malware 'type': must equal 'malware'."
@ -63,9 +63,9 @@ def test_malware_type_must_be_malware():
def test_malware_id_must_start_with_malware(): def test_malware_id_must_start_with_malware():
with pytest.raises(stix2.exceptions.InvalidValueError) as excinfo: with pytest.raises(stix2.exceptions.InvalidValueError) as excinfo:
stix2.Malware(id='my-prefix--', **MALWARE_KWARGS) stix2.v20.Malware(id='my-prefix--', **MALWARE_KWARGS)
assert excinfo.value.cls == stix2.Malware assert excinfo.value.cls == stix2.v20.Malware
assert excinfo.value.prop_name == "id" assert excinfo.value.prop_name == "id"
assert excinfo.value.reason == "must start with 'malware--'." assert excinfo.value.reason == "must start with 'malware--'."
assert str(excinfo.value) == "Invalid value for Malware 'id': must start with 'malware--'." assert str(excinfo.value) == "Invalid value for Malware 'id': must start with 'malware--'."
@ -73,17 +73,17 @@ def test_malware_id_must_start_with_malware():
def test_malware_required_properties(): def test_malware_required_properties():
with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo: with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo:
stix2.Malware() stix2.v20.Malware()
assert excinfo.value.cls == stix2.Malware assert excinfo.value.cls == stix2.v20.Malware
assert excinfo.value.properties == ["labels", "name"] assert excinfo.value.properties == ["labels", "name"]
def test_malware_required_property_name(): def test_malware_required_property_name():
with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo: with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo:
stix2.Malware(labels=['ransomware']) stix2.v20.Malware(labels=['ransomware'])
assert excinfo.value.cls == stix2.Malware assert excinfo.value.cls == stix2.v20.Malware
assert excinfo.value.properties == ["name"] assert excinfo.value.properties == ["name"]
@ -96,26 +96,28 @@ def test_cannot_assign_to_malware_attributes(malware):
def test_invalid_kwarg_to_malware(): def test_invalid_kwarg_to_malware():
with pytest.raises(stix2.exceptions.ExtraPropertiesError) as excinfo: with pytest.raises(stix2.exceptions.ExtraPropertiesError) as excinfo:
stix2.Malware(my_custom_property="foo", **MALWARE_KWARGS) stix2.v20.Malware(my_custom_property="foo", **MALWARE_KWARGS)
assert excinfo.value.cls == stix2.Malware assert excinfo.value.cls == stix2.v20.Malware
assert excinfo.value.properties == ['my_custom_property'] assert excinfo.value.properties == ['my_custom_property']
assert str(excinfo.value) == "Unexpected properties for Malware: (my_custom_property)." assert str(excinfo.value) == "Unexpected properties for Malware: (my_custom_property)."
@pytest.mark.parametrize("data", [ @pytest.mark.parametrize(
EXPECTED_MALWARE, "data", [
{ EXPECTED_MALWARE,
"type": "malware", {
"id": "malware--9c4638ec-f1de-4ddb-abf4-1b760417654e", "type": "malware",
"created": "2016-05-12T08:17:27.000Z", "id": "malware--9c4638ec-f1de-4ddb-abf4-1b760417654e",
"modified": "2016-05-12T08:17:27.000Z", "created": "2016-05-12T08:17:27.000Z",
"labels": ["ransomware"], "modified": "2016-05-12T08:17:27.000Z",
"name": "Cryptolocker", "labels": ["ransomware"],
}, "name": "Cryptolocker",
]) },
],
)
def test_parse_malware(data): def test_parse_malware(data):
mal = stix2.parse(data) mal = stix2.parse(data, version="2.0")
assert mal.type == 'malware' assert mal.type == 'malware'
assert mal.id == MALWARE_ID assert mal.id == MALWARE_ID
@ -128,7 +130,7 @@ def test_parse_malware(data):
def test_parse_malware_invalid_labels(): def test_parse_malware_invalid_labels():
data = re.compile('\\[.+\\]', re.DOTALL).sub('1', EXPECTED_MALWARE) data = re.compile('\\[.+\\]', re.DOTALL).sub('1', EXPECTED_MALWARE)
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
stix2.parse(data) stix2.parse(data, version="2.0")
assert "Invalid value for Malware 'labels'" in str(excinfo.value) assert "Invalid value for Malware 'labels'" in str(excinfo.value)
@ -141,7 +143,7 @@ def test_parse_malware_kill_chain_phases():
} }
]""" ]"""
data = EXPECTED_MALWARE.replace('malware"', 'malware",%s' % kill_chain) data = EXPECTED_MALWARE.replace('malware"', 'malware",%s' % kill_chain)
mal = stix2.parse(data) mal = stix2.parse(data, version="2.0")
assert mal.kill_chain_phases[0].kill_chain_name == "lockheed-martin-cyber-kill-chain" assert mal.kill_chain_phases[0].kill_chain_name == "lockheed-martin-cyber-kill-chain"
assert mal.kill_chain_phases[0].phase_name == "reconnaissance" assert mal.kill_chain_phases[0].phase_name == "reconnaissance"
assert mal['kill_chain_phases'][0]['kill_chain_name'] == "lockheed-martin-cyber-kill-chain" assert mal['kill_chain_phases'][0]['kill_chain_name'] == "lockheed-martin-cyber-kill-chain"
@ -157,5 +159,5 @@ def test_parse_malware_clean_kill_chain_phases():
} }
]""" ]"""
data = EXPECTED_MALWARE.replace('malware"', 'malware",%s' % kill_chain) data = EXPECTED_MALWARE.replace('malware"', 'malware",%s' % kill_chain)
mal = stix2.parse(data) mal = stix2.parse(data, version="2.0")
assert mal['kill_chain_phases'][0]['phase_name'] == "1" assert mal['kill_chain_phases'][0]['phase_name'] == "1"

View File

@ -4,7 +4,7 @@ import pytest
import pytz import pytz
import stix2 import stix2
from stix2 import TLP_WHITE from stix2.v20 import TLP_WHITE
from .constants import MARKING_DEFINITION_ID from .constants import MARKING_DEFINITION_ID
@ -75,11 +75,11 @@ def test_marking_def_example_with_tlp():
def test_marking_def_example_with_statement_positional_argument(): def test_marking_def_example_with_statement_positional_argument():
marking_definition = stix2.MarkingDefinition( marking_definition = stix2.v20.MarkingDefinition(
id="marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9", id="marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9",
created="2017-01-20T00:00:00.000Z", created="2017-01-20T00:00:00.000Z",
definition_type="statement", definition_type="statement",
definition=stix2.StatementMarking(statement="Copyright 2016, Example Corp") definition=stix2.v20.StatementMarking(statement="Copyright 2016, Example Corp"),
) )
assert str(marking_definition) == EXPECTED_STATEMENT_MARKING_DEFINITION assert str(marking_definition) == EXPECTED_STATEMENT_MARKING_DEFINITION
@ -87,11 +87,11 @@ def test_marking_def_example_with_statement_positional_argument():
def test_marking_def_example_with_kwargs_statement(): def test_marking_def_example_with_kwargs_statement():
kwargs = dict(statement="Copyright 2016, Example Corp") kwargs = dict(statement="Copyright 2016, Example Corp")
marking_definition = stix2.MarkingDefinition( marking_definition = stix2.v20.MarkingDefinition(
id="marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9", id="marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9",
created="2017-01-20T00:00:00.000Z", created="2017-01-20T00:00:00.000Z",
definition_type="statement", definition_type="statement",
definition=stix2.StatementMarking(**kwargs) definition=stix2.v20.StatementMarking(**kwargs),
) )
assert str(marking_definition) == EXPECTED_STATEMENT_MARKING_DEFINITION assert str(marking_definition) == EXPECTED_STATEMENT_MARKING_DEFINITION
@ -99,31 +99,31 @@ def test_marking_def_example_with_kwargs_statement():
def test_marking_def_invalid_type(): def test_marking_def_invalid_type():
with pytest.raises(ValueError): with pytest.raises(ValueError):
stix2.MarkingDefinition( stix2.v20.MarkingDefinition(
id="marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9", id="marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9",
created="2017-01-20T00:00:00.000Z", created="2017-01-20T00:00:00.000Z",
definition_type="my-definition-type", definition_type="my-definition-type",
definition=stix2.StatementMarking("Copyright 2016, Example Corp") definition=stix2.v20.StatementMarking("Copyright 2016, Example Corp"),
) )
def test_campaign_with_markings_example(): def test_campaign_with_markings_example():
campaign = stix2.Campaign( campaign = stix2.v20.Campaign(
id="campaign--8e2e2d2b-17d4-4cbf-938f-98ee46b3cd3f", id="campaign--8e2e2d2b-17d4-4cbf-938f-98ee46b3cd3f",
created_by_ref="identity--f431f809-377b-45e0-aa1c-6a4751cae5ff", created_by_ref="identity--f431f809-377b-45e0-aa1c-6a4751cae5ff",
created="2016-04-06T20:03:00Z", created="2016-04-06T20:03:00Z",
modified="2016-04-06T20:03:00Z", modified="2016-04-06T20:03:00Z",
name="Green Group Attacks Against Finance", name="Green Group Attacks Against Finance",
description="Campaign by Green Group against a series of targets in the financial services sector.", description="Campaign by Green Group against a series of targets in the financial services sector.",
object_marking_refs=TLP_WHITE object_marking_refs=TLP_WHITE,
) )
assert str(campaign) == EXPECTED_CAMPAIGN_WITH_OBJECT_MARKING assert str(campaign) == EXPECTED_CAMPAIGN_WITH_OBJECT_MARKING
def test_granular_example(): def test_granular_example():
granular_marking = stix2.GranularMarking( granular_marking = stix2.v20.GranularMarking(
marking_ref="marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9", marking_ref="marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9",
selectors=["abc", "abc.[23]", "abc.def", "abc.[2].efg"] selectors=["abc", "abc.[23]", "abc.def", "abc.[2].efg"],
) )
assert str(granular_marking) == EXPECTED_GRANULAR_MARKING assert str(granular_marking) == EXPECTED_GRANULAR_MARKING
@ -131,19 +131,19 @@ def test_granular_example():
def test_granular_example_with_bad_selector(): def test_granular_example_with_bad_selector():
with pytest.raises(stix2.exceptions.InvalidValueError) as excinfo: with pytest.raises(stix2.exceptions.InvalidValueError) as excinfo:
stix2.GranularMarking( stix2.v20.GranularMarking(
marking_ref="marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9", marking_ref="marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9",
selectors=["abc[0]"] # missing "." selectors=["abc[0]"], # missing "."
) )
assert excinfo.value.cls == stix2.GranularMarking assert excinfo.value.cls == stix2.v20.GranularMarking
assert excinfo.value.prop_name == "selectors" assert excinfo.value.prop_name == "selectors"
assert excinfo.value.reason == "must adhere to selector syntax." assert excinfo.value.reason == "must adhere to selector syntax."
assert str(excinfo.value) == "Invalid value for GranularMarking 'selectors': must adhere to selector syntax." assert str(excinfo.value) == "Invalid value for GranularMarking 'selectors': must adhere to selector syntax."
def test_campaign_with_granular_markings_example(): def test_campaign_with_granular_markings_example():
campaign = stix2.Campaign( campaign = stix2.v20.Campaign(
id="campaign--8e2e2d2b-17d4-4cbf-938f-98ee46b3cd3f", id="campaign--8e2e2d2b-17d4-4cbf-938f-98ee46b3cd3f",
created_by_ref="identity--f431f809-377b-45e0-aa1c-6a4751cae5ff", created_by_ref="identity--f431f809-377b-45e0-aa1c-6a4751cae5ff",
created="2016-04-06T20:03:00Z", created="2016-04-06T20:03:00Z",
@ -151,27 +151,31 @@ def test_campaign_with_granular_markings_example():
name="Green Group Attacks Against Finance", name="Green Group Attacks Against Finance",
description="Campaign by Green Group against a series of targets in the financial services sector.", description="Campaign by Green Group against a series of targets in the financial services sector.",
granular_markings=[ granular_markings=[
stix2.GranularMarking( stix2.v20.GranularMarking(
marking_ref="marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9", marking_ref="marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9",
selectors=["description"]) selectors=["description"],
]) ),
],
)
assert str(campaign) == EXPECTED_CAMPAIGN_WITH_GRANULAR_MARKINGS assert str(campaign) == EXPECTED_CAMPAIGN_WITH_GRANULAR_MARKINGS
@pytest.mark.parametrize("data", [ @pytest.mark.parametrize(
EXPECTED_TLP_MARKING_DEFINITION, "data", [
{ EXPECTED_TLP_MARKING_DEFINITION,
"id": "marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9", {
"type": "marking-definition", "id": "marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9",
"created": "2017-01-20T00:00:00Z", "type": "marking-definition",
"definition": { "created": "2017-01-20T00:00:00Z",
"tlp": "white" "definition": {
"tlp": "white",
},
"definition_type": "tlp",
}, },
"definition_type": "tlp", ],
}, )
])
def test_parse_marking_definition(data): def test_parse_marking_definition(data):
gm = stix2.parse(data) gm = stix2.parse(data, version="2.0")
assert gm.type == 'marking-definition' assert gm.type == 'marking-definition'
assert gm.id == MARKING_DEFINITION_ID assert gm.id == MARKING_DEFINITION_ID
@ -180,10 +184,12 @@ def test_parse_marking_definition(data):
assert gm.definition_type == "tlp" assert gm.definition_type == "tlp"
@stix2.common.CustomMarking('x-new-marking-type', [ @stix2.v20.CustomMarking(
('property1', stix2.properties.StringProperty(required=True)), 'x-new-marking-type', [
('property2', stix2.properties.IntegerProperty()), ('property1', stix2.properties.StringProperty(required=True)),
]) ('property2', stix2.properties.IntegerProperty()),
],
)
class NewMarking(object): class NewMarking(object):
def __init__(self, property2=None, **kwargs): def __init__(self, property2=None, **kwargs):
if "property3" in kwargs and not isinstance(kwargs.get("property3"), int): if "property3" in kwargs and not isinstance(kwargs.get("property3"), int):
@ -193,11 +199,11 @@ class NewMarking(object):
def test_registered_custom_marking(): def test_registered_custom_marking():
nm = NewMarking(property1='something', property2=55) nm = NewMarking(property1='something', property2=55)
marking_def = stix2.MarkingDefinition( marking_def = stix2.v20.MarkingDefinition(
id="marking-definition--00000000-0000-4000-8000-000000000012", id="marking-definition--00000000-0000-4000-8000-000000000012",
created="2017-01-22T00:00:00.000Z", created="2017-01-22T00:00:00.000Z",
definition_type="x-new-marking-type", definition_type="x-new-marking-type",
definition=nm definition=nm,
) )
assert marking_def.type == "marking-definition" assert marking_def.type == "marking-definition"
@ -218,21 +224,23 @@ def test_registered_custom_marking_raises_exception():
def test_not_registered_marking_raises_exception(): def test_not_registered_marking_raises_exception():
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
# Used custom object on purpose to demonstrate a not-registered marking # Used custom object on purpose to demonstrate a not-registered marking
@stix2.sdo.CustomObject('x-new-marking-type2', [ @stix2.v20.CustomObject(
('property1', stix2.properties.StringProperty(required=True)), 'x-new-marking-type2', [
('property2', stix2.properties.IntegerProperty()), ('property1', stix2.properties.StringProperty(required=True)),
]) ('property2', stix2.properties.IntegerProperty()),
],
)
class NewObject2(object): class NewObject2(object):
def __init__(self, property2=None, **kwargs): def __init__(self, property2=None, **kwargs):
return return
no = NewObject2(property1='something', property2=55) no = NewObject2(property1='something', property2=55)
stix2.MarkingDefinition( stix2.v20.MarkingDefinition(
id="marking-definition--00000000-0000-4000-8000-000000000012", id="marking-definition--00000000-0000-4000-8000-000000000012",
created="2017-01-22T00:00:00.000Z", created="2017-01-22T00:00:00.000Z",
definition_type="x-new-marking-type2", definition_type="x-new-marking-type2",
definition=no definition=no,
) )
assert str(excinfo.value) == "definition_type must be a valid marking type" assert str(excinfo.value) == "definition_type must be a valid marking type"
@ -241,7 +249,7 @@ def test_not_registered_marking_raises_exception():
def test_marking_wrong_type_construction(): def test_marking_wrong_type_construction():
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
# Test passing wrong type for properties. # Test passing wrong type for properties.
@stix2.CustomMarking('x-new-marking-type2', ("a", "b")) @stix2.v20.CustomMarking('x-new-marking-type2', ("a", "b"))
class NewObject3(object): class NewObject3(object):
pass pass
@ -249,7 +257,7 @@ def test_marking_wrong_type_construction():
def test_campaign_add_markings(): def test_campaign_add_markings():
campaign = stix2.Campaign( campaign = stix2.v20.Campaign(
id="campaign--8e2e2d2b-17d4-4cbf-938f-98ee46b3cd3f", id="campaign--8e2e2d2b-17d4-4cbf-938f-98ee46b3cd3f",
created_by_ref="identity--f431f809-377b-45e0-aa1c-6a4751cae5ff", created_by_ref="identity--f431f809-377b-45e0-aa1c-6a4751cae5ff",
created="2016-04-06T20:03:00Z", created="2016-04-06T20:03:00Z",

View File

@ -1,7 +1,8 @@
import pytest import pytest
from stix2 import TLP_AMBER, Malware, exceptions, markings from stix2 import exceptions, markings
from stix2.v20 import TLP_AMBER, Malware
from .constants import FAKE_TIME, MALWARE_ID from .constants import FAKE_TIME, MALWARE_ID
from .constants import MALWARE_KWARGS as MALWARE_KWARGS_CONST from .constants import MALWARE_KWARGS as MALWARE_KWARGS_CONST
@ -17,26 +18,34 @@ MALWARE_KWARGS.update({
}) })
@pytest.mark.parametrize("data", [ @pytest.mark.parametrize(
( "data", [
Malware(**MALWARE_KWARGS), (
Malware(object_marking_refs=[MARKING_IDS[0]], Malware(**MALWARE_KWARGS),
**MALWARE_KWARGS), Malware(
MARKING_IDS[0], object_marking_refs=[MARKING_IDS[0]],
), **MALWARE_KWARGS
( ),
MALWARE_KWARGS, MARKING_IDS[0],
dict(object_marking_refs=[MARKING_IDS[0]], ),
**MALWARE_KWARGS), (
MARKING_IDS[0], MALWARE_KWARGS,
), dict(
( object_marking_refs=[MARKING_IDS[0]],
Malware(**MALWARE_KWARGS), **MALWARE_KWARGS
Malware(object_marking_refs=[TLP_AMBER.id], ),
**MALWARE_KWARGS), MARKING_IDS[0],
TLP_AMBER, ),
), (
]) Malware(**MALWARE_KWARGS),
Malware(
object_marking_refs=[TLP_AMBER.id],
**MALWARE_KWARGS
),
TLP_AMBER,
),
],
)
def test_add_markings_one_marking(data): def test_add_markings_one_marking(data):
before = data[0] before = data[0]
after = data[1] after = data[1]
@ -72,12 +81,12 @@ def test_add_markings_combination():
granular_markings=[ granular_markings=[
{ {
"selectors": ["labels"], "selectors": ["labels"],
"marking_ref": MARKING_IDS[2] "marking_ref": MARKING_IDS[2],
}, },
{ {
"selectors": ["name"], "selectors": ["name"],
"marking_ref": MARKING_IDS[3] "marking_ref": MARKING_IDS[3],
} },
], ],
**MALWARE_KWARGS **MALWARE_KWARGS
) )
@ -94,12 +103,14 @@ def test_add_markings_combination():
assert m in after["object_marking_refs"] assert m in after["object_marking_refs"]
@pytest.mark.parametrize("data", [ @pytest.mark.parametrize(
([""]), "data", [
(""), ([""]),
([]), (""),
([MARKING_IDS[0], 456]) ([]),
]) ([MARKING_IDS[0], 456]),
],
)
def test_add_markings_bad_markings(data): def test_add_markings_bad_markings(data):
before = Malware( before = Malware(
**MALWARE_KWARGS **MALWARE_KWARGS
@ -119,62 +130,62 @@ GET_MARKINGS_TEST_DATA = \
"list value", "list value",
{ {
"g": "nested", "g": "nested",
"h": 45 "h": 45,
} },
], ],
"x": { "x": {
"y": [ "y": [
"hello", "hello",
88 88,
], ],
"z": { "z": {
"foo1": "bar", "foo1": "bar",
"foo2": 65 "foo2": 65,
} },
}, },
"object_marking_refs": ["11"], "object_marking_refs": ["11"],
"granular_markings": [ "granular_markings": [
{ {
"marking_ref": "1", "marking_ref": "1",
"selectors": ["a"] "selectors": ["a"],
}, },
{ {
"marking_ref": "2", "marking_ref": "2",
"selectors": ["c"] "selectors": ["c"],
}, },
{ {
"marking_ref": "3", "marking_ref": "3",
"selectors": ["c.[1]"] "selectors": ["c.[1]"],
}, },
{ {
"marking_ref": "4", "marking_ref": "4",
"selectors": ["c.[2]"] "selectors": ["c.[2]"],
}, },
{ {
"marking_ref": "5", "marking_ref": "5",
"selectors": ["c.[2].g"] "selectors": ["c.[2].g"],
}, },
{ {
"marking_ref": "6", "marking_ref": "6",
"selectors": ["x"] "selectors": ["x"],
}, },
{ {
"marking_ref": "7", "marking_ref": "7",
"selectors": ["x.y"] "selectors": ["x.y"],
}, },
{ {
"marking_ref": "8", "marking_ref": "8",
"selectors": ["x.y.[1]"] "selectors": ["x.y.[1]"],
}, },
{ {
"marking_ref": "9", "marking_ref": "9",
"selectors": ["x.z"] "selectors": ["x.z"],
}, },
{ {
"marking_ref": "10", "marking_ref": "10",
"selectors": ["x.z.foo2"] "selectors": ["x.z.foo2"],
}, },
] ],
} }
@ -257,18 +268,24 @@ def test_get_markings_object_and_granular_combinations(data):
assert set(markings.get_markings(data, "x.z.foo2", False, True)) == set(["10"]) assert set(markings.get_markings(data, "x.z.foo2", False, True)) == set(["10"])
@pytest.mark.parametrize("data", [ @pytest.mark.parametrize(
( "data", [
Malware(object_marking_refs=[MARKING_IDS[0]], (
**MALWARE_KWARGS), Malware(
Malware(**MALWARE_KWARGS), object_marking_refs=[MARKING_IDS[0]],
), **MALWARE_KWARGS
( ),
dict(object_marking_refs=[MARKING_IDS[0]], Malware(**MALWARE_KWARGS),
**MALWARE_KWARGS), ),
MALWARE_KWARGS, (
), dict(
]) object_marking_refs=[MARKING_IDS[0]],
**MALWARE_KWARGS
),
MALWARE_KWARGS,
),
],
)
def test_remove_markings_object_level(data): def test_remove_markings_object_level(data):
before = data[0] before = data[0]
after = data[1] after = data[1]
@ -283,29 +300,43 @@ def test_remove_markings_object_level(data):
modified == after['modified'] modified == after['modified']
@pytest.mark.parametrize("data", [ @pytest.mark.parametrize(
( "data", [
Malware(object_marking_refs=[MARKING_IDS[0], MARKING_IDS[1], MARKING_IDS[2]], (
**MALWARE_KWARGS), Malware(
Malware(object_marking_refs=[MARKING_IDS[1]], object_marking_refs=[MARKING_IDS[0], MARKING_IDS[1], MARKING_IDS[2]],
**MALWARE_KWARGS), **MALWARE_KWARGS
[MARKING_IDS[0], MARKING_IDS[2]], ),
), Malware(
( object_marking_refs=[MARKING_IDS[1]],
dict(object_marking_refs=[MARKING_IDS[0], MARKING_IDS[1], MARKING_IDS[2]], **MALWARE_KWARGS
**MALWARE_KWARGS), ),
dict(object_marking_refs=[MARKING_IDS[1]], [MARKING_IDS[0], MARKING_IDS[2]],
**MALWARE_KWARGS), ),
[MARKING_IDS[0], MARKING_IDS[2]], (
), dict(
( object_marking_refs=[MARKING_IDS[0], MARKING_IDS[1], MARKING_IDS[2]],
Malware(object_marking_refs=[MARKING_IDS[0], MARKING_IDS[1], TLP_AMBER.id], **MALWARE_KWARGS
**MALWARE_KWARGS), ),
Malware(object_marking_refs=[MARKING_IDS[1]], dict(
**MALWARE_KWARGS), object_marking_refs=[MARKING_IDS[1]],
[MARKING_IDS[0], TLP_AMBER], **MALWARE_KWARGS
), ),
]) [MARKING_IDS[0], MARKING_IDS[2]],
),
(
Malware(
object_marking_refs=[MARKING_IDS[0], MARKING_IDS[1], TLP_AMBER.id],
**MALWARE_KWARGS
),
Malware(
object_marking_refs=[MARKING_IDS[1]],
**MALWARE_KWARGS
),
[MARKING_IDS[0], TLP_AMBER],
),
],
)
def test_remove_markings_multiple(data): def test_remove_markings_multiple(data):
before = data[0] before = data[0]
after = data[1] after = data[1]
@ -325,18 +356,24 @@ def test_remove_markings_bad_markings():
assert str(excinfo.value) == "Marking ['%s'] was not found in Malware!" % MARKING_IDS[4] assert str(excinfo.value) == "Marking ['%s'] was not found in Malware!" % MARKING_IDS[4]
@pytest.mark.parametrize("data", [ @pytest.mark.parametrize(
( "data", [
Malware(object_marking_refs=[MARKING_IDS[0], MARKING_IDS[1], MARKING_IDS[2]], (
**MALWARE_KWARGS), Malware(
Malware(**MALWARE_KWARGS), object_marking_refs=[MARKING_IDS[0], MARKING_IDS[1], MARKING_IDS[2]],
), **MALWARE_KWARGS
( ),
dict(object_marking_refs=[MARKING_IDS[0], MARKING_IDS[1], MARKING_IDS[2]], Malware(**MALWARE_KWARGS),
**MALWARE_KWARGS), ),
MALWARE_KWARGS, (
), dict(
]) object_marking_refs=[MARKING_IDS[0], MARKING_IDS[1], MARKING_IDS[2]],
**MALWARE_KWARGS
),
MALWARE_KWARGS,
),
],
)
def test_clear_markings(data): def test_clear_markings(data):
before = data[0] before = data[0]
after = data[1] after = data[1]
@ -358,62 +395,62 @@ def test_is_marked_object_and_granular_combinations():
"list value", "list value",
{ {
"g": "nested", "g": "nested",
"h": 45 "h": 45,
} },
], ],
"x": { "x": {
"y": [ "y": [
"hello", "hello",
88 88,
], ],
"z": { "z": {
"foo1": "bar", "foo1": "bar",
"foo2": 65 "foo2": 65,
} },
}, },
"object_marking_refs": "11", "object_marking_refs": "11",
"granular_markings": [ "granular_markings": [
{ {
"marking_ref": "1", "marking_ref": "1",
"selectors": ["a"] "selectors": ["a"],
}, },
{ {
"marking_ref": "2", "marking_ref": "2",
"selectors": ["c"] "selectors": ["c"],
}, },
{ {
"marking_ref": "3", "marking_ref": "3",
"selectors": ["c.[1]"] "selectors": ["c.[1]"],
}, },
{ {
"marking_ref": "4", "marking_ref": "4",
"selectors": ["c.[2]"] "selectors": ["c.[2]"],
}, },
{ {
"marking_ref": "5", "marking_ref": "5",
"selectors": ["c.[2].g"] "selectors": ["c.[2].g"],
}, },
{ {
"marking_ref": "6", "marking_ref": "6",
"selectors": ["x"] "selectors": ["x"],
}, },
{ {
"marking_ref": "7", "marking_ref": "7",
"selectors": ["x.y"] "selectors": ["x.y"],
}, },
{ {
"marking_ref": "8", "marking_ref": "8",
"selectors": ["x.y.[1]"] "selectors": ["x.y.[1]"],
}, },
{ {
"marking_ref": "9", "marking_ref": "9",
"selectors": ["x.z"] "selectors": ["x.z"],
}, },
{ {
"marking_ref": "10", "marking_ref": "10",
"selectors": ["x.z.foo2"] "selectors": ["x.z.foo2"],
}, },
] ],
} }
assert markings.is_marked(test_sdo, ["1"], "a", False, False) assert markings.is_marked(test_sdo, ["1"], "a", False, False)
@ -490,18 +527,24 @@ def test_is_marked_object_and_granular_combinations():
assert markings.is_marked(test_sdo, ["2"], None, True, True) is False assert markings.is_marked(test_sdo, ["2"], None, True, True) is False
@pytest.mark.parametrize("data", [ @pytest.mark.parametrize(
( "data", [
Malware(object_marking_refs=[MARKING_IDS[0], MARKING_IDS[1], MARKING_IDS[2]], (
**MALWARE_KWARGS), Malware(
Malware(**MALWARE_KWARGS), object_marking_refs=[MARKING_IDS[0], MARKING_IDS[1], MARKING_IDS[2]],
), **MALWARE_KWARGS
( ),
dict(object_marking_refs=[MARKING_IDS[0], MARKING_IDS[1], MARKING_IDS[2]], Malware(**MALWARE_KWARGS),
**MALWARE_KWARGS), ),
MALWARE_KWARGS, (
), dict(
]) object_marking_refs=[MARKING_IDS[0], MARKING_IDS[1], MARKING_IDS[2]],
**MALWARE_KWARGS
),
MALWARE_KWARGS,
),
],
)
def test_is_marked_no_markings(data): def test_is_marked_no_markings(data):
marked = data[0] marked = data[0]
nonmarked = data[1] nonmarked = data[1]
@ -531,12 +574,14 @@ def test_set_marking():
assert x in after["object_marking_refs"] assert x in after["object_marking_refs"]
@pytest.mark.parametrize("data", [ @pytest.mark.parametrize(
([]), "data", [
([""]), ([]),
(""), ([""]),
([MARKING_IDS[4], 687]) (""),
]) ([MARKING_IDS[4], 687]),
],
)
def test_set_marking_bad_input(data): def test_set_marking_bad_input(data):
before = Malware( before = Malware(
object_marking_refs=[MARKING_IDS[0]], object_marking_refs=[MARKING_IDS[0]],

View File

@ -0,0 +1,525 @@
import datetime
import pytest
import stix2
from stix2.pattern_visitor import create_pattern_object
def test_create_comparison_expression():
exp = stix2.EqualityComparisonExpression(
"file:hashes.'SHA-256'",
stix2.HashConstant("aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f", "SHA-256"),
) # noqa
assert str(exp) == "file:hashes.'SHA-256' = 'aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f'"
def test_boolean_expression():
exp1 = stix2.MatchesComparisonExpression(
"email-message:from_ref.value",
stix2.StringConstant(".+\\@example\\.com$"),
)
exp2 = stix2.MatchesComparisonExpression(
"email-message:body_multipart[*].body_raw_ref.name",
stix2.StringConstant("^Final Report.+\\.exe$"),
)
exp = stix2.AndBooleanExpression([exp1, exp2])
assert str(exp) == "email-message:from_ref.value MATCHES '.+\\\\@example\\\\.com$' AND email-message:body_multipart[*].body_raw_ref.name MATCHES '^Final Report.+\\\\.exe$'" # noqa
def test_boolean_expression_with_parentheses():
exp1 = stix2.MatchesComparisonExpression(
stix2.ObjectPath(
"email-message",
[
stix2.ReferenceObjectPathComponent("from_ref"),
stix2.BasicObjectPathComponent("value", False),
],
),
stix2.StringConstant(".+\\@example\\.com$"),
)
exp2 = stix2.MatchesComparisonExpression(
"email-message:body_multipart[*].body_raw_ref.name",
stix2.StringConstant("^Final Report.+\\.exe$"),
)
exp = stix2.ParentheticalExpression(stix2.AndBooleanExpression([exp1, exp2]))
assert str(exp) == "(email-message:from_ref.value MATCHES '.+\\\\@example\\\\.com$' AND email-message:body_multipart[*].body_raw_ref.name MATCHES '^Final Report.+\\\\.exe$')" # noqa
def test_hash_followed_by_registryKey_expression_python_constant():
hash_exp = stix2.EqualityComparisonExpression(
"file:hashes.MD5",
stix2.HashConstant("79054025255fb1a26e4bc422aef54eb4", "MD5"),
)
o_exp1 = stix2.ObservationExpression(hash_exp)
reg_exp = stix2.EqualityComparisonExpression(
stix2.ObjectPath("windows-registry-key", ["key"]),
stix2.StringConstant("HKEY_LOCAL_MACHINE\\foo\\bar"),
)
o_exp2 = stix2.ObservationExpression(reg_exp)
fb_exp = stix2.FollowedByObservationExpression([o_exp1, o_exp2])
para_exp = stix2.ParentheticalExpression(fb_exp)
qual_exp = stix2.WithinQualifier(300)
exp = stix2.QualifiedObservationExpression(para_exp, qual_exp)
assert str(exp) == "([file:hashes.MD5 = '79054025255fb1a26e4bc422aef54eb4'] FOLLOWEDBY [windows-registry-key:key = 'HKEY_LOCAL_MACHINE\\\\foo\\\\bar']) WITHIN 300 SECONDS" # noqa
def test_hash_followed_by_registryKey_expression():
hash_exp = stix2.EqualityComparisonExpression(
"file:hashes.MD5",
stix2.HashConstant("79054025255fb1a26e4bc422aef54eb4", "MD5"),
)
o_exp1 = stix2.ObservationExpression(hash_exp)
reg_exp = stix2.EqualityComparisonExpression(
stix2.ObjectPath("windows-registry-key", ["key"]),
stix2.StringConstant("HKEY_LOCAL_MACHINE\\foo\\bar"),
)
o_exp2 = stix2.ObservationExpression(reg_exp)
fb_exp = stix2.FollowedByObservationExpression([o_exp1, o_exp2])
para_exp = stix2.ParentheticalExpression(fb_exp)
qual_exp = stix2.WithinQualifier(stix2.IntegerConstant(300))
exp = stix2.QualifiedObservationExpression(para_exp, qual_exp)
assert str(exp) == "([file:hashes.MD5 = '79054025255fb1a26e4bc422aef54eb4'] FOLLOWEDBY [windows-registry-key:key = 'HKEY_LOCAL_MACHINE\\\\foo\\\\bar']) WITHIN 300 SECONDS" # noqa
def test_file_observable_expression():
exp1 = stix2.EqualityComparisonExpression(
"file:hashes.'SHA-256'",
stix2.HashConstant(
"aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f",
'SHA-256',
),
)
exp2 = stix2.EqualityComparisonExpression("file:mime_type", stix2.StringConstant("application/x-pdf"))
bool_exp = stix2.ObservationExpression(stix2.AndBooleanExpression([exp1, exp2]))
assert str(bool_exp) == "[file:hashes.'SHA-256' = 'aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f' AND file:mime_type = 'application/x-pdf']" # noqa
@pytest.mark.parametrize(
"observation_class, op", [
(stix2.AndObservationExpression, 'AND'),
(stix2.OrObservationExpression, 'OR'),
],
)
def test_multiple_file_observable_expression(observation_class, op):
exp1 = stix2.EqualityComparisonExpression(
"file:hashes.'SHA-256'",
stix2.HashConstant(
"bf07a7fbb825fc0aae7bf4a1177b2b31fcf8a3feeaf7092761e18c859ee52a9c",
'SHA-256',
),
)
exp2 = stix2.EqualityComparisonExpression(
"file:hashes.MD5",
stix2.HashConstant("cead3f77f6cda6ec00f57d76c9a6879f", "MD5"),
)
bool1_exp = stix2.OrBooleanExpression([exp1, exp2])
exp3 = stix2.EqualityComparisonExpression(
"file:hashes.'SHA-256'",
stix2.HashConstant(
"aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f",
'SHA-256',
),
)
op1_exp = stix2.ObservationExpression(bool1_exp)
op2_exp = stix2.ObservationExpression(exp3)
exp = observation_class([op1_exp, op2_exp])
assert str(exp) == "[file:hashes.'SHA-256' = 'bf07a7fbb825fc0aae7bf4a1177b2b31fcf8a3feeaf7092761e18c859ee52a9c' OR file:hashes.MD5 = 'cead3f77f6cda6ec00f57d76c9a6879f'] {} [file:hashes.'SHA-256' = 'aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f']".format(op) # noqa
def test_root_types():
ast = stix2.ObservationExpression(
stix2.AndBooleanExpression(
[
stix2.ParentheticalExpression(
stix2.OrBooleanExpression([
stix2.EqualityComparisonExpression("a:b", stix2.StringConstant("1")),
stix2.EqualityComparisonExpression("b:c", stix2.StringConstant("2")),
]),
),
stix2.EqualityComparisonExpression(u"b:d", stix2.StringConstant("3")),
],
),
)
assert str(ast) == "[(a:b = '1' OR b:c = '2') AND b:d = '3']"
def test_artifact_payload():
exp1 = stix2.EqualityComparisonExpression(
"artifact:mime_type",
"application/vnd.tcpdump.pcap",
)
exp2 = stix2.MatchesComparisonExpression(
"artifact:payload_bin",
stix2.StringConstant("\\xd4\\xc3\\xb2\\xa1\\x02\\x00\\x04\\x00"),
)
and_exp = stix2.ObservationExpression(stix2.AndBooleanExpression([exp1, exp2]))
assert str(and_exp) == "[artifact:mime_type = 'application/vnd.tcpdump.pcap' AND artifact:payload_bin MATCHES '\\\\xd4\\\\xc3\\\\xb2\\\\xa1\\\\x02\\\\x00\\\\x04\\\\x00']" # noqa
def test_greater_than_python_constant():
exp1 = stix2.GreaterThanComparisonExpression("file:extensions.'windows-pebinary-ext'.sections[*].entropy", 7.0)
exp = stix2.ObservationExpression(exp1)
assert str(exp) == "[file:extensions.'windows-pebinary-ext'.sections[*].entropy > 7.0]"
def test_greater_than():
exp1 = stix2.GreaterThanComparisonExpression(
"file:extensions.'windows-pebinary-ext'.sections[*].entropy",
stix2.FloatConstant(7.0),
)
exp = stix2.ObservationExpression(exp1)
assert str(exp) == "[file:extensions.'windows-pebinary-ext'.sections[*].entropy > 7.0]"
def test_less_than():
exp = stix2.LessThanComparisonExpression("file:size", 1024)
assert str(exp) == "file:size < 1024"
def test_greater_than_or_equal():
exp = stix2.GreaterThanEqualComparisonExpression(
"file:size",
1024,
)
assert str(exp) == "file:size >= 1024"
def test_less_than_or_equal():
exp = stix2.LessThanEqualComparisonExpression(
"file:size",
1024,
)
assert str(exp) == "file:size <= 1024"
def test_not():
exp = stix2.LessThanComparisonExpression(
"file:size",
1024,
negated=True,
)
assert str(exp) == "file:size NOT < 1024"
def test_and_observable_expression():
exp1 = stix2.AndBooleanExpression([
stix2.EqualityComparisonExpression(
"user-account:account_type",
"unix",
),
stix2.EqualityComparisonExpression(
"user-account:user_id",
stix2.StringConstant("1007"),
),
stix2.EqualityComparisonExpression(
"user-account:account_login",
"Peter",
),
])
exp2 = stix2.AndBooleanExpression([
stix2.EqualityComparisonExpression(
"user-account:account_type",
"unix",
),
stix2.EqualityComparisonExpression(
"user-account:user_id",
stix2.StringConstant("1008"),
),
stix2.EqualityComparisonExpression(
"user-account:account_login",
"Paul",
),
])
exp3 = stix2.AndBooleanExpression([
stix2.EqualityComparisonExpression(
"user-account:account_type",
"unix",
),
stix2.EqualityComparisonExpression(
"user-account:user_id",
stix2.StringConstant("1009"),
),
stix2.EqualityComparisonExpression(
"user-account:account_login",
"Mary",
),
])
exp = stix2.AndObservationExpression([
stix2.ObservationExpression(exp1),
stix2.ObservationExpression(exp2),
stix2.ObservationExpression(exp3),
])
assert str(exp) == "[user-account:account_type = 'unix' AND user-account:user_id = '1007' AND user-account:account_login = 'Peter'] AND [user-account:account_type = 'unix' AND user-account:user_id = '1008' AND user-account:account_login = 'Paul'] AND [user-account:account_type = 'unix' AND user-account:user_id = '1009' AND user-account:account_login = 'Mary']" # noqa
def test_invalid_and_observable_expression():
with pytest.raises(ValueError) as excinfo:
stix2.AndBooleanExpression([
stix2.EqualityComparisonExpression(
"user-account:display_name",
"admin",
),
stix2.EqualityComparisonExpression(
"email-addr:display_name",
stix2.StringConstant("admin"),
),
])
assert "All operands to an 'AND' expression must have the same object type" in str(excinfo)
def test_hex():
exp_and = stix2.AndBooleanExpression([
stix2.EqualityComparisonExpression(
"file:mime_type",
"image/bmp",
),
stix2.EqualityComparisonExpression(
"file:magic_number_hex",
stix2.HexConstant("ffd8"),
),
])
exp = stix2.ObservationExpression(exp_and)
assert str(exp) == "[file:mime_type = 'image/bmp' AND file:magic_number_hex = h'ffd8']"
def test_multiple_qualifiers():
exp_and = stix2.AndBooleanExpression([
stix2.EqualityComparisonExpression(
"network-traffic:dst_ref.type",
"domain-name",
),
stix2.EqualityComparisonExpression(
"network-traffic:dst_ref.value",
"example.com",
),
])
exp_ob = stix2.ObservationExpression(exp_and)
qual_rep = stix2.RepeatQualifier(5)
qual_within = stix2.WithinQualifier(stix2.IntegerConstant(1800))
exp = stix2.QualifiedObservationExpression(stix2.QualifiedObservationExpression(exp_ob, qual_rep), qual_within)
assert str(exp) == "[network-traffic:dst_ref.type = 'domain-name' AND network-traffic:dst_ref.value = 'example.com'] REPEATS 5 TIMES WITHIN 1800 SECONDS" # noqa
def test_set_op():
exp = stix2.ObservationExpression(stix2.IsSubsetComparisonExpression(
"network-traffic:dst_ref.value",
"2001:0db8:dead:beef:0000:0000:0000:0000/64",
))
assert str(exp) == "[network-traffic:dst_ref.value ISSUBSET '2001:0db8:dead:beef:0000:0000:0000:0000/64']"
def test_timestamp():
ts = stix2.TimestampConstant('2014-01-13T07:03:17Z')
assert str(ts) == "t'2014-01-13T07:03:17Z'"
def test_boolean():
exp = stix2.EqualityComparisonExpression(
"email-message:is_multipart",
True,
)
assert str(exp) == "email-message:is_multipart = true"
def test_binary():
const = stix2.BinaryConstant("dGhpcyBpcyBhIHRlc3Q=")
exp = stix2.EqualityComparisonExpression(
"artifact:payload_bin",
const,
)
assert str(exp) == "artifact:payload_bin = b'dGhpcyBpcyBhIHRlc3Q='"
def test_list():
exp = stix2.InComparisonExpression(
"process:name",
['proccy', 'proximus', 'badproc'],
)
assert str(exp) == "process:name IN ('proccy', 'proximus', 'badproc')"
def test_list2():
# alternate way to construct an "IN" Comparison Expression
exp = stix2.EqualityComparisonExpression(
"process:name",
['proccy', 'proximus', 'badproc'],
)
assert str(exp) == "process:name IN ('proccy', 'proximus', 'badproc')"
def test_invalid_constant_type():
with pytest.raises(ValueError) as excinfo:
stix2.EqualityComparisonExpression(
"artifact:payload_bin",
{'foo': 'bar'},
)
assert 'Unable to create a constant' in str(excinfo)
def test_invalid_integer_constant():
with pytest.raises(ValueError) as excinfo:
stix2.IntegerConstant('foo')
assert 'must be an integer' in str(excinfo)
def test_invalid_timestamp_constant():
with pytest.raises(ValueError) as excinfo:
stix2.TimestampConstant('foo')
assert 'Must be a datetime object or timestamp string' in str(excinfo)
def test_invalid_float_constant():
with pytest.raises(ValueError) as excinfo:
stix2.FloatConstant('foo')
assert 'must be a float' in str(excinfo)
@pytest.mark.parametrize(
"data, result", [
(True, True),
(False, False),
('True', True),
('False', False),
('true', True),
('false', False),
('t', True),
('f', False),
('T', True),
('F', False),
(1, True),
(0, False),
],
)
def test_boolean_constant(data, result):
boolean = stix2.BooleanConstant(data)
assert boolean.value == result
def test_invalid_boolean_constant():
with pytest.raises(ValueError) as excinfo:
stix2.BooleanConstant('foo')
assert 'must be a boolean' in str(excinfo)
@pytest.mark.parametrize(
"hashtype, data", [
('MD5', 'zzz'),
('ssdeep', 'zzz=='),
],
)
def test_invalid_hash_constant(hashtype, data):
with pytest.raises(ValueError) as excinfo:
stix2.HashConstant(data, hashtype)
assert 'is not a valid {} hash'.format(hashtype) in str(excinfo)
def test_invalid_hex_constant():
with pytest.raises(ValueError) as excinfo:
stix2.HexConstant('mm')
assert "must contain an even number of hexadecimal characters" in str(excinfo)
def test_invalid_binary_constant():
with pytest.raises(ValueError) as excinfo:
stix2.BinaryConstant('foo')
assert 'must contain a base64' in str(excinfo)
def test_escape_quotes_and_backslashes():
exp = stix2.MatchesComparisonExpression(
"file:name",
"^Final Report.+\\.exe$",
)
assert str(exp) == "file:name MATCHES '^Final Report.+\\\\.exe$'"
def test_like():
exp = stix2.LikeComparisonExpression(
"directory:path",
"C:\\Windows\\%\\foo",
)
assert str(exp) == "directory:path LIKE 'C:\\\\Windows\\\\%\\\\foo'"
def test_issuperset():
exp = stix2.IsSupersetComparisonExpression(
"ipv4-addr:value",
"198.51.100.0/24",
)
assert str(exp) == "ipv4-addr:value ISSUPERSET '198.51.100.0/24'"
def test_repeat_qualifier():
qual = stix2.RepeatQualifier(stix2.IntegerConstant(5))
assert str(qual) == 'REPEATS 5 TIMES'
def test_invalid_repeat_qualifier():
with pytest.raises(ValueError) as excinfo:
stix2.RepeatQualifier('foo')
assert 'is not a valid argument for a Repeat Qualifier' in str(excinfo)
def test_invalid_within_qualifier():
with pytest.raises(ValueError) as excinfo:
stix2.WithinQualifier('foo')
assert 'is not a valid argument for a Within Qualifier' in str(excinfo)
def test_startstop_qualifier():
qual = stix2.StartStopQualifier(
stix2.TimestampConstant('2016-06-01T00:00:00Z'),
datetime.datetime(2017, 3, 12, 8, 30, 0),
)
assert str(qual) == "START t'2016-06-01T00:00:00Z' STOP t'2017-03-12T08:30:00Z'"
qual2 = stix2.StartStopQualifier(
datetime.date(2016, 6, 1),
stix2.TimestampConstant('2016-07-01T00:00:00Z'),
)
assert str(qual2) == "START t'2016-06-01T00:00:00Z' STOP t'2016-07-01T00:00:00Z'"
def test_invalid_startstop_qualifier():
with pytest.raises(ValueError) as excinfo:
stix2.StartStopQualifier(
'foo',
stix2.TimestampConstant('2016-06-01T00:00:00Z'),
)
assert 'is not a valid argument for a Start/Stop Qualifier' in str(excinfo)
with pytest.raises(ValueError) as excinfo:
stix2.StartStopQualifier(
datetime.date(2016, 6, 1),
'foo',
)
assert 'is not a valid argument for a Start/Stop Qualifier' in str(excinfo)
def test_make_constant_already_a_constant():
str_const = stix2.StringConstant('Foo')
result = stix2.patterns.make_constant(str_const)
assert result is str_const
def test_parsing_comparison_expression():
patt_obj = create_pattern_object("[file:hashes.'SHA-256' = 'aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f']")
assert str(patt_obj) == "[file:hashes.'SHA-256' = 'aec070645fe53ee3b3763059376134f058cc337247c978add178b6ccdfb0019f']"
def test_parsing_qualified_expression():
patt_obj = create_pattern_object(
"[network-traffic:dst_ref.type = 'domain-name' AND network-traffic:dst_ref.value = 'example.com'] REPEATS 5 TIMES WITHIN 1800 SECONDS",
)
assert str(
patt_obj,
) == "[network-traffic:dst_ref.type = 'domain-name' AND network-traffic:dst_ref.value = 'example.com'] REPEATS 5 TIMES WITHIN 1800 SECONDS"
def test_list_constant():
patt_obj = create_pattern_object("[network-traffic:src_ref.value IN ('10.0.0.0', '10.0.0.1', '10.0.0.2')]")
assert str(patt_obj) == "[network-traffic:src_ref.value IN ('10.0.0.0', '10.0.0.1', '10.0.0.2')]"

View File

@ -7,11 +7,11 @@ def test_pickling():
""" """
Ensure a pickle/unpickle cycle works okay. Ensure a pickle/unpickle cycle works okay.
""" """
identity = stix2.Identity( identity = stix2.v20.Identity(
id="identity--d66cb89d-5228-4983-958c-fa84ef75c88c", id="identity--d66cb89d-5228-4983-958c-fa84ef75c88c",
name="alice", name="alice",
description="this is a pickle test", description="this is a pickle test",
identity_class="some_class" identity_class="some_class",
) )
pickle.loads(pickle.dumps(identity)) pickle.loads(pickle.dumps(identity))

View File

@ -2,15 +2,16 @@ import uuid
import pytest import pytest
from stix2 import CustomObject, EmailMIMEComponent, ExtensionsProperty, TCPExt import stix2
from stix2.exceptions import AtLeastOnePropertyError, DictionaryKeyError from stix2.exceptions import AtLeastOnePropertyError, DictionaryKeyError
from stix2.properties import (ERROR_INVALID_ID, BinaryProperty, from stix2.properties import (
BooleanProperty, DictionaryProperty, ERROR_INVALID_ID, BinaryProperty, BooleanProperty, DictionaryProperty,
EmbeddedObjectProperty, EnumProperty, EmbeddedObjectProperty, EnumProperty, ExtensionsProperty, FloatProperty,
FloatProperty, HashesProperty, HexProperty, HashesProperty, HexProperty, IDProperty, IntegerProperty, ListProperty,
IDProperty, IntegerProperty, ListProperty, Property, ReferenceProperty, STIXObjectProperty, StringProperty,
Property, ReferenceProperty, StringProperty, TimestampProperty, TypeProperty,
TimestampProperty, TypeProperty) )
from stix2.v20.common import MarkingProperty
from . import constants from . import constants
@ -92,10 +93,12 @@ ID_PROP = IDProperty('my-type')
MY_ID = 'my-type--232c9d3f-49fc-4440-bb01-607f638778e7' MY_ID = 'my-type--232c9d3f-49fc-4440-bb01-607f638778e7'
@pytest.mark.parametrize("value", [ @pytest.mark.parametrize(
MY_ID, "value", [
'my-type--00000000-0000-4000-8000-000000000000', MY_ID,
]) 'my-type--00000000-0000-4000-8000-000000000000',
],
)
def test_id_property_valid(value): def test_id_property_valid(value):
assert ID_PROP.clean(value) == value assert ID_PROP.clean(value) == value
@ -133,14 +136,16 @@ def test_id_property_wrong_type():
assert str(excinfo.value) == "must start with 'my-type--'." assert str(excinfo.value) == "must start with 'my-type--'."
@pytest.mark.parametrize("value", [ @pytest.mark.parametrize(
'my-type--foo', "value", [
# Not a v4 UUID 'my-type--foo',
'my-type--00000000-0000-0000-0000-000000000000', # Not a v4 UUID
'my-type--' + str(uuid.uuid1()), 'my-type--00000000-0000-0000-0000-000000000000',
'my-type--' + str(uuid.uuid3(uuid.NAMESPACE_DNS, "example.org")), 'my-type--' + str(uuid.uuid1()),
'my-type--' + str(uuid.uuid5(uuid.NAMESPACE_DNS, "example.org")), 'my-type--' + str(uuid.uuid3(uuid.NAMESPACE_DNS, "example.org")),
]) 'my-type--' + str(uuid.uuid5(uuid.NAMESPACE_DNS, "example.org")),
],
)
def test_id_property_not_a_valid_hex_uuid(value): def test_id_property_not_a_valid_hex_uuid(value):
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
ID_PROP.clean(value) ID_PROP.clean(value)
@ -152,77 +157,117 @@ def test_id_property_default():
assert ID_PROP.clean(default) == default assert ID_PROP.clean(default) == default
@pytest.mark.parametrize("value", [ @pytest.mark.parametrize(
2, "value", [
-1, 2,
3.14, -1,
False, 3.14,
]) False,
],
)
def test_integer_property_valid(value): def test_integer_property_valid(value):
int_prop = IntegerProperty() int_prop = IntegerProperty()
assert int_prop.clean(value) is not None assert int_prop.clean(value) is not None
@pytest.mark.parametrize("value", [ @pytest.mark.parametrize(
"something", "value", [
StringProperty(), -1,
]) -100,
-5 * 6,
],
)
def test_integer_property_invalid_min_with_constraints(value):
int_prop = IntegerProperty(min=0, max=180)
with pytest.raises(ValueError) as excinfo:
int_prop.clean(value)
assert "minimum value is" in str(excinfo.value)
@pytest.mark.parametrize(
"value", [
181,
200,
50 * 6,
],
)
def test_integer_property_invalid_max_with_constraints(value):
int_prop = IntegerProperty(min=0, max=180)
with pytest.raises(ValueError) as excinfo:
int_prop.clean(value)
assert "maximum value is" in str(excinfo.value)
@pytest.mark.parametrize(
"value", [
"something",
StringProperty(),
],
)
def test_integer_property_invalid(value): def test_integer_property_invalid(value):
int_prop = IntegerProperty() int_prop = IntegerProperty()
with pytest.raises(ValueError): with pytest.raises(ValueError):
int_prop.clean(value) int_prop.clean(value)
@pytest.mark.parametrize("value", [ @pytest.mark.parametrize(
2, "value", [
-1, 2,
3.14, -1,
False, 3.14,
]) False,
],
)
def test_float_property_valid(value): def test_float_property_valid(value):
int_prop = FloatProperty() int_prop = FloatProperty()
assert int_prop.clean(value) is not None assert int_prop.clean(value) is not None
@pytest.mark.parametrize("value", [ @pytest.mark.parametrize(
"something", "value", [
StringProperty(), "something",
]) StringProperty(),
],
)
def test_float_property_invalid(value): def test_float_property_invalid(value):
int_prop = FloatProperty() int_prop = FloatProperty()
with pytest.raises(ValueError): with pytest.raises(ValueError):
int_prop.clean(value) int_prop.clean(value)
@pytest.mark.parametrize("value", [ @pytest.mark.parametrize(
True, "value", [
False, True,
'True', False,
'False', 'True',
'true', 'False',
'false', 'true',
'TRUE', 'false',
'FALSE', 'TRUE',
'T', 'FALSE',
'F', 'T',
't', 'F',
'f', 't',
1, 'f',
0, 1,
]) 0,
],
)
def test_boolean_property_valid(value): def test_boolean_property_valid(value):
bool_prop = BooleanProperty() bool_prop = BooleanProperty()
assert bool_prop.clean(value) is not None assert bool_prop.clean(value) is not None
@pytest.mark.parametrize("value", [ @pytest.mark.parametrize(
'abc', "value", [
['false'], 'abc',
{'true': 'true'}, ['false'],
2, {'true': 'true'},
-1, 2,
]) -1,
],
)
def test_boolean_property_invalid(value): def test_boolean_property_invalid(value):
bool_prop = BooleanProperty() bool_prop = BooleanProperty()
with pytest.raises(ValueError): with pytest.raises(ValueError):
@ -241,11 +286,13 @@ def test_reference_property():
ref_prop.clean("my-type--00000000-0000-0000-0000-000000000000") ref_prop.clean("my-type--00000000-0000-0000-0000-000000000000")
@pytest.mark.parametrize("value", [ @pytest.mark.parametrize(
'2017-01-01T12:34:56Z', "value", [
'2017-01-01 12:34:56', '2017-01-01T12:34:56Z',
'Jan 1 2017 12:34:56', '2017-01-01 12:34:56',
]) 'Jan 1 2017 12:34:56',
],
)
def test_timestamp_property_valid(value): def test_timestamp_property_valid(value):
ts_prop = TimestampProperty() ts_prop = TimestampProperty()
assert ts_prop.clean(value) == constants.FAKE_TIME assert ts_prop.clean(value) == constants.FAKE_TIME
@ -275,25 +322,33 @@ def test_hex_property():
hex_prop.clean("foobar") hex_prop.clean("foobar")
@pytest.mark.parametrize("d", [ @pytest.mark.parametrize(
{'description': 'something'}, "d", [
[('abc', 1), ('bcd', 2), ('cde', 3)], {'description': 'something'},
]) [('abc', 1), ('bcd', 2), ('cde', 3)],
],
)
def test_dictionary_property_valid(d): def test_dictionary_property_valid(d):
dict_prop = DictionaryProperty() dict_prop = DictionaryProperty()
assert dict_prop.clean(d) assert dict_prop.clean(d)
@pytest.mark.parametrize("d", [ @pytest.mark.parametrize(
[{'a': 'something'}, "Invalid dictionary key a: (shorter than 3 characters)."], "d", [
[{'a'*300: 'something'}, "Invalid dictionary key aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" [{'a': 'something'}, "Invalid dictionary key a: (shorter than 3 characters)."],
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" [
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" {'a'*300: 'something'}, "Invalid dictionary key aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
"aaaaaaaaaaaaaaaaaaaaaaa: (longer than 256 characters)."], "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
[{'Hey!': 'something'}, "Invalid dictionary key Hey!: (contains characters other thanlowercase a-z, " "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
"uppercase A-Z, numerals 0-9, hyphen (-), or underscore (_))."], "aaaaaaaaaaaaaaaaaaaaaaa: (longer than 256 characters).",
]) ],
[
{'Hey!': 'something'}, "Invalid dictionary key Hey!: (contains characters other than lowercase a-z, "
"uppercase A-Z, numerals 0-9, hyphen (-), or underscore (_)).",
],
],
)
def test_dictionary_property_invalid_key(d): def test_dictionary_property_invalid_key(d):
dict_prop = DictionaryProperty() dict_prop = DictionaryProperty()
@ -303,18 +358,20 @@ def test_dictionary_property_invalid_key(d):
assert str(excinfo.value) == d[1] assert str(excinfo.value) == d[1]
@pytest.mark.parametrize("d", [ @pytest.mark.parametrize(
({}, "The dictionary property must contain a non-empty dictionary"), "d", [
# TODO: This error message could be made more helpful. The error is caused ({}, "The dictionary property must contain a non-empty dictionary"),
# because `json.loads()` doesn't like the *single* quotes around the key # TODO: This error message could be made more helpful. The error is caused
# name, even though they are valid in a Python dictionary. While technically # because `json.loads()` doesn't like the *single* quotes around the key
# accurate (a string is not a dictionary), if we want to be able to load # name, even though they are valid in a Python dictionary. While technically
# string-encoded "dictionaries" that are, we need a better error message # accurate (a string is not a dictionary), if we want to be able to load
# or an alternative to `json.loads()` ... and preferably *not* `eval()`. :-) # string-encoded "dictionaries" that are, we need a better error message
# Changing the following to `'{"description": "something"}'` does not cause # or an alternative to `json.loads()` ... and preferably *not* `eval()`. :-)
# any ValueError to be raised. # Changing the following to `'{"description": "something"}'` does not cause
("{'description': 'something'}", "The dictionary property must contain a dictionary"), # any ValueError to be raised.
]) ("{'description': 'something'}", "The dictionary property must contain a dictionary"),
],
)
def test_dictionary_property_invalid(d): def test_dictionary_property_invalid(d):
dict_prop = DictionaryProperty() dict_prop = DictionaryProperty()
@ -324,9 +381,11 @@ def test_dictionary_property_invalid(d):
def test_property_list_of_dictionary(): def test_property_list_of_dictionary():
@CustomObject('x-new-obj', [ @stix2.v20.CustomObject(
('property1', ListProperty(DictionaryProperty(), required=True)), 'x-new-obj', [
]) ('property1', ListProperty(DictionaryProperty(), required=True)),
],
)
class NewObj(): class NewObj():
pass pass
@ -334,19 +393,23 @@ def test_property_list_of_dictionary():
assert test_obj.property1[0]['foo'] == 'bar' assert test_obj.property1[0]['foo'] == 'bar'
@pytest.mark.parametrize("value", [ @pytest.mark.parametrize(
{"sha256": "6db12788c37247f2316052e142f42f4b259d6561751e5f401a1ae2a6df9c674b"}, "value", [
[('MD5', '2dfb1bcc980200c6706feee399d41b3f'), ('RIPEMD-160', 'b3a8cd8a27c90af79b3c81754f267780f443dfef')], {"sha256": "6db12788c37247f2316052e142f42f4b259d6561751e5f401a1ae2a6df9c674b"},
]) [('MD5', '2dfb1bcc980200c6706feee399d41b3f'), ('RIPEMD-160', 'b3a8cd8a27c90af79b3c81754f267780f443dfef')],
],
)
def test_hashes_property_valid(value): def test_hashes_property_valid(value):
hash_prop = HashesProperty() hash_prop = HashesProperty()
assert hash_prop.clean(value) assert hash_prop.clean(value)
@pytest.mark.parametrize("value", [ @pytest.mark.parametrize(
{"MD5": "a"}, "value", [
{"SHA-256": "2dfb1bcc980200c6706feee399d41b3f"}, {"MD5": "a"},
]) {"SHA-256": "2dfb1bcc980200c6706feee399d41b3f"},
],
)
def test_hashes_property_invalid(value): def test_hashes_property_invalid(value):
hash_prop = HashesProperty() hash_prop = HashesProperty()
@ -355,11 +418,11 @@ def test_hashes_property_invalid(value):
def test_embedded_property(): def test_embedded_property():
emb_prop = EmbeddedObjectProperty(type=EmailMIMEComponent) emb_prop = EmbeddedObjectProperty(type=stix2.v20.EmailMIMEComponent)
mime = EmailMIMEComponent( mime = stix2.v20.EmailMIMEComponent(
content_type="text/plain; charset=utf-8", content_type="text/plain; charset=utf-8",
content_disposition="inline", content_disposition="inline",
body="Cats are funny!" body="Cats are funny!",
) )
assert emb_prop.clean(mime) assert emb_prop.clean(mime)
@ -367,11 +430,13 @@ def test_embedded_property():
emb_prop.clean("string") emb_prop.clean("string")
@pytest.mark.parametrize("value", [ @pytest.mark.parametrize(
['a', 'b', 'c'], "value", [
('a', 'b', 'c'), ['a', 'b', 'c'],
'b', ('a', 'b', 'c'),
]) 'b',
],
)
def test_enum_property_valid(value): def test_enum_property_valid(value):
enum_prop = EnumProperty(value) enum_prop = EnumProperty(value)
assert enum_prop.clean('b') assert enum_prop.clean('b')
@ -387,17 +452,19 @@ def test_extension_property_valid():
ext_prop = ExtensionsProperty(enclosing_type='file') ext_prop = ExtensionsProperty(enclosing_type='file')
assert ext_prop({ assert ext_prop({
'windows-pebinary-ext': { 'windows-pebinary-ext': {
'pe_type': 'exe' 'pe_type': 'exe',
}, },
}) })
@pytest.mark.parametrize("data", [ @pytest.mark.parametrize(
1, "data", [
{'foobar-ext': { 1,
'pe_type': 'exe' {'foobar-ext': {
}}, 'pe_type': 'exe',
]) }},
],
)
def test_extension_property_invalid(data): def test_extension_property_invalid(data):
ext_prop = ExtensionsProperty(enclosing_type='file') ext_prop = ExtensionsProperty(enclosing_type='file')
with pytest.raises(ValueError): with pytest.raises(ValueError):
@ -407,14 +474,36 @@ def test_extension_property_invalid(data):
def test_extension_property_invalid_type(): def test_extension_property_invalid_type():
ext_prop = ExtensionsProperty(enclosing_type='indicator') ext_prop = ExtensionsProperty(enclosing_type='indicator')
with pytest.raises(ValueError) as excinfo: with pytest.raises(ValueError) as excinfo:
ext_prop.clean({ ext_prop.clean(
'windows-pebinary-ext': { {
'pe_type': 'exe' 'windows-pebinary-ext': {
}} 'pe_type': 'exe',
},
},
) )
assert "Can't parse unknown extension" in str(excinfo.value) assert "Can't parse unknown extension" in str(excinfo.value)
def test_extension_at_least_one_property_constraint(): def test_extension_at_least_one_property_constraint():
with pytest.raises(AtLeastOnePropertyError): with pytest.raises(AtLeastOnePropertyError):
TCPExt() stix2.v20.TCPExt()
def test_marking_property_error():
mark_prop = MarkingProperty()
with pytest.raises(ValueError) as excinfo:
mark_prop.clean('my-marking')
assert str(excinfo.value) == "must be a Statement, TLP Marking or a registered marking."
def test_stix_property_not_compliant_spec():
# This is a 2.0 test only...
indicator = stix2.v20.Indicator(spec_version="2.0", allow_custom=True, **constants.INDICATOR_KWARGS)
stix_prop = STIXObjectProperty(spec_version="2.0")
with pytest.raises(ValueError) as excinfo:
stix_prop.clean(indicator)
assert "Spec version 2.0 bundles don't yet support containing objects of a different spec version." in str(excinfo.value)

Some files were not shown because too many files have changed in this diff Show More