2018-11-28 22:51:00 +01:00
|
|
|
"""Classes for representing properties of STIX Objects and Cyber Observables."""
|
|
|
|
|
2017-05-03 20:10:10 +02:00
|
|
|
import base64
|
|
|
|
import binascii
|
2021-03-31 17:02:05 +02:00
|
|
|
import collections.abc
|
2018-07-10 20:50:03 +02:00
|
|
|
import copy
|
2017-04-25 00:29:56 +02:00
|
|
|
import inspect
|
2017-03-22 00:44:01 +01:00
|
|
|
import re
|
2017-02-24 16:28:53 +01:00
|
|
|
import uuid
|
2017-05-10 00:03:46 +02:00
|
|
|
|
2020-07-10 02:13:53 +02:00
|
|
|
import stix2
|
|
|
|
import stix2.hashes
|
|
|
|
|
2020-01-29 00:13:36 +01:00
|
|
|
from .base import _STIXBase
|
2020-06-28 02:03:33 +02:00
|
|
|
from .exceptions import CustomContentError, DictionaryKeyError, STIXError
|
2021-01-09 04:08:33 +01:00
|
|
|
from .parsing import parse, parse_observable
|
2021-06-26 04:20:02 +02:00
|
|
|
from .registry import class_for_type
|
2021-04-01 02:01:27 +02:00
|
|
|
from .utils import (
|
|
|
|
STIXTypeClass, _get_dict, get_class_hierarchy_names, get_type_from_id,
|
|
|
|
is_object, is_stix_type, parse_into_datetime, to_enum,
|
|
|
|
)
|
2021-01-15 18:34:10 +01:00
|
|
|
from .version import DEFAULT_VERSION
|
2018-06-27 19:27:44 +02:00
|
|
|
|
2019-10-14 12:30:15 +02:00
|
|
|
ID_REGEX_interoperability = re.compile(r"[0-9a-fA-F]{8}-"
|
2018-12-05 23:18:55 +01:00
|
|
|
"[0-9a-fA-F]{4}-"
|
|
|
|
"[0-9a-fA-F]{4}-"
|
|
|
|
"[0-9a-fA-F]{4}-"
|
|
|
|
"[0-9a-fA-F]{12}$")
|
|
|
|
|
2020-01-04 19:50:06 +01:00
|
|
|
|
2020-11-10 23:08:51 +01:00
|
|
|
TYPE_REGEX = re.compile(r'^-?[a-z0-9]+(-[a-z0-9]+)*-?$')
|
|
|
|
TYPE_21_REGEX = re.compile(r'^([a-z][a-z0-9]*)+([a-z0-9-]+)*-?$')
|
2018-07-05 18:25:48 +02:00
|
|
|
ERROR_INVALID_ID = (
|
2019-06-13 02:19:47 +02:00
|
|
|
"not a valid STIX identifier, must match <object-type>--<UUID>: {}"
|
2018-07-05 18:25:48 +02:00
|
|
|
)
|
|
|
|
|
2017-02-24 16:28:53 +01:00
|
|
|
|
2019-10-14 12:30:15 +02:00
|
|
|
def _check_uuid(uuid_str, spec_version, interoperability):
|
2019-06-13 02:19:47 +02:00
|
|
|
"""
|
|
|
|
Check whether the given UUID string is valid with respect to the given STIX
|
|
|
|
spec version. STIX 2.0 requires UUIDv4; 2.1 only requires the RFC 4122
|
|
|
|
variant.
|
|
|
|
|
|
|
|
:param uuid_str: A UUID as a string
|
|
|
|
:param spec_version: The STIX spec version
|
|
|
|
:return: True if the UUID is valid, False if not
|
|
|
|
:raises ValueError: If uuid_str is malformed
|
|
|
|
"""
|
2019-10-14 12:30:15 +02:00
|
|
|
if interoperability:
|
|
|
|
return ID_REGEX_interoperability.match(uuid_str)
|
|
|
|
|
2019-06-13 02:19:47 +02:00
|
|
|
uuid_obj = uuid.UUID(uuid_str)
|
|
|
|
|
|
|
|
ok = uuid_obj.variant == uuid.RFC_4122
|
|
|
|
if ok and spec_version == "2.0":
|
|
|
|
ok = uuid_obj.version == 4
|
|
|
|
|
|
|
|
return ok
|
|
|
|
|
|
|
|
|
2019-10-14 12:30:15 +02:00
|
|
|
def _validate_id(id_, spec_version, required_prefix, interoperability):
|
2019-06-14 00:37:21 +02:00
|
|
|
"""
|
|
|
|
Check the STIX identifier for correctness, raise an exception if there are
|
|
|
|
errors.
|
|
|
|
|
|
|
|
:param id_: The STIX identifier
|
|
|
|
:param spec_version: The STIX specification version to use
|
|
|
|
:param required_prefix: The required prefix on the identifier, if any.
|
|
|
|
This function doesn't add a "--" suffix to the prefix, so callers must
|
|
|
|
add it if it is important. Pass None to skip the prefix check.
|
|
|
|
:raises ValueError: If there are any errors with the identifier
|
|
|
|
"""
|
|
|
|
if required_prefix:
|
|
|
|
if not id_.startswith(required_prefix):
|
|
|
|
raise ValueError("must start with '{}'.".format(required_prefix))
|
|
|
|
|
|
|
|
try:
|
|
|
|
if required_prefix:
|
|
|
|
uuid_part = id_[len(required_prefix):]
|
|
|
|
else:
|
|
|
|
idx = id_.index("--")
|
|
|
|
uuid_part = id_[idx+2:]
|
|
|
|
|
2019-10-14 12:30:15 +02:00
|
|
|
result = _check_uuid(uuid_part, spec_version, interoperability)
|
2019-06-14 00:37:21 +02:00
|
|
|
except ValueError:
|
|
|
|
# replace their ValueError with ours
|
|
|
|
raise ValueError(ERROR_INVALID_ID.format(id_))
|
|
|
|
|
|
|
|
if not result:
|
|
|
|
raise ValueError(ERROR_INVALID_ID.format(id_))
|
|
|
|
|
|
|
|
|
2020-04-02 14:17:34 +02:00
|
|
|
def _validate_type(type_, spec_version):
|
|
|
|
"""
|
|
|
|
Check the STIX type name for correctness, raise an exception if there are
|
|
|
|
errors.
|
|
|
|
|
|
|
|
:param type_: The STIX type name
|
|
|
|
:param spec_version: The STIX specification version to use
|
|
|
|
:raises ValueError: If there are any errors with the identifier
|
|
|
|
"""
|
|
|
|
if spec_version == "2.0":
|
|
|
|
if not re.match(TYPE_REGEX, type_):
|
|
|
|
raise ValueError(
|
|
|
|
"Invalid type name '%s': must only contain the "
|
|
|
|
"characters a-z (lowercase ASCII), 0-9, and hyphen (-)." %
|
|
|
|
type_,
|
|
|
|
)
|
|
|
|
else: # 2.1+
|
|
|
|
if not re.match(TYPE_21_REGEX, type_):
|
|
|
|
raise ValueError(
|
|
|
|
"Invalid type name '%s': must only contain the "
|
|
|
|
"characters a-z (lowercase ASCII), 0-9, and hyphen (-) "
|
|
|
|
"and must begin with an a-z character" % type_,
|
|
|
|
)
|
|
|
|
|
|
|
|
if len(type_) < 3 or len(type_) > 250:
|
|
|
|
raise ValueError(
|
|
|
|
"Invalid type name '%s': must be between 3 and 250 characters." % type_,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2017-02-24 16:28:53 +01:00
|
|
|
class Property(object):
|
|
|
|
"""Represent a property of STIX data type.
|
|
|
|
|
|
|
|
Subclasses can define the following attributes as keyword arguments to
|
2017-09-22 17:03:25 +02:00
|
|
|
``__init__()``.
|
|
|
|
|
|
|
|
Args:
|
2018-07-12 20:31:14 +02:00
|
|
|
required (bool): If ``True``, the property must be provided when
|
|
|
|
creating an object with that property. No default value exists for
|
|
|
|
these properties. (Default: ``False``)
|
2017-09-22 17:03:25 +02:00
|
|
|
fixed: This provides a constant default value. Users are free to
|
2021-04-15 18:28:58 +02:00
|
|
|
provide this value explicitly when constructing an object (which
|
2018-07-12 20:31:14 +02:00
|
|
|
allows you to copy **all** values from an existing object to a new
|
|
|
|
object), but if the user provides a value other than the ``fixed``
|
|
|
|
value, it will raise an error. This is semantically equivalent to
|
|
|
|
defining both:
|
2017-09-22 17:03:25 +02:00
|
|
|
|
|
|
|
- a ``clean()`` function that checks if the value matches the fixed
|
|
|
|
value, and
|
|
|
|
- a ``default()`` function that returns the fixed value.
|
|
|
|
|
|
|
|
Subclasses can also define the following functions:
|
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
- ``def clean(self, value, allow_custom) -> (any, has_custom):``
|
|
|
|
- Return a value that is valid for this property, and enforce and
|
|
|
|
detect value customization. If ``value`` is not valid for this
|
|
|
|
property, you may attempt to transform it first. If ``value`` is not
|
|
|
|
valid and no such transformation is possible, it must raise an
|
|
|
|
exception. The method is also responsible for enforcing and
|
|
|
|
detecting customizations. If allow_custom is False, no customizations
|
|
|
|
must be allowed. If any are encountered, an exception must be raised
|
|
|
|
(e.g. CustomContentError). If none are encountered, False must be
|
|
|
|
returned for has_custom. If allow_custom is True, then the clean()
|
|
|
|
method is responsible for detecting any customizations in the value
|
|
|
|
(just because the user has elected to allow customizations doesn't
|
|
|
|
mean there actually are any). The method must return an appropriate
|
|
|
|
value for has_custom. Customization may not be applicable/possible
|
|
|
|
for a property. In that case, allow_custom can be ignored, and
|
|
|
|
has_custom must be returned as False.
|
|
|
|
|
2017-09-22 17:03:25 +02:00
|
|
|
- ``def default(self):``
|
2017-02-24 16:28:53 +01:00
|
|
|
- provide a default value for this property.
|
2017-09-22 17:03:25 +02:00
|
|
|
- ``default()`` can return the special value ``NOW`` to use the current
|
2018-07-12 20:31:14 +02:00
|
|
|
time. This is useful when several timestamps in the same object
|
|
|
|
need to use the same default value, so calling now() for each
|
|
|
|
property-- likely several microseconds apart-- does not work.
|
|
|
|
|
|
|
|
Subclasses can instead provide a lambda function for ``default`` as a
|
|
|
|
keyword argument. ``clean`` should not be provided as a lambda since
|
|
|
|
lambdas cannot raise their own exceptions.
|
|
|
|
|
|
|
|
When instantiating Properties, ``required`` and ``default`` should not be
|
|
|
|
used together. ``default`` implies that the property is required in the
|
|
|
|
specification so this function will be used to supply a value if none is
|
|
|
|
provided. ``required`` means that the user must provide this; it is
|
|
|
|
required in the specification and we can't or don't want to create a
|
|
|
|
default value.
|
|
|
|
|
2017-02-24 16:28:53 +01:00
|
|
|
"""
|
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
def _default_clean(self, value, allow_custom=False):
|
2017-02-24 17:46:21 +01:00
|
|
|
if value != self._fixed_value:
|
2018-07-12 20:31:14 +02:00
|
|
|
raise ValueError("must equal '{}'.".format(self._fixed_value))
|
2020-06-19 02:49:25 +02:00
|
|
|
return value, False
|
2017-02-24 17:46:21 +01:00
|
|
|
|
2018-07-05 18:39:44 +02:00
|
|
|
def __init__(self, required=False, fixed=None, default=None):
|
2017-02-24 16:28:53 +01:00
|
|
|
self.required = required
|
2020-06-12 20:31:01 +02:00
|
|
|
|
|
|
|
if required and default:
|
2020-06-26 22:41:53 +02:00
|
|
|
raise STIXError(
|
2021-02-20 03:54:46 +01:00
|
|
|
"Can't use 'required' and 'default' together. 'required'"
|
2020-06-26 22:41:53 +02:00
|
|
|
"really means 'the user must provide this.'",
|
|
|
|
)
|
2020-06-12 20:31:01 +02:00
|
|
|
|
2017-02-24 16:28:53 +01:00
|
|
|
if fixed:
|
2017-02-24 17:46:21 +01:00
|
|
|
self._fixed_value = fixed
|
2017-04-17 21:13:11 +02:00
|
|
|
self.clean = self._default_clean
|
2017-02-24 16:28:53 +01:00
|
|
|
self.default = lambda: fixed
|
|
|
|
if default:
|
|
|
|
self.default = default
|
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
def clean(self, value, allow_custom=False):
|
|
|
|
return value, False
|
2017-04-14 16:42:17 +02:00
|
|
|
|
2017-02-24 16:28:53 +01:00
|
|
|
|
2017-04-07 01:17:32 +02:00
|
|
|
class ListProperty(Property):
|
2017-02-24 16:28:53 +01:00
|
|
|
|
2017-04-14 16:42:17 +02:00
|
|
|
def __init__(self, contained, **kwargs):
|
2017-02-24 16:28:53 +01:00
|
|
|
"""
|
2020-06-17 22:11:30 +02:00
|
|
|
``contained`` should be a Property class or instance, or a _STIXBase
|
|
|
|
subclass.
|
2017-02-24 16:28:53 +01:00
|
|
|
"""
|
2020-06-17 22:11:30 +02:00
|
|
|
self.contained = None
|
|
|
|
|
|
|
|
if inspect.isclass(contained):
|
|
|
|
# Property classes are instantiated; _STIXBase subclasses are left
|
|
|
|
# as-is.
|
|
|
|
if issubclass(contained, Property):
|
|
|
|
self.contained = contained()
|
|
|
|
elif issubclass(contained, _STIXBase):
|
|
|
|
self.contained = contained
|
|
|
|
|
|
|
|
elif isinstance(contained, Property):
|
2017-04-07 01:17:32 +02:00
|
|
|
self.contained = contained
|
2020-06-17 22:11:30 +02:00
|
|
|
|
|
|
|
if not self.contained:
|
|
|
|
raise TypeError(
|
|
|
|
"Invalid list element type: {}".format(
|
|
|
|
str(contained),
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
2017-04-07 20:53:40 +02:00
|
|
|
super(ListProperty, self).__init__(**kwargs)
|
2017-02-24 16:28:53 +01:00
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
def clean(self, value, allow_custom):
|
2017-04-10 16:18:54 +02:00
|
|
|
try:
|
|
|
|
iter(value)
|
|
|
|
except TypeError:
|
|
|
|
raise ValueError("must be an iterable.")
|
2017-07-17 20:56:13 +02:00
|
|
|
|
2021-02-18 18:26:54 +01:00
|
|
|
if isinstance(value, (_STIXBase, str)):
|
2017-07-17 20:56:13 +02:00
|
|
|
value = [value]
|
2017-04-10 16:18:54 +02:00
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
result = []
|
|
|
|
has_custom = False
|
2020-06-17 22:11:30 +02:00
|
|
|
if isinstance(self.contained, Property):
|
2020-06-19 02:49:25 +02:00
|
|
|
for item in value:
|
|
|
|
valid, temp_custom = self.contained.clean(item, allow_custom)
|
|
|
|
result.append(valid)
|
|
|
|
has_custom = has_custom or temp_custom
|
2020-06-17 22:11:30 +02:00
|
|
|
|
|
|
|
else: # self.contained must be a _STIXBase subclass
|
|
|
|
for item in value:
|
|
|
|
if isinstance(item, self.contained):
|
|
|
|
valid = item
|
|
|
|
|
2021-03-31 17:02:05 +02:00
|
|
|
elif isinstance(item, collections.abc.Mapping):
|
2020-06-17 22:11:30 +02:00
|
|
|
# attempt a mapping-like usage...
|
2020-06-19 02:49:25 +02:00
|
|
|
valid = self.contained(allow_custom=allow_custom, **item)
|
2020-06-17 22:11:30 +02:00
|
|
|
|
2019-12-17 17:57:55 +01:00
|
|
|
else:
|
2021-01-13 23:52:15 +01:00
|
|
|
raise ValueError(
|
|
|
|
"Can't create a {} out of {}".format(
|
|
|
|
self.contained._type, str(item),
|
|
|
|
),
|
|
|
|
)
|
2020-06-17 22:11:30 +02:00
|
|
|
|
|
|
|
result.append(valid)
|
2020-06-19 02:49:25 +02:00
|
|
|
has_custom = has_custom or valid.has_custom
|
|
|
|
|
|
|
|
if not allow_custom and has_custom:
|
|
|
|
raise CustomContentError("custom content encountered")
|
2017-04-14 16:42:17 +02:00
|
|
|
|
|
|
|
# STIX spec forbids empty lists
|
|
|
|
if len(result) < 1:
|
|
|
|
raise ValueError("must not be empty.")
|
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
return result, has_custom
|
2017-04-07 01:17:32 +02:00
|
|
|
|
|
|
|
|
|
|
|
class StringProperty(Property):
|
|
|
|
|
2017-04-07 20:53:40 +02:00
|
|
|
def __init__(self, **kwargs):
|
|
|
|
super(StringProperty, self).__init__(**kwargs)
|
2017-04-07 01:17:32 +02:00
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
def clean(self, value, allow_custom=False):
|
2021-02-18 18:26:54 +01:00
|
|
|
if not isinstance(value, str):
|
2020-06-19 02:49:25 +02:00
|
|
|
value = str(value)
|
|
|
|
return value, False
|
2017-04-07 01:17:32 +02:00
|
|
|
|
2017-02-24 16:28:53 +01:00
|
|
|
|
2017-02-24 17:46:21 +01:00
|
|
|
class TypeProperty(Property):
|
2017-05-10 17:52:59 +02:00
|
|
|
|
2021-01-15 18:34:10 +01:00
|
|
|
def __init__(self, type, spec_version=DEFAULT_VERSION):
|
2020-04-02 14:17:34 +02:00
|
|
|
_validate_type(type, spec_version)
|
2020-04-02 03:52:04 +02:00
|
|
|
self.spec_version = spec_version
|
2017-02-24 17:46:21 +01:00
|
|
|
super(TypeProperty, self).__init__(fixed=type)
|
|
|
|
|
|
|
|
|
2017-02-24 16:28:53 +01:00
|
|
|
class IDProperty(Property):
|
|
|
|
|
2021-01-15 18:34:10 +01:00
|
|
|
def __init__(self, type, spec_version=DEFAULT_VERSION):
|
2017-02-24 17:20:24 +01:00
|
|
|
self.required_prefix = type + "--"
|
2019-06-13 02:19:47 +02:00
|
|
|
self.spec_version = spec_version
|
2017-02-24 17:20:24 +01:00
|
|
|
super(IDProperty, self).__init__()
|
2017-02-24 16:28:53 +01:00
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
def clean(self, value, allow_custom=False):
|
2019-10-14 12:30:15 +02:00
|
|
|
interoperability = self.interoperability if hasattr(self, 'interoperability') and self.interoperability else False
|
|
|
|
_validate_id(value, self.spec_version, self.required_prefix, interoperability)
|
2020-06-19 02:49:25 +02:00
|
|
|
return value, False
|
2017-02-24 16:28:53 +01:00
|
|
|
|
|
|
|
def default(self):
|
2017-02-24 17:20:24 +01:00
|
|
|
return self.required_prefix + str(uuid.uuid4())
|
2017-03-22 00:33:43 +01:00
|
|
|
|
|
|
|
|
2017-04-18 15:19:38 +02:00
|
|
|
class IntegerProperty(Property):
|
|
|
|
|
2018-10-15 21:02:59 +02:00
|
|
|
def __init__(self, min=None, max=None, **kwargs):
|
|
|
|
self.min = min
|
|
|
|
self.max = max
|
|
|
|
super(IntegerProperty, self).__init__(**kwargs)
|
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
def clean(self, value, allow_custom=False):
|
2017-04-18 15:19:38 +02:00
|
|
|
try:
|
2018-10-15 21:02:59 +02:00
|
|
|
value = int(value)
|
2017-04-18 15:19:38 +02:00
|
|
|
except Exception:
|
|
|
|
raise ValueError("must be an integer.")
|
|
|
|
|
2018-10-15 21:02:59 +02:00
|
|
|
if self.min is not None and value < self.min:
|
|
|
|
msg = "minimum value is {}. received {}".format(self.min, value)
|
|
|
|
raise ValueError(msg)
|
|
|
|
|
|
|
|
if self.max is not None and value > self.max:
|
|
|
|
msg = "maximum value is {}. received {}".format(self.max, value)
|
|
|
|
raise ValueError(msg)
|
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
return value, False
|
2018-10-15 21:02:59 +02:00
|
|
|
|
2017-04-18 15:19:38 +02:00
|
|
|
|
2017-05-15 19:48:41 +02:00
|
|
|
class FloatProperty(Property):
|
2018-07-05 18:39:44 +02:00
|
|
|
|
2018-10-15 21:02:59 +02:00
|
|
|
def __init__(self, min=None, max=None, **kwargs):
|
|
|
|
self.min = min
|
|
|
|
self.max = max
|
|
|
|
super(FloatProperty, self).__init__(**kwargs)
|
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
def clean(self, value, allow_custom=False):
|
2017-05-15 19:48:41 +02:00
|
|
|
try:
|
2018-10-15 21:02:59 +02:00
|
|
|
value = float(value)
|
2017-05-15 19:48:41 +02:00
|
|
|
except Exception:
|
2017-05-16 15:25:08 +02:00
|
|
|
raise ValueError("must be a float.")
|
2017-05-15 19:48:41 +02:00
|
|
|
|
2018-10-15 21:02:59 +02:00
|
|
|
if self.min is not None and value < self.min:
|
|
|
|
msg = "minimum value is {}. received {}".format(self.min, value)
|
|
|
|
raise ValueError(msg)
|
|
|
|
|
|
|
|
if self.max is not None and value > self.max:
|
|
|
|
msg = "maximum value is {}. received {}".format(self.max, value)
|
|
|
|
raise ValueError(msg)
|
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
return value, False
|
2018-10-15 21:02:59 +02:00
|
|
|
|
2017-05-15 19:48:41 +02:00
|
|
|
|
2017-03-22 00:33:43 +01:00
|
|
|
class BooleanProperty(Property):
|
2020-06-19 02:49:25 +02:00
|
|
|
_trues = ['true', 't', '1', 1, True]
|
|
|
|
_falses = ['false', 'f', '0', 0, False]
|
2017-04-06 22:08:36 +02:00
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
def clean(self, value, allow_custom=False):
|
2017-04-06 22:08:36 +02:00
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
if isinstance(value, str):
|
|
|
|
value = value.lower()
|
2018-07-10 20:50:03 +02:00
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
if value in self._trues:
|
|
|
|
result = True
|
|
|
|
elif value in self._falses:
|
|
|
|
result = False
|
|
|
|
else:
|
|
|
|
raise ValueError("must be a boolean value.")
|
2017-04-06 22:08:36 +02:00
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
return result, False
|
2017-03-22 00:44:01 +01:00
|
|
|
|
|
|
|
|
2017-04-11 18:10:55 +02:00
|
|
|
class TimestampProperty(Property):
|
|
|
|
|
2020-03-17 01:25:38 +01:00
|
|
|
def __init__(self, precision="any", precision_constraint="exact", **kwargs):
|
2017-06-23 00:47:35 +02:00
|
|
|
self.precision = precision
|
2020-03-17 01:25:38 +01:00
|
|
|
self.precision_constraint = precision_constraint
|
|
|
|
|
2017-06-23 00:47:35 +02:00
|
|
|
super(TimestampProperty, self).__init__(**kwargs)
|
2017-04-11 18:10:55 +02:00
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
def clean(self, value, allow_custom=False):
|
2020-03-17 01:25:38 +01:00
|
|
|
return parse_into_datetime(
|
2020-03-17 23:26:57 +01:00
|
|
|
value, self.precision, self.precision_constraint,
|
2020-06-19 02:49:25 +02:00
|
|
|
), False
|
2017-04-11 18:10:55 +02:00
|
|
|
|
|
|
|
|
2017-05-03 20:10:10 +02:00
|
|
|
class DictionaryProperty(Property):
|
|
|
|
|
2021-01-15 18:34:10 +01:00
|
|
|
def __init__(self, spec_version=DEFAULT_VERSION, **kwargs):
|
2018-07-10 20:50:03 +02:00
|
|
|
self.spec_version = spec_version
|
|
|
|
super(DictionaryProperty, self).__init__(**kwargs)
|
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
def clean(self, value, allow_custom=False):
|
2017-05-16 15:25:08 +02:00
|
|
|
try:
|
2018-04-13 17:08:03 +02:00
|
|
|
dictified = _get_dict(value)
|
2017-05-16 15:25:08 +02:00
|
|
|
except ValueError:
|
|
|
|
raise ValueError("The dictionary property must contain a dictionary")
|
2017-05-03 20:10:10 +02:00
|
|
|
for k in dictified.keys():
|
2018-07-10 20:50:03 +02:00
|
|
|
if self.spec_version == '2.0':
|
|
|
|
if len(k) < 3:
|
|
|
|
raise DictionaryKeyError(k, "shorter than 3 characters")
|
|
|
|
elif len(k) > 256:
|
|
|
|
raise DictionaryKeyError(k, "longer than 256 characters")
|
|
|
|
elif self.spec_version == '2.1':
|
|
|
|
if len(k) > 250:
|
|
|
|
raise DictionaryKeyError(k, "longer than 250 characters")
|
2018-10-17 13:34:15 +02:00
|
|
|
if not re.match(r"^[a-zA-Z0-9_-]+$", k):
|
2018-07-13 17:10:05 +02:00
|
|
|
msg = (
|
|
|
|
"contains characters other than lowercase a-z, "
|
|
|
|
"uppercase A-Z, numerals 0-9, hyphen (-), or "
|
|
|
|
"underscore (_)"
|
|
|
|
)
|
2018-07-10 20:50:03 +02:00
|
|
|
raise DictionaryKeyError(k, msg)
|
2019-11-22 19:24:09 +01:00
|
|
|
|
|
|
|
if len(dictified) < 1:
|
|
|
|
raise ValueError("must not be empty.")
|
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
return dictified, False
|
2018-07-10 20:50:03 +02:00
|
|
|
|
2017-05-03 20:10:10 +02:00
|
|
|
|
2020-07-10 02:13:53 +02:00
|
|
|
class HashesProperty(DictionaryProperty):
|
2017-05-03 20:10:10 +02:00
|
|
|
|
2021-03-31 22:21:35 +02:00
|
|
|
def __init__(self, spec_hash_names, spec_version=DEFAULT_VERSION, **kwargs):
|
2020-07-10 22:29:57 +02:00
|
|
|
super().__init__(spec_version=spec_version, **kwargs)
|
2017-05-03 20:10:10 +02:00
|
|
|
|
2020-07-10 02:13:53 +02:00
|
|
|
self.__spec_hash_names = spec_hash_names
|
2017-05-03 20:10:10 +02:00
|
|
|
|
2020-07-10 02:13:53 +02:00
|
|
|
# Map hash algorithm enum to the given spec mandated name, for those
|
|
|
|
# names which are recognized as hash algorithms by this library.
|
|
|
|
self.__alg_to_spec_name = {}
|
|
|
|
for spec_hash_name in spec_hash_names:
|
|
|
|
alg = stix2.hashes.infer_hash_algorithm(spec_hash_name)
|
|
|
|
if alg:
|
|
|
|
self.__alg_to_spec_name[alg] = spec_hash_name
|
2018-07-10 20:50:03 +02:00
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
def clean(self, value, allow_custom):
|
|
|
|
# ignore the has_custom return value here; there is no customization
|
|
|
|
# of DictionaryProperties.
|
2020-07-10 02:13:53 +02:00
|
|
|
clean_dict, _ = super().clean(value, allow_custom)
|
|
|
|
|
|
|
|
spec_dict = {}
|
2020-06-19 02:49:25 +02:00
|
|
|
|
|
|
|
has_custom = False
|
2020-07-10 02:13:53 +02:00
|
|
|
for hash_k, hash_v in clean_dict.items():
|
|
|
|
hash_alg = stix2.hashes.infer_hash_algorithm(hash_k)
|
|
|
|
|
|
|
|
if hash_alg:
|
|
|
|
# Library-supported hash algorithm: sanity check the value.
|
|
|
|
if not stix2.hashes.check_hash(hash_alg, hash_v):
|
|
|
|
raise ValueError(
|
|
|
|
"'{0}' is not a valid {1} hash".format(
|
|
|
|
hash_v, hash_alg.name,
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
|
|
|
spec_name = self.__alg_to_spec_name.get(hash_alg)
|
|
|
|
if not spec_name:
|
|
|
|
# There is library support for the hash algorithm, but it's
|
|
|
|
# not in the spec. So it's custom. Just use the user's
|
|
|
|
# name as-is.
|
|
|
|
has_custom = True
|
|
|
|
spec_name = hash_k
|
2020-06-19 02:49:25 +02:00
|
|
|
|
|
|
|
else:
|
2020-07-10 02:13:53 +02:00
|
|
|
# Unrecognized hash algorithm; use as-is. Hash algorithm name
|
|
|
|
# must be an exact match from spec, or it will be considered
|
|
|
|
# custom.
|
|
|
|
spec_name = hash_k
|
|
|
|
if spec_name not in self.__spec_hash_names:
|
|
|
|
has_custom = True
|
2020-06-19 02:49:25 +02:00
|
|
|
|
|
|
|
if not allow_custom and has_custom:
|
2020-07-10 02:13:53 +02:00
|
|
|
raise CustomContentError(
|
|
|
|
"custom hash algorithm: " + hash_k,
|
|
|
|
)
|
|
|
|
|
|
|
|
spec_dict[spec_name] = hash_v
|
2017-05-03 20:10:10 +02:00
|
|
|
|
2020-07-10 02:13:53 +02:00
|
|
|
return spec_dict, has_custom
|
2017-05-03 20:10:10 +02:00
|
|
|
|
|
|
|
|
|
|
|
class BinaryProperty(Property):
|
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
def clean(self, value, allow_custom=False):
|
2017-05-03 20:10:10 +02:00
|
|
|
try:
|
|
|
|
base64.b64decode(value)
|
|
|
|
except (binascii.Error, TypeError):
|
|
|
|
raise ValueError("must contain a base64 encoded string")
|
2020-06-19 02:49:25 +02:00
|
|
|
return value, False
|
2017-05-03 20:10:10 +02:00
|
|
|
|
|
|
|
|
|
|
|
class HexProperty(Property):
|
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
def clean(self, value, allow_custom=False):
|
2018-10-17 13:34:15 +02:00
|
|
|
if not re.match(r"^([a-fA-F0-9]{2})+$", value):
|
2017-05-03 20:10:10 +02:00
|
|
|
raise ValueError("must contain an even number of hexadecimal characters")
|
2020-06-19 02:49:25 +02:00
|
|
|
return value, False
|
2017-05-03 20:10:10 +02:00
|
|
|
|
|
|
|
|
2017-03-22 00:44:01 +01:00
|
|
|
class ReferenceProperty(Property):
|
2017-05-10 17:52:59 +02:00
|
|
|
|
2020-06-20 00:48:38 +02:00
|
|
|
_WHITELIST, _BLACKLIST = range(2)
|
|
|
|
|
2021-01-15 18:34:10 +01:00
|
|
|
def __init__(self, valid_types=None, invalid_types=None, spec_version=DEFAULT_VERSION, **kwargs):
|
2017-03-31 21:52:27 +02:00
|
|
|
"""
|
|
|
|
references sometimes must be to a specific object type
|
|
|
|
"""
|
2019-06-13 02:19:47 +02:00
|
|
|
self.spec_version = spec_version
|
2019-08-27 23:36:45 +02:00
|
|
|
|
2020-06-20 00:48:38 +02:00
|
|
|
if (valid_types is not None and invalid_types is not None) or \
|
|
|
|
(valid_types is None and invalid_types is None):
|
|
|
|
raise ValueError(
|
|
|
|
"Exactly one of 'valid_types' and 'invalid_types' must be "
|
2020-06-28 02:03:33 +02:00
|
|
|
"given",
|
2020-06-20 00:48:38 +02:00
|
|
|
)
|
2019-08-29 23:15:51 +02:00
|
|
|
|
2020-06-20 00:48:38 +02:00
|
|
|
if valid_types and not isinstance(valid_types, list):
|
2019-08-27 23:36:45 +02:00
|
|
|
valid_types = [valid_types]
|
2020-06-20 00:48:38 +02:00
|
|
|
elif invalid_types and not isinstance(invalid_types, list):
|
2019-08-29 23:15:51 +02:00
|
|
|
invalid_types = [invalid_types]
|
|
|
|
|
2020-06-22 22:58:28 +02:00
|
|
|
if valid_types is not None and len(valid_types) == 0:
|
|
|
|
raise ValueError("Impossible type constraint: empty whitelist")
|
|
|
|
|
2020-06-20 00:48:38 +02:00
|
|
|
self.auth_type = self._WHITELIST if valid_types else self._BLACKLIST
|
|
|
|
|
2021-04-01 02:01:27 +02:00
|
|
|
# Divide type requirements into generic type classes and specific
|
|
|
|
# types. With respect to strings, values recognized as STIXTypeClass
|
|
|
|
# enum names are generic; all else are specifics.
|
|
|
|
self.generics = set()
|
|
|
|
self.specifics = set()
|
|
|
|
types = valid_types or invalid_types
|
|
|
|
for type_ in types:
|
|
|
|
try:
|
|
|
|
enum_value = to_enum(type_, STIXTypeClass)
|
|
|
|
except KeyError:
|
|
|
|
self.specifics.add(type_)
|
|
|
|
else:
|
|
|
|
self.generics.add(enum_value)
|
2019-08-27 23:36:45 +02:00
|
|
|
|
2018-07-05 18:39:44 +02:00
|
|
|
super(ReferenceProperty, self).__init__(**kwargs)
|
2017-03-22 00:44:01 +01:00
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
def clean(self, value, allow_custom):
|
2017-03-31 21:52:27 +02:00
|
|
|
if isinstance(value, _STIXBase):
|
|
|
|
value = value.id
|
2017-08-21 19:57:01 +02:00
|
|
|
value = str(value)
|
2019-06-13 02:19:47 +02:00
|
|
|
|
2019-10-14 12:30:15 +02:00
|
|
|
interoperability = self.interoperability if hasattr(self, 'interoperability') and self.interoperability else False
|
2021-06-30 15:30:13 +02:00
|
|
|
_validate_id(value, self.spec_version, None, interoperability)
|
2020-06-19 02:49:25 +02:00
|
|
|
|
2021-04-01 02:01:27 +02:00
|
|
|
obj_type = get_type_from_id(value)
|
2019-08-29 23:15:51 +02:00
|
|
|
|
2020-06-28 02:03:33 +02:00
|
|
|
# Only comes into play when inverting a hybrid whitelist.
|
|
|
|
# E.g. if the possible generic categories are A, B, C, then the
|
|
|
|
# inversion of whitelist constraint "A or x" (where x is a specific
|
|
|
|
# type) is something like "[not (B or C)] or x". In other words, we
|
|
|
|
# invert the generic categories to produce a blacklist, but leave the
|
|
|
|
# specific categories alone; they essentially become exceptions to our
|
|
|
|
# blacklist.
|
|
|
|
blacklist_exceptions = set()
|
|
|
|
|
2021-04-01 02:01:27 +02:00
|
|
|
generics = self.generics
|
|
|
|
specifics = self.specifics
|
2020-06-20 00:48:38 +02:00
|
|
|
auth_type = self.auth_type
|
2020-06-28 02:03:33 +02:00
|
|
|
if allow_custom and auth_type == self._WHITELIST and generics:
|
2020-06-20 00:48:38 +02:00
|
|
|
# If allowing customization and using a whitelist, and if generic
|
|
|
|
# "category" types were given, we need to allow custom object types
|
|
|
|
# of those categories. Unless registered, it's impossible to know
|
|
|
|
# whether a given type is within a given category. So we take a
|
|
|
|
# permissive approach and allow any type which is not known to be
|
|
|
|
# in the wrong category. I.e. flip the whitelist set to a
|
|
|
|
# blacklist of a complementary set.
|
|
|
|
auth_type = self._BLACKLIST
|
2021-04-01 02:01:27 +02:00
|
|
|
generics = set(STIXTypeClass) - generics
|
2020-06-28 02:03:33 +02:00
|
|
|
blacklist_exceptions, specifics = specifics, blacklist_exceptions
|
2020-06-20 00:48:38 +02:00
|
|
|
|
|
|
|
if auth_type == self._WHITELIST:
|
2021-04-01 02:01:27 +02:00
|
|
|
type_ok = is_stix_type(
|
|
|
|
obj_type, self.spec_version, *generics
|
2020-06-28 02:03:33 +02:00
|
|
|
) or obj_type in specifics
|
2019-06-13 02:19:47 +02:00
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
else:
|
2020-06-28 02:03:33 +02:00
|
|
|
type_ok = (
|
2021-04-01 02:01:27 +02:00
|
|
|
not is_stix_type(
|
|
|
|
obj_type, self.spec_version, *generics
|
|
|
|
) and obj_type not in specifics
|
2020-06-28 02:03:33 +02:00
|
|
|
) or obj_type in blacklist_exceptions
|
2020-06-19 02:49:25 +02:00
|
|
|
|
2020-06-28 02:03:33 +02:00
|
|
|
# We need to figure out whether the referenced object is custom or
|
|
|
|
# not. No good way to do that at present... just check if
|
|
|
|
# unregistered and for the "x-" type prefix, for now?
|
2021-04-01 02:01:27 +02:00
|
|
|
has_custom = not is_object(obj_type, self.spec_version) \
|
2021-04-01 03:27:06 +02:00
|
|
|
or obj_type.startswith("x-")
|
2019-08-27 23:36:45 +02:00
|
|
|
|
2021-06-11 19:59:48 +02:00
|
|
|
if not type_ok:
|
|
|
|
types = self.specifics.union(self.generics)
|
|
|
|
types = ", ".join(x.name if isinstance(x, STIXTypeClass) else x for x in types)
|
|
|
|
if self.auth_type == self._WHITELIST:
|
|
|
|
msg = "not one of the valid types for this property: %s." % types
|
|
|
|
else:
|
|
|
|
msg = "one of the invalid types for this property: %s." % types
|
|
|
|
if not allow_custom and has_custom:
|
|
|
|
msg += " A custom object type may be allowed with allow_custom=True."
|
|
|
|
raise ValueError(
|
|
|
|
"The type-specifying prefix '%s' for this property is %s"
|
|
|
|
% (obj_type, msg),
|
|
|
|
)
|
2019-11-06 16:11:12 +01:00
|
|
|
|
2020-06-28 02:03:33 +02:00
|
|
|
if not allow_custom and has_custom:
|
|
|
|
raise CustomContentError(
|
|
|
|
"reference to custom object type: " + obj_type,
|
|
|
|
)
|
2019-11-06 16:11:12 +01:00
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
return value, has_custom
|
2019-11-06 16:11:12 +01:00
|
|
|
|
|
|
|
|
2020-06-08 15:27:12 +02:00
|
|
|
SELECTOR_REGEX = re.compile(r"^([a-z0-9_-]{3,250}(\.(\[\d+\]|[a-z0-9_-]{1,250}))*|id)$")
|
2017-03-31 21:52:27 +02:00
|
|
|
|
|
|
|
|
|
|
|
class SelectorProperty(Property):
|
2017-05-10 17:52:59 +02:00
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
def clean(self, value, allow_custom=False):
|
2017-03-31 21:52:27 +02:00
|
|
|
if not SELECTOR_REGEX.match(value):
|
2017-04-18 21:19:16 +02:00
|
|
|
raise ValueError("must adhere to selector syntax.")
|
2020-06-19 02:49:25 +02:00
|
|
|
return value, False
|
2017-05-04 00:19:30 +02:00
|
|
|
|
|
|
|
|
2017-05-05 18:32:02 +02:00
|
|
|
class ObjectReferenceProperty(StringProperty):
|
2017-05-17 21:21:02 +02:00
|
|
|
|
|
|
|
def __init__(self, valid_types=None, **kwargs):
|
|
|
|
if valid_types and type(valid_types) is not list:
|
|
|
|
valid_types = [valid_types]
|
|
|
|
self.valid_types = valid_types
|
|
|
|
super(ObjectReferenceProperty, self).__init__(**kwargs)
|
2017-05-09 17:03:19 +02:00
|
|
|
|
|
|
|
|
|
|
|
class EmbeddedObjectProperty(Property):
|
2017-05-10 17:52:59 +02:00
|
|
|
|
2018-07-05 18:39:44 +02:00
|
|
|
def __init__(self, type, **kwargs):
|
2017-05-09 17:03:19 +02:00
|
|
|
self.type = type
|
2018-07-05 18:39:44 +02:00
|
|
|
super(EmbeddedObjectProperty, self).__init__(**kwargs)
|
2017-05-09 17:03:19 +02:00
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
def clean(self, value, allow_custom):
|
|
|
|
if isinstance(value, dict):
|
|
|
|
value = self.type(allow_custom=allow_custom, **value)
|
2017-05-09 17:03:19 +02:00
|
|
|
elif not isinstance(value, self.type):
|
2018-07-12 20:31:14 +02:00
|
|
|
raise ValueError("must be of type {}.".format(self.type.__name__))
|
2020-06-19 02:49:25 +02:00
|
|
|
|
|
|
|
has_custom = False
|
|
|
|
if isinstance(value, _STIXBase):
|
|
|
|
has_custom = value.has_custom
|
|
|
|
|
|
|
|
if not allow_custom and has_custom:
|
|
|
|
raise CustomContentError("custom content encountered")
|
|
|
|
|
|
|
|
return value, has_custom
|
2017-05-10 17:52:59 +02:00
|
|
|
|
|
|
|
|
|
|
|
class EnumProperty(StringProperty):
|
2020-07-10 02:13:53 +02:00
|
|
|
"""
|
|
|
|
Used for enumeration type properties. Properties of this type do not allow
|
|
|
|
customization.
|
|
|
|
"""
|
2017-05-10 17:52:59 +02:00
|
|
|
|
|
|
|
def __init__(self, allowed, **kwargs):
|
2020-07-10 02:13:53 +02:00
|
|
|
if isinstance(allowed, str):
|
|
|
|
allowed = [allowed]
|
2017-05-10 17:52:59 +02:00
|
|
|
self.allowed = allowed
|
|
|
|
super(EnumProperty, self).__init__(**kwargs)
|
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
def clean(self, value, allow_custom):
|
|
|
|
cleaned_value, _ = super(EnumProperty, self).clean(value, allow_custom)
|
2020-07-10 02:13:53 +02:00
|
|
|
|
2019-04-23 15:27:21 +02:00
|
|
|
if cleaned_value not in self.allowed:
|
|
|
|
raise ValueError("value '{}' is not valid for this enumeration.".format(cleaned_value))
|
|
|
|
|
2020-07-10 02:13:53 +02:00
|
|
|
return cleaned_value, False
|
|
|
|
|
|
|
|
|
|
|
|
class OpenVocabProperty(StringProperty):
|
|
|
|
"""
|
|
|
|
Used for open vocab type properties.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, allowed, **kwargs):
|
|
|
|
super(OpenVocabProperty, self).__init__(**kwargs)
|
|
|
|
|
|
|
|
if isinstance(allowed, str):
|
|
|
|
allowed = [allowed]
|
|
|
|
self.allowed = allowed
|
|
|
|
|
|
|
|
def clean(self, value, allow_custom):
|
|
|
|
cleaned_value, _ = super(OpenVocabProperty, self).clean(
|
|
|
|
value, allow_custom,
|
|
|
|
)
|
|
|
|
|
2021-04-14 18:48:32 +02:00
|
|
|
# Disabled: it was decided that enforcing this is too strict (might
|
|
|
|
# break too much user code). Revisit when we have the capability for
|
|
|
|
# more granular config settings when creating objects.
|
|
|
|
#
|
|
|
|
# has_custom = cleaned_value not in self.allowed
|
|
|
|
#
|
|
|
|
# if not allow_custom and has_custom:
|
|
|
|
# raise CustomContentError(
|
|
|
|
# "custom value in open vocab: '{}'".format(cleaned_value),
|
|
|
|
# )
|
2020-06-19 02:49:25 +02:00
|
|
|
|
2021-04-14 18:48:32 +02:00
|
|
|
has_custom = False
|
2019-04-23 15:27:21 +02:00
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
return cleaned_value, has_custom
|
2017-08-18 20:22:57 +02:00
|
|
|
|
|
|
|
|
|
|
|
class PatternProperty(StringProperty):
|
2020-01-03 20:00:15 +01:00
|
|
|
pass
|
2018-07-10 20:50:03 +02:00
|
|
|
|
|
|
|
|
|
|
|
class ObservableProperty(Property):
|
|
|
|
"""Property for holding Cyber Observable Objects.
|
|
|
|
"""
|
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
def __init__(self, spec_version=DEFAULT_VERSION, *args, **kwargs):
|
2018-07-10 20:50:03 +02:00
|
|
|
self.spec_version = spec_version
|
|
|
|
super(ObservableProperty, self).__init__(*args, **kwargs)
|
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
def clean(self, value, allow_custom):
|
2018-07-10 20:50:03 +02:00
|
|
|
try:
|
|
|
|
dictified = _get_dict(value)
|
|
|
|
# get deep copy since we are going modify the dict and might
|
|
|
|
# modify the original dict as _get_dict() does not return new
|
|
|
|
# dict when passed a dict
|
|
|
|
dictified = copy.deepcopy(dictified)
|
|
|
|
except ValueError:
|
|
|
|
raise ValueError("The observable property must contain a dictionary")
|
|
|
|
if dictified == {}:
|
|
|
|
raise ValueError("The observable property must contain a non-empty dictionary")
|
|
|
|
|
2021-02-20 03:54:46 +01:00
|
|
|
valid_refs = {k: v['type'] for (k, v) in dictified.items()}
|
2018-07-10 20:50:03 +02:00
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
has_custom = False
|
2018-07-10 20:50:03 +02:00
|
|
|
for key, obj in dictified.items():
|
2018-07-13 17:10:05 +02:00
|
|
|
parsed_obj = parse_observable(
|
|
|
|
obj,
|
|
|
|
valid_refs,
|
2020-06-19 02:49:25 +02:00
|
|
|
allow_custom=allow_custom,
|
2018-07-13 17:10:05 +02:00
|
|
|
version=self.spec_version,
|
|
|
|
)
|
2020-06-19 02:49:25 +02:00
|
|
|
|
|
|
|
if isinstance(parsed_obj, _STIXBase):
|
|
|
|
has_custom = has_custom or parsed_obj.has_custom
|
|
|
|
else:
|
|
|
|
# we get dicts for unregistered custom objects
|
|
|
|
has_custom = True
|
|
|
|
|
|
|
|
if not allow_custom and has_custom:
|
2021-03-31 22:49:14 +02:00
|
|
|
raise CustomContentError(
|
|
|
|
"customized {} observable found".format(
|
|
|
|
parsed_obj["type"],
|
|
|
|
),
|
|
|
|
)
|
2020-06-19 02:49:25 +02:00
|
|
|
|
2018-07-10 20:50:03 +02:00
|
|
|
dictified[key] = parsed_obj
|
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
return dictified, has_custom
|
2018-07-10 20:50:03 +02:00
|
|
|
|
|
|
|
|
|
|
|
class ExtensionsProperty(DictionaryProperty):
|
|
|
|
"""Property for representing extensions on Observable objects.
|
|
|
|
"""
|
|
|
|
|
2021-06-13 07:48:33 +02:00
|
|
|
def __init__(self, spec_version=DEFAULT_VERSION, required=False):
|
2018-07-10 20:50:03 +02:00
|
|
|
super(ExtensionsProperty, self).__init__(spec_version=spec_version, required=required)
|
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
def clean(self, value, allow_custom):
|
2018-07-10 20:50:03 +02:00
|
|
|
try:
|
|
|
|
dictified = _get_dict(value)
|
|
|
|
# get deep copy since we are going modify the dict and might
|
|
|
|
# modify the original dict as _get_dict() does not return new
|
|
|
|
# dict when passed a dict
|
|
|
|
dictified = copy.deepcopy(dictified)
|
|
|
|
except ValueError:
|
|
|
|
raise ValueError("The extensions property must contain a dictionary")
|
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
has_custom = False
|
2018-11-28 17:21:27 +01:00
|
|
|
for key, subvalue in dictified.items():
|
2021-06-26 04:20:02 +02:00
|
|
|
cls = class_for_type(key, self.spec_version, "extensions")
|
|
|
|
if cls:
|
|
|
|
if isinstance(subvalue, dict):
|
2020-06-19 02:49:25 +02:00
|
|
|
ext = cls(allow_custom=allow_custom, **subvalue)
|
2021-06-26 04:20:02 +02:00
|
|
|
elif isinstance(subvalue, cls):
|
|
|
|
# If already an instance of the registered class, assume
|
|
|
|
# it's valid
|
2020-06-19 02:49:25 +02:00
|
|
|
ext = subvalue
|
2018-07-10 20:50:03 +02:00
|
|
|
else:
|
2021-06-26 04:20:02 +02:00
|
|
|
raise TypeError(
|
|
|
|
"Can't create extension '{}' from {}.".format(
|
2021-06-30 23:50:00 +02:00
|
|
|
key, type(subvalue),
|
|
|
|
),
|
2021-06-26 04:20:02 +02:00
|
|
|
)
|
2020-06-19 02:49:25 +02:00
|
|
|
|
|
|
|
has_custom = has_custom or ext.has_custom
|
|
|
|
|
|
|
|
if not allow_custom and has_custom:
|
|
|
|
raise CustomContentError(
|
|
|
|
"custom content found in {} extension".format(
|
|
|
|
key,
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
|
|
|
dictified[key] = ext
|
|
|
|
|
2018-11-28 17:21:27 +01:00
|
|
|
else:
|
2021-06-26 04:20:02 +02:00
|
|
|
# If an unregistered "extension-definition--" style extension,
|
|
|
|
# we don't know what's supposed to be in it, so we can't
|
|
|
|
# determine whether there's anything custom. So, assume there
|
|
|
|
# are no customizations. If it's a different type of extension,
|
|
|
|
# non-registration implies customization (since all spec-defined
|
|
|
|
# extensions should be pre-registered with the library).
|
|
|
|
|
|
|
|
if key.startswith('extension-definition--'):
|
|
|
|
_validate_id(
|
2021-06-30 23:50:00 +02:00
|
|
|
key, self.spec_version, 'extension-definition--',
|
2021-06-26 04:20:02 +02:00
|
|
|
)
|
|
|
|
elif allow_custom:
|
2020-06-19 02:49:25 +02:00
|
|
|
has_custom = True
|
2019-08-26 23:10:54 +02:00
|
|
|
else:
|
|
|
|
raise CustomContentError("Can't parse unknown extension type: {}".format(key))
|
2020-06-19 02:49:25 +02:00
|
|
|
|
2021-06-26 04:20:02 +02:00
|
|
|
dictified[key] = subvalue
|
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
return dictified, has_custom
|
2018-07-10 20:50:03 +02:00
|
|
|
|
|
|
|
|
|
|
|
class STIXObjectProperty(Property):
|
|
|
|
|
2021-06-30 15:30:13 +02:00
|
|
|
def __init__(self, spec_version=DEFAULT_VERSION, interoperability=False, *args, **kwargs):
|
2018-07-10 20:50:03 +02:00
|
|
|
self.spec_version = spec_version
|
2018-12-14 10:09:58 +01:00
|
|
|
self.interoperability = interoperability
|
2018-07-10 20:50:03 +02:00
|
|
|
super(STIXObjectProperty, self).__init__(*args, **kwargs)
|
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
def clean(self, value, allow_custom):
|
2018-07-10 20:50:03 +02:00
|
|
|
# Any STIX Object (SDO, SRO, or Marking Definition) can be added to
|
|
|
|
# a bundle with no further checks.
|
2021-02-20 03:54:46 +01:00
|
|
|
stix2_classes = {'_DomainObject', '_RelationshipObject', 'MarkingDefinition'}
|
2021-01-13 23:52:15 +01:00
|
|
|
if any(
|
2021-02-20 03:54:46 +01:00
|
|
|
x in stix2_classes
|
2021-01-13 23:52:15 +01:00
|
|
|
for x in get_class_hierarchy_names(value)
|
|
|
|
):
|
2018-07-10 20:50:03 +02:00
|
|
|
# A simple "is this a spec version 2.1+ object" test. For now,
|
|
|
|
# limit 2.0 bundles to 2.0 objects. It's not possible yet to
|
|
|
|
# have validation co-constraints among properties, e.g. have
|
|
|
|
# validation here depend on the value of another property
|
|
|
|
# (spec_version). So this is a hack, and not technically spec-
|
|
|
|
# compliant.
|
|
|
|
if 'spec_version' in value and self.spec_version == '2.0':
|
2018-07-13 17:10:05 +02:00
|
|
|
raise ValueError(
|
|
|
|
"Spec version 2.0 bundles don't yet support "
|
|
|
|
"containing objects of a different spec "
|
|
|
|
"version.",
|
|
|
|
)
|
2020-06-19 02:49:25 +02:00
|
|
|
|
|
|
|
if not allow_custom and value.has_custom:
|
|
|
|
raise CustomContentError("custom content encountered")
|
|
|
|
|
|
|
|
return value, value.has_custom
|
2018-07-10 20:50:03 +02:00
|
|
|
try:
|
|
|
|
dictified = _get_dict(value)
|
|
|
|
except ValueError:
|
|
|
|
raise ValueError("This property may only contain a dictionary or object")
|
|
|
|
if dictified == {}:
|
|
|
|
raise ValueError("This property may only contain a non-empty dictionary or object")
|
|
|
|
if 'type' in dictified and dictified['type'] == 'bundle':
|
|
|
|
raise ValueError("This property may not contain a Bundle object")
|
|
|
|
if 'spec_version' in dictified and self.spec_version == '2.0':
|
|
|
|
# See above comment regarding spec_version.
|
2018-07-13 17:10:05 +02:00
|
|
|
raise ValueError(
|
|
|
|
"Spec version 2.0 bundles don't yet support "
|
|
|
|
"containing objects of a different spec version.",
|
|
|
|
)
|
2018-07-10 20:50:03 +02:00
|
|
|
|
2021-06-30 15:30:13 +02:00
|
|
|
parsed_obj = parse(dictified, allow_custom=allow_custom, interoperability=self.interoperability)
|
2020-06-19 02:49:25 +02:00
|
|
|
|
|
|
|
if isinstance(parsed_obj, _STIXBase):
|
|
|
|
has_custom = parsed_obj.has_custom
|
|
|
|
else:
|
|
|
|
# we get dicts for unregistered custom objects
|
|
|
|
has_custom = True
|
|
|
|
|
|
|
|
if not allow_custom and has_custom:
|
|
|
|
# parse() will ignore the caller's allow_custom=False request if
|
|
|
|
# the object type is registered and dictified has a
|
|
|
|
# "custom_properties" key. So we have to do another check here.
|
|
|
|
raise CustomContentError(
|
|
|
|
"customized {} object found".format(
|
|
|
|
parsed_obj["type"],
|
|
|
|
),
|
|
|
|
)
|
2018-07-10 20:50:03 +02:00
|
|
|
|
2020-06-19 02:49:25 +02:00
|
|
|
return parsed_obj, has_custom
|