Updates to allow existing tests to pass

master
Desai, Kartikey H 2019-08-21 02:00:41 -04:00
parent 46359ead69
commit bf1b8b567d
5 changed files with 50 additions and 5 deletions

View File

@ -128,6 +128,11 @@ class _STIXBase(collections.Mapping):
list_of_properties.remove('type') list_of_properties.remove('type')
current_properties = self.properties_populated() current_properties = self.properties_populated()
list_of_properties_populated = set(list_of_properties).intersection(current_properties) list_of_properties_populated = set(list_of_properties).intersection(current_properties)
if list_of_properties_populated == set(['id']) and isinstance(self, _Observable):
# Do not count the auto-generated id as a user-specified property
list_of_properties_populated = None
if list_of_properties and (not list_of_properties_populated or list_of_properties_populated == set(['extensions'])): if list_of_properties and (not list_of_properties_populated or list_of_properties_populated == set(['extensions'])):
raise AtLeastOnePropertyError(self.__class__, list_of_properties) raise AtLeastOnePropertyError(self.__class__, list_of_properties)
@ -369,13 +374,24 @@ class _Observable(_STIXBase):
streamlined_object["hashes"] = possible_hash streamlined_object["hashes"] = possible_hash
for key in kwargs.keys(): for key in kwargs.keys():
if key in properties_to_use and key != "hashes": if key in properties_to_use and key != "hashes":
streamlined_object[key] = kwargs[key] if type(kwargs[key]) is dict:
for otherKey in kwargs[key]:
if isinstance(kwargs[key][otherKey], _STIXBase):
streamlined_object[key] = self._embed_obj_to_json(kwargs[key][otherKey])
else:
streamlined_object[key] = kwargs[key]
else:
if isinstance(kwargs[key], _STIXBase):
streamlined_object[key] = self._embed_obj_to_json(kwargs[key])
else:
streamlined_object[key] = kwargs[key]
if streamlined_object: if streamlined_object:
data = canonicalize(streamlined_object, utf8=False) data = canonicalize(str(streamlined_object), utf8=False)
return required_prefix + str(uuid.uuid5(namespace, str(data))) return required_prefix + str(uuid.uuid5(namespace, str(data)))
return None return None
except AttributeError: except AttributeError:
# We ideally end up here if handling a 2.0 SCO
return None return None
def _choose_one_hash(self, hash_dict): def _choose_one_hash(self, hash_dict):
@ -393,6 +409,12 @@ class _Observable(_STIXBase):
break break
return {k: v} return {k: v}
def _embed_obj_to_json(self, obj):
tmp_obj = dict(copy.deepcopy(obj))
for prop_name in obj._defaulted_optional_properties:
del tmp_obj[prop_name]
return tmp_obj
class _Extension(_STIXBase): class _Extension(_STIXBase):

View File

@ -360,6 +360,7 @@ def _make_iterencode(
chunks = _iterencode_dict(value, _current_indent_level) chunks = _iterencode_dict(value, _current_indent_level)
else: else:
chunks = _iterencode(value, _current_indent_level) chunks = _iterencode(value, _current_indent_level)
# Below line commented-out for python2 compatibility
# yield from chunks # yield from chunks
for chunk in chunks: for chunk in chunks:
yield chunk yield chunk
@ -441,6 +442,7 @@ def _make_iterencode(
chunks = _iterencode_dict(value, _current_indent_level) chunks = _iterencode_dict(value, _current_indent_level)
else: else:
chunks = _iterencode(value, _current_indent_level) chunks = _iterencode(value, _current_indent_level)
# Below line commented-out for python2 compatibility
# yield from chunks # yield from chunks
for chunk in chunks: for chunk in chunks:
yield chunk yield chunk
@ -467,10 +469,12 @@ def _make_iterencode(
# see comment for int/float in _make_iterencode # see comment for int/float in _make_iterencode
yield convert2Es6Format(o) yield convert2Es6Format(o)
elif isinstance(o, (list, tuple)): elif isinstance(o, (list, tuple)):
# Below line commented-out for python2 compatibility
# yield from _iterencode_list(o, _current_indent_level) # yield from _iterencode_list(o, _current_indent_level)
for thing in _iterencode_list(o, _current_indent_level): for thing in _iterencode_list(o, _current_indent_level):
yield thing yield thing
elif isinstance(o, dict): elif isinstance(o, dict):
# Below line commented-out for python2 compatibility
# yield from _iterencode_dict(o, _current_indent_level) # yield from _iterencode_dict(o, _current_indent_level)
for thing in _iterencode_dict(o, _current_indent_level): for thing in _iterencode_dict(o, _current_indent_level):
yield thing yield thing
@ -481,6 +485,7 @@ def _make_iterencode(
raise ValueError("Circular reference detected") raise ValueError("Circular reference detected")
markers[markerid] = o markers[markerid] = o
o = _default(o) o = _default(o)
# Below line commented-out for python2 compatibility
# yield from _iterencode(o, _current_indent_level) # yield from _iterencode(o, _current_indent_level)
for thing in _iterencode(o, _current_indent_level): for thing in _iterencode(o, _current_indent_level):
yield thing yield thing

View File

@ -86,6 +86,7 @@ stix_objs = [
"objects": { "objects": {
"0": { "0": {
"type": "file", "type": "file",
"id": "file--fa1b868c-5fe2-5c85-8197-9674548379ec",
"name": "HAL 9000.exe", "name": "HAL 9000.exe",
}, },
}, },
@ -109,8 +110,8 @@ filters = [
Filter("object_marking_refs", "=", "marking-definition--613f2e26-0000-4000-8000-b8e91df99dc9"), Filter("object_marking_refs", "=", "marking-definition--613f2e26-0000-4000-8000-b8e91df99dc9"),
Filter("granular_markings.selectors", "in", "description"), Filter("granular_markings.selectors", "in", "description"),
Filter("external_references.source_name", "=", "CVE"), Filter("external_references.source_name", "=", "CVE"),
Filter("objects", "=", {"0": {"type": "file", "name": "HAL 9000.exe"}}), Filter("objects", "=", {"0": {"type": "file", "name": "HAL 9000.exe", "id": "file--fa1b868c-5fe2-5c85-8197-9674548379ec"}}),
Filter("objects", "contains", {"type": "file", "name": "HAL 9000.exe"}), Filter("objects", "contains", {"type": "file", "name": "HAL 9000.exe", "id": "file--fa1b868c-5fe2-5c85-8197-9674548379ec"}),
Filter("labels", "contains", "heartbleed"), Filter("labels", "contains", "heartbleed"),
] ]

View File

@ -25,6 +25,7 @@ EXPECTED = """{
"objects": { "objects": {
"0": { "0": {
"type": "file", "type": "file",
"id": "file--500d9a03-9d03-5c31-82b2-2be8aacec481",
"name": "foo.exe" "name": "foo.exe"
} }
} }
@ -64,10 +65,12 @@ EXPECTED_WITH_REF = """{
"objects": { "objects": {
"0": { "0": {
"type": "file", "type": "file",
"id": "file--500d9a03-9d03-5c31-82b2-2be8aacec481",
"name": "foo.exe" "name": "foo.exe"
}, },
"1": { "1": {
"type": "directory", "type": "directory",
"id": "directory--ed959127-2df3-5999-99b6-df7614398c1c",
"path": "/usr/home", "path": "/usr/home",
"contains_refs": [ "contains_refs": [
"0" "0"
@ -1391,3 +1394,17 @@ def test_objects_deprecation():
}, },
}, },
) )
# def test_deterministic_id_same_extra_prop_vals():
# email_addr_1 = stix2.v21.EmailAddress(
# value="john@example.com",
# display_name="Johnny Doe"
# )
# email_addr_2 = stix2.v21.EmailAddress(
# value="john@example.com",
# display_name="Johnny Doe"
# )
# assert email_addr_1.id == email_addr_2.id

View File

@ -41,7 +41,7 @@ class Artifact(_Observable):
def _check_object_constraints(self): def _check_object_constraints(self):
super(Artifact, self)._check_object_constraints() super(Artifact, self)._check_object_constraints()
self._check_mutually_exclusive_properties(['payload_bin', 'url']) self._check_mutually_exclusive_properties(['payload_bin', 'url'], at_least_one=False)
self._check_properties_dependency(['hashes'], ['url']) self._check_properties_dependency(['hashes'], ['url'])