Improve autodoc output

- Add markings and sources
- Tidy up some docstrings
stix2.1
Chris Lenk 2017-09-22 10:01:00 -04:00
parent d7efd1f752
commit c0fd740e0a
20 changed files with 170 additions and 97 deletions

View File

@ -0,0 +1,5 @@
granular_markings
================================
.. automodule:: stix2.markings.granular_markings
:members:

View File

@ -0,0 +1,5 @@
object_markings
==============================
.. automodule:: stix2.markings.object_markings
:members:

View File

@ -0,0 +1,5 @@
utils
====================
.. automodule:: stix2.markings.utils
:members:

View File

@ -0,0 +1,5 @@
filesystem
========================
.. automodule:: stix2.sources.filesystem
:members:

View File

@ -0,0 +1,5 @@
filters
=====================
.. automodule:: stix2.sources.filters
:members:

View File

@ -0,0 +1,5 @@
memory
====================
.. automodule:: stix2.sources.memory
:members:

View File

@ -0,0 +1,5 @@
taxii
===================
.. automodule:: stix2.sources.taxii
:members:

View File

@ -0,0 +1,5 @@
markings
==============
.. automodule:: stix2.markings
:members:

View File

@ -0,0 +1,5 @@
sources
=============
.. automodule:: stix2.sources
:members:

View File

@ -9,6 +9,7 @@ extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.napoleon',
'sphinx.ext.todo',
]
autodoc_default_flags = [
'show-inheritance',

View File

@ -8,10 +8,12 @@
core
environment
exceptions
markings
observables
patterns
properties
sdo
sources
sro
utils
"""

View File

@ -3,6 +3,18 @@ Python STIX 2.0 Data Markings API.
These high level functions will operate on both object level markings and
granular markings unless otherwise noted in each of the functions.
.. autosummary::
:toctree: api
granular_markings
object_markings
utils
.. raw:: html
<br/><hr/><br/>
"""
from stix2.markings import granular_markings, object_markings

View File

@ -7,14 +7,14 @@ from stix2 import exceptions
def _evaluate_expression(obj, selector):
"""
Walks an SDO or SRO generating selectors to match against ``selector``. If
a match is found and the the value of this property is present in the
"""Walks an SDO or SRO generating selectors to match against ``selector``.
If a match is found and the the value of this property is present in the
objects. Matching value of the property will be returned.
Args:
obj: An SDO or SRO object.
selector: A string following the selector syntax.
selector (str): A string following the selector syntax.
Returns:
list: Values contained in matching property. Otherwise empty list.
@ -58,28 +58,26 @@ def convert_to_list(data):
def compress_markings(granular_markings):
"""
Compress granular markings list. If there is more than one marking
identifier matches. It will collapse into a single granular marking.
"""Compress granular markings list.
Examples:
Input:
[
{
"selectors": [
"description"
],
"marking_ref": "marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9"
},
{
"selectors": [
"name"
],
"marking_ref": "marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9"
}
]
If there is more than one marking identifier matches. It will collapse into
a single granular marking.
Output:
Example:
>>> compress_markings([
... {
... "selectors": [
... "description"
... ],
... "marking_ref": "marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9"
... },
... {
... "selectors": [
... "name"
... ],
... "marking_ref": "marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9"
... }
... ])
[
{
"selectors": [
@ -117,23 +115,21 @@ def compress_markings(granular_markings):
def expand_markings(granular_markings):
"""
Expands granular markings list. If there is more than one selector per
granular marking. It will be expanded using the same marking_ref.
"""Expands granular markings list.
Examples:
Input:
[
{
"selectors": [
"description",
"name"
],
"marking_ref": "marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9"
}
]
If there is more than one selector per granular marking. It will be
expanded using the same marking_ref.
Output:
Example:
>>> expand_markings([
... {
... "selectors": [
... "description",
... "name"
... ],
... "marking_ref": "marking-definition--613f2e26-407d-48c7-9eca-b8e91df99dc9"
... }
... ])
[
{
"selectors": [
@ -174,15 +170,16 @@ def expand_markings(granular_markings):
def build_granular_marking(granular_marking):
"""Returns a dictionary with the required structure for a granular
marking"""
"""Returns a dictionary with the required structure for a granular marking.
"""
return {"granular_markings": expand_markings(granular_marking)}
def iterpath(obj, path=None):
"""
Generator which walks the input ``obj`` model. Each iteration yields a
tuple containing a list of ancestors and the property value.
"""Generator which walks the input ``obj`` model.
Each iteration yields a tuple containing a list of ancestors and the
property value.
Args:
obj: An SDO or SRO object.

View File

@ -1,3 +1,5 @@
"""Classes for representing properties of STIX Objects and Cyber Observables.
"""
import base64
import binascii
import collections

View File

@ -288,30 +288,28 @@ class Vulnerability(_STIXBase):
def CustomObject(type='x-custom-type', properties=None):
"""Custom STIX Object type decorator
"""Custom STIX Object type decorator.
Example 1:
@CustomObject('x-type-name', [
('property1', StringProperty(required=True)),
('property2', IntegerProperty()),
])
class MyNewObjectType():
pass
Example:
>>> @CustomObject('x-type-name', [
... ('property1', StringProperty(required=True)),
... ('property2', IntegerProperty()),
... ])
... class MyNewObjectType():
... pass
Supply an __init__() function to add any special validations to the custom
type. Don't call super().__init__() though - doing so will cause an error.
Example 2:
@CustomObject('x-type-name', [
('property1', StringProperty(required=True)),
('property2', IntegerProperty()),
])
class MyNewObjectType():
def __init__(self, property2=None, **kwargs):
if property2 and property2 < 10:
raise ValueError("'property2' is too small.")
Example:
>>> @CustomObject('x-type-name', [
... ('property1', StringProperty(required=True)),
... ('property2', IntegerProperty()),
... ])
... class MyNewObjectType():
... def __init__(self, property2=None, **kwargs):
... if property2 and property2 < 10:
... raise ValueError("'property2' is too small.")
"""
def custom_builder(cls):

View File

@ -1,5 +1,4 @@
"""
Python STIX 2.0 Sources
"""Python STIX 2.0 Sources
Classes:
DataStore
@ -7,13 +6,26 @@ Classes:
DataSource
CompositeDataSource
TODO:Test everything
TODO:
Test everything
Notes:
Note:
add_filter(), remove_filter(), deduplicate() - if these functions remain
the exact same for DataSource, DataSink, CompositeDataSource etc... -> just
make those functions an interface to inherit?
.. autosummary::
:toctree: api
filters
filesystem
memory
taxii
.. raw:: html
<br/><hr/><br/>
"""
import uuid
@ -29,8 +41,7 @@ def make_id():
class DataStore(object):
"""
An implementer will create a concrete subclass from
"""An implementer will create a concrete subclass from
this abstract class for the specific data store.
Attributes:
@ -47,7 +58,7 @@ class DataStore(object):
def get(self, stix_id):
"""Retrieve the most recent version of a single STIX object by ID.
Notes:
Note:
Translate API get() call to the appropriate DataSource call.
Args:
@ -79,7 +90,7 @@ class DataStore(object):
def query(self, query):
"""Retrieve STIX objects matching a set of filters.
Notes:
Note:
Implement the specific data source API calls, processing,
functionality required for retrieving query from the data source.
@ -97,7 +108,7 @@ class DataStore(object):
def add(self, stix_objs):
"""Store STIX objects.
Notes:
Note:
Translate add() to the appropriate DataSink call().
Args:
@ -109,9 +120,9 @@ class DataStore(object):
class DataSink(object):
"""
Abstract class for defining a data sink. Intended for subclassing into
different sink components.
"""Abstract class for defining a data sink.
Intended for subclassing into different sink components.
Attributes:
id (str): A unique UUIDv4 to identify this DataSink.
@ -123,7 +134,7 @@ class DataSink(object):
def add(self, stix_objs):
"""Store STIX objects.
Notes:
Note:
Implement the specific data sink API calls, processing,
functionality required for adding data to the sink
@ -136,9 +147,9 @@ class DataSink(object):
class DataSource(object):
"""
Abstract class for defining a data source. Intended for subclassing into
different source components.
"""Abstract class for defining a data source.
Intended for subclassing into different source components.
Attributes:
id (str): A unique UUIDv4 to identify this DataSource.
@ -171,7 +182,7 @@ class DataSource(object):
def all_versions(self, stix_id, _composite_filters=None):
"""
Notes:
Note:
Similar to get() except returns list of all object versions of
the specified "id". In addition, implement the specific data
source API calls, processing, functionality required for retrieving
@ -294,7 +305,7 @@ class DataSource(object):
return filtered_stix_objs
def deduplicate(self, stix_obj_list):
"""Deduplicate a list of STIX objects to a unique set
"""Deduplicate a list of STIX objects to a unique set.
Reduces a set of STIX objects to unique set by looking
at 'id' and 'modified' fields - as a unique object version
@ -348,7 +359,7 @@ class CompositeDataSource(DataSource):
function does a federated retrieval and consolidation of the data
returned from all the STIX data sources.
Notes:
Note:
A composite data source will pass its attached filters to
each configured data source, pushing filtering to them to handle.
@ -387,7 +398,7 @@ class CompositeDataSource(DataSource):
Federated all_versions retrieve method - iterates through all STIX data
sources defined in "data_sources"
Notes:
Note:
A composite data source will pass its attached filters to
each configured data source, pushing filtering to them to handle

View File

@ -6,7 +6,8 @@ Classes:
FileSystemSink
FileSystemSource
TODO: Test everything
TODO:
Test everything
"""
import json
@ -88,7 +89,7 @@ class FileSystemSource(DataSource):
def all_versions(self, stix_id, _composite_filters=None):
"""
Notes:
Note:
Since FileSystem sources/sinks don't handle multiple versions
of a STIX object, this operation is unnecessary. Pass call to get().

View File

@ -4,10 +4,11 @@ Filters for Python STIX 2.0 DataSources, DataSinks, DataStores
Classes:
Filter
TODO: The script at the bottom of the module works (to capture
all the callable filter methods), however it causes this module
to be imported by itself twice. Not sure how big of deal that is,
or if cleaner solution possible.
TODO:
The script at the bottom of the module works (to capture
all the callable filter methods), however it causes this module
to be imported by itself twice. Not sure how big of deal that is,
or if cleaner solution possible.
"""
import collections

View File

@ -6,12 +6,14 @@ Classes:
MemorySink
MemorySource
TODO: Test everything.
TODO:
Test everything.
TODO: Use deduplicate() calls only when memory corpus is dirty (been added to)
can save a lot of time for successive queries
TODO:
Use deduplicate() calls only when memory corpus is dirty (been added to)
can save a lot of time for successive queries
Notes:
Note:
Not worrying about STIX versioning. The in memory STIX data at anytime
will only hold one version of a STIX object. As such, when save() is called,
the single versions of all the STIX objects are what is written to file.
@ -47,7 +49,7 @@ class MemoryStore(DataStore):
"""
def __init__(self, stix_data=None):
"""
Notes:
Note:
It doesn't make sense to create a MemoryStore by passing
in existing MemorySource and MemorySink because there could
be data concurrency issues. Just as easy to create new MemoryStore.
@ -144,7 +146,7 @@ class MemorySource(DataSource):
def all_versions(self, stix_id, _composite_filters=None):
"""
Notes:
Note:
Since Memory sources/sinks don't handle multiple versions of a
STIX object, this operation is unnecessary. Translate call to get().

View File

@ -6,7 +6,8 @@ Classes:
TAXIICollectionSink
TAXIICollectionSource
TODO: Test everything
TODO:
Test everything
"""
@ -127,7 +128,7 @@ class TAXIICollectionSource(DataSource):
def _parse_taxii_filters(self, query):
"""Parse out TAXII filters that the TAXII server can filter on.
Notes:
Note:
For instance - "?match[type]=indicator,sighting" should be in a
query dict as follows: