From a85c0cfe6bc408799a5b3db68daf4eed4eb37774 Mon Sep 17 00:00:00 2001 From: Tom Clark Date: Thu, 13 Aug 2020 19:58:43 +0100 Subject: [PATCH 01/14] IMP Added analytics to docs to try and see what's helpful --- docs/source/_ext/googleanalytics.py | 29 +++++++++++++++++++++++++++++ docs/source/conf.py | 21 +++++++++++++++++++-- 2 files changed, 48 insertions(+), 2 deletions(-) create mode 100644 docs/source/_ext/googleanalytics.py diff --git a/docs/source/_ext/googleanalytics.py b/docs/source/_ext/googleanalytics.py new file mode 100644 index 0000000..891df1b --- /dev/null +++ b/docs/source/_ext/googleanalytics.py @@ -0,0 +1,29 @@ +from sphinx.errors import ExtensionError + + +def add_ga_javascript(app, pagename, templatename, context, doctree): + if app.config.googleanalytics_enabled: + id = app.config.googleanalytics_id + metatags = context.get('metatags', '') + metatags += "\n" + metatags += f'\n' + metatags += "\n" + context['metatags'] = metatags + + +def check_config(app): + if not app.config.googleanalytics_id: + raise ExtensionError("'googleanalytics_id' config value must be set for ga statistics to function properly.") + + +def setup(app): + app.add_config_value('googleanalytics_id', '', 'html') + app.add_config_value('googleanalytics_enabled', True, 'html') + app.connect('html-page-context', add_ga_javascript) + app.connect('builder-inited', check_config) + return {'version': '0.1'} diff --git a/docs/source/conf.py b/docs/source/conf.py index 8b0371f..a15d42f 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -11,12 +11,21 @@ # serve to show the default. import os +import sys import sphinx_rtd_theme +from distutils.util import strtobool + + +def str2bool(value): + """ Allows for parsing boolean environment variables like 'True' and 'False' correctly + """ + return bool(strtobool(value)) + # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -# sys.path.insert(0, os.path.abspath('.')) +sys.path.insert(0, os.path.abspath("./_ext")) # -- General configuration ----------------------------------------------------- @@ -33,7 +42,8 @@ 'sphinx.ext.mathjax', 'sphinx.ext.ifconfig', 'breathe', - 'exhale' + 'exhale', + 'googleanalytics' ] # Add any paths that contain templates here, relative to this directory. @@ -125,6 +135,13 @@ # Tell sphinx what the pygments highlight language should be highlight_language = 'python' +# -- Google Analytics Configuration -------------------------------------------- + +# Only add google analytics when building on ReadTheDocs, +# to avoid clicks from development pages adding to analytics +googleanalytics_id = "UA-43965341-6" +googleanalytics_enabled = True +# str2bool(os.getenv("READTHEDOCS", 'False')) # -- Options for HTML output --------------------------------------------------- From cd1213e52409307982a19f53d1d383f3abf17512 Mon Sep 17 00:00:00 2001 From: Tom Clark Date: Sat, 15 Aug 2020 15:11:52 +0100 Subject: [PATCH 02/14] IMP Added an accordion extension allowing us to create drowdowns --- docs/source/_ext/sphinx_accordion/README.md | 21 + docs/source/_ext/sphinx_accordion/__init__.py | 0 .../_ext/sphinx_accordion/accordion.css | 11 + .../source/_ext/sphinx_accordion/accordion.js | 46 ++ .../source/_ext/sphinx_accordion/accordion.py | 256 ++++++++ .../semantic-ui-2.4.2/.versions | 3 + .../semantic-ui-2.4.2/accordion.css | 253 ++++++++ .../semantic-ui-2.4.2/accordion.js | 613 ++++++++++++++++++ .../semantic-ui-2.4.2/accordion.min.css | 9 + .../semantic-ui-2.4.2/accordion.min.js | 1 + docs/source/conf.py | 3 +- 11 files changed, 1215 insertions(+), 1 deletion(-) create mode 100644 docs/source/_ext/sphinx_accordion/README.md create mode 100644 docs/source/_ext/sphinx_accordion/__init__.py create mode 100644 docs/source/_ext/sphinx_accordion/accordion.css create mode 100644 docs/source/_ext/sphinx_accordion/accordion.js create mode 100644 docs/source/_ext/sphinx_accordion/accordion.py create mode 100755 docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/.versions create mode 100755 docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/accordion.css create mode 100755 docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/accordion.js create mode 100755 docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/accordion.min.css create mode 100755 docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/accordion.min.js diff --git a/docs/source/_ext/sphinx_accordion/README.md b/docs/source/_ext/sphinx_accordion/README.md new file mode 100644 index 0000000..a4fc72c --- /dev/null +++ b/docs/source/_ext/sphinx_accordion/README.md @@ -0,0 +1,21 @@ + + +``` +extensions = [ + ... + 'sphinx_accordion.accordion' + ... +] +``` + +``` +.. accordion:: + + .. accordion-row:: The Title + + The Contents + + .. accordion-row:: The Second Title + + The Contents 2 +``` diff --git a/docs/source/_ext/sphinx_accordion/__init__.py b/docs/source/_ext/sphinx_accordion/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/docs/source/_ext/sphinx_accordion/accordion.css b/docs/source/_ext/sphinx_accordion/accordion.css new file mode 100644 index 0000000..861fb88 --- /dev/null +++ b/docs/source/_ext/sphinx_accordion/accordion.css @@ -0,0 +1,11 @@ +.sphinx-accordion.accordion { + margin-bottom: 1.75em; +} + +.sphinx-accordion.title p { + display: inline-block; + margin-top: 8px; + margin-right: 0px; + margin-bottom: 8px; + margin-left: 0px; +} diff --git a/docs/source/_ext/sphinx_accordion/accordion.js b/docs/source/_ext/sphinx_accordion/accordion.js new file mode 100644 index 0000000..580c322 --- /dev/null +++ b/docs/source/_ext/sphinx_accordion/accordion.js @@ -0,0 +1,46 @@ +// if (!String.prototype.startsWith) { +// Object.defineProperty(String.prototype, 'startsWith', { +// value: function(search, pos) { +// pos = !pos || pos < 0 ? 0 : +pos; +// return this.substring(pos, pos + search.length) === search; +// } +// }); +// } + +$(document).ready(function(){console.log('FFS')}); + +$(function() { + console.log('SOMETHING HAPPENS MAYBE'); + + // We store the data-row values as sphinx-data- + // Add data-row attribute with the extracted value + $('.sphinx-accordion.title').each(function() { + const this1 = $(this); + const prefix = 'sphinx-accordion-title-'; + const classes = this1.attr('class').split(/\s+/); + $.each(classes, function(idx, clazz) { + if (clazz.startsWith(prefix)) { + this1.attr('data-row', clazz.substring(prefix.length)); + } + }); + + const data_row = this1.attr('data-row'); + + this1.on('click', function() { + // Find offset in view + const offset = (this1.offset().top - $(window).scrollTop()); + + // Toggle active class on this subsequent sibling + if (this1.hasClass('active')) { + this1.removeClass('active'); + this1.next().removeClass('active'); + } else { + this1.addClass('active'); + this1.next().addClass('active'); + } + + // Keep tab with the original view offset + $(window).scrollTop(this1.offset().top - offset); + }); + }); +}); \ No newline at end of file diff --git a/docs/source/_ext/sphinx_accordion/accordion.py b/docs/source/_ext/sphinx_accordion/accordion.py new file mode 100644 index 0000000..e3b670f --- /dev/null +++ b/docs/source/_ext/sphinx_accordion/accordion.py @@ -0,0 +1,256 @@ +""" Accordion dropdown for Sphinx, with HTML builder """ + +import json +import posixpath +import os +from docutils import nodes +from docutils.parsers.rst import Directive +from pkg_resources import resource_filename +from pygments.lexers import get_all_lexers +from sphinx.util.osutil import copyfile +from sphinx.util import logging + +FILES = [ + 'semantic-ui-2.4.2/accordion.css', + 'semantic-ui-2.4.2/accordion.js', + 'accordion.css', + 'accordion.js', +] + + +LEXER_MAP = {} +for lexer in get_all_lexers(): + for short_name in lexer[1]: + LEXER_MAP[short_name] = lexer[0] + + +def get_compatible_builders(app): + builders = [ + 'html', + 'singlehtml', + 'dirhtml', + 'readthedocs', + 'readthedocsdirhtml', + 'readthedocssinglehtml', + 'readthedocssinglehtmllocalmedia', + 'spelling' + ] + builders.extend(app.config['sphinx_tabs_valid_builders']) + return builders + + +class AccordionDirective(Directive): + """ Top-level accordion directive """ + + has_content = True + + def run(self): + """ Parse an accordion directive """ + self.assert_has_content() + env = self.state.document.settings.env + + node = nodes.container() + node['classes'] = ['sphinx-accordion', 'ui', 'styled', 'fluid', 'accordion'] + + if 'next_accordion_id' not in env.temp_data: + env.temp_data['next_accordion_id'] = 0 + if 'accordion_stack' not in env.temp_data: + env.temp_data['accordion_stack'] = [] + + accordion_id = env.temp_data['next_accordion_id'] + accordion_key = 'accordion_%d' % accordion_id + env.temp_data['next_accordion_id'] += 1 + env.temp_data['accordion_stack'].append(accordion_id) + + env.temp_data[accordion_key] = {} + env.temp_data[accordion_key]['row_ids'] = [] + env.temp_data[accordion_key]['row_titles'] = [] + env.temp_data[accordion_key]['is_first_row'] = True + + self.state.nested_parse(self.content, self.content_offset, node) + + if env.app.builder.name in get_compatible_builders(env.app): + title_nodes = [] + row_ids = env.temp_data[accordion_key]['row_ids'] + row_titles = env.temp_data[accordion_key]['row_titles'] + for idx, [data_row, row_name] in enumerate(row_titles): + title_node = nodes.container() + title_node.tagname = 'div' + title_node['classes'] = ['sphinx-accordion', 'title'] + title_node['classes'].append(f'sphinx-accordion-title-{accordion_id}-{row_ids[idx]}') + title_node += row_name.children + icon_node = nodes.inline() + icon_node.tagname = 'i' + icon_node['classes'] = ['dropdown', 'icon'] + # Access the first child, we don't want the container that somehow gets generated + title_node.children.insert(0, icon_node) + title_nodes.append(title_node) + + node.children = [child for pair in zip(title_nodes, node.children) for child in pair] + + env.temp_data['accordion_stack'].pop() + return [node] + + +class AccordionRowDirective(Directive): + """ AccordionRow directive, for adding a row to an accordion """ + + has_content = True + + def run(self): + """ Parse a row directive """ + self.assert_has_content() + env = self.state.document.settings.env + + accordion_id = env.temp_data['accordion_stack'][-1] + accordion_key = 'accordion_%d' % accordion_id + + args = self.content[0].strip() + if args.startswith('{'): + try: + args = json.loads(args) + self.content.trim_start(1) + except ValueError: + args = {} + else: + args = {} + + row_name = nodes.container() + self.state.nested_parse(self.content[:1], self.content_offset, row_name) + args['row_name'] = row_name + + include_accordion_id_in_data_row = False + if 'row_id' not in args: + args['row_id'] = env.new_serialno(accordion_key) + include_accordion_id_in_data_row = True + i = 1 + while args['row_id'] in env.temp_data[accordion_key]['row_ids']: + args['row_id'] = '%s-%d' % (args['row_id'], i) + i += 1 + env.temp_data[accordion_key]['row_ids'].append(args['row_id']) + + data_row = str(args['row_id']) + if include_accordion_id_in_data_row: + data_row = '%d-%s' % (accordion_id, data_row) + data_row = "sphinx-accordion-content-{}".format(data_row) + + env.temp_data[accordion_key]['row_titles'].append( + (data_row, args['row_name']) + ) + + text = '\n'.join(self.content) + node = nodes.container(text) + classes = 'sphinx-accordion content' + node['classes'] = classes.split(' ') + node['classes'].extend(args.get('classes', [])) + node['classes'].append(data_row) + + self.state.nested_parse(self.content[2:], self.content_offset, node) + + if env.app.builder.name not in get_compatible_builders(env.app): + outer_node = nodes.container() + row = nodes.container() + row.tagname = 'a' + row['classes'] = ['item'] + row += row_name + outer_node.append(row) + outer_node.append(node) + return [outer_node] + + return [node] + + +class _FindAccordionDirectiveVisitor(nodes.NodeVisitor): + """ Visitor pattern than looks for a sphinx accordion directive in a document """ + def __init__(self, document): + nodes.NodeVisitor.__init__(self, document) + self._found = False + + def unknown_visit(self, node): + if not self._found and isinstance(node, nodes.container) and 'classes' in node and isinstance(node['classes'], list): + self._found = 'sphinx-accordion' in node['classes'] + + @property + def found_accordion_directive(self): + """ Return whether a sphinx accordion directive was found """ + return self._found + + +def update_context(app, pagename, templatename, context, doctree): + """ Remove sphinx-accordion CSS and JS asset files if not used in a page """ + if doctree is None: + return + visitor = _FindAccordionDirectiveVisitor(doctree) + doctree.walk(visitor) + if not visitor.found_accordion_directive: + paths = [posixpath.join('_static', 'sphinx_accordion/' + f) for f in FILES] + if 'css_files' in context: + context['css_files'] = context['css_files'][:] + for path in paths: + if path.endswith('.css') and path in context['css_files']: + context['css_files'].remove(path) + if 'script_files' in context: + context['script_files'] = context['script_files'][:] + for path in paths: + if path.endswith('.js') and path in context['script_files']: + context['script_files'].remove(path) + + +def copy_assets(app, exception): + """ Copy asset files to the output """ + if 'getLogger' in dir(logging): + log = logging.getLogger(__name__).info + warn = logging.getLogger(__name__).warning + else: + log = app.info + warn = app.warning + builders = get_compatible_builders(app) + if exception: + return + if app.builder.name not in builders: + if not app.config['sphinx_accordion_nowarn']: + warn( + 'Not copying accordion assets! Not compatible with %s builder' % + app.builder.name) + return + + log('Copying accordion assets') + + installdir = os.path.join(app.builder.outdir, '_static', 'sphinx_accordion') + + for path in FILES: + source = resource_filename('sphinx_accordion', path) + dest = os.path.join(installdir, path) + destdir = os.path.dirname(dest) + if not os.path.exists(destdir): + os.makedirs(destdir) + + copyfile(source, dest) + + +def setup(app): + """ Set up the plugin """ + app.add_config_value('sphinx_accordion_nowarn', False, '') + app.add_config_value('sphinx_accordion_valid_builders', [], '') + app.add_directive('accordion', AccordionDirective) + app.add_directive('accordion-row', AccordionRowDirective) + + for path in ['sphinx_accordion/' + f for f in FILES]: + if path.endswith('.css'): + if 'add_css_file' in dir(app): + app.add_css_file(path) + else: + app.add_stylesheet(path) + if path.endswith('.js'): + if 'add_script_file' in dir(app): + app.add_script_file(path) + else: + app.add_javascript(path) + + app.connect('html-page-context', update_context) + app.connect('build-finished', copy_assets) + + return { + 'parallel_read_safe': True, + 'parallel_write_safe': True, + } diff --git a/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/.versions b/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/.versions new file mode 100755 index 0000000..01b3d82 --- /dev/null +++ b/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/.versions @@ -0,0 +1,3 @@ +meteor@1.1.6 +semantic:ui-accordion@2.1.3 +underscore@1.0.3 diff --git a/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/accordion.css b/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/accordion.css new file mode 100755 index 0000000..e9b104d --- /dev/null +++ b/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/accordion.css @@ -0,0 +1,253 @@ +/*! + * # Semantic UI 2.4.1 - Accordion + * http://github.com/semantic-org/semantic-ui/ + * + * + * Released under the MIT license + * http://opensource.org/licenses/MIT + * + */ + + +/******************************* + Accordion +*******************************/ + +.ui.accordion, +.ui.accordion .accordion { + max-width: 100%; +} +.ui.accordion .accordion { + margin: 1em 0em 0em; + padding: 0em; +} + +/* Title */ +.ui.accordion .title, +.ui.accordion .accordion .title { + cursor: pointer; +} + +/* Default Styling */ +.ui.accordion .title:not(.ui) { + padding: 0.5em 0em; + font-family: 'Lato', 'Helvetica Neue', Arial, Helvetica, sans-serif; + font-size: 1em; + color: rgba(0, 0, 0, 0.87); +} + +/* Content */ +.ui.accordion .title ~ .content, +.ui.accordion .accordion .title ~ .content { + display: none; +} + +/* Default Styling */ +.ui.accordion:not(.styled) .title ~ .content:not(.ui), +.ui.accordion:not(.styled) .accordion .title ~ .content:not(.ui) { + margin: ''; + padding: 0.5em 0em 1em; +} +.ui.accordion:not(.styled) .title ~ .content:not(.ui):last-child { + padding-bottom: 0em; +} + +/* Arrow */ +.ui.accordion .title .dropdown.icon, +.ui.accordion .accordion .title .dropdown.icon { + display: inline-block; + float: none; + opacity: 1; + width: 1.25em; + height: 1em; + margin: 0em 0.25rem 0em 0rem; + padding: 0em; + font-size: 1em; + -webkit-transition: opacity 0.1s ease, -webkit-transform 0.1s ease; + transition: opacity 0.1s ease, -webkit-transform 0.1s ease; + transition: transform 0.1s ease, opacity 0.1s ease; + transition: transform 0.1s ease, opacity 0.1s ease, -webkit-transform 0.1s ease; + vertical-align: baseline; + -webkit-transform: none; + transform: none; +} + +/*-------------- + Coupling +---------------*/ + + +/* Menu */ +.ui.accordion.menu .item .title { + display: block; + padding: 0em; +} +.ui.accordion.menu .item .title > .dropdown.icon { + float: right; + margin: 0.21425em 0em 0em 1em; + -webkit-transform: rotate(180deg); + transform: rotate(180deg); +} + +/* Header */ +.ui.accordion .ui.header .dropdown.icon { + font-size: 1em; + margin: 0em 0.25rem 0em 0rem; +} + + +/******************************* + States +*******************************/ + +.ui.accordion .active.title .dropdown.icon, +.ui.accordion .accordion .active.title .dropdown.icon { + -webkit-transform: rotate(90deg); + transform: rotate(90deg); +} +.ui.accordion.menu .item .active.title > .dropdown.icon { + -webkit-transform: rotate(90deg); + transform: rotate(90deg); +} + + +/******************************* + Types +*******************************/ + + +/*-------------- + Styled +---------------*/ + +.ui.styled.accordion { + width: 600px; +} +.ui.styled.accordion, +.ui.styled.accordion .accordion { + border-radius: 0.28571429rem; + background: #FFFFFF; + -webkit-box-shadow: 0px 1px 2px 0 rgba(34, 36, 38, 0.15), 0px 0px 0px 1px rgba(34, 36, 38, 0.15); + box-shadow: 0px 1px 2px 0 rgba(34, 36, 38, 0.15), 0px 0px 0px 1px rgba(34, 36, 38, 0.15); +} +.ui.styled.accordion .title, +.ui.styled.accordion .accordion .title { + margin: 0em; + padding: 0.75em 1em; + color: rgba(0, 0, 0, 0.4); + font-weight: bold; + border-top: 1px solid rgba(34, 36, 38, 0.15); + -webkit-transition: background 0.1s ease, color 0.1s ease; + transition: background 0.1s ease, color 0.1s ease; +} +.ui.styled.accordion > .title:first-child, +.ui.styled.accordion .accordion .title:first-child { + border-top: none; +} + +/* Content */ +.ui.styled.accordion .content, +.ui.styled.accordion .accordion .content { + margin: 0em; + padding: 0.5em 1em 1.5em; +} +.ui.styled.accordion .accordion .content { + padding: 0em; + padding: 0.5em 1em 1.5em; +} + +/* Hover */ +.ui.styled.accordion .title:hover, +.ui.styled.accordion .active.title, +.ui.styled.accordion .accordion .title:hover, +.ui.styled.accordion .accordion .active.title { + background: transparent; + color: rgba(0, 0, 0, 0.87); +} +.ui.styled.accordion .accordion .title:hover, +.ui.styled.accordion .accordion .active.title { + background: transparent; + color: rgba(0, 0, 0, 0.87); +} + +/* Active */ +.ui.styled.accordion .active.title { + background: transparent; + color: rgba(0, 0, 0, 0.95); +} +.ui.styled.accordion .accordion .active.title { + background: transparent; + color: rgba(0, 0, 0, 0.95); +} + + +/******************************* + States +*******************************/ + + +/*-------------- + Active +---------------*/ + +.ui.accordion .active.content, +.ui.accordion .accordion .active.content { + display: block; +} + + +/******************************* + Variations +*******************************/ + + +/*-------------- + Fluid +---------------*/ + +.ui.fluid.accordion, +.ui.fluid.accordion .accordion { + width: 100%; +} + +/*-------------- + Inverted +---------------*/ + +.ui.inverted.accordion .title:not(.ui) { + color: rgba(255, 255, 255, 0.9); +} + + +/******************************* + Theme Overrides +*******************************/ + +@font-face { + font-family: 'Accordion'; + src: url(data:application/x-font-ttf;charset=utf-8;base64,AAEAAAALAIAAAwAwT1MvMggjB5AAAAC8AAAAYGNtYXAPfOIKAAABHAAAAExnYXNwAAAAEAAAAWgAAAAIZ2x5Zryj6HgAAAFwAAAAyGhlYWT/0IhHAAACOAAAADZoaGVhApkB5wAAAnAAAAAkaG10eAJuABIAAAKUAAAAGGxvY2EAjABWAAACrAAAAA5tYXhwAAgAFgAAArwAAAAgbmFtZfC1n04AAALcAAABPHBvc3QAAwAAAAAEGAAAACAAAwIAAZAABQAAAUwBZgAAAEcBTAFmAAAA9QAZAIQAAAAAAAAAAAAAAAAAAAABEAAAAAAAAAAAAAAAAAAAAABAAADw2gHg/+D/4AHgACAAAAABAAAAAAAAAAAAAAAgAAAAAAACAAAAAwAAABQAAwABAAAAFAAEADgAAAAKAAgAAgACAAEAIPDa//3//wAAAAAAIPDZ//3//wAB/+MPKwADAAEAAAAAAAAAAAAAAAEAAf//AA8AAQAAAAAAAAAAAAIAADc5AQAAAAABAAAAAAAAAAAAAgAANzkBAAAAAAEAAAAAAAAAAAACAAA3OQEAAAAAAQASAEkAtwFuABMAADc0PwE2FzYXFh0BFAcGJwYvASY1EgaABQgHBQYGBQcIBYAG2wcGfwcBAQcECf8IBAcBAQd/BgYAAAAAAQAAAEkApQFuABMAADcRNDc2MzIfARYVFA8BBiMiJyY1AAUGBwgFgAYGgAUIBwYFWwEACAUGBoAFCAcFgAYGBQcAAAABAAAAAQAAqWYls18PPPUACwIAAAAAAM/9o+4AAAAAz/2j7gAAAAAAtwFuAAAACAACAAAAAAAAAAEAAAHg/+AAAAIAAAAAAAC3AAEAAAAAAAAAAAAAAAAAAAAGAAAAAAAAAAAAAAAAAQAAAAC3ABIAtwAAAAAAAAAKABQAHgBCAGQAAAABAAAABgAUAAEAAAAAAAIAAAAAAAAAAAAAAAAAAAAAAAAADgCuAAEAAAAAAAEADAAAAAEAAAAAAAIADgBAAAEAAAAAAAMADAAiAAEAAAAAAAQADABOAAEAAAAAAAUAFgAMAAEAAAAAAAYABgAuAAEAAAAAAAoANABaAAMAAQQJAAEADAAAAAMAAQQJAAIADgBAAAMAAQQJAAMADAAiAAMAAQQJAAQADABOAAMAAQQJAAUAFgAMAAMAAQQJAAYADAA0AAMAAQQJAAoANABaAHIAYQB0AGkAbgBnAFYAZQByAHMAaQBvAG4AIAAxAC4AMAByAGEAdABpAG4AZ3JhdGluZwByAGEAdABpAG4AZwBSAGUAZwB1AGwAYQByAHIAYQB0AGkAbgBnAEYAbwBuAHQAIABnAGUAbgBlAHIAYQB0AGUAZAAgAGIAeQAgAEkAYwBvAE0AbwBvAG4ALgADAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA) format('truetype'), url(data:application/font-woff;charset=utf-8;base64,d09GRk9UVE8AAASwAAoAAAAABGgAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAABDRkYgAAAA9AAAAS0AAAEtFpovuE9TLzIAAAIkAAAAYAAAAGAIIweQY21hcAAAAoQAAABMAAAATA984gpnYXNwAAAC0AAAAAgAAAAIAAAAEGhlYWQAAALYAAAANgAAADb/0IhHaGhlYQAAAxAAAAAkAAAAJAKZAedobXR4AAADNAAAABgAAAAYAm4AEm1heHAAAANMAAAABgAAAAYABlAAbmFtZQAAA1QAAAE8AAABPPC1n05wb3N0AAAEkAAAACAAAAAgAAMAAAEABAQAAQEBB3JhdGluZwABAgABADr4HAL4GwP4GAQeCgAZU/+Lix4KABlT/4uLDAeLa/iU+HQFHQAAAHkPHQAAAH4RHQAAAAkdAAABJBIABwEBBw0PERQZHnJhdGluZ3JhdGluZ3UwdTF1MjB1RjBEOXVGMERBAAACAYkABAAGAQEEBwoNVp38lA78lA78lA77lA773Z33bxWLkI2Qj44I9xT3FAWOj5CNkIuQi4+JjoePiI2Gi4YIi/uUBYuGiYeHiIiHh4mGi4aLho2Ijwj7FPcUBYeOiY+LkAgO+92L5hWL95QFi5CNkI6Oj4+PjZCLkIuQiY6HCPcU+xQFj4iNhouGi4aJh4eICPsU+xQFiIeGiYaLhouHjYePiI6Jj4uQCA74lBT4lBWLDAoAAAAAAwIAAZAABQAAAUwBZgAAAEcBTAFmAAAA9QAZAIQAAAAAAAAAAAAAAAAAAAABEAAAAAAAAAAAAAAAAAAAAABAAADw2gHg/+D/4AHgACAAAAABAAAAAAAAAAAAAAAgAAAAAAACAAAAAwAAABQAAwABAAAAFAAEADgAAAAKAAgAAgACAAEAIPDa//3//wAAAAAAIPDZ//3//wAB/+MPKwADAAEAAAAAAAAAAAAAAAEAAf//AA8AAQAAAAEAADfYOJZfDzz1AAsCAAAAAADP/aPuAAAAAM/9o+4AAAAAALcBbgAAAAgAAgAAAAAAAAABAAAB4P/gAAACAAAAAAAAtwABAAAAAAAAAAAAAAAAAAAABgAAAAAAAAAAAAAAAAEAAAAAtwASALcAAAAAUAAABgAAAAAADgCuAAEAAAAAAAEADAAAAAEAAAAAAAIADgBAAAEAAAAAAAMADAAiAAEAAAAAAAQADABOAAEAAAAAAAUAFgAMAAEAAAAAAAYABgAuAAEAAAAAAAoANABaAAMAAQQJAAEADAAAAAMAAQQJAAIADgBAAAMAAQQJAAMADAAiAAMAAQQJAAQADABOAAMAAQQJAAUAFgAMAAMAAQQJAAYADAA0AAMAAQQJAAoANABaAHIAYQB0AGkAbgBnAFYAZQByAHMAaQBvAG4AIAAxAC4AMAByAGEAdABpAG4AZ3JhdGluZwByAGEAdABpAG4AZwBSAGUAZwB1AGwAYQByAHIAYQB0AGkAbgBnAEYAbwBuAHQAIABnAGUAbgBlAHIAYQB0AGUAZAAgAGIAeQAgAEkAYwBvAE0AbwBvAG4ALgADAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA) format('woff'); + font-weight: normal; + font-style: normal; +} + +/* Dropdown Icon */ +.ui.accordion .title .dropdown.icon, +.ui.accordion .accordion .title .dropdown.icon { + font-family: Accordion; + line-height: 1; + -webkit-backface-visibility: hidden; + backface-visibility: hidden; + font-weight: normal; + font-style: normal; + text-align: center; +} +.ui.accordion .title .dropdown.icon:before, +.ui.accordion .accordion .title .dropdown.icon:before { + content: '\f0da' /*rtl:'\f0d9'*/; +} + + +/******************************* + User Overrides +*******************************/ + diff --git a/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/accordion.js b/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/accordion.js new file mode 100755 index 0000000..94e8830 --- /dev/null +++ b/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/accordion.js @@ -0,0 +1,613 @@ +/*! + * # Semantic UI 2.4.1 - Accordion + * http://github.com/semantic-org/semantic-ui/ + * + * + * Released under the MIT license + * http://opensource.org/licenses/MIT + * + */ + +;(function ($, window, document, undefined) { + +'use strict'; + +window = (typeof window != 'undefined' && window.Math == Math) + ? window + : (typeof self != 'undefined' && self.Math == Math) + ? self + : Function('return this')() +; + +$.fn.accordion = function(parameters) { + var + $allModules = $(this), + + time = new Date().getTime(), + performance = [], + + query = arguments[0], + methodInvoked = (typeof query == 'string'), + queryArguments = [].slice.call(arguments, 1), + + requestAnimationFrame = window.requestAnimationFrame + || window.mozRequestAnimationFrame + || window.webkitRequestAnimationFrame + || window.msRequestAnimationFrame + || function(callback) { setTimeout(callback, 0); }, + + returnedValue + ; + $allModules + .each(function() { + var + settings = ( $.isPlainObject(parameters) ) + ? $.extend(true, {}, $.fn.accordion.settings, parameters) + : $.extend({}, $.fn.accordion.settings), + + className = settings.className, + namespace = settings.namespace, + selector = settings.selector, + error = settings.error, + + eventNamespace = '.' + namespace, + moduleNamespace = 'module-' + namespace, + moduleSelector = $allModules.selector || '', + + $module = $(this), + $title = $module.find(selector.title), + $content = $module.find(selector.content), + + element = this, + instance = $module.data(moduleNamespace), + observer, + module + ; + + module = { + + initialize: function() { + module.debug('Initializing', $module); + module.bind.events(); + if(settings.observeChanges) { + module.observeChanges(); + } + module.instantiate(); + }, + + instantiate: function() { + instance = module; + $module + .data(moduleNamespace, module) + ; + }, + + destroy: function() { + module.debug('Destroying previous instance', $module); + $module + .off(eventNamespace) + .removeData(moduleNamespace) + ; + }, + + refresh: function() { + $title = $module.find(selector.title); + $content = $module.find(selector.content); + }, + + observeChanges: function() { + if('MutationObserver' in window) { + observer = new MutationObserver(function(mutations) { + module.debug('DOM tree modified, updating selector cache'); + module.refresh(); + }); + observer.observe(element, { + childList : true, + subtree : true + }); + module.debug('Setting up mutation observer', observer); + } + }, + + bind: { + events: function() { + module.debug('Binding delegated events'); + $module + .on(settings.on + eventNamespace, selector.trigger, module.event.click) + ; + } + }, + + event: { + click: function() { + module.toggle.call(this); + } + }, + + toggle: function(query) { + var + $activeTitle = (query !== undefined) + ? (typeof query === 'number') + ? $title.eq(query) + : $(query).closest(selector.title) + : $(this).closest(selector.title), + $activeContent = $activeTitle.next($content), + isAnimating = $activeContent.hasClass(className.animating), + isActive = $activeContent.hasClass(className.active), + isOpen = (isActive && !isAnimating), + isOpening = (!isActive && isAnimating) + ; + module.debug('Toggling visibility of content', $activeTitle); + if(isOpen || isOpening) { + if(settings.collapsible) { + module.close.call($activeTitle); + } + else { + module.debug('Cannot close accordion content collapsing is disabled'); + } + } + else { + module.open.call($activeTitle); + } + }, + + open: function(query) { + var + $activeTitle = (query !== undefined) + ? (typeof query === 'number') + ? $title.eq(query) + : $(query).closest(selector.title) + : $(this).closest(selector.title), + $activeContent = $activeTitle.next($content), + isAnimating = $activeContent.hasClass(className.animating), + isActive = $activeContent.hasClass(className.active), + isOpen = (isActive || isAnimating) + ; + if(isOpen) { + module.debug('Accordion already open, skipping', $activeContent); + return; + } + module.debug('Opening accordion content', $activeTitle); + settings.onOpening.call($activeContent); + settings.onChanging.call($activeContent); + if(settings.exclusive) { + module.closeOthers.call($activeTitle); + } + $activeTitle + .addClass(className.active) + ; + $activeContent + .stop(true, true) + .addClass(className.animating) + ; + if(settings.animateChildren) { + if($.fn.transition !== undefined && $module.transition('is supported')) { + $activeContent + .children() + .transition({ + animation : 'fade in', + queue : false, + useFailSafe : true, + debug : settings.debug, + verbose : settings.verbose, + duration : settings.duration + }) + ; + } + else { + $activeContent + .children() + .stop(true, true) + .animate({ + opacity: 1 + }, settings.duration, module.resetOpacity) + ; + } + } + $activeContent + .slideDown(settings.duration, settings.easing, function() { + $activeContent + .removeClass(className.animating) + .addClass(className.active) + ; + module.reset.display.call(this); + settings.onOpen.call(this); + settings.onChange.call(this); + }) + ; + }, + + close: function(query) { + var + $activeTitle = (query !== undefined) + ? (typeof query === 'number') + ? $title.eq(query) + : $(query).closest(selector.title) + : $(this).closest(selector.title), + $activeContent = $activeTitle.next($content), + isAnimating = $activeContent.hasClass(className.animating), + isActive = $activeContent.hasClass(className.active), + isOpening = (!isActive && isAnimating), + isClosing = (isActive && isAnimating) + ; + if((isActive || isOpening) && !isClosing) { + module.debug('Closing accordion content', $activeContent); + settings.onClosing.call($activeContent); + settings.onChanging.call($activeContent); + $activeTitle + .removeClass(className.active) + ; + $activeContent + .stop(true, true) + .addClass(className.animating) + ; + if(settings.animateChildren) { + if($.fn.transition !== undefined && $module.transition('is supported')) { + $activeContent + .children() + .transition({ + animation : 'fade out', + queue : false, + useFailSafe : true, + debug : settings.debug, + verbose : settings.verbose, + duration : settings.duration + }) + ; + } + else { + $activeContent + .children() + .stop(true, true) + .animate({ + opacity: 0 + }, settings.duration, module.resetOpacity) + ; + } + } + $activeContent + .slideUp(settings.duration, settings.easing, function() { + $activeContent + .removeClass(className.animating) + .removeClass(className.active) + ; + module.reset.display.call(this); + settings.onClose.call(this); + settings.onChange.call(this); + }) + ; + } + }, + + closeOthers: function(index) { + var + $activeTitle = (index !== undefined) + ? $title.eq(index) + : $(this).closest(selector.title), + $parentTitles = $activeTitle.parents(selector.content).prev(selector.title), + $activeAccordion = $activeTitle.closest(selector.accordion), + activeSelector = selector.title + '.' + className.active + ':visible', + activeContent = selector.content + '.' + className.active + ':visible', + $openTitles, + $nestedTitles, + $openContents + ; + if(settings.closeNested) { + $openTitles = $activeAccordion.find(activeSelector).not($parentTitles); + $openContents = $openTitles.next($content); + } + else { + $openTitles = $activeAccordion.find(activeSelector).not($parentTitles); + $nestedTitles = $activeAccordion.find(activeContent).find(activeSelector).not($parentTitles); + $openTitles = $openTitles.not($nestedTitles); + $openContents = $openTitles.next($content); + } + if( ($openTitles.length > 0) ) { + module.debug('Exclusive enabled, closing other content', $openTitles); + $openTitles + .removeClass(className.active) + ; + $openContents + .removeClass(className.animating) + .stop(true, true) + ; + if(settings.animateChildren) { + if($.fn.transition !== undefined && $module.transition('is supported')) { + $openContents + .children() + .transition({ + animation : 'fade out', + useFailSafe : true, + debug : settings.debug, + verbose : settings.verbose, + duration : settings.duration + }) + ; + } + else { + $openContents + .children() + .stop(true, true) + .animate({ + opacity: 0 + }, settings.duration, module.resetOpacity) + ; + } + } + $openContents + .slideUp(settings.duration , settings.easing, function() { + $(this).removeClass(className.active); + module.reset.display.call(this); + }) + ; + } + }, + + reset: { + + display: function() { + module.verbose('Removing inline display from element', this); + $(this).css('display', ''); + if( $(this).attr('style') === '') { + $(this) + .attr('style', '') + .removeAttr('style') + ; + } + }, + + opacity: function() { + module.verbose('Removing inline opacity from element', this); + $(this).css('opacity', ''); + if( $(this).attr('style') === '') { + $(this) + .attr('style', '') + .removeAttr('style') + ; + } + }, + + }, + + setting: function(name, value) { + module.debug('Changing setting', name, value); + if( $.isPlainObject(name) ) { + $.extend(true, settings, name); + } + else if(value !== undefined) { + if($.isPlainObject(settings[name])) { + $.extend(true, settings[name], value); + } + else { + settings[name] = value; + } + } + else { + return settings[name]; + } + }, + internal: function(name, value) { + module.debug('Changing internal', name, value); + if(value !== undefined) { + if( $.isPlainObject(name) ) { + $.extend(true, module, name); + } + else { + module[name] = value; + } + } + else { + return module[name]; + } + }, + debug: function() { + if(!settings.silent && settings.debug) { + if(settings.performance) { + module.performance.log(arguments); + } + else { + module.debug = Function.prototype.bind.call(console.info, console, settings.name + ':'); + module.debug.apply(console, arguments); + } + } + }, + verbose: function() { + if(!settings.silent && settings.verbose && settings.debug) { + if(settings.performance) { + module.performance.log(arguments); + } + else { + module.verbose = Function.prototype.bind.call(console.info, console, settings.name + ':'); + module.verbose.apply(console, arguments); + } + } + }, + error: function() { + if(!settings.silent) { + module.error = Function.prototype.bind.call(console.error, console, settings.name + ':'); + module.error.apply(console, arguments); + } + }, + performance: { + log: function(message) { + var + currentTime, + executionTime, + previousTime + ; + if(settings.performance) { + currentTime = new Date().getTime(); + previousTime = time || currentTime; + executionTime = currentTime - previousTime; + time = currentTime; + performance.push({ + 'Name' : message[0], + 'Arguments' : [].slice.call(message, 1) || '', + 'Element' : element, + 'Execution Time' : executionTime + }); + } + clearTimeout(module.performance.timer); + module.performance.timer = setTimeout(module.performance.display, 500); + }, + display: function() { + var + title = settings.name + ':', + totalTime = 0 + ; + time = false; + clearTimeout(module.performance.timer); + $.each(performance, function(index, data) { + totalTime += data['Execution Time']; + }); + title += ' ' + totalTime + 'ms'; + if(moduleSelector) { + title += ' \'' + moduleSelector + '\''; + } + if( (console.group !== undefined || console.table !== undefined) && performance.length > 0) { + console.groupCollapsed(title); + if(console.table) { + console.table(performance); + } + else { + $.each(performance, function(index, data) { + console.log(data['Name'] + ': ' + data['Execution Time']+'ms'); + }); + } + console.groupEnd(); + } + performance = []; + } + }, + invoke: function(query, passedArguments, context) { + var + object = instance, + maxDepth, + found, + response + ; + passedArguments = passedArguments || queryArguments; + context = element || context; + if(typeof query == 'string' && object !== undefined) { + query = query.split(/[\. ]/); + maxDepth = query.length - 1; + $.each(query, function(depth, value) { + var camelCaseValue = (depth != maxDepth) + ? value + query[depth + 1].charAt(0).toUpperCase() + query[depth + 1].slice(1) + : query + ; + if( $.isPlainObject( object[camelCaseValue] ) && (depth != maxDepth) ) { + object = object[camelCaseValue]; + } + else if( object[camelCaseValue] !== undefined ) { + found = object[camelCaseValue]; + return false; + } + else if( $.isPlainObject( object[value] ) && (depth != maxDepth) ) { + object = object[value]; + } + else if( object[value] !== undefined ) { + found = object[value]; + return false; + } + else { + module.error(error.method, query); + return false; + } + }); + } + if ( $.isFunction( found ) ) { + response = found.apply(context, passedArguments); + } + else if(found !== undefined) { + response = found; + } + if($.isArray(returnedValue)) { + returnedValue.push(response); + } + else if(returnedValue !== undefined) { + returnedValue = [returnedValue, response]; + } + else if(response !== undefined) { + returnedValue = response; + } + return found; + } + }; + if(methodInvoked) { + if(instance === undefined) { + module.initialize(); + } + module.invoke(query); + } + else { + if(instance !== undefined) { + instance.invoke('destroy'); + } + module.initialize(); + } + }) + ; + return (returnedValue !== undefined) + ? returnedValue + : this + ; +}; + +$.fn.accordion.settings = { + + name : 'Accordion', + namespace : 'accordion', + + silent : false, + debug : false, + verbose : false, + performance : true, + + on : 'click', // event on title that opens accordion + + observeChanges : true, // whether accordion should automatically refresh on DOM insertion + + exclusive : true, // whether a single accordion content panel should be open at once + collapsible : true, // whether accordion content can be closed + closeNested : false, // whether nested content should be closed when a panel is closed + animateChildren : true, // whether children opacity should be animated + + duration : 350, // duration of animation + easing : 'easeOutQuad', // easing equation for animation + + onOpening : function(){}, // callback before open animation + onClosing : function(){}, // callback before closing animation + onChanging : function(){}, // callback before closing or opening animation + + onOpen : function(){}, // callback after open animation + onClose : function(){}, // callback after closing animation + onChange : function(){}, // callback after closing or opening animation + + error: { + method : 'The method you called is not defined' + }, + + className : { + active : 'active', + animating : 'animating' + }, + + selector : { + accordion : '.accordion', + title : '.title', + trigger : '.title', + content : '.content' + } + +}; + +// Adds easing +$.extend( $.easing, { + easeOutQuad: function (x, t, b, c, d) { + return -c *(t/=d)*(t-2) + b; + } +}); + +})( jQuery, window, document ); + diff --git a/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/accordion.min.css b/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/accordion.min.css new file mode 100755 index 0000000..80a8e46 --- /dev/null +++ b/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/accordion.min.css @@ -0,0 +1,9 @@ +/*! + * # Semantic UI 2.4.1 - Accordion + * http://github.com/semantic-org/semantic-ui/ + * + * + * Released under the MIT license + * http://opensource.org/licenses/MIT + * + */.ui.accordion,.ui.accordion .accordion{max-width:100%}.ui.accordion .accordion{margin:1em 0 0;padding:0}.ui.accordion .accordion .title,.ui.accordion .title{cursor:pointer}.ui.accordion .title:not(.ui){padding:.5em 0;font-family:Lato,'Helvetica Neue',Arial,Helvetica,sans-serif;font-size:1em;color:rgba(0,0,0,.87)}.ui.accordion .accordion .title~.content,.ui.accordion .title~.content{display:none}.ui.accordion:not(.styled) .accordion .title~.content:not(.ui),.ui.accordion:not(.styled) .title~.content:not(.ui){margin:'';padding:.5em 0 1em}.ui.accordion:not(.styled) .title~.content:not(.ui):last-child{padding-bottom:0}.ui.accordion .accordion .title .dropdown.icon,.ui.accordion .title .dropdown.icon{display:inline-block;float:none;opacity:1;width:1.25em;height:1em;margin:0 .25rem 0 0;padding:0;font-size:1em;-webkit-transition:opacity .1s ease,-webkit-transform .1s ease;transition:opacity .1s ease,-webkit-transform .1s ease;transition:transform .1s ease,opacity .1s ease;transition:transform .1s ease,opacity .1s ease,-webkit-transform .1s ease;vertical-align:baseline;-webkit-transform:none;transform:none}.ui.accordion.menu .item .title{display:block;padding:0}.ui.accordion.menu .item .title>.dropdown.icon{float:right;margin:.21425em 0 0 1em;-webkit-transform:rotate(180deg);transform:rotate(180deg)}.ui.accordion .ui.header .dropdown.icon{font-size:1em;margin:0 .25rem 0 0}.ui.accordion .accordion .active.title .dropdown.icon,.ui.accordion .active.title .dropdown.icon{-webkit-transform:rotate(90deg);transform:rotate(90deg)}.ui.accordion.menu .item .active.title>.dropdown.icon{-webkit-transform:rotate(90deg);transform:rotate(90deg)}.ui.styled.accordion{width:600px}.ui.styled.accordion,.ui.styled.accordion .accordion{border-radius:.28571429rem;background:#fff;-webkit-box-shadow:0 1px 2px 0 rgba(34,36,38,.15),0 0 0 1px rgba(34,36,38,.15);box-shadow:0 1px 2px 0 rgba(34,36,38,.15),0 0 0 1px rgba(34,36,38,.15)}.ui.styled.accordion .accordion .title,.ui.styled.accordion .title{margin:0;padding:.75em 1em;color:rgba(0,0,0,.4);font-weight:700;border-top:1px solid rgba(34,36,38,.15);-webkit-transition:background .1s ease,color .1s ease;transition:background .1s ease,color .1s ease}.ui.styled.accordion .accordion .title:first-child,.ui.styled.accordion>.title:first-child{border-top:none}.ui.styled.accordion .accordion .content,.ui.styled.accordion .content{margin:0;padding:.5em 1em 1.5em}.ui.styled.accordion .accordion .content{padding:0;padding:.5em 1em 1.5em}.ui.styled.accordion .accordion .active.title,.ui.styled.accordion .accordion .title:hover,.ui.styled.accordion .active.title,.ui.styled.accordion .title:hover{background:0 0;color:rgba(0,0,0,.87)}.ui.styled.accordion .accordion .active.title,.ui.styled.accordion .accordion .title:hover{background:0 0;color:rgba(0,0,0,.87)}.ui.styled.accordion .active.title{background:0 0;color:rgba(0,0,0,.95)}.ui.styled.accordion .accordion .active.title{background:0 0;color:rgba(0,0,0,.95)}.ui.accordion .accordion .active.content,.ui.accordion .active.content{display:block}.ui.fluid.accordion,.ui.fluid.accordion .accordion{width:100%}.ui.inverted.accordion .title:not(.ui){color:rgba(255,255,255,.9)}@font-face{font-family:Accordion;src:url(data:application/x-font-ttf;charset=utf-8;base64,AAEAAAALAIAAAwAwT1MvMggjB5AAAAC8AAAAYGNtYXAPfOIKAAABHAAAAExnYXNwAAAAEAAAAWgAAAAIZ2x5Zryj6HgAAAFwAAAAyGhlYWT/0IhHAAACOAAAADZoaGVhApkB5wAAAnAAAAAkaG10eAJuABIAAAKUAAAAGGxvY2EAjABWAAACrAAAAA5tYXhwAAgAFgAAArwAAAAgbmFtZfC1n04AAALcAAABPHBvc3QAAwAAAAAEGAAAACAAAwIAAZAABQAAAUwBZgAAAEcBTAFmAAAA9QAZAIQAAAAAAAAAAAAAAAAAAAABEAAAAAAAAAAAAAAAAAAAAABAAADw2gHg/+D/4AHgACAAAAABAAAAAAAAAAAAAAAgAAAAAAACAAAAAwAAABQAAwABAAAAFAAEADgAAAAKAAgAAgACAAEAIPDa//3//wAAAAAAIPDZ//3//wAB/+MPKwADAAEAAAAAAAAAAAAAAAEAAf//AA8AAQAAAAAAAAAAAAIAADc5AQAAAAABAAAAAAAAAAAAAgAANzkBAAAAAAEAAAAAAAAAAAACAAA3OQEAAAAAAQASAEkAtwFuABMAADc0PwE2FzYXFh0BFAcGJwYvASY1EgaABQgHBQYGBQcIBYAG2wcGfwcBAQcECf8IBAcBAQd/BgYAAAAAAQAAAEkApQFuABMAADcRNDc2MzIfARYVFA8BBiMiJyY1AAUGBwgFgAYGgAUIBwYFWwEACAUGBoAFCAcFgAYGBQcAAAABAAAAAQAAqWYls18PPPUACwIAAAAAAM/9o+4AAAAAz/2j7gAAAAAAtwFuAAAACAACAAAAAAAAAAEAAAHg/+AAAAIAAAAAAAC3AAEAAAAAAAAAAAAAAAAAAAAGAAAAAAAAAAAAAAAAAQAAAAC3ABIAtwAAAAAAAAAKABQAHgBCAGQAAAABAAAABgAUAAEAAAAAAAIAAAAAAAAAAAAAAAAAAAAAAAAADgCuAAEAAAAAAAEADAAAAAEAAAAAAAIADgBAAAEAAAAAAAMADAAiAAEAAAAAAAQADABOAAEAAAAAAAUAFgAMAAEAAAAAAAYABgAuAAEAAAAAAAoANABaAAMAAQQJAAEADAAAAAMAAQQJAAIADgBAAAMAAQQJAAMADAAiAAMAAQQJAAQADABOAAMAAQQJAAUAFgAMAAMAAQQJAAYADAA0AAMAAQQJAAoANABaAHIAYQB0AGkAbgBnAFYAZQByAHMAaQBvAG4AIAAxAC4AMAByAGEAdABpAG4AZ3JhdGluZwByAGEAdABpAG4AZwBSAGUAZwB1AGwAYQByAHIAYQB0AGkAbgBnAEYAbwBuAHQAIABnAGUAbgBlAHIAYQB0AGUAZAAgAGIAeQAgAEkAYwBvAE0AbwBvAG4ALgADAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA) format('truetype'),url(data:application/font-woff;charset=utf-8;base64,d09GRk9UVE8AAASwAAoAAAAABGgAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAABDRkYgAAAA9AAAAS0AAAEtFpovuE9TLzIAAAIkAAAAYAAAAGAIIweQY21hcAAAAoQAAABMAAAATA984gpnYXNwAAAC0AAAAAgAAAAIAAAAEGhlYWQAAALYAAAANgAAADb/0IhHaGhlYQAAAxAAAAAkAAAAJAKZAedobXR4AAADNAAAABgAAAAYAm4AEm1heHAAAANMAAAABgAAAAYABlAAbmFtZQAAA1QAAAE8AAABPPC1n05wb3N0AAAEkAAAACAAAAAgAAMAAAEABAQAAQEBB3JhdGluZwABAgABADr4HAL4GwP4GAQeCgAZU/+Lix4KABlT/4uLDAeLa/iU+HQFHQAAAHkPHQAAAH4RHQAAAAkdAAABJBIABwEBBw0PERQZHnJhdGluZ3JhdGluZ3UwdTF1MjB1RjBEOXVGMERBAAACAYkABAAGAQEEBwoNVp38lA78lA78lA77lA773Z33bxWLkI2Qj44I9xT3FAWOj5CNkIuQi4+JjoePiI2Gi4YIi/uUBYuGiYeHiIiHh4mGi4aLho2Ijwj7FPcUBYeOiY+LkAgO+92L5hWL95QFi5CNkI6Oj4+PjZCLkIuQiY6HCPcU+xQFj4iNhouGi4aJh4eICPsU+xQFiIeGiYaLhouHjYePiI6Jj4uQCA74lBT4lBWLDAoAAAAAAwIAAZAABQAAAUwBZgAAAEcBTAFmAAAA9QAZAIQAAAAAAAAAAAAAAAAAAAABEAAAAAAAAAAAAAAAAAAAAABAAADw2gHg/+D/4AHgACAAAAABAAAAAAAAAAAAAAAgAAAAAAACAAAAAwAAABQAAwABAAAAFAAEADgAAAAKAAgAAgACAAEAIPDa//3//wAAAAAAIPDZ//3//wAB/+MPKwADAAEAAAAAAAAAAAAAAAEAAf//AA8AAQAAAAEAADfYOJZfDzz1AAsCAAAAAADP/aPuAAAAAM/9o+4AAAAAALcBbgAAAAgAAgAAAAAAAAABAAAB4P/gAAACAAAAAAAAtwABAAAAAAAAAAAAAAAAAAAABgAAAAAAAAAAAAAAAAEAAAAAtwASALcAAAAAUAAABgAAAAAADgCuAAEAAAAAAAEADAAAAAEAAAAAAAIADgBAAAEAAAAAAAMADAAiAAEAAAAAAAQADABOAAEAAAAAAAUAFgAMAAEAAAAAAAYABgAuAAEAAAAAAAoANABaAAMAAQQJAAEADAAAAAMAAQQJAAIADgBAAAMAAQQJAAMADAAiAAMAAQQJAAQADABOAAMAAQQJAAUAFgAMAAMAAQQJAAYADAA0AAMAAQQJAAoANABaAHIAYQB0AGkAbgBnAFYAZQByAHMAaQBvAG4AIAAxAC4AMAByAGEAdABpAG4AZ3JhdGluZwByAGEAdABpAG4AZwBSAGUAZwB1AGwAYQByAHIAYQB0AGkAbgBnAEYAbwBuAHQAIABnAGUAbgBlAHIAYQB0AGUAZAAgAGIAeQAgAEkAYwBvAE0AbwBvAG4ALgADAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA) format('woff');font-weight:400;font-style:normal}.ui.accordion .accordion .title .dropdown.icon,.ui.accordion .title .dropdown.icon{font-family:Accordion;line-height:1;-webkit-backface-visibility:hidden;backface-visibility:hidden;font-weight:400;font-style:normal;text-align:center}.ui.accordion .accordion .title .dropdown.icon:before,.ui.accordion .title .dropdown.icon:before{content:'\f0da'} \ No newline at end of file diff --git a/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/accordion.min.js b/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/accordion.min.js new file mode 100755 index 0000000..1dd73b8 --- /dev/null +++ b/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/accordion.min.js @@ -0,0 +1 @@ +!function(F,A,e,q){"use strict";A=void 0!==A&&A.Math==Math?A:"undefined"!=typeof self&&self.Math==Math?self:Function("return this")(),F.fn.accordion=function(a){var v,s=F(this),b=(new Date).getTime(),y=[],C=a,O="string"==typeof C,x=[].slice.call(arguments,1);A.requestAnimationFrame||A.mozRequestAnimationFrame||A.webkitRequestAnimationFrame||A.msRequestAnimationFrame;return s.each(function(){var e,c,u=F.isPlainObject(a)?F.extend(!0,{},F.fn.accordion.settings,a):F.extend({},F.fn.accordion.settings),d=u.className,n=u.namespace,g=u.selector,l=u.error,t="."+n,i="module-"+n,o=s.selector||"",f=F(this),m=f.find(g.title),p=f.find(g.content),r=this,h=f.data(i);c={initialize:function(){c.debug("Initializing",f),c.bind.events(),u.observeChanges&&c.observeChanges(),c.instantiate()},instantiate:function(){h=c,f.data(i,c)},destroy:function(){c.debug("Destroying previous instance",f),f.off(t).removeData(i)},refresh:function(){m=f.find(g.title),p=f.find(g.content)},observeChanges:function(){"MutationObserver"in A&&((e=new MutationObserver(function(e){c.debug("DOM tree modified, updating selector cache"),c.refresh()})).observe(r,{childList:!0,subtree:!0}),c.debug("Setting up mutation observer",e))},bind:{events:function(){c.debug("Binding delegated events"),f.on(u.on+t,g.trigger,c.event.click)}},event:{click:function(){c.toggle.call(this)}},toggle:function(e){var n=e!==q?"number"==typeof e?m.eq(e):F(e).closest(g.title):F(this).closest(g.title),t=n.next(p),i=t.hasClass(d.animating),o=t.hasClass(d.active),a=o&&!i,s=!o&&i;c.debug("Toggling visibility of content",n),a||s?u.collapsible?c.close.call(n):c.debug("Cannot close accordion content collapsing is disabled"):c.open.call(n)},open:function(e){var n=e!==q?"number"==typeof e?m.eq(e):F(e).closest(g.title):F(this).closest(g.title),t=n.next(p),i=t.hasClass(d.animating);t.hasClass(d.active)||i?c.debug("Accordion already open, skipping",t):(c.debug("Opening accordion content",n),u.onOpening.call(t),u.onChanging.call(t),u.exclusive&&c.closeOthers.call(n),n.addClass(d.active),t.stop(!0,!0).addClass(d.animating),u.animateChildren&&(F.fn.transition!==q&&f.transition("is supported")?t.children().transition({animation:"fade in",queue:!1,useFailSafe:!0,debug:u.debug,verbose:u.verbose,duration:u.duration}):t.children().stop(!0,!0).animate({opacity:1},u.duration,c.resetOpacity)),t.slideDown(u.duration,u.easing,function(){t.removeClass(d.animating).addClass(d.active),c.reset.display.call(this),u.onOpen.call(this),u.onChange.call(this)}))},close:function(e){var n=e!==q?"number"==typeof e?m.eq(e):F(e).closest(g.title):F(this).closest(g.title),t=n.next(p),i=t.hasClass(d.animating),o=t.hasClass(d.active);!o&&!(!o&&i)||o&&i||(c.debug("Closing accordion content",t),u.onClosing.call(t),u.onChanging.call(t),n.removeClass(d.active),t.stop(!0,!0).addClass(d.animating),u.animateChildren&&(F.fn.transition!==q&&f.transition("is supported")?t.children().transition({animation:"fade out",queue:!1,useFailSafe:!0,debug:u.debug,verbose:u.verbose,duration:u.duration}):t.children().stop(!0,!0).animate({opacity:0},u.duration,c.resetOpacity)),t.slideUp(u.duration,u.easing,function(){t.removeClass(d.animating).removeClass(d.active),c.reset.display.call(this),u.onClose.call(this),u.onChange.call(this)}))},closeOthers:function(e){var n,t,i,o=e!==q?m.eq(e):F(this).closest(g.title),a=o.parents(g.content).prev(g.title),s=o.closest(g.accordion),l=g.title+"."+d.active+":visible",r=g.content+"."+d.active+":visible";i=u.closeNested?(n=s.find(l).not(a)).next(p):(n=s.find(l).not(a),t=s.find(r).find(l).not(a),(n=n.not(t)).next(p)),0 Date: Sat, 15 Aug 2020 15:13:23 +0100 Subject: [PATCH 03/14] VER Bump setup.py --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 761c1b9..be078eb 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ setup( name="twined", - version="0.0.10", + version="0.0.12", py_modules=[], install_requires=["jsonschema ~= 3.2.0", "python-dotenv"], url="https://www.github.com/octue/twined", From 56f80221142cbc43cc56b22438894b211c79f7d9 Mon Sep 17 00:00:00 2001 From: Tom Clark Date: Sat, 15 Aug 2020 15:13:52 +0100 Subject: [PATCH 04/14] FIX Correct installation of docs requirements --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index e48c652..e01469c 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -21,4 +21,4 @@ twine # <---- nothing to do with the twined library! # Building documentation # ------------------------------------------------------------------------------ -include docs/requirements.txt +-r docs/requirements.txt From 2e5ee6eca67367cd6f582ab546c0239ef505ce16 Mon Sep 17 00:00:00 2001 From: Tom Clark Date: Sat, 15 Aug 2020 15:15:57 +0100 Subject: [PATCH 05/14] GIT Enforcing branch name convention --- .pre-commit-config.yaml | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7762cda..3ab5747 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,5 +1,4 @@ exclude: 'build|docs|node_modules|.git|.tox|dist|docs|octue.egg-info|twined.egg-info' -include: 'twined' default_stages: [commit] fail_fast: true default_language_version: @@ -33,7 +32,23 @@ repos: language_version: python3 - repo: https://github.com/thclark/pre-commit-sphinx - rev: 0.0.1 + rev: master hooks: - id: build-docs language_version: python3 + + - repo: https://github.com/windpioneers/pre-commit-hooks + rev: 0.0.5 + hooks: + - id: check-branch-name + args: + - '^master$' + - '^development$' + - '^devops/([a-z][a-z0-9]*)(-[a-z0-9]+)*$' + - '^doc/([a-z][a-z0-9]*)(-[a-z0-9]+)*$' + - '^experiment/([a-z][a-z0-9]*)(-[a-z0-9]+)*$' + - '^feature/([a-z][a-z0-9]*)(-[a-z0-9]+)*$' + - '^fix/([a-z][a-z0-9]*)(-[a-z0-9]+)*$' + - '^hotfix/([a-z][a-z0-9]*)(-[a-z0-9]+)*$' + - '^review/([a-z][a-z0-9]*)(-[a-z0-9]+)*$' + - '^release/(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)(?:-(?P(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$' From 39c883dd94ea3d46833c00364e6560f74e856dda Mon Sep 17 00:00:00 2001 From: Tom Clark Date: Sat, 15 Aug 2020 15:18:18 +0100 Subject: [PATCH 06/14] REF Removed unused print line --- docs/source/conf.py | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 7455762..767fed1 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -142,7 +142,6 @@ def str2bool(value): # to avoid clicks from development pages adding to analytics googleanalytics_id = "UA-43965341-6" googleanalytics_enabled = True -# str2bool(os.getenv("READTHEDOCS", 'False')) # -- Options for HTML output --------------------------------------------------- From fc54f0d81c907003754e9a5caf2e82e84503a27b Mon Sep 17 00:00:00 2001 From: Tom Clark Date: Sat, 15 Aug 2020 18:14:15 +0100 Subject: [PATCH 07/14] DOC About section rearranged --- docs/source/about.rst | 21 +++ docs/source/about_digital_twins.rst | 57 ++++++++ docs/source/about_introducing_json_schema.rst | 133 ++++++++++++++++++ docs/source/about_other_considerations.rst | 106 ++++++++++++++ docs/source/about_requirements.rst | 30 ++++ 5 files changed, 347 insertions(+) create mode 100644 docs/source/about.rst create mode 100644 docs/source/about_digital_twins.rst create mode 100644 docs/source/about_introducing_json_schema.rst create mode 100644 docs/source/about_other_considerations.rst create mode 100644 docs/source/about_requirements.rst diff --git a/docs/source/about.rst b/docs/source/about.rst new file mode 100644 index 0000000..a39cb2c --- /dev/null +++ b/docs/source/about.rst @@ -0,0 +1,21 @@ +.. _about: + +============ +About Twines +============ + +**Twined** is a framework for describing a digital twin or data service. + +We call these descriptions "twines". To just get started building a *twine*, check out the :ref:`quick_start`. To +get into the detail of what's in a *twine*, see :ref:`anatomy`. + +Here, we look at requirements for the framework, our motivations and background, and some of the decisions made while +developing **twined**. + +.. toctree:: + :maxdepth: 1 + + about_digital_twins + about_requirements + about_introducing_json_schema + about_other_considerations diff --git a/docs/source/about_digital_twins.rst b/docs/source/about_digital_twins.rst new file mode 100644 index 0000000..999a3a9 --- /dev/null +++ b/docs/source/about_digital_twins.rst @@ -0,0 +1,57 @@ +.. _digital_twins: + +============= +Digital Twins +============= + +A digital twin is a virtual representation of a real life being - a physical asset like a wind turbine or car - or even +a human. + +There are three reasons why you might want to create a digital twin: + - Monitoring + - Prediction + - Optimisation + +On its own, a digital twin can be quite useful. For example, a twin might embody an AI-based analysis to predict power +output of a turbine. + +.. figure:: images/digital_twin_component_basic.svg + :width: 400px + :align: center + :figclass: align-center + :alt: A digital twin component + + A digital twin consists of some kind of analysis or processing task, which could be run many times per second, or + daily, down to occasionally or sometimes only once (the same as a "normal" analysis). + +Coupling digital twins is generally even more useful. You might wish to couple your turbine twin with a representation +of the local power grid, and a representation of a factory building to determine power demand... enabling you to +optimise your factory plant for lowest energy cost whilst intelligently selling surplus power to the grid. + +.. figure:: images/digital_twin_hierarchy.svg + :width: 350px + :align: center + :figclass: align-center + :alt: Hierarchy of digital twins + + A hierarchy of digital twins. Each blue circle represents a twin, coupled to its neighbours. Yellow nodes are where + schema are used to connect twins. + + +.. _gemini_principles: + +Gemini Principles +================= + +The Gemini Principles have been derived by the +`Centre for Digital Built Britain (CDBB) `_. +We strongly recommend you give them a read if embarking on a digital twins project. + +The aim of **twined** is to enable the following principles. In particular: + +#. Openness (open-source project to create schema for twins that can be run anywhere, anywhen) +#. Federation (encouraging a standardised way of connecting twins together) +#. Security (making sure schemas and data can be read safely) +#. Public Good (see our nano-rant about climate change in :ref:`reason_for_being`) + + diff --git a/docs/source/about_introducing_json_schema.rst b/docs/source/about_introducing_json_schema.rst new file mode 100644 index 0000000..8f20858 --- /dev/null +++ b/docs/source/about_introducing_json_schema.rst @@ -0,0 +1,133 @@ +.. _introducing_json_schema: + +======================= +Introducing JSON Schema +======================= + +``JSON`` is a data interchange format that has rapidly taken over as the defacto web-based data communication standard +in recent years. + +``JSONSchema`` is a way of specifying what a ``JSON`` document should contain. The Schema are, themselves, written in +``JSON``! + +Whilst schema can become extremely complicated in some scenarios, they are best designed to be quite succinct. See below +for the schema (and matching ``JSON``) for an integer and a string variable. + +**JSON:** + +.. code-block:: json + + { + "id": 1, + "name": "Tom" + } + + +**Schema:** + +.. code-block:: json + + { + "type": "object", + "title": "An id number and a name", + "properties": { + "id": { + "type": "integer", + "title": "An integer number", + "default": 0 + }, + "name": { + "type": "string", + "title": "A string name", + "default": "" + } + } + } + + +.. _useful_resources: + +Useful resources +================ +.. list-table:: + :widths: auto + :header-rows: 1 + + * - Link + - Resource + * - https://jsonschema.net/ + - Useful web tool for inferring schema from existing json + * - https://jsoneditoronline.org + - A powerful online editor for json, allowing manipulation of large documents better than most text editors + * - https://www.json.org/ + - The JSON standard spec + * - https://json-schema.org/ + - The (draft standard) JSONSchema spec + * - https://rjsf-team.github.io/react-jsonschema-form/ + - A front end library for generating webforms directly from a schema + + +.. _human_readbility: + +Human readability +================= + +Back in our :ref:`requirements` section, we noted it was important for humans to read and understand schema. + +The actual documents themselves are pretty easy to read by technical users. But, for non technical users, readability can be +enhanced even further by the ability to turn ``JSONSchema`` into web forms automatically. For our example above, we can +autogenerate a web form straight from the schema: + +.. figure:: images/schema_form_example.png + :width: 500px + :align: center + :figclass: align-center + :alt: Web form from an example schema + + Web form generated from the example schema above. + +Thus, we can take a schema (or a part of a schema) and use it to generate a control form for a digital twin in a web +interface without writing a separate form component - great for ease and maintainability. + + +.. _why_not_xml: + +Why not XML? +============ + +In a truly excellent `three-part blog `_, writer Seva Savris takes us +through the ups and downs of ``JSON`` versus ``XML``; well worth a read if wishing to understand the respective technologies +better. + +In short, both ``JSON`` and ``XML`` are generalised data interchange specifications and can both can do what we want here. +We choose ``JSON`` because: + +#. Textual representation is much more concise and easy to understand (very important where non-developers like + engineers and scientists must be expected to interpret schema) + +#. `Attack vectors `_. Because entities in ``XML`` + are not necessarily primitives (unlike in ``JSON``), an ``XML`` document parser in its default state may leave a system + open to XXE injection attacks and DTD validation attacks, and therefore requires hardening. ``JSON`` documents are + similarly afflicated (just like any kind of serialized data) but default parsers, operating on the premise of only + deserializing to primitive types, are safe by default - it is only when nondefault parsering or deserialization + techniques (such as ``JSONP``) are used that the application becomes vulnerable. By utilising a default ``JSON`` parser + we can therefore significantly shrink the attack surface of the system. See + `this blog post `_ for further discussion. + +#. ``XML`` is powerful... perhaps too powerful. The standard can be adapted greatly, resulting in high encapsulation + and a high resilience to future unknowns. Both beneficial. However, this requires developers of twins to maintain + interfaces of very high complexity, adaptable to a much wider variety of input. To enable developers to progress, we + suggest handling changes and future unknowns through well-considered versioning, whilst keeping their API simple. + +#. ``XML`` allows baked-in validation of data and attributes. Whilst advantageous in some situations, this is not a + benefit here. We wish validation to be one-sided: validation of data accepted/generated by a digital twin should be + occur within (at) the boundaries of that twin. + +#. Required validation capabilities, built into ``XML`` are achievable with ``JSONSchema`` (otherwise missing from the + pure ``JSON`` standard) + +#. ``JSON`` is a more compact expression than XML, significantly reducing memory and bandwidth requirements. Whilst + not a major issue for most modern PCS, sensors on the edge may have limited memory, and both memory and bandwidth at + scale are extremely expensive. Thus for extremely large networks of interconnected systems there could be significant + speed and cost savings. + diff --git a/docs/source/about_other_considerations.rst b/docs/source/about_other_considerations.rst new file mode 100644 index 0000000..8c2768a --- /dev/null +++ b/docs/source/about_other_considerations.rst @@ -0,0 +1,106 @@ +.. _other_considerations: + +==================== +Other Considerations +==================== + +A variety of thoughts that arose whilst architecting **twined**. + +.. _bash_style_stdio: + +Bash-style stdio +================ + +Some thought was given to using a very old-school-unix approach to piping data between twins, via stdout. + +Whilst attractive (as being a wildly fast way of piping data between twins on the same machine) it was felt this +was insufficiently general, eg: + + - where twins don't exist on the same machine or container, making it cumbersome to engineer common iostreams + - where slight differences between different shells might lead to incompatibilities or changes in behaviour + +And also unfriendly, eg: + + - engineers or scientists unfamiliar with subtleties of bash shell scripting encounter difficulty piping data around + - difficult to build friendly web based tools to introspect the data and configuration + - bound to be headaches on windows platforms, even though windows now supports bash + - easy to corrupt using third party libraries (e.g. which print to stdout) + + +.. _Units: + +Units +===== + +Being used (mostly) for engineering and scientific analysis, it was tempting to add in a specified sub-schema for units. +For example, mandating that where values can be given in units, they be specified in a certain way, like: + +.. code-block:: javascript + + { + "wind_speed": { + "value": 10.2, + "units": "mph" + } + } + +or (more succinct): + +.. code-block:: javascript + + { + "wind_speed": 10.2, + "wind_speed_units": "mph" + } + +It's still extremely tempting to provide this facility; or at least provide some way of specifying in the schema +what units a value should be provided in. Thinking about it but don't have time right now. +If anybody wants to start crafting a PR with an extension or update to **twined** that facilitates this; please raise an +issue to start progressing it. + + +.. _variable_style: + +Variable Style +============== + +A premptive stamp on the whinging... + +Note that in the ``JSON`` descriptions above, all variables are named in ``snake_case`` rather than ``camelCase``. This +decision, more likely than even Brexit to divide opinions, is based on: + +- The languages we anticipate being most popular for building twins seem to trend toward snake case (eg + `python `_, `c++ `_) + although to be fair we might've woefully misjudged which languages start emerging. + +- The reservation of snake case for the schema spec has the subtle advantage that in future, we might be able to use + camelCase within the spec to denote class types in some useful way, just like in python. Not sure yet; just mulling. + +- The :ref:`requirements` mention human-readability as a must; + `this paper `_ + suggests a 20% slower comprehension of camel case than snake, although to be fair that's probably arguable. + +- We're starting in Python so are taking a lead from PEP8, which is bar none the most successful style guide on the + planet, because it got everybody on the same page really early on. + +If existing code that you're dropping in uses camelCase, please don't file that as an issue... converting property +names automatically after schema validation generation is trivial, there are tons of libraries (like +`humps `_) to do it. + +We'd also consider a pull request for a built-in utility converting `to `_ and +`from `_ that does this following validation and prior to returning results. +Suggest your proposed approach on the `issues board `_. + + +.. _language_choice: + +Language Choice +=============== + +**twined** is presently released in python only. It won't be too hard to replicate functionality in other languages, and +we're considering other languages at present, so might be easily persuadable ;) + +If you require implementation of **twined** in a different language, +and are willing to consider sponsorship of development and maintenance of that library, please +`file an issue `_. + diff --git a/docs/source/about_requirements.rst b/docs/source/about_requirements.rst new file mode 100644 index 0000000..6869ee6 --- /dev/null +++ b/docs/source/about_requirements.rst @@ -0,0 +1,30 @@ +.. _requirements: + +Requirements of the framework +=================================== + +A *twine* must describe a digital twin, and have multiple roles. It must: + +#. Define what data is required by a digital twin, in order to run +#. Define what data will be returned by the twin following a successful run +#. Define the formats of these data, in such a way that incoming data can be validated +#. Define what other (1st or 3rd party) twins / services are required by this one in order for it to run. + +If this weren't enough, the description: + +#. Must be trustable (i.e. a *twine* from an untrusted, corrupt or malicious third party should be safe to at least read) +#. Must be machine-readable *and machine-understandable* [1]_ +#. Must be human-readable *and human-understandable* [1]_ +#. Must be discoverable (that is, searchable/indexable) otherwise people won't know it's there in orer to use it. + +Fortunately for digital twin developers, several of these requirements have already been seen for data interchange +formats developed for the web. **twined** uses ``JSON`` and ``JSONSchema`` to help interchange data. + +If you're not already familiar with ``JSONSchema`` (or wish to know why **twined** uses ``JSON`` over the seemingly more +appropriate ``XML`` standard), see :ref:`introducing_json_schema`. + + +.. Footnotes: + +.. [1] *Understandable* essentially means that, once read, the machine or human knows what it actually means and what to do with it. + From 9d808a139f1c8253b7a3908914c5d1aae0d22360 Mon Sep 17 00:00:00 2001 From: Tom Clark Date: Sat, 15 Aug 2020 18:15:21 +0100 Subject: [PATCH 08/14] DOC Added section on deployment --- docs/source/deployment.rst | 66 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 66 insertions(+) create mode 100644 docs/source/deployment.rst diff --git a/docs/source/deployment.rst b/docs/source/deployment.rst new file mode 100644 index 0000000..0cec073 --- /dev/null +++ b/docs/source/deployment.rst @@ -0,0 +1,66 @@ +.. _deployment: + +========== +Deployment +========== + + +.. _deploying_with_octue: + +Deploying with Octue +==================== + +`Octue `_ provides automated deployment to a cloud provider (like GCP or Azure), along with +permissions and user management, monitoring, logging and data storage management out of the box. + +There are also a whole bunch of collaborative helper tools, like the graphical +`twine builder `_ and manifesting tools, designed to speed up the process of building +and using twines. + +The full set of services is in early beta, `get in touch `_ and we can help you +architect systems - from small data services to large networks of :ref:`digital_twins`. + + +.. _deploying_with_doctue: + +Coming Soon - Deploying with doctue +=================================== + +Once we've bedded down our services internally at Octue, we'll be open-sourcing more parts of our build/deploy process, +including docker containers with pre-configured servers to run and monitor twine-based services and digital twins. + +This will allow services to be easily spun up on GCP, Azure Digital Ocean etc., and be a nice halfway house between +fully managed system on Octue and running your own webserver. Of course, +without all the collaborative and data management features that Octue provides ;) + +We're looking for commercial sponsors for this part of the process - if that could be you, please +`get in touch `_ + + +.. _deploying_as_a_cli: + +Deploying as a command-line application +======================================= + +Use the open-source `octue app template `_ as a guide. Write your new +python code (or call your existing tools/libraries) within it. It's set up to wrap and check configuration, inputs and +outputs using twined. Follow the instructions there to set up your inputs, and your files, and run an analysis. + + +.. _deployment_with_a_web_server: + +Deploying with your own web server +================================== + +You can use any python based web server (need another language? see :ref:`language_choice`): + +- Add ``configuration_values_data`` to your webserver config +- Set up an endpoint to allow. +- Set up an endpoint to handle incoming requests / socket messages - these will be ``input_values_data``. +- Treat these requests / messages as events which trigger a task. +- In your task framework (e.g. your celery task), either: + - Use **twined** directly to validate the ``input_values_data``/``output_values_data`` (and, on startup, the + ``configuration_values_data``) and handle running any required analysis yourself, or + - import your analysis app (as built in :ref:`deploying_as_a_cli`) and call it with the configuration and input + data in your task framework. +- Return the result to the client. From 3c095359d7a07898e8af3628b18a7745fa21d711 Mon Sep 17 00:00:00 2001 From: Tom Clark Date: Sat, 15 Aug 2020 18:18:35 +0100 Subject: [PATCH 09/14] DOC Updated quick start guide --- docs/source/quick_start.rst | 114 +----------------- .../quick_start_create_your_first_twine.rst | 111 +++++++++++++++++ docs/source/quick_start_installation.rst | 41 +++++++ 3 files changed, 156 insertions(+), 110 deletions(-) create mode 100644 docs/source/quick_start_create_your_first_twine.rst create mode 100644 docs/source/quick_start_installation.rst diff --git a/docs/source/quick_start.rst b/docs/source/quick_start.rst index 9d89d1c..3428b7b 100644 --- a/docs/source/quick_start.rst +++ b/docs/source/quick_start.rst @@ -4,114 +4,8 @@ Quick Start ============ -.. _create_a_twine: +.. toctree:: + :maxdepth: 2 -Create your first twine -======================= - -Let's say we want a digital twin that accepts two values, uses them to make a calculation, then gives the result. Anyone connecting to the twin will need to know what values it requires, and what it responds with. - -First, create a blank text file, call it `twine.json`. We'll give the twin a title and description. -Paste in the following: - -.. code-block:: javascript - - { - "title": "My first digital twin... of an atomising discombobulator", - "description": "A simple example... estimates the `foz` value of an atomising discombobulator." - } - -Now, let's define an input values strand, to specify what values are required by the twin. For this we use a json schema -(you can read more about them in :ref:`introducing_json_schema`). Add the ``input_values`` field, so your twine looks like this: - -.. code-block:: javascript - - { - "title": "My first digital twin", - "description": "A simple example to build on..." - "input_values_schema": { - "$schema": "http://json-schema.org/2019-09/schema#", - "title": "Input Values schema for my first digital twin", - "description": "These values are supplied to the twin by another program (often over a websocket, depending on your integration provider). So as these values change, the twin can reply with an update.", - "type": "object", - "properties": { - "foo": { - "description": "The foo value... speed of the discombobulator's input bobulation module, in m/s", - "type": "number", - "minimum": 10, - "maximum": 500 - }, - "baz": { - "description": "The baz value... period of the discombobulator's recombulation unit, in s", - "type": "number", - "minimum": 0, - "maximum": 1000 - } - } - } - } - -Finally, let's define an output values strand, to define what kind of data is returned by the twin: - -.. code-block:: javascript - - "output_values_schema": { - "$schema": "http://json-schema.org/2019-09/schema#", - "title": "Output Values schema for my first digital twin", - "description": "The twin will output data that matches this schema", - "type": "object", - "properties": { - "foz": { - "description": "Estimate of the foz value... efficiency of the discombobulator in %", - "type": "number", - "minimum": 10, - "maximum": 500 - } - } - } - - -.. _load_the_twine: - -Load the twine -============== - -**twined** provides a `Twine()` class to load a twine (from a file or a json string). -The loading process checks the twine itself is valid. It's as simple as: - -.. code-block:: py - - from twined import Twine - - my_twine = Twine(file='twine.json') - - -.. _validate_some_inputs: - -Validate some inputs -==================== - -Say we have some json that we want to parse and validate, to make sure it matches what's required for input values. - -.. code-block:: py - - my_input_values = my_twine.validate_input_values(json='{"foo": 30, "baz": 500}') - -You can read the values from a file too. Paste the following into a file named ``input_values.json``: - -.. code-block:: javascript - - { - "foo": 30, - "baz": 500 - } - -Then parse and validate directly from the file: - -.. code-block:: py - - my_input_values = my_twine.validate_input_values(file="input_values.json") - - -.. ATTENTION:: - LIBRARY IS UNDER CONSTRUCTION! WATCH THIS SPACE FOR MORE! \ No newline at end of file + quick_start_installation + quick_start_create_your_first_twine diff --git a/docs/source/quick_start_create_your_first_twine.rst b/docs/source/quick_start_create_your_first_twine.rst new file mode 100644 index 0000000..bbfbad6 --- /dev/null +++ b/docs/source/quick_start_create_your_first_twine.rst @@ -0,0 +1,111 @@ +.. _create_your_first_twine: + +Create your first twine +======================= + +Let's say we want a digital twin that accepts two values, uses them to make a calculation, then gives the result. Anyone connecting to the twin will need to know what values it requires, and what it responds with. + +First, create a blank text file, call it `twine.json`. We'll give the twin a title and description. +Paste in the following: + +.. code-block:: javascript + + { + "title": "My first digital twin... of an atomising discombobulator", + "description": "A simple example... estimates the `foz` value of an atomising discombobulator." + } + +Now, let's define an input values strand, to specify what values are required by the twin. For this we use a json schema +(you can read more about them in :ref:`introducing_json_schema`). Add the ``input_values`` field, so your twine looks like this: + +.. code-block:: javascript + + { + "title": "My first digital twin", + "description": "A simple example to build on..." + "input_values_schema": { + "$schema": "http://json-schema.org/2019-09/schema#", + "title": "Input Values schema for my first digital twin", + "description": "These values are supplied to the twin by another program (often over a websocket, depending on your integration provider). So as these values change, the twin can reply with an update.", + "type": "object", + "properties": { + "foo": { + "description": "The foo value... speed of the discombobulator's input bobulation module, in m/s", + "type": "number", + "minimum": 10, + "maximum": 500 + }, + "baz": { + "description": "The baz value... period of the discombobulator's recombulation unit, in s", + "type": "number", + "minimum": 0, + "maximum": 1000 + } + } + } + } + +Finally, let's define an output values strand, to define what kind of data is returned by the twin: + +.. code-block:: javascript + + "output_values_schema": { + "$schema": "http://json-schema.org/2019-09/schema#", + "title": "Output Values schema for my first digital twin", + "description": "The twin will output data that matches this schema", + "type": "object", + "properties": { + "foz": { + "description": "Estimate of the foz value... efficiency of the discombobulator in %", + "type": "number", + "minimum": 10, + "maximum": 500 + } + } + } + + +.. _load_the_twine: + +Load the twine +============== + +**twined** provides a `Twine()` class to load a twine (from a file or a json string). +The loading process checks the twine itself is valid. It's as simple as: + +.. code-block:: py + + from twined import Twine + + my_twine = Twine(file='twine.json') + + +.. _validate_some_inputs: + +Validate some inputs +==================== + +Say we have some json that we want to parse and validate, to make sure it matches what's required for input values. + +.. code-block:: py + + my_input_values = my_twine.validate_input_values(json='{"foo": 30, "baz": 500}') + +You can read the values from a file too. Paste the following into a file named ``input_values.json``: + +.. code-block:: javascript + + { + "foo": 30, + "baz": 500 + } + +Then parse and validate directly from the file: + +.. code-block:: py + + my_input_values = my_twine.validate_input_values(file="input_values.json") + + +.. ATTENTION:: + LIBRARY IS UNDER CONSTRUCTION! WATCH THIS SPACE FOR MORE! \ No newline at end of file diff --git a/docs/source/quick_start_installation.rst b/docs/source/quick_start_installation.rst new file mode 100644 index 0000000..0cbc136 --- /dev/null +++ b/docs/source/quick_start_installation.rst @@ -0,0 +1,41 @@ +.. _installation: + +============ +Installation +============ + +**twined** is available on `pypi `_, so installation into your python virtual environment is dead +simple: + +.. code-block:: py + + pip install twined + +Don't have a virtual environment with pip? You probably should! ``pyenv`` is your friend. Google it. + + +.. _compilation: + +Compilation +============ + +There is presently no need to compile **twined**, as it's written entirely in python. + + +.. _third_party_library_installation: + +Third party library installation +================================ + +**twined** is for python >= 3.6 so expects that. Other dependencies can be checked in ``setup.py``, and will +automatically installed during the installation above. + + +.. _third_party_build_requirements: + +Third party build requirements +============================== + +.. ATTENTION:: + Woohoo! There are no crazy dependencies that you have to compile and build for your particular system. + (you know the ones... they never *actually* compile, right?). We aim to keep it this way. From 99448101d8e99f3849c70512a9fbab253b2880d0 Mon Sep 17 00:00:00 2001 From: Tom Clark Date: Sat, 15 Aug 2020 18:19:23 +0100 Subject: [PATCH 10/14] DOC Placeholder for lifecycle section --- docs/source/lifecycle.rst | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 docs/source/lifecycle.rst diff --git a/docs/source/lifecycle.rst b/docs/source/lifecycle.rst new file mode 100644 index 0000000..404bd3c --- /dev/null +++ b/docs/source/lifecycle.rst @@ -0,0 +1,30 @@ + +.. + + Data matching the ``configuration_values_schema`` is supplied to the digital twin / data service at + startup. + + It's generally used to define control parameters relating to what the service should do, or how it should operate. + For example, should it produce output images as low resolution PNGs or as SVGs? How many iterations of a fluid + flow solver should be used? What is the acceptable error level on an classifier algorithm? + + Input Values + + Once configuration data supplied to a service has been validated, it can accept inputs and run analyses + using them. + + Depending on the way it's deployed (see :ref:`deployment`), the ``input_values`` might come in from a web request, + over a websocket or called directly from the command line or another library. + + However it comes, new ``input_values``, which are in ``JSON`` format, are checked against the + ``input_values_schema`` strand of the twine. If they match, then analysis can proceed. + + Output Values + + Once a service has Data matching the ``output_values_schema`` is supplied to the service while it's running. Depending on the way + it's deployed, the values might come in from a web request, over a websocket or called directly from + another library + + Input For example current rotor speed, or forecast wind direction. + + Values might be passed at instantiation of a twin (typical application-like process) or via a socket. From a828de18e18f3e485836d8ef9fecde7c998ad670 Mon Sep 17 00:00:00 2001 From: Tom Clark Date: Sat, 15 Aug 2020 18:21:24 +0100 Subject: [PATCH 11/14] DOC Updated structure and anatomy sections --- docs/source/anatomy.rst | 101 ++++++++++++++ docs/source/anatomy_children.rst | 9 ++ docs/source/anatomy_credentials.rst | 79 +++++++++++ docs/source/anatomy_manifest.rst | 202 ++++++++++++++++++++++++++++ docs/source/anatomy_monitors.rst | 44 ++++++ docs/source/anatomy_values.rst | 128 ++++++++++++++++++ docs/source/index.rst | 58 +++----- 7 files changed, 583 insertions(+), 38 deletions(-) create mode 100644 docs/source/anatomy.rst create mode 100644 docs/source/anatomy_children.rst create mode 100644 docs/source/anatomy_credentials.rst create mode 100644 docs/source/anatomy_manifest.rst create mode 100644 docs/source/anatomy_monitors.rst create mode 100644 docs/source/anatomy_values.rst diff --git a/docs/source/anatomy.rst b/docs/source/anatomy.rst new file mode 100644 index 0000000..c4533af --- /dev/null +++ b/docs/source/anatomy.rst @@ -0,0 +1,101 @@ +.. _anatomy: + +========================= +Anatomy Of The Twine File +========================= + +The main point of **twined** is to enable engineers and scientists to easily (and rigorously) define a digital twin +or data service. + +This is done by adding a ``twine.json`` file to the repository containing your code. Adding a *twine* means you can: + +- communicate (to you or a colleague) what data is required by this service +- communicate (to another service / machine) what data is required +- deploy services automatically with a provider like `Octue `_. + +To just get started building a *twine*, check out the :ref:`quick_start`. To learn more about twines in general, +see :ref:`about`. Here, we describe the parts of a *twine* ("strands") and what they mean. + +.. _strands: + +Strands +======= + +A *twine* has several sections, called *strands*. Each defines a different kind of data required (or produced) by the +twin. + +.. list-table:: + :widths: 30 70 + :header-rows: 1 + + * - Strand + - Describes the twin's requirements for... + * - :ref:`Configuration Values ` + - Data, in JSON form, used for configuration of the twin/service. + * - :ref:`Configuration Manifest ` + - Files/datasets required by the twin at configuration/startup + * - :ref:`Input Values ` + - Data, in JSON form, passed to the twin in order to trigger an analysis + * - :ref:`Input Manifest ` + - Files/datasets passed with Input Values to trigger an analysis + * - :ref:`Output Values ` + - Data, in JSON form, that will be produced by the twin (in response to inputs) + * - :ref:`Output Manifest ` + - Files/datasets that will be produced by the twin (in response to inputs) + * - :ref:`Credentials ` + - Credentials that are required by the twin in order to access third party services + * - :ref:`Children ` + - Other twins, access to which are required for this twin to function + * - :ref:`Monitors ` + - Visual and progress outputs from an analysis + + +.. toctree:: + :maxdepth: 1 + :hidden: + + anatomy_values + anatomy_manifest + anatomy_credentials + anatomy_monitors + anatomy_children + + +.. _twine_file_schema: + +Twine File Schema +================= + +Because the ``twine.json`` file itself is in ``JSON`` format with a strict structure, **twined** uses a schema to make +that twine files are correctly written (a "schema-schema", if you will, since a twine already contains schema). Try not +to think about it. But if you must, the *twine* schema is +`here `_. + +The first thing **twined** always does is check that the ``twine.json`` file itself is valid, and give you a +descriptive error if it isn't. + + +.. _other_external_io: + +Other External I/O +================== + +A twin might: + +- GET/POST data from/to an external API, +- query/update a database, +- upload files to an object store, +- trigger events in another network, or +- perform pretty much any interaction you can think of with other applications over the web. + +However, such data exchange may not be controllable by **twined** (which is intended to operate at the boundaries of the +twin) unless the resulting data is returned from the twin (and must therefore be compliant with the schema). + +So, there's nothing for **twined** to do here, and no need for a strand in the *twine* file. However, interacting with +third party APIs or databases might require some credentials. See :ref:`credentials_strand` for help with that. + +.. NOTE:: + This is actually a very common scenario. For example, the purpose of the twin might be to fetch data (like a weather + forecast) from some external API then return it in the ``output_values`` for use in a network of digital twins. + But its the twin developer's job to do the fetchin' and make sure the resulting data is compliant with the + ``output_values_schema`` (see :ref:`values_based_strands`). \ No newline at end of file diff --git a/docs/source/anatomy_children.rst b/docs/source/anatomy_children.rst new file mode 100644 index 0000000..8f17237 --- /dev/null +++ b/docs/source/anatomy_children.rst @@ -0,0 +1,9 @@ +.. _children_strand: + +=============== +Children Strand +=============== + +.. ATTENTION:: + + Coming Soon! diff --git a/docs/source/anatomy_credentials.rst b/docs/source/anatomy_credentials.rst new file mode 100644 index 0000000..857d5b7 --- /dev/null +++ b/docs/source/anatomy_credentials.rst @@ -0,0 +1,79 @@ +.. _credentials_strand: + +================== +Credentials Strand +================== + +In order to: + +- GET/POST data from/to an API, +- query a database, or +- connect to a socket (for receiving Values or emitting Values, Monitors or Logs), + +A digital twin must have *access* to it. API keys, database URIs, etc must be supplied to the digital twin but +treated with best practice with respect to security considerations. The purpose of the ``credentials`` strand is to +dictate what credentials the twin requires in order to function. + +.. _defining_the_credentials_strand: + +Defining the Credentials Strand +=============================== + +This is the simplest of the strands, containing a list of credentials (whose ``NAMES_SHOULD_BE_SHOUTY_SNAKE_CASE``) with +a reminder of the purpose. Defaults can also be provided, useful for running on local or closed networks. + +.. code-block:: javascript + + { + "credentials": [ + { + "name": "SECRET_THE_FIRST", + "purpose": "Token for accessing a 3rd party API service" + }, + { + "name": "SECRET_THE_SECOND", + "purpose": "Token for accessing a 3rd party API service" + }, + { + "name": "SECRET_THE_THIRD", + "purpose": "Usually a big secret but sometimes has a convenient non-secret default, like a sandbox or local database", + "default": "postgres://pguser:pgpassword@localhost:5432/pgdb" + } + ] + } + +.. _supplying_credentials: + +Supplying Credentials +===================== + +.. ATTENTION:: + + *Credentials should never be hard-coded into application code* + + Do you trust the twin code? If you insert credentials to your own database into a digital twin + provided by a third party, you better be very sure that twin isn't going to scrape all that data out then send + it elsewhere! + + Alternatively, if you're building a twin requiring such credentials, it's your responsibility to give the end + users confidence that you're not abusing their access. + + There'll be a lot more discussion on these issues, but it's outside the scope of **twined** - all we do here is + make sure a twin has the credentials it requires. + +Credentials should be securely managed by whatever system is managing the twin, then made accessible to the twin +in the form of environment variables: + +.. code-block:: javascript + + SERVICE_API_KEY=someLongTokenTHatYouProbablyHaveToPayTheThirdPartyProviderLoadsOfMoneyFor + +Credentials may also reside in a ``.env`` file in the current directory, either in the format above +(with a new line for each variable) or, for convenience, as bash exports like: + +.. code-block:: javascript + + export SERVICE_API_KEY=someLongTokenTHatYouProbablyHaveToPayTheThirdPartyProviderLoadsOfMoneyFor + +The ``validate_credentials()`` method of the ``Twine`` class checks for their presence and, where contained in a +``.env`` file, ensures they are loaded into the environment. diff --git a/docs/source/anatomy_manifest.rst b/docs/source/anatomy_manifest.rst new file mode 100644 index 0000000..ba98dd9 --- /dev/null +++ b/docs/source/anatomy_manifest.rst @@ -0,0 +1,202 @@ +.. _manifest_strands: + +====================== +Manifest-based Strands +====================== + +Frequently, twins operate on files containing some kind of data. These files need to be made accessible to the code +running in the twin, in order that their contents can be read and processed. Conversely, a twin might produce an output +dataset which must be understood by users. + +The ``configuration_manifest``, ``input_manifest`` and ``output_manifest`` strands describe what kind of datasets (and +associated files) are required / produced. + +.. NOTE:: + + Files are always contained in datasets, even if there's only one file. It's so that we can keep nitty-gritty file + metadata separate from the more meaningful, higher level metadata like what a dataset is for. + +.. tabs:: + + .. group-tab:: Configuration Manifest Strand + + This describes datasets/files that are required at startup of the twin / service. They typically contain a + resource that the twin might use across many analyses. + + For example, a twin might predict failure for a particular component, given an image. It will require a trained + ML model (saved in a ``*.pickle`` or ``*.json``). While many thousands of predictions might be done over the + period that the twin is deployed, all predictions are done using this version of the model - so the model file is + supplied at startup. + + .. group-tab:: Input Manifest Strand + + These files are made available for the twin to run a particular analysis with. Each analysis will likely have + different input datasets. + + For example, a twin might be passed a dataset of LiDAR ``*.scn`` files and be expected to compute atmospheric flow + properties as a timeseries (which might be returned in the :ref:`output values ` for onward + processing and storage). + + .. group-tab:: Output Manifest Strand + + Files are created by the twin during an analysis, tagged and stored as datasets for some onward purpose. + This strand is not used for sourcing data; it enables users or other services to understand appropriate search + terms to retrieve datasets produced. + + +.. _describing_manifests: + +Describing Manifests +==================== + +Manifest-based strands are a **description of what files are needed**, NOT a list of specific files or datasets. This is +a tricky concept, but important, since services should be reusable and applicable to a range of similar datasets. + +The purpose of the manifest strands is to provide a helper to a wider system providing datafiles to digital twins. + +The manifest strands therefore use **tagging** - they contain a ``filters`` field, which should be valid +`Apache Lucene `_ search syntax. This is a powerful syntax, whose tagging features allow +us to specify incredibly broad, or extremely narrow searches (even down to a known unique result). See the tabs below +for examples. + + +.. NOTE:: + + Tagging syntax is extremely powerful. Below, you'll see how this enables a digital twin to specify things like: + + *"OK, I need this digital twin to always have access to a model file for a particular system, containing trained model data"* + + *"Uh, so I need an ordered sequence of files, that are CSV files from a meteorological mast."* + + This allows **twined** to check that the input files contain what is needed, enables quick and easy + extraction of subgroups or particular sequences of files within a dataset, and enables management systems + to map candidate datasets to twins that might be used to process them. + + + +.. tabs:: + + .. group-tab:: Configuration Manifest Strand + + Here we construct an extremely tight filter, which connects this digital twin to a specific + system. + + .. accordion:: + + .. accordion-row:: Show twine containing this strand + + .. literalinclude:: ../../examples/damage_classifier_service/twine.json + :language: javascript + + .. accordion-row:: Show a matching file manifest + + .. literalinclude:: ../../examples/damage_classifier_service/data/configuration_manifest.json + :language: javascript + + + .. group-tab:: Input Manifest Strand + + Here we specify that two datasets (and all or some of the files associated with them) are + required, for a service that cross-checks meteorological mast data and power output data for a wind farm. + + .. accordion:: + + .. accordion-row:: Show twine containing this strand + + .. literalinclude:: ../../examples/met_mast_scada_service/strands/input_manifest_filters.json + :language: javascript + + .. accordion-row:: Show a matching file manifest + + .. literalinclude:: ../../examples/met_mast_scada_service/data/input_manifest.json + :language: javascript + + .. group-tab:: Output Manifest Strand + + Here we specify that two datasets (and all or some of the files associated with them) are + required, for a service that cross-checks meteorological mast data and power output data for a wind farm. + + .. accordion:: + + .. accordion-row:: Show twine containing this strand + + .. literalinclude:: ../../examples/met_mast_scada_service/strands/output_manifest_filters.json + :language: javascript + + .. accordion-row:: Show a matching file manifest + + .. literalinclude:: ../../examples/met_mast_scada_service/data/output_manifest.json + :language: javascript + +.. + + TODO - clean up or remove this section + + .. _how_filtering_works: + + How Filtering Works + =================== + + It's the job of **twined** to make sure of two things: + + 1. make sure the *twine* file itself is valid, + + + **File data (input, output)** + + Files are not streamed directly to the digital twin (this would require extreme bandwidth in whatever system is + orchestrating all the twins). Instead, files should be made available on the local storage system; i.e. a volume + mounted to whatever container or VM the digital twin runs in. + + Groups of files are described by a ``manifest``, where a manifest is (in essence) a catalogue of files in a + dataset. + + A digital twin might receive multiple manifests, if it uses multiple datasets. For example, it could use a 3D + point cloud LiDAR dataset, and a meteorological dataset. + + .. code-block:: javascript + + { + "manifests": [ + { + "type": "dataset", + "id": "3c15c2ba-6a32-87e0-11e9-3baa66a632fe", // UUID of the manifest + "files": [ + { + "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86", // UUID of that file + "sha1": "askjnkdfoisdnfkjnkjsnd" // for quality control to check correctness of file contents + "name": "Lidar - 4 to 10 Dec.csv", + "path": "local/file/path/to/folder/containing/it/", + "type": "csv", + "metadata": { + }, + "size_bytes": 59684813, + "tags": "lidar, helpful, information, like, sequence:1", // Searchable, parsable and filterable + }, + { + "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86", + "name": "Lidar - 11 to 18 Dec.csv", + "path": "local/file/path/to/folder/containing/it/", + "type": "csv", + "metadata": { + }, + "size_bytes": 59684813, + "tags": "lidar, helpful, information, like, sequence:2", // Searchable, parsable and filterable + }, + { + "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86", + "name": "Lidar report.pdf", + "path": "local/file/path/to/folder/containing/it/", + "type": "pdf", + "metadata": { + }, + "size_bytes": 484813, + "tags": "report", // Searchable, parsable and filterable + } + ] + }, + { + // ... another dataset manifest ... + } + ] + } diff --git a/docs/source/anatomy_monitors.rst b/docs/source/anatomy_monitors.rst new file mode 100644 index 0000000..8704877 --- /dev/null +++ b/docs/source/anatomy_monitors.rst @@ -0,0 +1,44 @@ +.. _monitors_strand: + +=============== +Monitors Strand +=============== + +The ``configuration_values_schema``, ``input_values_schema`` and ``output_values_schema`` strands are *values-based*, +meaning the data that matches these strands is in JSON form. + +Each of these strands is a *json schema* which describes that data. + +.. tabs:: + + .. group-tab:: Monitors Strand + + There are two kinds of monitoring data required from a digital twin. + + **Monitor data (output)** + + Values for health and progress monitoring of the twin, for example percentage progress, iteration number and + status - perhaps even residuals graphs for a converging calculation. Broadly speaking, this should be user-facing + information. + + *This kind of monitoring data can be in a suitable form for display on a dashboard* + + **Log data (output)** + + Logged statements, typically in iostream form, produced by the twin (e.g. via python's ``logging`` module) must be + capturable as an output for debugging and monitoring purposes. Broadly speaking, this should be developer-facing + information. + + + +Let's look at basic examples for twines containing each of these strands: + + +.. tabs:: + + .. group-tab:: Monitors Strand + + **Monitor data (output)** + + **Log data (output)** + diff --git a/docs/source/anatomy_values.rst b/docs/source/anatomy_values.rst new file mode 100644 index 0000000..71c0724 --- /dev/null +++ b/docs/source/anatomy_values.rst @@ -0,0 +1,128 @@ +.. _values_based_strands: + +==================== +Values-based Strands +==================== + +The ``configuration_values_schema``, ``input_values_schema`` and ``output_values_schema`` strands are *values-based*, +meaning the data that matches these strands is in JSON form. + +Each of these strands is a *json schema* which describes that data. + +.. tabs:: + + .. group-tab:: Configuration Values Strand + + This strand is a ``configuration_values_schema``, that is used to check validity of any + ``configuration_values`` data supplied to the twin at startup. + + The Configuration Values Strand is generally used to define control parameters relating to what the twin should + do, or how it should operate. + + For example, should it produce output images as low resolution PNGs or as SVGs? How many iterations of a fluid + flow solver should be used? What is the acceptable error level on an classifier algorithm? + + .. group-tab:: Input Values Strand + + This strand is an ``input_values_schema``, that is used to check validity of ``input_values`` data supplied to the + twin at the beginning of an analysis task. + + The Input Values Strand is generally used to define actual data which will be processed by the twin. Sometimes, it + may be used to define control parameters specific to an analysis. + + For example, if a twin cleans and detects anomalies in a 10-minute timeseries of 1Hz data, the ``input_values`` + might contain an array of data and a list of corresponding timestamps. It may also contain a control parameter + specifying which algorithm is used to do the detection. + + .. NOTE:: + Depending on the way the twin is deployed (see :ref:`deployment`), the ``input_values`` might come in from a + web request, over a websocket or called directly from the command line or another library. + + However they come, if the new ``input_values`` validate against the ``input_values_schema`` strand, + then analysis can proceed. + + .. group-tab:: Output Values Strand + + This strand is an ``output_values_schema``, that is used to check results (``output_values``) computed during an + analysis. This ensures that the application wrapped up within the *twine* is operating correctly, and + enables other twins/services or the end users to see what outputs they will get. + + For example,if a twin cleans and detects anomalies in a 10-minute timeseries of 1Hz data, the ``output_values`` + might contain an array of data interpolated onto regular timestamps, with missing values filled in and a list of + warnings where anomalies were found. + + +Let's look at basic examples for twines containing each of these strands: + + +.. tabs:: + + .. group-tab:: Configuration Values Strand + + This *twine* contains an example ``configuration_values_schema`` with one control parameter. + + `Many more detailed and specialised examples are available in the GitHub repository `_ + + .. code-block:: javascript + + { + "configuration_values_schema": { + "title": "The example configuration form", + "description": "The Configuration Values Strand of an example twine", + "type": "object", + "properties": { + "n_iterations": { + "description": "An example of an integer configuration variable, called 'n_iterations'.", + "type": "integer", + "minimum": 1, + "maximum": 10, + "default": 5 + } + } + } + } + + Matching ``configuration_values`` data could look like this: + + .. code-block:: javascript + + { + "n_iterations": 8, + } + + + .. group-tab:: Input Values Strand + + This *twine* contains an example ``input_values_schema`` with one input value, which marked as required. + + Many more detailed and specialised examples are available in :ref:`examples`. + + .. code-block:: javascript + + { + "input_values_schema": { + "title": "Input Values", + "description": "The input values strand of an example twine, with a required height value", + "type": "object", + "properties": { + "height": { + "description": "An example of an integer value called 'height'", + "type": "integer", + "minimum": 2 + } + }, + "required": ["height"] + }, + + Matching ``input_values`` data could look like this: + + .. code-block:: javascript + + { + "height": 13, + } + + + .. group-tab:: Output Values Strand + + Stuff \ No newline at end of file diff --git a/docs/source/index.rst b/docs/source/index.rst index f35c0a4..4e68c89 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -4,20 +4,11 @@ `issues board `_ and `roadmap `_. -.. image:: https://codecov.io/gh/octue/twined/branch/master/graph/badge.svg - :target: https://codecov.io/gh/octue/twined - :alt: Code coverage - :align: right -.. image:: https://readthedocs.org/projects/twined/badge/?version=latest - :target: https://twined.readthedocs.io/en/latest/?badge=latest - :alt: Documentation Status - :align: right - ====== Twined ====== -**twined** is a library to help :ref:`digital_twins` talk to one another. +**twined** is a library to help create and connect :ref:`digital_twins` and data services. .. epigraph:: *"Twined" [t-why-nd] ~ encircled, twisted together, interwoven* @@ -26,10 +17,11 @@ A digital twin is a virtual representation of a real life being - a physical ass a human. Like real things, digital twins need to interact, so can be connected together, but need a common communication framework to do so. -**twined** helps you to define a single file, a "twine", that defines a digital twin, specifying its data -interfaces, connections to other twins, and other requirements. +**twined** helps you to define a single file, a "twine", that defines a digital twin / data service. It specifies +specifying its data interfaces, connections to other twins, and other requirements. -Any person, or any computer, can read a twine and understand *what-goes-in* and *what-comes-out*. +Any person, or any computer, can read a twine and understand *what-goes-in* and *what-comes-out*. That makes it easy to +collaborate with other teams, since everybody is crystal clear about what's needed. .. figure:: images/digital_twin_hierarchy.svg :width: 350px @@ -37,7 +29,7 @@ Any person, or any computer, can read a twine and understand *what-goes-in* and :figclass: align-center :alt: Hierarchy of digital twins - Digital twins connected in a hierarchy. Each blue circle represents a twin, coupled to its neighbours. + Digital twins / data services connected in a hierarchy. Each blue circle represents a twin, coupled to its neighbours. Yellow nodes are where schema are used to connect twins. @@ -49,13 +41,13 @@ Aims **twined** provides a toolkit to help create and validate "twines" - descriptions of a digital twin, what data it requires, what it does and how it works. -The goals of **twined** are as follows: - - Provide a clear framework for what a digital twin schema can and/or must contain - - Provide functions to validate incoming data against a known schema - - Provide functions to check that a schema itself is valid - - Provide (or direct you to) tools to create schema describing what you require +The goals of this **twined** library are as follows: + - Provide a clear framework for what a *twine* can and/or must contain + - Provide functions to validate incoming data against a known *twine* + - Provide functions to check that a *twine* itself is valid + - Provide (or direct you to) tools to create *twines* describing what you require -In :ref:`schema`, we describe the different parts of a twine (examining how digital twins connect and interact... +In :ref:`anatomy`, we describe the different parts of a twine (examining how digital twins connect and interact... building them together in hierarchies and networks). But you may prefer to dive straight in with the :ref:`quick_start` guide. @@ -78,6 +70,10 @@ it is necessary for everyone to be on the same page - the :ref:`gemini_principle which is why we've released this part of our technology stack as open source, to support those principles and help develop a wider ecosystem. +The main goal is to **help engineers and scientists focus on doing engineering and science** - instead of apis, data +cleaning/management, and all this cloud-pipeline-devops-test-ci-ml BS that takes up 90% of a scientist's time, when they +should be spending their valuable time researching migratory patterns of birds, or cell structures, or wind turbine +performance, or whatever excites them. .. _uses: @@ -86,6 +82,7 @@ Uses At `Octue `_, **twined** is used as a core part of our application creation process: + * As a format to communicate requirements to our partners in research projects * As a tool to validate incoming data to digital twins * As a framework to help establish schema when designing digital twins * As a source of information on digital twins in our network, to help map and connect twins together @@ -97,28 +94,13 @@ Please note, this is not a "general help" forum; we recommend Stack Overflow for issues or for help designing digital twin schema, Octue is able to provide application support services for those building digital twins using **twined**. - -.. _life_choices: - -Life Choices -============ - -**twined** is presently released in python only. It won't be too hard to replicate functionality in other languages, and -we're considering other languages at present, so might be easily persuadable ;) - -If you require implementation of **twined** in a different language, -and are willing to consider sponsorship of development and maintenance of that library, please -`get in touch `_. - - .. toctree:: :maxdepth: 2 self - installation quick_start - examples - digital_twins - schema + anatomy + about + deployment license version_history From e7e04f82f5219924816b935ff2fa251a53389324 Mon Sep 17 00:00:00 2001 From: Tom Clark Date: Sat, 15 Aug 2020 18:21:59 +0100 Subject: [PATCH 12/14] FIX Tidyup of accordion --- docs/source/_ext/sphinx_accordion/accordion.py | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/source/_ext/sphinx_accordion/accordion.py b/docs/source/_ext/sphinx_accordion/accordion.py index e3b670f..cada02d 100644 --- a/docs/source/_ext/sphinx_accordion/accordion.py +++ b/docs/source/_ext/sphinx_accordion/accordion.py @@ -10,6 +10,7 @@ from sphinx.util.osutil import copyfile from sphinx.util import logging + FILES = [ 'semantic-ui-2.4.2/accordion.css', 'semantic-ui-2.4.2/accordion.js', From 9ffb7424c69e9b6f20a98061bf52f02cac0e3f2f Mon Sep 17 00:00:00 2001 From: Tom Clark Date: Sat, 15 Aug 2020 18:22:32 +0100 Subject: [PATCH 13/14] REF Removed old docs that got removed into different sections --- docs/source/digital_twins.rst | 57 ---- docs/source/installation.rst | 41 --- docs/source/schema.rst | 333 -------------------- docs/source/schema_introducing_json.rst | 133 -------- docs/source/schema_other_considerations.rst | 88 ------ 5 files changed, 652 deletions(-) delete mode 100644 docs/source/digital_twins.rst delete mode 100644 docs/source/installation.rst delete mode 100644 docs/source/schema.rst delete mode 100644 docs/source/schema_introducing_json.rst delete mode 100644 docs/source/schema_other_considerations.rst diff --git a/docs/source/digital_twins.rst b/docs/source/digital_twins.rst deleted file mode 100644 index 999a3a9..0000000 --- a/docs/source/digital_twins.rst +++ /dev/null @@ -1,57 +0,0 @@ -.. _digital_twins: - -============= -Digital Twins -============= - -A digital twin is a virtual representation of a real life being - a physical asset like a wind turbine or car - or even -a human. - -There are three reasons why you might want to create a digital twin: - - Monitoring - - Prediction - - Optimisation - -On its own, a digital twin can be quite useful. For example, a twin might embody an AI-based analysis to predict power -output of a turbine. - -.. figure:: images/digital_twin_component_basic.svg - :width: 400px - :align: center - :figclass: align-center - :alt: A digital twin component - - A digital twin consists of some kind of analysis or processing task, which could be run many times per second, or - daily, down to occasionally or sometimes only once (the same as a "normal" analysis). - -Coupling digital twins is generally even more useful. You might wish to couple your turbine twin with a representation -of the local power grid, and a representation of a factory building to determine power demand... enabling you to -optimise your factory plant for lowest energy cost whilst intelligently selling surplus power to the grid. - -.. figure:: images/digital_twin_hierarchy.svg - :width: 350px - :align: center - :figclass: align-center - :alt: Hierarchy of digital twins - - A hierarchy of digital twins. Each blue circle represents a twin, coupled to its neighbours. Yellow nodes are where - schema are used to connect twins. - - -.. _gemini_principles: - -Gemini Principles -================= - -The Gemini Principles have been derived by the -`Centre for Digital Built Britain (CDBB) `_. -We strongly recommend you give them a read if embarking on a digital twins project. - -The aim of **twined** is to enable the following principles. In particular: - -#. Openness (open-source project to create schema for twins that can be run anywhere, anywhen) -#. Federation (encouraging a standardised way of connecting twins together) -#. Security (making sure schemas and data can be read safely) -#. Public Good (see our nano-rant about climate change in :ref:`reason_for_being`) - - diff --git a/docs/source/installation.rst b/docs/source/installation.rst deleted file mode 100644 index 0cbc136..0000000 --- a/docs/source/installation.rst +++ /dev/null @@ -1,41 +0,0 @@ -.. _installation: - -============ -Installation -============ - -**twined** is available on `pypi `_, so installation into your python virtual environment is dead -simple: - -.. code-block:: py - - pip install twined - -Don't have a virtual environment with pip? You probably should! ``pyenv`` is your friend. Google it. - - -.. _compilation: - -Compilation -============ - -There is presently no need to compile **twined**, as it's written entirely in python. - - -.. _third_party_library_installation: - -Third party library installation -================================ - -**twined** is for python >= 3.6 so expects that. Other dependencies can be checked in ``setup.py``, and will -automatically installed during the installation above. - - -.. _third_party_build_requirements: - -Third party build requirements -============================== - -.. ATTENTION:: - Woohoo! There are no crazy dependencies that you have to compile and build for your particular system. - (you know the ones... they never *actually* compile, right?). We aim to keep it this way. diff --git a/docs/source/schema.rst b/docs/source/schema.rst deleted file mode 100644 index 98af867..0000000 --- a/docs/source/schema.rst +++ /dev/null @@ -1,333 +0,0 @@ -.. _schema: - -===================== -About Twines (Schema) -===================== - -The core of **twined** is to provide and use schemas for digital twins. - -Below, we set out requirements and a framework for creating a *schema* to represent a digital twin. -We call these schema "twines". To just get started building a **twine**, check out the :ref:`quick_start`. - - -.. _requirements: - -Requirements of digital twin schema -=================================== - -A *schema* defines a digital twin, and has multiple roles. It: - -#. Defines what data is required by a digital twin, in order to run -#. Defines what data will be returned by the twin following a successful run -#. Defines the formats of these data, in such a way that incoming data can be validated - -If this weren't enough, the schema: - -#. Must be trustable (i.e. a schema from an untrusted, corrupt or malicious third party should be safe to at least read) -#. Must be machine-readable *and machine-understandable* [1]_ -#. Must be human-readable *and human-understandable* [1]_ -#. Must be searchable/indexable - -Fortunately for digital twin developers, many of these requirements have already been seen for data interchange formats -developed for the web. **twined** uses ``JSON`` and ``JSONSchema`` to interchange data between digital twins. - -If you're not already familiar with ``JSONSchema`` (or wish to know why **twined** uses ``JSON`` over the seemingly more -appropriate ``XML`` standard), see :ref:`introducing_json_schema`. - -.. toctree:: - :maxdepth: 0 - :hidden: - - schema_introducing_json - - -.. _data_framework: - -Data framework -============== - -We cannot simply expect many developers to create digital twins with some schema, then to be able to connect them all -together - even if those schema are all fully valid (*readable*). **twined** makes things slightly more specific. - -**twined** has an opinionated view on how incoming data is organised. This results in a top-level schema that is -extremely prescriptive (*understandable*), allowing digital twins to be introspected and connected. - - -.. _data_types: - -Data types ----------- - -Let us review the classes of data i/o undertaken a digital twin: - -.. tabs:: - - .. group-tab:: Config - - **Configuration data (input)** - - Control parameters relating to what the twin should do, or how it should operate. For example, should a twin produce - output images as low resolution PNGs or as SVGs? How many iterations of a fluid flow solver should be used? What is - the acceptable error level on an classifier algorithm? - - *These values should always have defaults.* - - .. group-tab:: Values - - **Value data (input, output)** - - Raw values passed directly to/from a twin. For example current rotor speed, or forecast wind direction. - - Values might be passed at instantiation of a twin (typical application-like process) or via a socket. - - *These values should never have defaults.* - - .. group-tab:: Files - - **File data (input, output)** - - Twins frequently operate on file content - eg files on disc or objects in a cloud data store. For example, - groups of ``.csv`` files can contain data to train a machine learning algorithm. There are four subclasses of file i/o - that may be undertaken by digital twins: - - #. Input file (read) - eg to read input data from a csv file - #. Temporary file (read-write, disposable) - eg to save intermediate results to disk, reducing memory use - #. Cache file (read-write, persistent) - eg to save a trained classifier for later use in prediction - #. Output file (write) - eg to write postprocessed csv data ready for the next twin, or save generated images etc. - - .. group-tab:: External - - **External service data (input, output)** - - A digital twin might: - - GET/POST data from/to an external API, - - query/update a database. - - Such data exchange may not be controllable by **twined** (which is intended to operate at the boundaries of the - twin) unless the resulting data is returned from the twin and must therefore be schema-compliant. - - .. group-tab:: Credentials - - **Credentials (input)** - - In order to: - - GET/POST data from/to an API, - - query a database, or - - connect to a socket (for receiving Values or emitting Values, Monitors or Logs) - - a digital twin must have *access* to it. API keys, database URIs, etc must be supplied to the digital twin but - treated with best practice with respect to security considerations. - - *Credentials should never be hard-coded into application code, always passed in* - - .. group-tab:: Monitors/Logs - - There are two kinds of monitoring data required from a digital twin. - - **Monitor data (output)** - - Values for health and progress monitoring of the twin, for example percentage progress, iteration number and - status - perhaps even residuals graphs for a converging calculation. Broadly speaking, this should be user-facing - information. - - *This kind of monitoring data can be in a suitable form for display on a dashboard* - - **Log data (output)** - - Logged statements, typically in iostream form, produced by the twin (e.g. via python's ``logging`` module) must be - capturable as an output for debugging and monitoring purposes. Broadly speaking, this should be developer-facing - information. - - -.. _data_descriptions: - -Data descriptions ------------------ - -Here, we describe how each of these data classes is described by **twined**. - - -.. tabs:: - - .. group-tab:: Config - - **Configuration data** - - Configuration data is supplied as a simple object, which of course can be nested (although we don't encourage deep - nesting). The following is a totally hypothetical configuration... - - .. code-block:: javascript - - { - "max_iterations": 0, - "compute_vectors": True, - "cache_mode": "extended", - "initial_conditions": { - "intensity": 0.0, - "direction", 0.0 - } - } - - .. group-tab:: Values - - **Value data (input, output)** - - For Values data, a twin will accept and/or respond with raw JSON (this could originate over a socket, be read from - a file or API depending exactly on the twin) containing variables of importance: - - .. code-block:: javascript - - { - "rotor_speed": 13.2, - "wind_direction": 179.4 - } - - .. group-tab:: Files - - **File data (input, output)** - - Files are not streamed directly to the digital twin (this would require extreme bandwidth in whatever system is - orchestrating all the twins). Instead, files should be made available on the local storage system; i.e. a volume - mounted to whatever container or VM the digital twin runs in. - - Groups of files are described by a ``manifest``, where a manifest is (in essence) a catalogue of files in a - dataset. - - A digital twin might receive multiple manifests, if it uses multiple datasets. For example, it could use a 3D - point cloud LiDAR dataset, and a meteorological dataset. - - .. code-block:: javascript - - { - "manifests": [ - { - "type": "dataset", - "id": "3c15c2ba-6a32-87e0-11e9-3baa66a632fe", // UUID of the manifest - "files": [ - { - "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86", // UUID of that file - "sha1": "askjnkdfoisdnfkjnkjsnd" // for quality control to check correctness of file contents - "name": "Lidar - 4 to 10 Dec.csv", - "path": "local/file/path/to/folder/containing/it/", - "type": "csv", - "metadata": { - }, - "size_bytes": 59684813, - "tags": "lidar, helpful, information, like, sequence:1", // Searchable, parsable and filterable - }, - { - "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86", - "name": "Lidar - 11 to 18 Dec.csv", - "path": "local/file/path/to/folder/containing/it/", - "type": "csv", - "metadata": { - }, - "size_bytes": 59684813, - "tags": "lidar, helpful, information, like, sequence:2", // Searchable, parsable and filterable - }, - { - "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86", - "name": "Lidar report.pdf", - "path": "local/file/path/to/folder/containing/it/", - "type": "pdf", - "metadata": { - }, - "size_bytes": 484813, - "tags": "report", // Searchable, parsable and filterable - } - ] - }, - { - // ... another dataset manifest ... - } - ] - } - - .. NOTE:: - - Tagging syntax is extremely powerful. Below, you'll see how this enables a digital twin to specify things like: - - *"Uh, so I need an ordered sequence of files, that are CSV files, and are tagged as lidar."* - - This allows **twined** to check that the input files contain what is needed, enables quick and easy - extraction of subgroups or particular sequences of files within a dataset, and enables management systems - to map candidate datasets to twins that might be used to process them. - - - .. group-tab:: External - - **External service data (input, output)** - - There's nothing for **twined** to do here! - - If the purpose of the twin (and this is a common scenario!) is simply - to fetch data from some service then return it as values from the twin, that's perfect. But its - the twin developer's job to do the fetchin', not ours ;) - - However, fetching from your API or database might require some credentials. See the following tab for help with - that. - - .. group-tab:: Credentials - - **Credentials (input)** - - Credentials should be securely managed by whatever system is managing the twin, then made accessible to the twin - in the form of environment variables: - - .. code-block:: javascript - - SERVICE_API_KEY=someLongTokenTHatYouProbablyHaveToPayTheThirdPartyProviderLoadsOfMoneyFor - - Credentials may also reside in a ``.env`` file in the current directory, either in the format above - (with a new line for each variable) or, for convenience, as bash exports like: - - .. code-block:: javascript - - export SERVICE_API_KEY=someLongTokenTHatYouProbablyHaveToPayTheThirdPartyProviderLoadsOfMoneyFor - - The ``validate_credentials()`` method of the ``Twine class checks for their presence and, where contained in a - ``.env`` file, ensures they are loaded into the environment. - - .. ATTENTION:: - - Do you trust the twin code? If you insert credentials to your own database into a digital twin - provided by a third party, you better be very sure that twin isn't going to scrape all that data out then send - it elsewhere! - - Alternatively, if you're building a twin requiring such credentials, it's your responsibility to give the end - users confidence that you're not abusing their access. - - There'll be a lot more discussion on these issues, but it's outside the scope of **twined** - all we do here is - make sure a twin has the credentials it requires. - - .. group-tab:: Monitors/Logs - - **Monitor data (output)** - - **Log data (output)** - - -.. ATTENTION:: - *What's the difference between Configuration and Values data? Isn't it the same?* - - No. Configuration data is supplied to a twin to initialise it, and always has defaults. Values data is ingested by a - twin, maybe at startup but maybe also later (if the twin is working like a live server). In complex cases, which - Values are required may also depend on the Configuration of the twin! - - Values data can also be returned from a twin whereas configuration data is not. - - Don't get hung up on this yet - in simple (most) cases, they are effectively the same. For a twin which is run as a - straightforward analysis, both the Configuration and Values are processed at startup. - - - -.. Footnotes: - -.. [1] *Understandable* essentially means that, once read, the machine or human knows what it actually means and what to do with it. - - -.. toctree:: - :maxdepth: 0 - :hidden: - - schema_other_considerations diff --git a/docs/source/schema_introducing_json.rst b/docs/source/schema_introducing_json.rst deleted file mode 100644 index 8f20858..0000000 --- a/docs/source/schema_introducing_json.rst +++ /dev/null @@ -1,133 +0,0 @@ -.. _introducing_json_schema: - -======================= -Introducing JSON Schema -======================= - -``JSON`` is a data interchange format that has rapidly taken over as the defacto web-based data communication standard -in recent years. - -``JSONSchema`` is a way of specifying what a ``JSON`` document should contain. The Schema are, themselves, written in -``JSON``! - -Whilst schema can become extremely complicated in some scenarios, they are best designed to be quite succinct. See below -for the schema (and matching ``JSON``) for an integer and a string variable. - -**JSON:** - -.. code-block:: json - - { - "id": 1, - "name": "Tom" - } - - -**Schema:** - -.. code-block:: json - - { - "type": "object", - "title": "An id number and a name", - "properties": { - "id": { - "type": "integer", - "title": "An integer number", - "default": 0 - }, - "name": { - "type": "string", - "title": "A string name", - "default": "" - } - } - } - - -.. _useful_resources: - -Useful resources -================ -.. list-table:: - :widths: auto - :header-rows: 1 - - * - Link - - Resource - * - https://jsonschema.net/ - - Useful web tool for inferring schema from existing json - * - https://jsoneditoronline.org - - A powerful online editor for json, allowing manipulation of large documents better than most text editors - * - https://www.json.org/ - - The JSON standard spec - * - https://json-schema.org/ - - The (draft standard) JSONSchema spec - * - https://rjsf-team.github.io/react-jsonschema-form/ - - A front end library for generating webforms directly from a schema - - -.. _human_readbility: - -Human readability -================= - -Back in our :ref:`requirements` section, we noted it was important for humans to read and understand schema. - -The actual documents themselves are pretty easy to read by technical users. But, for non technical users, readability can be -enhanced even further by the ability to turn ``JSONSchema`` into web forms automatically. For our example above, we can -autogenerate a web form straight from the schema: - -.. figure:: images/schema_form_example.png - :width: 500px - :align: center - :figclass: align-center - :alt: Web form from an example schema - - Web form generated from the example schema above. - -Thus, we can take a schema (or a part of a schema) and use it to generate a control form for a digital twin in a web -interface without writing a separate form component - great for ease and maintainability. - - -.. _why_not_xml: - -Why not XML? -============ - -In a truly excellent `three-part blog `_, writer Seva Savris takes us -through the ups and downs of ``JSON`` versus ``XML``; well worth a read if wishing to understand the respective technologies -better. - -In short, both ``JSON`` and ``XML`` are generalised data interchange specifications and can both can do what we want here. -We choose ``JSON`` because: - -#. Textual representation is much more concise and easy to understand (very important where non-developers like - engineers and scientists must be expected to interpret schema) - -#. `Attack vectors `_. Because entities in ``XML`` - are not necessarily primitives (unlike in ``JSON``), an ``XML`` document parser in its default state may leave a system - open to XXE injection attacks and DTD validation attacks, and therefore requires hardening. ``JSON`` documents are - similarly afflicated (just like any kind of serialized data) but default parsers, operating on the premise of only - deserializing to primitive types, are safe by default - it is only when nondefault parsering or deserialization - techniques (such as ``JSONP``) are used that the application becomes vulnerable. By utilising a default ``JSON`` parser - we can therefore significantly shrink the attack surface of the system. See - `this blog post `_ for further discussion. - -#. ``XML`` is powerful... perhaps too powerful. The standard can be adapted greatly, resulting in high encapsulation - and a high resilience to future unknowns. Both beneficial. However, this requires developers of twins to maintain - interfaces of very high complexity, adaptable to a much wider variety of input. To enable developers to progress, we - suggest handling changes and future unknowns through well-considered versioning, whilst keeping their API simple. - -#. ``XML`` allows baked-in validation of data and attributes. Whilst advantageous in some situations, this is not a - benefit here. We wish validation to be one-sided: validation of data accepted/generated by a digital twin should be - occur within (at) the boundaries of that twin. - -#. Required validation capabilities, built into ``XML`` are achievable with ``JSONSchema`` (otherwise missing from the - pure ``JSON`` standard) - -#. ``JSON`` is a more compact expression than XML, significantly reducing memory and bandwidth requirements. Whilst - not a major issue for most modern PCS, sensors on the edge may have limited memory, and both memory and bandwidth at - scale are extremely expensive. Thus for extremely large networks of interconnected systems there could be significant - speed and cost savings. - diff --git a/docs/source/schema_other_considerations.rst b/docs/source/schema_other_considerations.rst deleted file mode 100644 index b07aeed..0000000 --- a/docs/source/schema_other_considerations.rst +++ /dev/null @@ -1,88 +0,0 @@ -.. _other_considerations: - -==================== -Other Considerations -==================== - -A variety of thoughts that arose whilst architecting **twined**. - -.. _bash_style_stdio: - -Bash-style stdio ----------------- - -Some thought was given to using a very old-school-unix approach to piping data between twins, via stdout. - -Whilst attractive (as being a wildly fast way of piping data between twins on the same machine) it was felt this -was insufficiently general, eg: - - - where twins don't exist on the same machine or container, making it cumbersome to engineer common iostreams - - where slight differences between different shells might lead to incompatibilities or changes in behaviour - -And also unfriendly, eg: - - - engineers or scientists unfamiliar with subtleties of bash shell scripting encounter difficulty piping data around - - difficult to build friendly web based tools to introspect the data and configuration - - bound to be headaches on windows platforms, even though windows now supports bash - - easy to corrupt using third party libraries (e.g. which print to stdout) - - -.. _Units: - -Units ------ - -Being used (mostly) for engineering and scientific analysis, it was tempting to add in a specified sub-schema for units. -For example, mandating that where values can be given in units, they be specified in a certain way, like: - -.. code-block:: javascript - - { - "wind_speed": { - "value": 10.2, - "units": "mph" - } - } - -or (more succinct): - -.. code-block:: javascript - - { - "wind_speed": 10.2, - "wind_speed_units": "mph" - } - -It's still extremely tempting to provide this facility; or at least provide some way of specifying in the schema -what units a value should be provided in. Thinking about it but don't have time right now. -If anybody wants to start crafting a PR with an extension or update to **twined** that facilitates this; please raise an -issue to start progressing it. - - -.. _variable_style: - -Variable Style --------------- - -A premptive stamp on the whinging... - -Note that in the ``JSON`` descriptions above, all variables are named in ``snake_case`` rather than ``camelCase``. This -decision, more likely than even Brexit to divide opinions, is based on: - - The reservation of snake case for the schema spec has the subtle advantage that in future, we might be able to use - camelCase within the spec to denote class types in some useful way, just like in python. Not sure yet; just mulling. - - The :ref:`requirements` mention human-readability as a must; - `this paper `_ - suggests a 20% slower comprehension of camel case than snake. - - The languages we anticipate being most popular for building twins seem to trend toward snake case (eg - `python `_, `c++ `_) - although to be fair we might've woefully misjudged which languages start emerging. - - We're starting in Python so are taking a lead from PEP8, which is bar none the most successful style guide on the - planet, because it got everybody on the same page really early on. - -If existing code that you're dropping in uses camelCase, please don't file that as an issue... converting property -names automatically after schema validation generation is trivial, there are tons of libraries (like -`humps `_) to do it. - -We'd also consider a pull request for a built-in utility converting `to `_ and -`from <>`_ that does this following validation and prior to returning results. Suggest your proposed approach on the -issues board. From bb7b9eadabf30a5551a9fac86dfdc2e9ff3e631a Mon Sep 17 00:00:00 2001 From: Tom Clark Date: Sat, 15 Aug 2020 18:23:46 +0100 Subject: [PATCH 14/14] DOC Added examples which are literally included in the docs and able to be tested --- .../data/configuration_manifest.json | 26 +++++++++++++++++++ examples/damage_classifier_service/twine.json | 14 ++++++++++ .../data/input_manifest.json | 26 +++++++++++++++++++ .../data/output_manifest.json | 26 +++++++++++++++++++ .../strands/input_manifest_filters.json | 20 ++++++++++++++ .../strands/output_manifest_filters.json | 12 +++++++++ 6 files changed, 124 insertions(+) create mode 100644 examples/damage_classifier_service/data/configuration_manifest.json create mode 100644 examples/damage_classifier_service/twine.json create mode 100644 examples/met_mast_scada_service/data/input_manifest.json create mode 100644 examples/met_mast_scada_service/data/output_manifest.json create mode 100644 examples/met_mast_scada_service/strands/input_manifest_filters.json create mode 100644 examples/met_mast_scada_service/strands/output_manifest_filters.json diff --git a/examples/damage_classifier_service/data/configuration_manifest.json b/examples/damage_classifier_service/data/configuration_manifest.json new file mode 100644 index 0000000..8acf4c2 --- /dev/null +++ b/examples/damage_classifier_service/data/configuration_manifest.json @@ -0,0 +1,26 @@ +{ + "id": "8ead7669-8162-4f64-8cd5-4abe92509e17", + "datasets": [ + { + "id": "7ead7669-8162-4f64-8cd5-4abe92509e17", + "name": "training data for system abc123", + "organisation": "megacorp", + "tags": "classifier, damage, system:abc123", + "files": [ + { + "path": "datasets/7ead7669/blade_damage.mdl", + "cluster": 0, + "sequence": 0, + "extension": "csv", + "tags": "", + "posix_timestamp": 0, + "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86", + "last_modified": "2019-02-28T22:40:30.533005Z", + "name": "blade_damage.mdl", + "size_bytes": 59684813, + "sha-512/256": "somesha" + } + ] + } + ] +} diff --git a/examples/damage_classifier_service/twine.json b/examples/damage_classifier_service/twine.json new file mode 100644 index 0000000..e6e5f52 --- /dev/null +++ b/examples/damage_classifier_service/twine.json @@ -0,0 +1,14 @@ +{ + // Manifest strands contain lists, with one entry for each required dataset + "configuration_manifest_filters": [ + { + // Once the inputs are validated, your analysis program can use this key to access the dataset + "key": "trained_model", + // General notes, which are helpful as a reminder to users of the service + "purpose": "The trained classifier", + // Issues a strict search for data provided by megacorp, containing *.mdl files tagged as + // classifiers for blade damage on system abc123 + "filters": "organisation: megacorp AND tags:(classifier AND damage AND system:abc123) AND files:(extension:mdl)" + } + ], +} \ No newline at end of file diff --git a/examples/met_mast_scada_service/data/input_manifest.json b/examples/met_mast_scada_service/data/input_manifest.json new file mode 100644 index 0000000..8acf4c2 --- /dev/null +++ b/examples/met_mast_scada_service/data/input_manifest.json @@ -0,0 +1,26 @@ +{ + "id": "8ead7669-8162-4f64-8cd5-4abe92509e17", + "datasets": [ + { + "id": "7ead7669-8162-4f64-8cd5-4abe92509e17", + "name": "training data for system abc123", + "organisation": "megacorp", + "tags": "classifier, damage, system:abc123", + "files": [ + { + "path": "datasets/7ead7669/blade_damage.mdl", + "cluster": 0, + "sequence": 0, + "extension": "csv", + "tags": "", + "posix_timestamp": 0, + "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86", + "last_modified": "2019-02-28T22:40:30.533005Z", + "name": "blade_damage.mdl", + "size_bytes": 59684813, + "sha-512/256": "somesha" + } + ] + } + ] +} diff --git a/examples/met_mast_scada_service/data/output_manifest.json b/examples/met_mast_scada_service/data/output_manifest.json new file mode 100644 index 0000000..8acf4c2 --- /dev/null +++ b/examples/met_mast_scada_service/data/output_manifest.json @@ -0,0 +1,26 @@ +{ + "id": "8ead7669-8162-4f64-8cd5-4abe92509e17", + "datasets": [ + { + "id": "7ead7669-8162-4f64-8cd5-4abe92509e17", + "name": "training data for system abc123", + "organisation": "megacorp", + "tags": "classifier, damage, system:abc123", + "files": [ + { + "path": "datasets/7ead7669/blade_damage.mdl", + "cluster": 0, + "sequence": 0, + "extension": "csv", + "tags": "", + "posix_timestamp": 0, + "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86", + "last_modified": "2019-02-28T22:40:30.533005Z", + "name": "blade_damage.mdl", + "size_bytes": 59684813, + "sha-512/256": "somesha" + } + ] + } + ] +} diff --git a/examples/met_mast_scada_service/strands/input_manifest_filters.json b/examples/met_mast_scada_service/strands/input_manifest_filters.json new file mode 100644 index 0000000..fed513b --- /dev/null +++ b/examples/met_mast_scada_service/strands/input_manifest_filters.json @@ -0,0 +1,20 @@ +{ + // Manifest strands contain lists, with one entry for each required dataset + "input_manifest_filters": [ + { + // Once the inputs are validated, your analysis program can use this key to access the dataset + "key": "met_mast_data", + // General notes, which are helpful as a reminder to users of the service + "purpose": "A dataset containing meteorological mast data", + // Searches datasets which are tagged "met*" (allowing for "met" and "meterological"), whose + // files are CSVs in a numbered sequence, and which occur at a particular location + "filters": "tags:(met* AND mast) AND files:(extension:csv AND sequence:>=0) AND location:10" + }, + { + "key": "scada_data", + "purpose": "A dataset containing scada data", + // The organisation: filter refines search to datasets owned by a particular organisation handle + "filters": "organisation: megacorp AND tags:(scada AND mast) AND files:(extension:csv AND sequence:>=0)" + } + ], +} \ No newline at end of file diff --git a/examples/met_mast_scada_service/strands/output_manifest_filters.json b/examples/met_mast_scada_service/strands/output_manifest_filters.json new file mode 100644 index 0000000..43b7a08 --- /dev/null +++ b/examples/met_mast_scada_service/strands/output_manifest_filters.json @@ -0,0 +1,12 @@ +{ + "output_manifest_filters": [ + { + // Twined will prepare a manifest with this key, which you can add to during the analysis or once its complete + "key": "met_scada_checks", + // General notes, which are helpful as a reminder to users of the service + "purpose": "A dataset containing figures (in json format) chowing correlations between mast and scada data", + // Twined will check that the output file manifest has tags appropriate to the filters + "filters": "tags:(met* AND scada AND correlation) AND files:(extension:json) AND location:10" + } + ] +} \ No newline at end of file