diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7762cda..3ab5747 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,5 +1,4 @@ exclude: 'build|docs|node_modules|.git|.tox|dist|docs|octue.egg-info|twined.egg-info' -include: 'twined' default_stages: [commit] fail_fast: true default_language_version: @@ -33,7 +32,23 @@ repos: language_version: python3 - repo: https://github.com/thclark/pre-commit-sphinx - rev: 0.0.1 + rev: master hooks: - id: build-docs language_version: python3 + + - repo: https://github.com/windpioneers/pre-commit-hooks + rev: 0.0.5 + hooks: + - id: check-branch-name + args: + - '^master$' + - '^development$' + - '^devops/([a-z][a-z0-9]*)(-[a-z0-9]+)*$' + - '^doc/([a-z][a-z0-9]*)(-[a-z0-9]+)*$' + - '^experiment/([a-z][a-z0-9]*)(-[a-z0-9]+)*$' + - '^feature/([a-z][a-z0-9]*)(-[a-z0-9]+)*$' + - '^fix/([a-z][a-z0-9]*)(-[a-z0-9]+)*$' + - '^hotfix/([a-z][a-z0-9]*)(-[a-z0-9]+)*$' + - '^review/([a-z][a-z0-9]*)(-[a-z0-9]+)*$' + - '^release/(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)(?:-(?P(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$' diff --git a/docs/source/_ext/googleanalytics.py b/docs/source/_ext/googleanalytics.py new file mode 100644 index 0000000..891df1b --- /dev/null +++ b/docs/source/_ext/googleanalytics.py @@ -0,0 +1,29 @@ +from sphinx.errors import ExtensionError + + +def add_ga_javascript(app, pagename, templatename, context, doctree): + if app.config.googleanalytics_enabled: + id = app.config.googleanalytics_id + metatags = context.get('metatags', '') + metatags += "\n" + metatags += f'\n' + metatags += "\n" + context['metatags'] = metatags + + +def check_config(app): + if not app.config.googleanalytics_id: + raise ExtensionError("'googleanalytics_id' config value must be set for ga statistics to function properly.") + + +def setup(app): + app.add_config_value('googleanalytics_id', '', 'html') + app.add_config_value('googleanalytics_enabled', True, 'html') + app.connect('html-page-context', add_ga_javascript) + app.connect('builder-inited', check_config) + return {'version': '0.1'} diff --git a/docs/source/_ext/sphinx_accordion/README.md b/docs/source/_ext/sphinx_accordion/README.md new file mode 100644 index 0000000..a4fc72c --- /dev/null +++ b/docs/source/_ext/sphinx_accordion/README.md @@ -0,0 +1,21 @@ + + +``` +extensions = [ + ... + 'sphinx_accordion.accordion' + ... +] +``` + +``` +.. accordion:: + + .. accordion-row:: The Title + + The Contents + + .. accordion-row:: The Second Title + + The Contents 2 +``` diff --git a/docs/source/_ext/sphinx_accordion/__init__.py b/docs/source/_ext/sphinx_accordion/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/docs/source/_ext/sphinx_accordion/accordion.css b/docs/source/_ext/sphinx_accordion/accordion.css new file mode 100644 index 0000000..861fb88 --- /dev/null +++ b/docs/source/_ext/sphinx_accordion/accordion.css @@ -0,0 +1,11 @@ +.sphinx-accordion.accordion { + margin-bottom: 1.75em; +} + +.sphinx-accordion.title p { + display: inline-block; + margin-top: 8px; + margin-right: 0px; + margin-bottom: 8px; + margin-left: 0px; +} diff --git a/docs/source/_ext/sphinx_accordion/accordion.js b/docs/source/_ext/sphinx_accordion/accordion.js new file mode 100644 index 0000000..580c322 --- /dev/null +++ b/docs/source/_ext/sphinx_accordion/accordion.js @@ -0,0 +1,46 @@ +// if (!String.prototype.startsWith) { +// Object.defineProperty(String.prototype, 'startsWith', { +// value: function(search, pos) { +// pos = !pos || pos < 0 ? 0 : +pos; +// return this.substring(pos, pos + search.length) === search; +// } +// }); +// } + +$(document).ready(function(){console.log('FFS')}); + +$(function() { + console.log('SOMETHING HAPPENS MAYBE'); + + // We store the data-row values as sphinx-data- + // Add data-row attribute with the extracted value + $('.sphinx-accordion.title').each(function() { + const this1 = $(this); + const prefix = 'sphinx-accordion-title-'; + const classes = this1.attr('class').split(/\s+/); + $.each(classes, function(idx, clazz) { + if (clazz.startsWith(prefix)) { + this1.attr('data-row', clazz.substring(prefix.length)); + } + }); + + const data_row = this1.attr('data-row'); + + this1.on('click', function() { + // Find offset in view + const offset = (this1.offset().top - $(window).scrollTop()); + + // Toggle active class on this subsequent sibling + if (this1.hasClass('active')) { + this1.removeClass('active'); + this1.next().removeClass('active'); + } else { + this1.addClass('active'); + this1.next().addClass('active'); + } + + // Keep tab with the original view offset + $(window).scrollTop(this1.offset().top - offset); + }); + }); +}); \ No newline at end of file diff --git a/docs/source/_ext/sphinx_accordion/accordion.py b/docs/source/_ext/sphinx_accordion/accordion.py new file mode 100644 index 0000000..cada02d --- /dev/null +++ b/docs/source/_ext/sphinx_accordion/accordion.py @@ -0,0 +1,257 @@ +""" Accordion dropdown for Sphinx, with HTML builder """ + +import json +import posixpath +import os +from docutils import nodes +from docutils.parsers.rst import Directive +from pkg_resources import resource_filename +from pygments.lexers import get_all_lexers +from sphinx.util.osutil import copyfile +from sphinx.util import logging + + +FILES = [ + 'semantic-ui-2.4.2/accordion.css', + 'semantic-ui-2.4.2/accordion.js', + 'accordion.css', + 'accordion.js', +] + + +LEXER_MAP = {} +for lexer in get_all_lexers(): + for short_name in lexer[1]: + LEXER_MAP[short_name] = lexer[0] + + +def get_compatible_builders(app): + builders = [ + 'html', + 'singlehtml', + 'dirhtml', + 'readthedocs', + 'readthedocsdirhtml', + 'readthedocssinglehtml', + 'readthedocssinglehtmllocalmedia', + 'spelling' + ] + builders.extend(app.config['sphinx_tabs_valid_builders']) + return builders + + +class AccordionDirective(Directive): + """ Top-level accordion directive """ + + has_content = True + + def run(self): + """ Parse an accordion directive """ + self.assert_has_content() + env = self.state.document.settings.env + + node = nodes.container() + node['classes'] = ['sphinx-accordion', 'ui', 'styled', 'fluid', 'accordion'] + + if 'next_accordion_id' not in env.temp_data: + env.temp_data['next_accordion_id'] = 0 + if 'accordion_stack' not in env.temp_data: + env.temp_data['accordion_stack'] = [] + + accordion_id = env.temp_data['next_accordion_id'] + accordion_key = 'accordion_%d' % accordion_id + env.temp_data['next_accordion_id'] += 1 + env.temp_data['accordion_stack'].append(accordion_id) + + env.temp_data[accordion_key] = {} + env.temp_data[accordion_key]['row_ids'] = [] + env.temp_data[accordion_key]['row_titles'] = [] + env.temp_data[accordion_key]['is_first_row'] = True + + self.state.nested_parse(self.content, self.content_offset, node) + + if env.app.builder.name in get_compatible_builders(env.app): + title_nodes = [] + row_ids = env.temp_data[accordion_key]['row_ids'] + row_titles = env.temp_data[accordion_key]['row_titles'] + for idx, [data_row, row_name] in enumerate(row_titles): + title_node = nodes.container() + title_node.tagname = 'div' + title_node['classes'] = ['sphinx-accordion', 'title'] + title_node['classes'].append(f'sphinx-accordion-title-{accordion_id}-{row_ids[idx]}') + title_node += row_name.children + icon_node = nodes.inline() + icon_node.tagname = 'i' + icon_node['classes'] = ['dropdown', 'icon'] + # Access the first child, we don't want the container that somehow gets generated + title_node.children.insert(0, icon_node) + title_nodes.append(title_node) + + node.children = [child for pair in zip(title_nodes, node.children) for child in pair] + + env.temp_data['accordion_stack'].pop() + return [node] + + +class AccordionRowDirective(Directive): + """ AccordionRow directive, for adding a row to an accordion """ + + has_content = True + + def run(self): + """ Parse a row directive """ + self.assert_has_content() + env = self.state.document.settings.env + + accordion_id = env.temp_data['accordion_stack'][-1] + accordion_key = 'accordion_%d' % accordion_id + + args = self.content[0].strip() + if args.startswith('{'): + try: + args = json.loads(args) + self.content.trim_start(1) + except ValueError: + args = {} + else: + args = {} + + row_name = nodes.container() + self.state.nested_parse(self.content[:1], self.content_offset, row_name) + args['row_name'] = row_name + + include_accordion_id_in_data_row = False + if 'row_id' not in args: + args['row_id'] = env.new_serialno(accordion_key) + include_accordion_id_in_data_row = True + i = 1 + while args['row_id'] in env.temp_data[accordion_key]['row_ids']: + args['row_id'] = '%s-%d' % (args['row_id'], i) + i += 1 + env.temp_data[accordion_key]['row_ids'].append(args['row_id']) + + data_row = str(args['row_id']) + if include_accordion_id_in_data_row: + data_row = '%d-%s' % (accordion_id, data_row) + data_row = "sphinx-accordion-content-{}".format(data_row) + + env.temp_data[accordion_key]['row_titles'].append( + (data_row, args['row_name']) + ) + + text = '\n'.join(self.content) + node = nodes.container(text) + classes = 'sphinx-accordion content' + node['classes'] = classes.split(' ') + node['classes'].extend(args.get('classes', [])) + node['classes'].append(data_row) + + self.state.nested_parse(self.content[2:], self.content_offset, node) + + if env.app.builder.name not in get_compatible_builders(env.app): + outer_node = nodes.container() + row = nodes.container() + row.tagname = 'a' + row['classes'] = ['item'] + row += row_name + outer_node.append(row) + outer_node.append(node) + return [outer_node] + + return [node] + + +class _FindAccordionDirectiveVisitor(nodes.NodeVisitor): + """ Visitor pattern than looks for a sphinx accordion directive in a document """ + def __init__(self, document): + nodes.NodeVisitor.__init__(self, document) + self._found = False + + def unknown_visit(self, node): + if not self._found and isinstance(node, nodes.container) and 'classes' in node and isinstance(node['classes'], list): + self._found = 'sphinx-accordion' in node['classes'] + + @property + def found_accordion_directive(self): + """ Return whether a sphinx accordion directive was found """ + return self._found + + +def update_context(app, pagename, templatename, context, doctree): + """ Remove sphinx-accordion CSS and JS asset files if not used in a page """ + if doctree is None: + return + visitor = _FindAccordionDirectiveVisitor(doctree) + doctree.walk(visitor) + if not visitor.found_accordion_directive: + paths = [posixpath.join('_static', 'sphinx_accordion/' + f) for f in FILES] + if 'css_files' in context: + context['css_files'] = context['css_files'][:] + for path in paths: + if path.endswith('.css') and path in context['css_files']: + context['css_files'].remove(path) + if 'script_files' in context: + context['script_files'] = context['script_files'][:] + for path in paths: + if path.endswith('.js') and path in context['script_files']: + context['script_files'].remove(path) + + +def copy_assets(app, exception): + """ Copy asset files to the output """ + if 'getLogger' in dir(logging): + log = logging.getLogger(__name__).info + warn = logging.getLogger(__name__).warning + else: + log = app.info + warn = app.warning + builders = get_compatible_builders(app) + if exception: + return + if app.builder.name not in builders: + if not app.config['sphinx_accordion_nowarn']: + warn( + 'Not copying accordion assets! Not compatible with %s builder' % + app.builder.name) + return + + log('Copying accordion assets') + + installdir = os.path.join(app.builder.outdir, '_static', 'sphinx_accordion') + + for path in FILES: + source = resource_filename('sphinx_accordion', path) + dest = os.path.join(installdir, path) + destdir = os.path.dirname(dest) + if not os.path.exists(destdir): + os.makedirs(destdir) + + copyfile(source, dest) + + +def setup(app): + """ Set up the plugin """ + app.add_config_value('sphinx_accordion_nowarn', False, '') + app.add_config_value('sphinx_accordion_valid_builders', [], '') + app.add_directive('accordion', AccordionDirective) + app.add_directive('accordion-row', AccordionRowDirective) + + for path in ['sphinx_accordion/' + f for f in FILES]: + if path.endswith('.css'): + if 'add_css_file' in dir(app): + app.add_css_file(path) + else: + app.add_stylesheet(path) + if path.endswith('.js'): + if 'add_script_file' in dir(app): + app.add_script_file(path) + else: + app.add_javascript(path) + + app.connect('html-page-context', update_context) + app.connect('build-finished', copy_assets) + + return { + 'parallel_read_safe': True, + 'parallel_write_safe': True, + } diff --git a/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/.versions b/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/.versions new file mode 100755 index 0000000..01b3d82 --- /dev/null +++ b/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/.versions @@ -0,0 +1,3 @@ +meteor@1.1.6 +semantic:ui-accordion@2.1.3 +underscore@1.0.3 diff --git a/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/accordion.css b/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/accordion.css new file mode 100755 index 0000000..e9b104d --- /dev/null +++ b/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/accordion.css @@ -0,0 +1,253 @@ +/*! + * # Semantic UI 2.4.1 - Accordion + * http://github.com/semantic-org/semantic-ui/ + * + * + * Released under the MIT license + * http://opensource.org/licenses/MIT + * + */ + + +/******************************* + Accordion +*******************************/ + +.ui.accordion, +.ui.accordion .accordion { + max-width: 100%; +} +.ui.accordion .accordion { + margin: 1em 0em 0em; + padding: 0em; +} + +/* Title */ +.ui.accordion .title, +.ui.accordion .accordion .title { + cursor: pointer; +} + +/* Default Styling */ +.ui.accordion .title:not(.ui) { + padding: 0.5em 0em; + font-family: 'Lato', 'Helvetica Neue', Arial, Helvetica, sans-serif; + font-size: 1em; + color: rgba(0, 0, 0, 0.87); +} + +/* Content */ +.ui.accordion .title ~ .content, +.ui.accordion .accordion .title ~ .content { + display: none; +} + +/* Default Styling */ +.ui.accordion:not(.styled) .title ~ .content:not(.ui), +.ui.accordion:not(.styled) .accordion .title ~ .content:not(.ui) { + margin: ''; + padding: 0.5em 0em 1em; +} +.ui.accordion:not(.styled) .title ~ .content:not(.ui):last-child { + padding-bottom: 0em; +} + +/* Arrow */ +.ui.accordion .title .dropdown.icon, +.ui.accordion .accordion .title .dropdown.icon { + display: inline-block; + float: none; + opacity: 1; + width: 1.25em; + height: 1em; + margin: 0em 0.25rem 0em 0rem; + padding: 0em; + font-size: 1em; + -webkit-transition: opacity 0.1s ease, -webkit-transform 0.1s ease; + transition: opacity 0.1s ease, -webkit-transform 0.1s ease; + transition: transform 0.1s ease, opacity 0.1s ease; + transition: transform 0.1s ease, opacity 0.1s ease, -webkit-transform 0.1s ease; + vertical-align: baseline; + -webkit-transform: none; + transform: none; +} + +/*-------------- + Coupling +---------------*/ + + +/* Menu */ +.ui.accordion.menu .item .title { + display: block; + padding: 0em; +} +.ui.accordion.menu .item .title > .dropdown.icon { + float: right; + margin: 0.21425em 0em 0em 1em; + -webkit-transform: rotate(180deg); + transform: rotate(180deg); +} + +/* Header */ +.ui.accordion .ui.header .dropdown.icon { + font-size: 1em; + margin: 0em 0.25rem 0em 0rem; +} + + +/******************************* + States +*******************************/ + +.ui.accordion .active.title .dropdown.icon, +.ui.accordion .accordion .active.title .dropdown.icon { + -webkit-transform: rotate(90deg); + transform: rotate(90deg); +} +.ui.accordion.menu .item .active.title > .dropdown.icon { + -webkit-transform: rotate(90deg); + transform: rotate(90deg); +} + + +/******************************* + Types +*******************************/ + + +/*-------------- + Styled +---------------*/ + +.ui.styled.accordion { + width: 600px; +} +.ui.styled.accordion, +.ui.styled.accordion .accordion { + border-radius: 0.28571429rem; + background: #FFFFFF; + -webkit-box-shadow: 0px 1px 2px 0 rgba(34, 36, 38, 0.15), 0px 0px 0px 1px rgba(34, 36, 38, 0.15); + box-shadow: 0px 1px 2px 0 rgba(34, 36, 38, 0.15), 0px 0px 0px 1px rgba(34, 36, 38, 0.15); +} +.ui.styled.accordion .title, +.ui.styled.accordion .accordion .title { + margin: 0em; + padding: 0.75em 1em; + color: rgba(0, 0, 0, 0.4); + font-weight: bold; + border-top: 1px solid rgba(34, 36, 38, 0.15); + -webkit-transition: background 0.1s ease, color 0.1s ease; + transition: background 0.1s ease, color 0.1s ease; +} +.ui.styled.accordion > .title:first-child, +.ui.styled.accordion .accordion .title:first-child { + border-top: none; +} + +/* Content */ +.ui.styled.accordion .content, +.ui.styled.accordion .accordion .content { + margin: 0em; + padding: 0.5em 1em 1.5em; +} +.ui.styled.accordion .accordion .content { + padding: 0em; + padding: 0.5em 1em 1.5em; +} + +/* Hover */ +.ui.styled.accordion .title:hover, +.ui.styled.accordion .active.title, +.ui.styled.accordion .accordion .title:hover, +.ui.styled.accordion .accordion .active.title { + background: transparent; + color: rgba(0, 0, 0, 0.87); +} +.ui.styled.accordion .accordion .title:hover, +.ui.styled.accordion .accordion .active.title { + background: transparent; + color: rgba(0, 0, 0, 0.87); +} + +/* Active */ +.ui.styled.accordion .active.title { + background: transparent; + color: rgba(0, 0, 0, 0.95); +} +.ui.styled.accordion .accordion .active.title { + background: transparent; + color: rgba(0, 0, 0, 0.95); +} + + +/******************************* + States +*******************************/ + + +/*-------------- + Active +---------------*/ + +.ui.accordion .active.content, +.ui.accordion .accordion .active.content { + display: block; +} + + +/******************************* + Variations +*******************************/ + + +/*-------------- + Fluid +---------------*/ + +.ui.fluid.accordion, +.ui.fluid.accordion .accordion { + width: 100%; +} + +/*-------------- + Inverted +---------------*/ + +.ui.inverted.accordion .title:not(.ui) { + color: rgba(255, 255, 255, 0.9); +} + + +/******************************* + Theme Overrides +*******************************/ + +@font-face { + font-family: 'Accordion'; + src: url(data:application/x-font-ttf;charset=utf-8;base64,AAEAAAALAIAAAwAwT1MvMggjB5AAAAC8AAAAYGNtYXAPfOIKAAABHAAAAExnYXNwAAAAEAAAAWgAAAAIZ2x5Zryj6HgAAAFwAAAAyGhlYWT/0IhHAAACOAAAADZoaGVhApkB5wAAAnAAAAAkaG10eAJuABIAAAKUAAAAGGxvY2EAjABWAAACrAAAAA5tYXhwAAgAFgAAArwAAAAgbmFtZfC1n04AAALcAAABPHBvc3QAAwAAAAAEGAAAACAAAwIAAZAABQAAAUwBZgAAAEcBTAFmAAAA9QAZAIQAAAAAAAAAAAAAAAAAAAABEAAAAAAAAAAAAAAAAAAAAABAAADw2gHg/+D/4AHgACAAAAABAAAAAAAAAAAAAAAgAAAAAAACAAAAAwAAABQAAwABAAAAFAAEADgAAAAKAAgAAgACAAEAIPDa//3//wAAAAAAIPDZ//3//wAB/+MPKwADAAEAAAAAAAAAAAAAAAEAAf//AA8AAQAAAAAAAAAAAAIAADc5AQAAAAABAAAAAAAAAAAAAgAANzkBAAAAAAEAAAAAAAAAAAACAAA3OQEAAAAAAQASAEkAtwFuABMAADc0PwE2FzYXFh0BFAcGJwYvASY1EgaABQgHBQYGBQcIBYAG2wcGfwcBAQcECf8IBAcBAQd/BgYAAAAAAQAAAEkApQFuABMAADcRNDc2MzIfARYVFA8BBiMiJyY1AAUGBwgFgAYGgAUIBwYFWwEACAUGBoAFCAcFgAYGBQcAAAABAAAAAQAAqWYls18PPPUACwIAAAAAAM/9o+4AAAAAz/2j7gAAAAAAtwFuAAAACAACAAAAAAAAAAEAAAHg/+AAAAIAAAAAAAC3AAEAAAAAAAAAAAAAAAAAAAAGAAAAAAAAAAAAAAAAAQAAAAC3ABIAtwAAAAAAAAAKABQAHgBCAGQAAAABAAAABgAUAAEAAAAAAAIAAAAAAAAAAAAAAAAAAAAAAAAADgCuAAEAAAAAAAEADAAAAAEAAAAAAAIADgBAAAEAAAAAAAMADAAiAAEAAAAAAAQADABOAAEAAAAAAAUAFgAMAAEAAAAAAAYABgAuAAEAAAAAAAoANABaAAMAAQQJAAEADAAAAAMAAQQJAAIADgBAAAMAAQQJAAMADAAiAAMAAQQJAAQADABOAAMAAQQJAAUAFgAMAAMAAQQJAAYADAA0AAMAAQQJAAoANABaAHIAYQB0AGkAbgBnAFYAZQByAHMAaQBvAG4AIAAxAC4AMAByAGEAdABpAG4AZ3JhdGluZwByAGEAdABpAG4AZwBSAGUAZwB1AGwAYQByAHIAYQB0AGkAbgBnAEYAbwBuAHQAIABnAGUAbgBlAHIAYQB0AGUAZAAgAGIAeQAgAEkAYwBvAE0AbwBvAG4ALgADAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA) format('truetype'), url(data:application/font-woff;charset=utf-8;base64,d09GRk9UVE8AAASwAAoAAAAABGgAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAABDRkYgAAAA9AAAAS0AAAEtFpovuE9TLzIAAAIkAAAAYAAAAGAIIweQY21hcAAAAoQAAABMAAAATA984gpnYXNwAAAC0AAAAAgAAAAIAAAAEGhlYWQAAALYAAAANgAAADb/0IhHaGhlYQAAAxAAAAAkAAAAJAKZAedobXR4AAADNAAAABgAAAAYAm4AEm1heHAAAANMAAAABgAAAAYABlAAbmFtZQAAA1QAAAE8AAABPPC1n05wb3N0AAAEkAAAACAAAAAgAAMAAAEABAQAAQEBB3JhdGluZwABAgABADr4HAL4GwP4GAQeCgAZU/+Lix4KABlT/4uLDAeLa/iU+HQFHQAAAHkPHQAAAH4RHQAAAAkdAAABJBIABwEBBw0PERQZHnJhdGluZ3JhdGluZ3UwdTF1MjB1RjBEOXVGMERBAAACAYkABAAGAQEEBwoNVp38lA78lA78lA77lA773Z33bxWLkI2Qj44I9xT3FAWOj5CNkIuQi4+JjoePiI2Gi4YIi/uUBYuGiYeHiIiHh4mGi4aLho2Ijwj7FPcUBYeOiY+LkAgO+92L5hWL95QFi5CNkI6Oj4+PjZCLkIuQiY6HCPcU+xQFj4iNhouGi4aJh4eICPsU+xQFiIeGiYaLhouHjYePiI6Jj4uQCA74lBT4lBWLDAoAAAAAAwIAAZAABQAAAUwBZgAAAEcBTAFmAAAA9QAZAIQAAAAAAAAAAAAAAAAAAAABEAAAAAAAAAAAAAAAAAAAAABAAADw2gHg/+D/4AHgACAAAAABAAAAAAAAAAAAAAAgAAAAAAACAAAAAwAAABQAAwABAAAAFAAEADgAAAAKAAgAAgACAAEAIPDa//3//wAAAAAAIPDZ//3//wAB/+MPKwADAAEAAAAAAAAAAAAAAAEAAf//AA8AAQAAAAEAADfYOJZfDzz1AAsCAAAAAADP/aPuAAAAAM/9o+4AAAAAALcBbgAAAAgAAgAAAAAAAAABAAAB4P/gAAACAAAAAAAAtwABAAAAAAAAAAAAAAAAAAAABgAAAAAAAAAAAAAAAAEAAAAAtwASALcAAAAAUAAABgAAAAAADgCuAAEAAAAAAAEADAAAAAEAAAAAAAIADgBAAAEAAAAAAAMADAAiAAEAAAAAAAQADABOAAEAAAAAAAUAFgAMAAEAAAAAAAYABgAuAAEAAAAAAAoANABaAAMAAQQJAAEADAAAAAMAAQQJAAIADgBAAAMAAQQJAAMADAAiAAMAAQQJAAQADABOAAMAAQQJAAUAFgAMAAMAAQQJAAYADAA0AAMAAQQJAAoANABaAHIAYQB0AGkAbgBnAFYAZQByAHMAaQBvAG4AIAAxAC4AMAByAGEAdABpAG4AZ3JhdGluZwByAGEAdABpAG4AZwBSAGUAZwB1AGwAYQByAHIAYQB0AGkAbgBnAEYAbwBuAHQAIABnAGUAbgBlAHIAYQB0AGUAZAAgAGIAeQAgAEkAYwBvAE0AbwBvAG4ALgADAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA) format('woff'); + font-weight: normal; + font-style: normal; +} + +/* Dropdown Icon */ +.ui.accordion .title .dropdown.icon, +.ui.accordion .accordion .title .dropdown.icon { + font-family: Accordion; + line-height: 1; + -webkit-backface-visibility: hidden; + backface-visibility: hidden; + font-weight: normal; + font-style: normal; + text-align: center; +} +.ui.accordion .title .dropdown.icon:before, +.ui.accordion .accordion .title .dropdown.icon:before { + content: '\f0da' /*rtl:'\f0d9'*/; +} + + +/******************************* + User Overrides +*******************************/ + diff --git a/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/accordion.js b/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/accordion.js new file mode 100755 index 0000000..94e8830 --- /dev/null +++ b/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/accordion.js @@ -0,0 +1,613 @@ +/*! + * # Semantic UI 2.4.1 - Accordion + * http://github.com/semantic-org/semantic-ui/ + * + * + * Released under the MIT license + * http://opensource.org/licenses/MIT + * + */ + +;(function ($, window, document, undefined) { + +'use strict'; + +window = (typeof window != 'undefined' && window.Math == Math) + ? window + : (typeof self != 'undefined' && self.Math == Math) + ? self + : Function('return this')() +; + +$.fn.accordion = function(parameters) { + var + $allModules = $(this), + + time = new Date().getTime(), + performance = [], + + query = arguments[0], + methodInvoked = (typeof query == 'string'), + queryArguments = [].slice.call(arguments, 1), + + requestAnimationFrame = window.requestAnimationFrame + || window.mozRequestAnimationFrame + || window.webkitRequestAnimationFrame + || window.msRequestAnimationFrame + || function(callback) { setTimeout(callback, 0); }, + + returnedValue + ; + $allModules + .each(function() { + var + settings = ( $.isPlainObject(parameters) ) + ? $.extend(true, {}, $.fn.accordion.settings, parameters) + : $.extend({}, $.fn.accordion.settings), + + className = settings.className, + namespace = settings.namespace, + selector = settings.selector, + error = settings.error, + + eventNamespace = '.' + namespace, + moduleNamespace = 'module-' + namespace, + moduleSelector = $allModules.selector || '', + + $module = $(this), + $title = $module.find(selector.title), + $content = $module.find(selector.content), + + element = this, + instance = $module.data(moduleNamespace), + observer, + module + ; + + module = { + + initialize: function() { + module.debug('Initializing', $module); + module.bind.events(); + if(settings.observeChanges) { + module.observeChanges(); + } + module.instantiate(); + }, + + instantiate: function() { + instance = module; + $module + .data(moduleNamespace, module) + ; + }, + + destroy: function() { + module.debug('Destroying previous instance', $module); + $module + .off(eventNamespace) + .removeData(moduleNamespace) + ; + }, + + refresh: function() { + $title = $module.find(selector.title); + $content = $module.find(selector.content); + }, + + observeChanges: function() { + if('MutationObserver' in window) { + observer = new MutationObserver(function(mutations) { + module.debug('DOM tree modified, updating selector cache'); + module.refresh(); + }); + observer.observe(element, { + childList : true, + subtree : true + }); + module.debug('Setting up mutation observer', observer); + } + }, + + bind: { + events: function() { + module.debug('Binding delegated events'); + $module + .on(settings.on + eventNamespace, selector.trigger, module.event.click) + ; + } + }, + + event: { + click: function() { + module.toggle.call(this); + } + }, + + toggle: function(query) { + var + $activeTitle = (query !== undefined) + ? (typeof query === 'number') + ? $title.eq(query) + : $(query).closest(selector.title) + : $(this).closest(selector.title), + $activeContent = $activeTitle.next($content), + isAnimating = $activeContent.hasClass(className.animating), + isActive = $activeContent.hasClass(className.active), + isOpen = (isActive && !isAnimating), + isOpening = (!isActive && isAnimating) + ; + module.debug('Toggling visibility of content', $activeTitle); + if(isOpen || isOpening) { + if(settings.collapsible) { + module.close.call($activeTitle); + } + else { + module.debug('Cannot close accordion content collapsing is disabled'); + } + } + else { + module.open.call($activeTitle); + } + }, + + open: function(query) { + var + $activeTitle = (query !== undefined) + ? (typeof query === 'number') + ? $title.eq(query) + : $(query).closest(selector.title) + : $(this).closest(selector.title), + $activeContent = $activeTitle.next($content), + isAnimating = $activeContent.hasClass(className.animating), + isActive = $activeContent.hasClass(className.active), + isOpen = (isActive || isAnimating) + ; + if(isOpen) { + module.debug('Accordion already open, skipping', $activeContent); + return; + } + module.debug('Opening accordion content', $activeTitle); + settings.onOpening.call($activeContent); + settings.onChanging.call($activeContent); + if(settings.exclusive) { + module.closeOthers.call($activeTitle); + } + $activeTitle + .addClass(className.active) + ; + $activeContent + .stop(true, true) + .addClass(className.animating) + ; + if(settings.animateChildren) { + if($.fn.transition !== undefined && $module.transition('is supported')) { + $activeContent + .children() + .transition({ + animation : 'fade in', + queue : false, + useFailSafe : true, + debug : settings.debug, + verbose : settings.verbose, + duration : settings.duration + }) + ; + } + else { + $activeContent + .children() + .stop(true, true) + .animate({ + opacity: 1 + }, settings.duration, module.resetOpacity) + ; + } + } + $activeContent + .slideDown(settings.duration, settings.easing, function() { + $activeContent + .removeClass(className.animating) + .addClass(className.active) + ; + module.reset.display.call(this); + settings.onOpen.call(this); + settings.onChange.call(this); + }) + ; + }, + + close: function(query) { + var + $activeTitle = (query !== undefined) + ? (typeof query === 'number') + ? $title.eq(query) + : $(query).closest(selector.title) + : $(this).closest(selector.title), + $activeContent = $activeTitle.next($content), + isAnimating = $activeContent.hasClass(className.animating), + isActive = $activeContent.hasClass(className.active), + isOpening = (!isActive && isAnimating), + isClosing = (isActive && isAnimating) + ; + if((isActive || isOpening) && !isClosing) { + module.debug('Closing accordion content', $activeContent); + settings.onClosing.call($activeContent); + settings.onChanging.call($activeContent); + $activeTitle + .removeClass(className.active) + ; + $activeContent + .stop(true, true) + .addClass(className.animating) + ; + if(settings.animateChildren) { + if($.fn.transition !== undefined && $module.transition('is supported')) { + $activeContent + .children() + .transition({ + animation : 'fade out', + queue : false, + useFailSafe : true, + debug : settings.debug, + verbose : settings.verbose, + duration : settings.duration + }) + ; + } + else { + $activeContent + .children() + .stop(true, true) + .animate({ + opacity: 0 + }, settings.duration, module.resetOpacity) + ; + } + } + $activeContent + .slideUp(settings.duration, settings.easing, function() { + $activeContent + .removeClass(className.animating) + .removeClass(className.active) + ; + module.reset.display.call(this); + settings.onClose.call(this); + settings.onChange.call(this); + }) + ; + } + }, + + closeOthers: function(index) { + var + $activeTitle = (index !== undefined) + ? $title.eq(index) + : $(this).closest(selector.title), + $parentTitles = $activeTitle.parents(selector.content).prev(selector.title), + $activeAccordion = $activeTitle.closest(selector.accordion), + activeSelector = selector.title + '.' + className.active + ':visible', + activeContent = selector.content + '.' + className.active + ':visible', + $openTitles, + $nestedTitles, + $openContents + ; + if(settings.closeNested) { + $openTitles = $activeAccordion.find(activeSelector).not($parentTitles); + $openContents = $openTitles.next($content); + } + else { + $openTitles = $activeAccordion.find(activeSelector).not($parentTitles); + $nestedTitles = $activeAccordion.find(activeContent).find(activeSelector).not($parentTitles); + $openTitles = $openTitles.not($nestedTitles); + $openContents = $openTitles.next($content); + } + if( ($openTitles.length > 0) ) { + module.debug('Exclusive enabled, closing other content', $openTitles); + $openTitles + .removeClass(className.active) + ; + $openContents + .removeClass(className.animating) + .stop(true, true) + ; + if(settings.animateChildren) { + if($.fn.transition !== undefined && $module.transition('is supported')) { + $openContents + .children() + .transition({ + animation : 'fade out', + useFailSafe : true, + debug : settings.debug, + verbose : settings.verbose, + duration : settings.duration + }) + ; + } + else { + $openContents + .children() + .stop(true, true) + .animate({ + opacity: 0 + }, settings.duration, module.resetOpacity) + ; + } + } + $openContents + .slideUp(settings.duration , settings.easing, function() { + $(this).removeClass(className.active); + module.reset.display.call(this); + }) + ; + } + }, + + reset: { + + display: function() { + module.verbose('Removing inline display from element', this); + $(this).css('display', ''); + if( $(this).attr('style') === '') { + $(this) + .attr('style', '') + .removeAttr('style') + ; + } + }, + + opacity: function() { + module.verbose('Removing inline opacity from element', this); + $(this).css('opacity', ''); + if( $(this).attr('style') === '') { + $(this) + .attr('style', '') + .removeAttr('style') + ; + } + }, + + }, + + setting: function(name, value) { + module.debug('Changing setting', name, value); + if( $.isPlainObject(name) ) { + $.extend(true, settings, name); + } + else if(value !== undefined) { + if($.isPlainObject(settings[name])) { + $.extend(true, settings[name], value); + } + else { + settings[name] = value; + } + } + else { + return settings[name]; + } + }, + internal: function(name, value) { + module.debug('Changing internal', name, value); + if(value !== undefined) { + if( $.isPlainObject(name) ) { + $.extend(true, module, name); + } + else { + module[name] = value; + } + } + else { + return module[name]; + } + }, + debug: function() { + if(!settings.silent && settings.debug) { + if(settings.performance) { + module.performance.log(arguments); + } + else { + module.debug = Function.prototype.bind.call(console.info, console, settings.name + ':'); + module.debug.apply(console, arguments); + } + } + }, + verbose: function() { + if(!settings.silent && settings.verbose && settings.debug) { + if(settings.performance) { + module.performance.log(arguments); + } + else { + module.verbose = Function.prototype.bind.call(console.info, console, settings.name + ':'); + module.verbose.apply(console, arguments); + } + } + }, + error: function() { + if(!settings.silent) { + module.error = Function.prototype.bind.call(console.error, console, settings.name + ':'); + module.error.apply(console, arguments); + } + }, + performance: { + log: function(message) { + var + currentTime, + executionTime, + previousTime + ; + if(settings.performance) { + currentTime = new Date().getTime(); + previousTime = time || currentTime; + executionTime = currentTime - previousTime; + time = currentTime; + performance.push({ + 'Name' : message[0], + 'Arguments' : [].slice.call(message, 1) || '', + 'Element' : element, + 'Execution Time' : executionTime + }); + } + clearTimeout(module.performance.timer); + module.performance.timer = setTimeout(module.performance.display, 500); + }, + display: function() { + var + title = settings.name + ':', + totalTime = 0 + ; + time = false; + clearTimeout(module.performance.timer); + $.each(performance, function(index, data) { + totalTime += data['Execution Time']; + }); + title += ' ' + totalTime + 'ms'; + if(moduleSelector) { + title += ' \'' + moduleSelector + '\''; + } + if( (console.group !== undefined || console.table !== undefined) && performance.length > 0) { + console.groupCollapsed(title); + if(console.table) { + console.table(performance); + } + else { + $.each(performance, function(index, data) { + console.log(data['Name'] + ': ' + data['Execution Time']+'ms'); + }); + } + console.groupEnd(); + } + performance = []; + } + }, + invoke: function(query, passedArguments, context) { + var + object = instance, + maxDepth, + found, + response + ; + passedArguments = passedArguments || queryArguments; + context = element || context; + if(typeof query == 'string' && object !== undefined) { + query = query.split(/[\. ]/); + maxDepth = query.length - 1; + $.each(query, function(depth, value) { + var camelCaseValue = (depth != maxDepth) + ? value + query[depth + 1].charAt(0).toUpperCase() + query[depth + 1].slice(1) + : query + ; + if( $.isPlainObject( object[camelCaseValue] ) && (depth != maxDepth) ) { + object = object[camelCaseValue]; + } + else if( object[camelCaseValue] !== undefined ) { + found = object[camelCaseValue]; + return false; + } + else if( $.isPlainObject( object[value] ) && (depth != maxDepth) ) { + object = object[value]; + } + else if( object[value] !== undefined ) { + found = object[value]; + return false; + } + else { + module.error(error.method, query); + return false; + } + }); + } + if ( $.isFunction( found ) ) { + response = found.apply(context, passedArguments); + } + else if(found !== undefined) { + response = found; + } + if($.isArray(returnedValue)) { + returnedValue.push(response); + } + else if(returnedValue !== undefined) { + returnedValue = [returnedValue, response]; + } + else if(response !== undefined) { + returnedValue = response; + } + return found; + } + }; + if(methodInvoked) { + if(instance === undefined) { + module.initialize(); + } + module.invoke(query); + } + else { + if(instance !== undefined) { + instance.invoke('destroy'); + } + module.initialize(); + } + }) + ; + return (returnedValue !== undefined) + ? returnedValue + : this + ; +}; + +$.fn.accordion.settings = { + + name : 'Accordion', + namespace : 'accordion', + + silent : false, + debug : false, + verbose : false, + performance : true, + + on : 'click', // event on title that opens accordion + + observeChanges : true, // whether accordion should automatically refresh on DOM insertion + + exclusive : true, // whether a single accordion content panel should be open at once + collapsible : true, // whether accordion content can be closed + closeNested : false, // whether nested content should be closed when a panel is closed + animateChildren : true, // whether children opacity should be animated + + duration : 350, // duration of animation + easing : 'easeOutQuad', // easing equation for animation + + onOpening : function(){}, // callback before open animation + onClosing : function(){}, // callback before closing animation + onChanging : function(){}, // callback before closing or opening animation + + onOpen : function(){}, // callback after open animation + onClose : function(){}, // callback after closing animation + onChange : function(){}, // callback after closing or opening animation + + error: { + method : 'The method you called is not defined' + }, + + className : { + active : 'active', + animating : 'animating' + }, + + selector : { + accordion : '.accordion', + title : '.title', + trigger : '.title', + content : '.content' + } + +}; + +// Adds easing +$.extend( $.easing, { + easeOutQuad: function (x, t, b, c, d) { + return -c *(t/=d)*(t-2) + b; + } +}); + +})( jQuery, window, document ); + diff --git a/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/accordion.min.css b/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/accordion.min.css new file mode 100755 index 0000000..80a8e46 --- /dev/null +++ b/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/accordion.min.css @@ -0,0 +1,9 @@ +/*! + * # Semantic UI 2.4.1 - Accordion + * http://github.com/semantic-org/semantic-ui/ + * + * + * Released under the MIT license + * http://opensource.org/licenses/MIT + * + */.ui.accordion,.ui.accordion .accordion{max-width:100%}.ui.accordion .accordion{margin:1em 0 0;padding:0}.ui.accordion .accordion .title,.ui.accordion .title{cursor:pointer}.ui.accordion .title:not(.ui){padding:.5em 0;font-family:Lato,'Helvetica Neue',Arial,Helvetica,sans-serif;font-size:1em;color:rgba(0,0,0,.87)}.ui.accordion .accordion .title~.content,.ui.accordion .title~.content{display:none}.ui.accordion:not(.styled) .accordion .title~.content:not(.ui),.ui.accordion:not(.styled) .title~.content:not(.ui){margin:'';padding:.5em 0 1em}.ui.accordion:not(.styled) .title~.content:not(.ui):last-child{padding-bottom:0}.ui.accordion .accordion .title .dropdown.icon,.ui.accordion .title .dropdown.icon{display:inline-block;float:none;opacity:1;width:1.25em;height:1em;margin:0 .25rem 0 0;padding:0;font-size:1em;-webkit-transition:opacity .1s ease,-webkit-transform .1s ease;transition:opacity .1s ease,-webkit-transform .1s ease;transition:transform .1s ease,opacity .1s ease;transition:transform .1s ease,opacity .1s ease,-webkit-transform .1s ease;vertical-align:baseline;-webkit-transform:none;transform:none}.ui.accordion.menu .item .title{display:block;padding:0}.ui.accordion.menu .item .title>.dropdown.icon{float:right;margin:.21425em 0 0 1em;-webkit-transform:rotate(180deg);transform:rotate(180deg)}.ui.accordion .ui.header .dropdown.icon{font-size:1em;margin:0 .25rem 0 0}.ui.accordion .accordion .active.title .dropdown.icon,.ui.accordion .active.title .dropdown.icon{-webkit-transform:rotate(90deg);transform:rotate(90deg)}.ui.accordion.menu .item .active.title>.dropdown.icon{-webkit-transform:rotate(90deg);transform:rotate(90deg)}.ui.styled.accordion{width:600px}.ui.styled.accordion,.ui.styled.accordion .accordion{border-radius:.28571429rem;background:#fff;-webkit-box-shadow:0 1px 2px 0 rgba(34,36,38,.15),0 0 0 1px rgba(34,36,38,.15);box-shadow:0 1px 2px 0 rgba(34,36,38,.15),0 0 0 1px rgba(34,36,38,.15)}.ui.styled.accordion .accordion .title,.ui.styled.accordion .title{margin:0;padding:.75em 1em;color:rgba(0,0,0,.4);font-weight:700;border-top:1px solid rgba(34,36,38,.15);-webkit-transition:background .1s ease,color .1s ease;transition:background .1s ease,color .1s ease}.ui.styled.accordion .accordion .title:first-child,.ui.styled.accordion>.title:first-child{border-top:none}.ui.styled.accordion .accordion .content,.ui.styled.accordion .content{margin:0;padding:.5em 1em 1.5em}.ui.styled.accordion .accordion .content{padding:0;padding:.5em 1em 1.5em}.ui.styled.accordion .accordion .active.title,.ui.styled.accordion .accordion .title:hover,.ui.styled.accordion .active.title,.ui.styled.accordion .title:hover{background:0 0;color:rgba(0,0,0,.87)}.ui.styled.accordion .accordion .active.title,.ui.styled.accordion .accordion .title:hover{background:0 0;color:rgba(0,0,0,.87)}.ui.styled.accordion .active.title{background:0 0;color:rgba(0,0,0,.95)}.ui.styled.accordion .accordion .active.title{background:0 0;color:rgba(0,0,0,.95)}.ui.accordion .accordion .active.content,.ui.accordion .active.content{display:block}.ui.fluid.accordion,.ui.fluid.accordion .accordion{width:100%}.ui.inverted.accordion .title:not(.ui){color:rgba(255,255,255,.9)}@font-face{font-family:Accordion;src:url(data:application/x-font-ttf;charset=utf-8;base64,AAEAAAALAIAAAwAwT1MvMggjB5AAAAC8AAAAYGNtYXAPfOIKAAABHAAAAExnYXNwAAAAEAAAAWgAAAAIZ2x5Zryj6HgAAAFwAAAAyGhlYWT/0IhHAAACOAAAADZoaGVhApkB5wAAAnAAAAAkaG10eAJuABIAAAKUAAAAGGxvY2EAjABWAAACrAAAAA5tYXhwAAgAFgAAArwAAAAgbmFtZfC1n04AAALcAAABPHBvc3QAAwAAAAAEGAAAACAAAwIAAZAABQAAAUwBZgAAAEcBTAFmAAAA9QAZAIQAAAAAAAAAAAAAAAAAAAABEAAAAAAAAAAAAAAAAAAAAABAAADw2gHg/+D/4AHgACAAAAABAAAAAAAAAAAAAAAgAAAAAAACAAAAAwAAABQAAwABAAAAFAAEADgAAAAKAAgAAgACAAEAIPDa//3//wAAAAAAIPDZ//3//wAB/+MPKwADAAEAAAAAAAAAAAAAAAEAAf//AA8AAQAAAAAAAAAAAAIAADc5AQAAAAABAAAAAAAAAAAAAgAANzkBAAAAAAEAAAAAAAAAAAACAAA3OQEAAAAAAQASAEkAtwFuABMAADc0PwE2FzYXFh0BFAcGJwYvASY1EgaABQgHBQYGBQcIBYAG2wcGfwcBAQcECf8IBAcBAQd/BgYAAAAAAQAAAEkApQFuABMAADcRNDc2MzIfARYVFA8BBiMiJyY1AAUGBwgFgAYGgAUIBwYFWwEACAUGBoAFCAcFgAYGBQcAAAABAAAAAQAAqWYls18PPPUACwIAAAAAAM/9o+4AAAAAz/2j7gAAAAAAtwFuAAAACAACAAAAAAAAAAEAAAHg/+AAAAIAAAAAAAC3AAEAAAAAAAAAAAAAAAAAAAAGAAAAAAAAAAAAAAAAAQAAAAC3ABIAtwAAAAAAAAAKABQAHgBCAGQAAAABAAAABgAUAAEAAAAAAAIAAAAAAAAAAAAAAAAAAAAAAAAADgCuAAEAAAAAAAEADAAAAAEAAAAAAAIADgBAAAEAAAAAAAMADAAiAAEAAAAAAAQADABOAAEAAAAAAAUAFgAMAAEAAAAAAAYABgAuAAEAAAAAAAoANABaAAMAAQQJAAEADAAAAAMAAQQJAAIADgBAAAMAAQQJAAMADAAiAAMAAQQJAAQADABOAAMAAQQJAAUAFgAMAAMAAQQJAAYADAA0AAMAAQQJAAoANABaAHIAYQB0AGkAbgBnAFYAZQByAHMAaQBvAG4AIAAxAC4AMAByAGEAdABpAG4AZ3JhdGluZwByAGEAdABpAG4AZwBSAGUAZwB1AGwAYQByAHIAYQB0AGkAbgBnAEYAbwBuAHQAIABnAGUAbgBlAHIAYQB0AGUAZAAgAGIAeQAgAEkAYwBvAE0AbwBvAG4ALgADAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA) format('truetype'),url(data:application/font-woff;charset=utf-8;base64,d09GRk9UVE8AAASwAAoAAAAABGgAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAABDRkYgAAAA9AAAAS0AAAEtFpovuE9TLzIAAAIkAAAAYAAAAGAIIweQY21hcAAAAoQAAABMAAAATA984gpnYXNwAAAC0AAAAAgAAAAIAAAAEGhlYWQAAALYAAAANgAAADb/0IhHaGhlYQAAAxAAAAAkAAAAJAKZAedobXR4AAADNAAAABgAAAAYAm4AEm1heHAAAANMAAAABgAAAAYABlAAbmFtZQAAA1QAAAE8AAABPPC1n05wb3N0AAAEkAAAACAAAAAgAAMAAAEABAQAAQEBB3JhdGluZwABAgABADr4HAL4GwP4GAQeCgAZU/+Lix4KABlT/4uLDAeLa/iU+HQFHQAAAHkPHQAAAH4RHQAAAAkdAAABJBIABwEBBw0PERQZHnJhdGluZ3JhdGluZ3UwdTF1MjB1RjBEOXVGMERBAAACAYkABAAGAQEEBwoNVp38lA78lA78lA77lA773Z33bxWLkI2Qj44I9xT3FAWOj5CNkIuQi4+JjoePiI2Gi4YIi/uUBYuGiYeHiIiHh4mGi4aLho2Ijwj7FPcUBYeOiY+LkAgO+92L5hWL95QFi5CNkI6Oj4+PjZCLkIuQiY6HCPcU+xQFj4iNhouGi4aJh4eICPsU+xQFiIeGiYaLhouHjYePiI6Jj4uQCA74lBT4lBWLDAoAAAAAAwIAAZAABQAAAUwBZgAAAEcBTAFmAAAA9QAZAIQAAAAAAAAAAAAAAAAAAAABEAAAAAAAAAAAAAAAAAAAAABAAADw2gHg/+D/4AHgACAAAAABAAAAAAAAAAAAAAAgAAAAAAACAAAAAwAAABQAAwABAAAAFAAEADgAAAAKAAgAAgACAAEAIPDa//3//wAAAAAAIPDZ//3//wAB/+MPKwADAAEAAAAAAAAAAAAAAAEAAf//AA8AAQAAAAEAADfYOJZfDzz1AAsCAAAAAADP/aPuAAAAAM/9o+4AAAAAALcBbgAAAAgAAgAAAAAAAAABAAAB4P/gAAACAAAAAAAAtwABAAAAAAAAAAAAAAAAAAAABgAAAAAAAAAAAAAAAAEAAAAAtwASALcAAAAAUAAABgAAAAAADgCuAAEAAAAAAAEADAAAAAEAAAAAAAIADgBAAAEAAAAAAAMADAAiAAEAAAAAAAQADABOAAEAAAAAAAUAFgAMAAEAAAAAAAYABgAuAAEAAAAAAAoANABaAAMAAQQJAAEADAAAAAMAAQQJAAIADgBAAAMAAQQJAAMADAAiAAMAAQQJAAQADABOAAMAAQQJAAUAFgAMAAMAAQQJAAYADAA0AAMAAQQJAAoANABaAHIAYQB0AGkAbgBnAFYAZQByAHMAaQBvAG4AIAAxAC4AMAByAGEAdABpAG4AZ3JhdGluZwByAGEAdABpAG4AZwBSAGUAZwB1AGwAYQByAHIAYQB0AGkAbgBnAEYAbwBuAHQAIABnAGUAbgBlAHIAYQB0AGUAZAAgAGIAeQAgAEkAYwBvAE0AbwBvAG4ALgADAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA) format('woff');font-weight:400;font-style:normal}.ui.accordion .accordion .title .dropdown.icon,.ui.accordion .title .dropdown.icon{font-family:Accordion;line-height:1;-webkit-backface-visibility:hidden;backface-visibility:hidden;font-weight:400;font-style:normal;text-align:center}.ui.accordion .accordion .title .dropdown.icon:before,.ui.accordion .title .dropdown.icon:before{content:'\f0da'} \ No newline at end of file diff --git a/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/accordion.min.js b/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/accordion.min.js new file mode 100755 index 0000000..1dd73b8 --- /dev/null +++ b/docs/source/_ext/sphinx_accordion/semantic-ui-2.4.2/accordion.min.js @@ -0,0 +1 @@ +!function(F,A,e,q){"use strict";A=void 0!==A&&A.Math==Math?A:"undefined"!=typeof self&&self.Math==Math?self:Function("return this")(),F.fn.accordion=function(a){var v,s=F(this),b=(new Date).getTime(),y=[],C=a,O="string"==typeof C,x=[].slice.call(arguments,1);A.requestAnimationFrame||A.mozRequestAnimationFrame||A.webkitRequestAnimationFrame||A.msRequestAnimationFrame;return s.each(function(){var e,c,u=F.isPlainObject(a)?F.extend(!0,{},F.fn.accordion.settings,a):F.extend({},F.fn.accordion.settings),d=u.className,n=u.namespace,g=u.selector,l=u.error,t="."+n,i="module-"+n,o=s.selector||"",f=F(this),m=f.find(g.title),p=f.find(g.content),r=this,h=f.data(i);c={initialize:function(){c.debug("Initializing",f),c.bind.events(),u.observeChanges&&c.observeChanges(),c.instantiate()},instantiate:function(){h=c,f.data(i,c)},destroy:function(){c.debug("Destroying previous instance",f),f.off(t).removeData(i)},refresh:function(){m=f.find(g.title),p=f.find(g.content)},observeChanges:function(){"MutationObserver"in A&&((e=new MutationObserver(function(e){c.debug("DOM tree modified, updating selector cache"),c.refresh()})).observe(r,{childList:!0,subtree:!0}),c.debug("Setting up mutation observer",e))},bind:{events:function(){c.debug("Binding delegated events"),f.on(u.on+t,g.trigger,c.event.click)}},event:{click:function(){c.toggle.call(this)}},toggle:function(e){var n=e!==q?"number"==typeof e?m.eq(e):F(e).closest(g.title):F(this).closest(g.title),t=n.next(p),i=t.hasClass(d.animating),o=t.hasClass(d.active),a=o&&!i,s=!o&&i;c.debug("Toggling visibility of content",n),a||s?u.collapsible?c.close.call(n):c.debug("Cannot close accordion content collapsing is disabled"):c.open.call(n)},open:function(e){var n=e!==q?"number"==typeof e?m.eq(e):F(e).closest(g.title):F(this).closest(g.title),t=n.next(p),i=t.hasClass(d.animating);t.hasClass(d.active)||i?c.debug("Accordion already open, skipping",t):(c.debug("Opening accordion content",n),u.onOpening.call(t),u.onChanging.call(t),u.exclusive&&c.closeOthers.call(n),n.addClass(d.active),t.stop(!0,!0).addClass(d.animating),u.animateChildren&&(F.fn.transition!==q&&f.transition("is supported")?t.children().transition({animation:"fade in",queue:!1,useFailSafe:!0,debug:u.debug,verbose:u.verbose,duration:u.duration}):t.children().stop(!0,!0).animate({opacity:1},u.duration,c.resetOpacity)),t.slideDown(u.duration,u.easing,function(){t.removeClass(d.animating).addClass(d.active),c.reset.display.call(this),u.onOpen.call(this),u.onChange.call(this)}))},close:function(e){var n=e!==q?"number"==typeof e?m.eq(e):F(e).closest(g.title):F(this).closest(g.title),t=n.next(p),i=t.hasClass(d.animating),o=t.hasClass(d.active);!o&&!(!o&&i)||o&&i||(c.debug("Closing accordion content",t),u.onClosing.call(t),u.onChanging.call(t),n.removeClass(d.active),t.stop(!0,!0).addClass(d.animating),u.animateChildren&&(F.fn.transition!==q&&f.transition("is supported")?t.children().transition({animation:"fade out",queue:!1,useFailSafe:!0,debug:u.debug,verbose:u.verbose,duration:u.duration}):t.children().stop(!0,!0).animate({opacity:0},u.duration,c.resetOpacity)),t.slideUp(u.duration,u.easing,function(){t.removeClass(d.animating).removeClass(d.active),c.reset.display.call(this),u.onClose.call(this),u.onChange.call(this)}))},closeOthers:function(e){var n,t,i,o=e!==q?m.eq(e):F(this).closest(g.title),a=o.parents(g.content).prev(g.title),s=o.closest(g.accordion),l=g.title+"."+d.active+":visible",r=g.content+"."+d.active+":visible";i=u.closeNested?(n=s.find(l).not(a)).next(p):(n=s.find(l).not(a),t=s.find(r).find(l).not(a),(n=n.not(t)).next(p)),0`_, `c++ `_) + although to be fair we might've woefully misjudged which languages start emerging. + +- The reservation of snake case for the schema spec has the subtle advantage that in future, we might be able to use camelCase within the spec to denote class types in some useful way, just like in python. Not sure yet; just mulling. - - The :ref:`requirements` mention human-readability as a must; + +- The :ref:`requirements` mention human-readability as a must; `this paper `_ - suggests a 20% slower comprehension of camel case than snake. - - The languages we anticipate being most popular for building twins seem to trend toward snake case (eg - `python `_, `c++ `_) - although to be fair we might've woefully misjudged which languages start emerging. - - We're starting in Python so are taking a lead from PEP8, which is bar none the most successful style guide on the + suggests a 20% slower comprehension of camel case than snake, although to be fair that's probably arguable. + +- We're starting in Python so are taking a lead from PEP8, which is bar none the most successful style guide on the planet, because it got everybody on the same page really early on. If existing code that you're dropping in uses camelCase, please don't file that as an issue... converting property @@ -84,5 +88,19 @@ names automatically after schema validation generation is trivial, there are ton `humps `_) to do it. We'd also consider a pull request for a built-in utility converting `to `_ and -`from <>`_ that does this following validation and prior to returning results. Suggest your proposed approach on the -issues board. +`from `_ that does this following validation and prior to returning results. +Suggest your proposed approach on the `issues board `_. + + +.. _language_choice: + +Language Choice +=============== + +**twined** is presently released in python only. It won't be too hard to replicate functionality in other languages, and +we're considering other languages at present, so might be easily persuadable ;) + +If you require implementation of **twined** in a different language, +and are willing to consider sponsorship of development and maintenance of that library, please +`file an issue `_. + diff --git a/docs/source/about_requirements.rst b/docs/source/about_requirements.rst new file mode 100644 index 0000000..6869ee6 --- /dev/null +++ b/docs/source/about_requirements.rst @@ -0,0 +1,30 @@ +.. _requirements: + +Requirements of the framework +=================================== + +A *twine* must describe a digital twin, and have multiple roles. It must: + +#. Define what data is required by a digital twin, in order to run +#. Define what data will be returned by the twin following a successful run +#. Define the formats of these data, in such a way that incoming data can be validated +#. Define what other (1st or 3rd party) twins / services are required by this one in order for it to run. + +If this weren't enough, the description: + +#. Must be trustable (i.e. a *twine* from an untrusted, corrupt or malicious third party should be safe to at least read) +#. Must be machine-readable *and machine-understandable* [1]_ +#. Must be human-readable *and human-understandable* [1]_ +#. Must be discoverable (that is, searchable/indexable) otherwise people won't know it's there in orer to use it. + +Fortunately for digital twin developers, several of these requirements have already been seen for data interchange +formats developed for the web. **twined** uses ``JSON`` and ``JSONSchema`` to help interchange data. + +If you're not already familiar with ``JSONSchema`` (or wish to know why **twined** uses ``JSON`` over the seemingly more +appropriate ``XML`` standard), see :ref:`introducing_json_schema`. + + +.. Footnotes: + +.. [1] *Understandable* essentially means that, once read, the machine or human knows what it actually means and what to do with it. + diff --git a/docs/source/anatomy.rst b/docs/source/anatomy.rst new file mode 100644 index 0000000..c4533af --- /dev/null +++ b/docs/source/anatomy.rst @@ -0,0 +1,101 @@ +.. _anatomy: + +========================= +Anatomy Of The Twine File +========================= + +The main point of **twined** is to enable engineers and scientists to easily (and rigorously) define a digital twin +or data service. + +This is done by adding a ``twine.json`` file to the repository containing your code. Adding a *twine* means you can: + +- communicate (to you or a colleague) what data is required by this service +- communicate (to another service / machine) what data is required +- deploy services automatically with a provider like `Octue `_. + +To just get started building a *twine*, check out the :ref:`quick_start`. To learn more about twines in general, +see :ref:`about`. Here, we describe the parts of a *twine* ("strands") and what they mean. + +.. _strands: + +Strands +======= + +A *twine* has several sections, called *strands*. Each defines a different kind of data required (or produced) by the +twin. + +.. list-table:: + :widths: 30 70 + :header-rows: 1 + + * - Strand + - Describes the twin's requirements for... + * - :ref:`Configuration Values ` + - Data, in JSON form, used for configuration of the twin/service. + * - :ref:`Configuration Manifest ` + - Files/datasets required by the twin at configuration/startup + * - :ref:`Input Values ` + - Data, in JSON form, passed to the twin in order to trigger an analysis + * - :ref:`Input Manifest ` + - Files/datasets passed with Input Values to trigger an analysis + * - :ref:`Output Values ` + - Data, in JSON form, that will be produced by the twin (in response to inputs) + * - :ref:`Output Manifest ` + - Files/datasets that will be produced by the twin (in response to inputs) + * - :ref:`Credentials ` + - Credentials that are required by the twin in order to access third party services + * - :ref:`Children ` + - Other twins, access to which are required for this twin to function + * - :ref:`Monitors ` + - Visual and progress outputs from an analysis + + +.. toctree:: + :maxdepth: 1 + :hidden: + + anatomy_values + anatomy_manifest + anatomy_credentials + anatomy_monitors + anatomy_children + + +.. _twine_file_schema: + +Twine File Schema +================= + +Because the ``twine.json`` file itself is in ``JSON`` format with a strict structure, **twined** uses a schema to make +that twine files are correctly written (a "schema-schema", if you will, since a twine already contains schema). Try not +to think about it. But if you must, the *twine* schema is +`here `_. + +The first thing **twined** always does is check that the ``twine.json`` file itself is valid, and give you a +descriptive error if it isn't. + + +.. _other_external_io: + +Other External I/O +================== + +A twin might: + +- GET/POST data from/to an external API, +- query/update a database, +- upload files to an object store, +- trigger events in another network, or +- perform pretty much any interaction you can think of with other applications over the web. + +However, such data exchange may not be controllable by **twined** (which is intended to operate at the boundaries of the +twin) unless the resulting data is returned from the twin (and must therefore be compliant with the schema). + +So, there's nothing for **twined** to do here, and no need for a strand in the *twine* file. However, interacting with +third party APIs or databases might require some credentials. See :ref:`credentials_strand` for help with that. + +.. NOTE:: + This is actually a very common scenario. For example, the purpose of the twin might be to fetch data (like a weather + forecast) from some external API then return it in the ``output_values`` for use in a network of digital twins. + But its the twin developer's job to do the fetchin' and make sure the resulting data is compliant with the + ``output_values_schema`` (see :ref:`values_based_strands`). \ No newline at end of file diff --git a/docs/source/anatomy_children.rst b/docs/source/anatomy_children.rst new file mode 100644 index 0000000..8f17237 --- /dev/null +++ b/docs/source/anatomy_children.rst @@ -0,0 +1,9 @@ +.. _children_strand: + +=============== +Children Strand +=============== + +.. ATTENTION:: + + Coming Soon! diff --git a/docs/source/anatomy_credentials.rst b/docs/source/anatomy_credentials.rst new file mode 100644 index 0000000..857d5b7 --- /dev/null +++ b/docs/source/anatomy_credentials.rst @@ -0,0 +1,79 @@ +.. _credentials_strand: + +================== +Credentials Strand +================== + +In order to: + +- GET/POST data from/to an API, +- query a database, or +- connect to a socket (for receiving Values or emitting Values, Monitors or Logs), + +A digital twin must have *access* to it. API keys, database URIs, etc must be supplied to the digital twin but +treated with best practice with respect to security considerations. The purpose of the ``credentials`` strand is to +dictate what credentials the twin requires in order to function. + +.. _defining_the_credentials_strand: + +Defining the Credentials Strand +=============================== + +This is the simplest of the strands, containing a list of credentials (whose ``NAMES_SHOULD_BE_SHOUTY_SNAKE_CASE``) with +a reminder of the purpose. Defaults can also be provided, useful for running on local or closed networks. + +.. code-block:: javascript + + { + "credentials": [ + { + "name": "SECRET_THE_FIRST", + "purpose": "Token for accessing a 3rd party API service" + }, + { + "name": "SECRET_THE_SECOND", + "purpose": "Token for accessing a 3rd party API service" + }, + { + "name": "SECRET_THE_THIRD", + "purpose": "Usually a big secret but sometimes has a convenient non-secret default, like a sandbox or local database", + "default": "postgres://pguser:pgpassword@localhost:5432/pgdb" + } + ] + } + +.. _supplying_credentials: + +Supplying Credentials +===================== + +.. ATTENTION:: + + *Credentials should never be hard-coded into application code* + + Do you trust the twin code? If you insert credentials to your own database into a digital twin + provided by a third party, you better be very sure that twin isn't going to scrape all that data out then send + it elsewhere! + + Alternatively, if you're building a twin requiring such credentials, it's your responsibility to give the end + users confidence that you're not abusing their access. + + There'll be a lot more discussion on these issues, but it's outside the scope of **twined** - all we do here is + make sure a twin has the credentials it requires. + +Credentials should be securely managed by whatever system is managing the twin, then made accessible to the twin +in the form of environment variables: + +.. code-block:: javascript + + SERVICE_API_KEY=someLongTokenTHatYouProbablyHaveToPayTheThirdPartyProviderLoadsOfMoneyFor + +Credentials may also reside in a ``.env`` file in the current directory, either in the format above +(with a new line for each variable) or, for convenience, as bash exports like: + +.. code-block:: javascript + + export SERVICE_API_KEY=someLongTokenTHatYouProbablyHaveToPayTheThirdPartyProviderLoadsOfMoneyFor + +The ``validate_credentials()`` method of the ``Twine`` class checks for their presence and, where contained in a +``.env`` file, ensures they are loaded into the environment. diff --git a/docs/source/anatomy_manifest.rst b/docs/source/anatomy_manifest.rst new file mode 100644 index 0000000..ba98dd9 --- /dev/null +++ b/docs/source/anatomy_manifest.rst @@ -0,0 +1,202 @@ +.. _manifest_strands: + +====================== +Manifest-based Strands +====================== + +Frequently, twins operate on files containing some kind of data. These files need to be made accessible to the code +running in the twin, in order that their contents can be read and processed. Conversely, a twin might produce an output +dataset which must be understood by users. + +The ``configuration_manifest``, ``input_manifest`` and ``output_manifest`` strands describe what kind of datasets (and +associated files) are required / produced. + +.. NOTE:: + + Files are always contained in datasets, even if there's only one file. It's so that we can keep nitty-gritty file + metadata separate from the more meaningful, higher level metadata like what a dataset is for. + +.. tabs:: + + .. group-tab:: Configuration Manifest Strand + + This describes datasets/files that are required at startup of the twin / service. They typically contain a + resource that the twin might use across many analyses. + + For example, a twin might predict failure for a particular component, given an image. It will require a trained + ML model (saved in a ``*.pickle`` or ``*.json``). While many thousands of predictions might be done over the + period that the twin is deployed, all predictions are done using this version of the model - so the model file is + supplied at startup. + + .. group-tab:: Input Manifest Strand + + These files are made available for the twin to run a particular analysis with. Each analysis will likely have + different input datasets. + + For example, a twin might be passed a dataset of LiDAR ``*.scn`` files and be expected to compute atmospheric flow + properties as a timeseries (which might be returned in the :ref:`output values ` for onward + processing and storage). + + .. group-tab:: Output Manifest Strand + + Files are created by the twin during an analysis, tagged and stored as datasets for some onward purpose. + This strand is not used for sourcing data; it enables users or other services to understand appropriate search + terms to retrieve datasets produced. + + +.. _describing_manifests: + +Describing Manifests +==================== + +Manifest-based strands are a **description of what files are needed**, NOT a list of specific files or datasets. This is +a tricky concept, but important, since services should be reusable and applicable to a range of similar datasets. + +The purpose of the manifest strands is to provide a helper to a wider system providing datafiles to digital twins. + +The manifest strands therefore use **tagging** - they contain a ``filters`` field, which should be valid +`Apache Lucene `_ search syntax. This is a powerful syntax, whose tagging features allow +us to specify incredibly broad, or extremely narrow searches (even down to a known unique result). See the tabs below +for examples. + + +.. NOTE:: + + Tagging syntax is extremely powerful. Below, you'll see how this enables a digital twin to specify things like: + + *"OK, I need this digital twin to always have access to a model file for a particular system, containing trained model data"* + + *"Uh, so I need an ordered sequence of files, that are CSV files from a meteorological mast."* + + This allows **twined** to check that the input files contain what is needed, enables quick and easy + extraction of subgroups or particular sequences of files within a dataset, and enables management systems + to map candidate datasets to twins that might be used to process them. + + + +.. tabs:: + + .. group-tab:: Configuration Manifest Strand + + Here we construct an extremely tight filter, which connects this digital twin to a specific + system. + + .. accordion:: + + .. accordion-row:: Show twine containing this strand + + .. literalinclude:: ../../examples/damage_classifier_service/twine.json + :language: javascript + + .. accordion-row:: Show a matching file manifest + + .. literalinclude:: ../../examples/damage_classifier_service/data/configuration_manifest.json + :language: javascript + + + .. group-tab:: Input Manifest Strand + + Here we specify that two datasets (and all or some of the files associated with them) are + required, for a service that cross-checks meteorological mast data and power output data for a wind farm. + + .. accordion:: + + .. accordion-row:: Show twine containing this strand + + .. literalinclude:: ../../examples/met_mast_scada_service/strands/input_manifest_filters.json + :language: javascript + + .. accordion-row:: Show a matching file manifest + + .. literalinclude:: ../../examples/met_mast_scada_service/data/input_manifest.json + :language: javascript + + .. group-tab:: Output Manifest Strand + + Here we specify that two datasets (and all or some of the files associated with them) are + required, for a service that cross-checks meteorological mast data and power output data for a wind farm. + + .. accordion:: + + .. accordion-row:: Show twine containing this strand + + .. literalinclude:: ../../examples/met_mast_scada_service/strands/output_manifest_filters.json + :language: javascript + + .. accordion-row:: Show a matching file manifest + + .. literalinclude:: ../../examples/met_mast_scada_service/data/output_manifest.json + :language: javascript + +.. + + TODO - clean up or remove this section + + .. _how_filtering_works: + + How Filtering Works + =================== + + It's the job of **twined** to make sure of two things: + + 1. make sure the *twine* file itself is valid, + + + **File data (input, output)** + + Files are not streamed directly to the digital twin (this would require extreme bandwidth in whatever system is + orchestrating all the twins). Instead, files should be made available on the local storage system; i.e. a volume + mounted to whatever container or VM the digital twin runs in. + + Groups of files are described by a ``manifest``, where a manifest is (in essence) a catalogue of files in a + dataset. + + A digital twin might receive multiple manifests, if it uses multiple datasets. For example, it could use a 3D + point cloud LiDAR dataset, and a meteorological dataset. + + .. code-block:: javascript + + { + "manifests": [ + { + "type": "dataset", + "id": "3c15c2ba-6a32-87e0-11e9-3baa66a632fe", // UUID of the manifest + "files": [ + { + "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86", // UUID of that file + "sha1": "askjnkdfoisdnfkjnkjsnd" // for quality control to check correctness of file contents + "name": "Lidar - 4 to 10 Dec.csv", + "path": "local/file/path/to/folder/containing/it/", + "type": "csv", + "metadata": { + }, + "size_bytes": 59684813, + "tags": "lidar, helpful, information, like, sequence:1", // Searchable, parsable and filterable + }, + { + "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86", + "name": "Lidar - 11 to 18 Dec.csv", + "path": "local/file/path/to/folder/containing/it/", + "type": "csv", + "metadata": { + }, + "size_bytes": 59684813, + "tags": "lidar, helpful, information, like, sequence:2", // Searchable, parsable and filterable + }, + { + "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86", + "name": "Lidar report.pdf", + "path": "local/file/path/to/folder/containing/it/", + "type": "pdf", + "metadata": { + }, + "size_bytes": 484813, + "tags": "report", // Searchable, parsable and filterable + } + ] + }, + { + // ... another dataset manifest ... + } + ] + } diff --git a/docs/source/anatomy_monitors.rst b/docs/source/anatomy_monitors.rst new file mode 100644 index 0000000..8704877 --- /dev/null +++ b/docs/source/anatomy_monitors.rst @@ -0,0 +1,44 @@ +.. _monitors_strand: + +=============== +Monitors Strand +=============== + +The ``configuration_values_schema``, ``input_values_schema`` and ``output_values_schema`` strands are *values-based*, +meaning the data that matches these strands is in JSON form. + +Each of these strands is a *json schema* which describes that data. + +.. tabs:: + + .. group-tab:: Monitors Strand + + There are two kinds of monitoring data required from a digital twin. + + **Monitor data (output)** + + Values for health and progress monitoring of the twin, for example percentage progress, iteration number and + status - perhaps even residuals graphs for a converging calculation. Broadly speaking, this should be user-facing + information. + + *This kind of monitoring data can be in a suitable form for display on a dashboard* + + **Log data (output)** + + Logged statements, typically in iostream form, produced by the twin (e.g. via python's ``logging`` module) must be + capturable as an output for debugging and monitoring purposes. Broadly speaking, this should be developer-facing + information. + + + +Let's look at basic examples for twines containing each of these strands: + + +.. tabs:: + + .. group-tab:: Monitors Strand + + **Monitor data (output)** + + **Log data (output)** + diff --git a/docs/source/anatomy_values.rst b/docs/source/anatomy_values.rst new file mode 100644 index 0000000..71c0724 --- /dev/null +++ b/docs/source/anatomy_values.rst @@ -0,0 +1,128 @@ +.. _values_based_strands: + +==================== +Values-based Strands +==================== + +The ``configuration_values_schema``, ``input_values_schema`` and ``output_values_schema`` strands are *values-based*, +meaning the data that matches these strands is in JSON form. + +Each of these strands is a *json schema* which describes that data. + +.. tabs:: + + .. group-tab:: Configuration Values Strand + + This strand is a ``configuration_values_schema``, that is used to check validity of any + ``configuration_values`` data supplied to the twin at startup. + + The Configuration Values Strand is generally used to define control parameters relating to what the twin should + do, or how it should operate. + + For example, should it produce output images as low resolution PNGs or as SVGs? How many iterations of a fluid + flow solver should be used? What is the acceptable error level on an classifier algorithm? + + .. group-tab:: Input Values Strand + + This strand is an ``input_values_schema``, that is used to check validity of ``input_values`` data supplied to the + twin at the beginning of an analysis task. + + The Input Values Strand is generally used to define actual data which will be processed by the twin. Sometimes, it + may be used to define control parameters specific to an analysis. + + For example, if a twin cleans and detects anomalies in a 10-minute timeseries of 1Hz data, the ``input_values`` + might contain an array of data and a list of corresponding timestamps. It may also contain a control parameter + specifying which algorithm is used to do the detection. + + .. NOTE:: + Depending on the way the twin is deployed (see :ref:`deployment`), the ``input_values`` might come in from a + web request, over a websocket or called directly from the command line or another library. + + However they come, if the new ``input_values`` validate against the ``input_values_schema`` strand, + then analysis can proceed. + + .. group-tab:: Output Values Strand + + This strand is an ``output_values_schema``, that is used to check results (``output_values``) computed during an + analysis. This ensures that the application wrapped up within the *twine* is operating correctly, and + enables other twins/services or the end users to see what outputs they will get. + + For example,if a twin cleans and detects anomalies in a 10-minute timeseries of 1Hz data, the ``output_values`` + might contain an array of data interpolated onto regular timestamps, with missing values filled in and a list of + warnings where anomalies were found. + + +Let's look at basic examples for twines containing each of these strands: + + +.. tabs:: + + .. group-tab:: Configuration Values Strand + + This *twine* contains an example ``configuration_values_schema`` with one control parameter. + + `Many more detailed and specialised examples are available in the GitHub repository `_ + + .. code-block:: javascript + + { + "configuration_values_schema": { + "title": "The example configuration form", + "description": "The Configuration Values Strand of an example twine", + "type": "object", + "properties": { + "n_iterations": { + "description": "An example of an integer configuration variable, called 'n_iterations'.", + "type": "integer", + "minimum": 1, + "maximum": 10, + "default": 5 + } + } + } + } + + Matching ``configuration_values`` data could look like this: + + .. code-block:: javascript + + { + "n_iterations": 8, + } + + + .. group-tab:: Input Values Strand + + This *twine* contains an example ``input_values_schema`` with one input value, which marked as required. + + Many more detailed and specialised examples are available in :ref:`examples`. + + .. code-block:: javascript + + { + "input_values_schema": { + "title": "Input Values", + "description": "The input values strand of an example twine, with a required height value", + "type": "object", + "properties": { + "height": { + "description": "An example of an integer value called 'height'", + "type": "integer", + "minimum": 2 + } + }, + "required": ["height"] + }, + + Matching ``input_values`` data could look like this: + + .. code-block:: javascript + + { + "height": 13, + } + + + .. group-tab:: Output Values Strand + + Stuff \ No newline at end of file diff --git a/docs/source/conf.py b/docs/source/conf.py index 8b0371f..767fed1 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -11,12 +11,21 @@ # serve to show the default. import os +import sys import sphinx_rtd_theme +from distutils.util import strtobool + + +def str2bool(value): + """ Allows for parsing boolean environment variables like 'True' and 'False' correctly + """ + return bool(strtobool(value)) + # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -# sys.path.insert(0, os.path.abspath('.')) +sys.path.insert(0, os.path.abspath("./_ext")) # -- General configuration ----------------------------------------------------- @@ -33,7 +42,9 @@ 'sphinx.ext.mathjax', 'sphinx.ext.ifconfig', 'breathe', - 'exhale' + 'exhale', + 'googleanalytics', + 'sphinx_accordion.accordion' ] # Add any paths that contain templates here, relative to this directory. @@ -125,6 +136,12 @@ # Tell sphinx what the pygments highlight language should be highlight_language = 'python' +# -- Google Analytics Configuration -------------------------------------------- + +# Only add google analytics when building on ReadTheDocs, +# to avoid clicks from development pages adding to analytics +googleanalytics_id = "UA-43965341-6" +googleanalytics_enabled = True # -- Options for HTML output --------------------------------------------------- diff --git a/docs/source/deployment.rst b/docs/source/deployment.rst new file mode 100644 index 0000000..0cec073 --- /dev/null +++ b/docs/source/deployment.rst @@ -0,0 +1,66 @@ +.. _deployment: + +========== +Deployment +========== + + +.. _deploying_with_octue: + +Deploying with Octue +==================== + +`Octue `_ provides automated deployment to a cloud provider (like GCP or Azure), along with +permissions and user management, monitoring, logging and data storage management out of the box. + +There are also a whole bunch of collaborative helper tools, like the graphical +`twine builder `_ and manifesting tools, designed to speed up the process of building +and using twines. + +The full set of services is in early beta, `get in touch `_ and we can help you +architect systems - from small data services to large networks of :ref:`digital_twins`. + + +.. _deploying_with_doctue: + +Coming Soon - Deploying with doctue +=================================== + +Once we've bedded down our services internally at Octue, we'll be open-sourcing more parts of our build/deploy process, +including docker containers with pre-configured servers to run and monitor twine-based services and digital twins. + +This will allow services to be easily spun up on GCP, Azure Digital Ocean etc., and be a nice halfway house between +fully managed system on Octue and running your own webserver. Of course, +without all the collaborative and data management features that Octue provides ;) + +We're looking for commercial sponsors for this part of the process - if that could be you, please +`get in touch `_ + + +.. _deploying_as_a_cli: + +Deploying as a command-line application +======================================= + +Use the open-source `octue app template `_ as a guide. Write your new +python code (or call your existing tools/libraries) within it. It's set up to wrap and check configuration, inputs and +outputs using twined. Follow the instructions there to set up your inputs, and your files, and run an analysis. + + +.. _deployment_with_a_web_server: + +Deploying with your own web server +================================== + +You can use any python based web server (need another language? see :ref:`language_choice`): + +- Add ``configuration_values_data`` to your webserver config +- Set up an endpoint to allow. +- Set up an endpoint to handle incoming requests / socket messages - these will be ``input_values_data``. +- Treat these requests / messages as events which trigger a task. +- In your task framework (e.g. your celery task), either: + - Use **twined** directly to validate the ``input_values_data``/``output_values_data`` (and, on startup, the + ``configuration_values_data``) and handle running any required analysis yourself, or + - import your analysis app (as built in :ref:`deploying_as_a_cli`) and call it with the configuration and input + data in your task framework. +- Return the result to the client. diff --git a/docs/source/index.rst b/docs/source/index.rst index f35c0a4..4e68c89 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -4,20 +4,11 @@ `issues board `_ and `roadmap `_. -.. image:: https://codecov.io/gh/octue/twined/branch/master/graph/badge.svg - :target: https://codecov.io/gh/octue/twined - :alt: Code coverage - :align: right -.. image:: https://readthedocs.org/projects/twined/badge/?version=latest - :target: https://twined.readthedocs.io/en/latest/?badge=latest - :alt: Documentation Status - :align: right - ====== Twined ====== -**twined** is a library to help :ref:`digital_twins` talk to one another. +**twined** is a library to help create and connect :ref:`digital_twins` and data services. .. epigraph:: *"Twined" [t-why-nd] ~ encircled, twisted together, interwoven* @@ -26,10 +17,11 @@ A digital twin is a virtual representation of a real life being - a physical ass a human. Like real things, digital twins need to interact, so can be connected together, but need a common communication framework to do so. -**twined** helps you to define a single file, a "twine", that defines a digital twin, specifying its data -interfaces, connections to other twins, and other requirements. +**twined** helps you to define a single file, a "twine", that defines a digital twin / data service. It specifies +specifying its data interfaces, connections to other twins, and other requirements. -Any person, or any computer, can read a twine and understand *what-goes-in* and *what-comes-out*. +Any person, or any computer, can read a twine and understand *what-goes-in* and *what-comes-out*. That makes it easy to +collaborate with other teams, since everybody is crystal clear about what's needed. .. figure:: images/digital_twin_hierarchy.svg :width: 350px @@ -37,7 +29,7 @@ Any person, or any computer, can read a twine and understand *what-goes-in* and :figclass: align-center :alt: Hierarchy of digital twins - Digital twins connected in a hierarchy. Each blue circle represents a twin, coupled to its neighbours. + Digital twins / data services connected in a hierarchy. Each blue circle represents a twin, coupled to its neighbours. Yellow nodes are where schema are used to connect twins. @@ -49,13 +41,13 @@ Aims **twined** provides a toolkit to help create and validate "twines" - descriptions of a digital twin, what data it requires, what it does and how it works. -The goals of **twined** are as follows: - - Provide a clear framework for what a digital twin schema can and/or must contain - - Provide functions to validate incoming data against a known schema - - Provide functions to check that a schema itself is valid - - Provide (or direct you to) tools to create schema describing what you require +The goals of this **twined** library are as follows: + - Provide a clear framework for what a *twine* can and/or must contain + - Provide functions to validate incoming data against a known *twine* + - Provide functions to check that a *twine* itself is valid + - Provide (or direct you to) tools to create *twines* describing what you require -In :ref:`schema`, we describe the different parts of a twine (examining how digital twins connect and interact... +In :ref:`anatomy`, we describe the different parts of a twine (examining how digital twins connect and interact... building them together in hierarchies and networks). But you may prefer to dive straight in with the :ref:`quick_start` guide. @@ -78,6 +70,10 @@ it is necessary for everyone to be on the same page - the :ref:`gemini_principle which is why we've released this part of our technology stack as open source, to support those principles and help develop a wider ecosystem. +The main goal is to **help engineers and scientists focus on doing engineering and science** - instead of apis, data +cleaning/management, and all this cloud-pipeline-devops-test-ci-ml BS that takes up 90% of a scientist's time, when they +should be spending their valuable time researching migratory patterns of birds, or cell structures, or wind turbine +performance, or whatever excites them. .. _uses: @@ -86,6 +82,7 @@ Uses At `Octue `_, **twined** is used as a core part of our application creation process: + * As a format to communicate requirements to our partners in research projects * As a tool to validate incoming data to digital twins * As a framework to help establish schema when designing digital twins * As a source of information on digital twins in our network, to help map and connect twins together @@ -97,28 +94,13 @@ Please note, this is not a "general help" forum; we recommend Stack Overflow for issues or for help designing digital twin schema, Octue is able to provide application support services for those building digital twins using **twined**. - -.. _life_choices: - -Life Choices -============ - -**twined** is presently released in python only. It won't be too hard to replicate functionality in other languages, and -we're considering other languages at present, so might be easily persuadable ;) - -If you require implementation of **twined** in a different language, -and are willing to consider sponsorship of development and maintenance of that library, please -`get in touch `_. - - .. toctree:: :maxdepth: 2 self - installation quick_start - examples - digital_twins - schema + anatomy + about + deployment license version_history diff --git a/docs/source/lifecycle.rst b/docs/source/lifecycle.rst new file mode 100644 index 0000000..404bd3c --- /dev/null +++ b/docs/source/lifecycle.rst @@ -0,0 +1,30 @@ + +.. + + Data matching the ``configuration_values_schema`` is supplied to the digital twin / data service at + startup. + + It's generally used to define control parameters relating to what the service should do, or how it should operate. + For example, should it produce output images as low resolution PNGs or as SVGs? How many iterations of a fluid + flow solver should be used? What is the acceptable error level on an classifier algorithm? + + Input Values + + Once configuration data supplied to a service has been validated, it can accept inputs and run analyses + using them. + + Depending on the way it's deployed (see :ref:`deployment`), the ``input_values`` might come in from a web request, + over a websocket or called directly from the command line or another library. + + However it comes, new ``input_values``, which are in ``JSON`` format, are checked against the + ``input_values_schema`` strand of the twine. If they match, then analysis can proceed. + + Output Values + + Once a service has Data matching the ``output_values_schema`` is supplied to the service while it's running. Depending on the way + it's deployed, the values might come in from a web request, over a websocket or called directly from + another library + + Input For example current rotor speed, or forecast wind direction. + + Values might be passed at instantiation of a twin (typical application-like process) or via a socket. diff --git a/docs/source/quick_start.rst b/docs/source/quick_start.rst index 9d89d1c..3428b7b 100644 --- a/docs/source/quick_start.rst +++ b/docs/source/quick_start.rst @@ -4,114 +4,8 @@ Quick Start ============ -.. _create_a_twine: +.. toctree:: + :maxdepth: 2 -Create your first twine -======================= - -Let's say we want a digital twin that accepts two values, uses them to make a calculation, then gives the result. Anyone connecting to the twin will need to know what values it requires, and what it responds with. - -First, create a blank text file, call it `twine.json`. We'll give the twin a title and description. -Paste in the following: - -.. code-block:: javascript - - { - "title": "My first digital twin... of an atomising discombobulator", - "description": "A simple example... estimates the `foz` value of an atomising discombobulator." - } - -Now, let's define an input values strand, to specify what values are required by the twin. For this we use a json schema -(you can read more about them in :ref:`introducing_json_schema`). Add the ``input_values`` field, so your twine looks like this: - -.. code-block:: javascript - - { - "title": "My first digital twin", - "description": "A simple example to build on..." - "input_values_schema": { - "$schema": "http://json-schema.org/2019-09/schema#", - "title": "Input Values schema for my first digital twin", - "description": "These values are supplied to the twin by another program (often over a websocket, depending on your integration provider). So as these values change, the twin can reply with an update.", - "type": "object", - "properties": { - "foo": { - "description": "The foo value... speed of the discombobulator's input bobulation module, in m/s", - "type": "number", - "minimum": 10, - "maximum": 500 - }, - "baz": { - "description": "The baz value... period of the discombobulator's recombulation unit, in s", - "type": "number", - "minimum": 0, - "maximum": 1000 - } - } - } - } - -Finally, let's define an output values strand, to define what kind of data is returned by the twin: - -.. code-block:: javascript - - "output_values_schema": { - "$schema": "http://json-schema.org/2019-09/schema#", - "title": "Output Values schema for my first digital twin", - "description": "The twin will output data that matches this schema", - "type": "object", - "properties": { - "foz": { - "description": "Estimate of the foz value... efficiency of the discombobulator in %", - "type": "number", - "minimum": 10, - "maximum": 500 - } - } - } - - -.. _load_the_twine: - -Load the twine -============== - -**twined** provides a `Twine()` class to load a twine (from a file or a json string). -The loading process checks the twine itself is valid. It's as simple as: - -.. code-block:: py - - from twined import Twine - - my_twine = Twine(file='twine.json') - - -.. _validate_some_inputs: - -Validate some inputs -==================== - -Say we have some json that we want to parse and validate, to make sure it matches what's required for input values. - -.. code-block:: py - - my_input_values = my_twine.validate_input_values(json='{"foo": 30, "baz": 500}') - -You can read the values from a file too. Paste the following into a file named ``input_values.json``: - -.. code-block:: javascript - - { - "foo": 30, - "baz": 500 - } - -Then parse and validate directly from the file: - -.. code-block:: py - - my_input_values = my_twine.validate_input_values(file="input_values.json") - - -.. ATTENTION:: - LIBRARY IS UNDER CONSTRUCTION! WATCH THIS SPACE FOR MORE! \ No newline at end of file + quick_start_installation + quick_start_create_your_first_twine diff --git a/docs/source/quick_start_create_your_first_twine.rst b/docs/source/quick_start_create_your_first_twine.rst new file mode 100644 index 0000000..bbfbad6 --- /dev/null +++ b/docs/source/quick_start_create_your_first_twine.rst @@ -0,0 +1,111 @@ +.. _create_your_first_twine: + +Create your first twine +======================= + +Let's say we want a digital twin that accepts two values, uses them to make a calculation, then gives the result. Anyone connecting to the twin will need to know what values it requires, and what it responds with. + +First, create a blank text file, call it `twine.json`. We'll give the twin a title and description. +Paste in the following: + +.. code-block:: javascript + + { + "title": "My first digital twin... of an atomising discombobulator", + "description": "A simple example... estimates the `foz` value of an atomising discombobulator." + } + +Now, let's define an input values strand, to specify what values are required by the twin. For this we use a json schema +(you can read more about them in :ref:`introducing_json_schema`). Add the ``input_values`` field, so your twine looks like this: + +.. code-block:: javascript + + { + "title": "My first digital twin", + "description": "A simple example to build on..." + "input_values_schema": { + "$schema": "http://json-schema.org/2019-09/schema#", + "title": "Input Values schema for my first digital twin", + "description": "These values are supplied to the twin by another program (often over a websocket, depending on your integration provider). So as these values change, the twin can reply with an update.", + "type": "object", + "properties": { + "foo": { + "description": "The foo value... speed of the discombobulator's input bobulation module, in m/s", + "type": "number", + "minimum": 10, + "maximum": 500 + }, + "baz": { + "description": "The baz value... period of the discombobulator's recombulation unit, in s", + "type": "number", + "minimum": 0, + "maximum": 1000 + } + } + } + } + +Finally, let's define an output values strand, to define what kind of data is returned by the twin: + +.. code-block:: javascript + + "output_values_schema": { + "$schema": "http://json-schema.org/2019-09/schema#", + "title": "Output Values schema for my first digital twin", + "description": "The twin will output data that matches this schema", + "type": "object", + "properties": { + "foz": { + "description": "Estimate of the foz value... efficiency of the discombobulator in %", + "type": "number", + "minimum": 10, + "maximum": 500 + } + } + } + + +.. _load_the_twine: + +Load the twine +============== + +**twined** provides a `Twine()` class to load a twine (from a file or a json string). +The loading process checks the twine itself is valid. It's as simple as: + +.. code-block:: py + + from twined import Twine + + my_twine = Twine(file='twine.json') + + +.. _validate_some_inputs: + +Validate some inputs +==================== + +Say we have some json that we want to parse and validate, to make sure it matches what's required for input values. + +.. code-block:: py + + my_input_values = my_twine.validate_input_values(json='{"foo": 30, "baz": 500}') + +You can read the values from a file too. Paste the following into a file named ``input_values.json``: + +.. code-block:: javascript + + { + "foo": 30, + "baz": 500 + } + +Then parse and validate directly from the file: + +.. code-block:: py + + my_input_values = my_twine.validate_input_values(file="input_values.json") + + +.. ATTENTION:: + LIBRARY IS UNDER CONSTRUCTION! WATCH THIS SPACE FOR MORE! \ No newline at end of file diff --git a/docs/source/installation.rst b/docs/source/quick_start_installation.rst similarity index 100% rename from docs/source/installation.rst rename to docs/source/quick_start_installation.rst diff --git a/docs/source/schema.rst b/docs/source/schema.rst deleted file mode 100644 index 98af867..0000000 --- a/docs/source/schema.rst +++ /dev/null @@ -1,333 +0,0 @@ -.. _schema: - -===================== -About Twines (Schema) -===================== - -The core of **twined** is to provide and use schemas for digital twins. - -Below, we set out requirements and a framework for creating a *schema* to represent a digital twin. -We call these schema "twines". To just get started building a **twine**, check out the :ref:`quick_start`. - - -.. _requirements: - -Requirements of digital twin schema -=================================== - -A *schema* defines a digital twin, and has multiple roles. It: - -#. Defines what data is required by a digital twin, in order to run -#. Defines what data will be returned by the twin following a successful run -#. Defines the formats of these data, in such a way that incoming data can be validated - -If this weren't enough, the schema: - -#. Must be trustable (i.e. a schema from an untrusted, corrupt or malicious third party should be safe to at least read) -#. Must be machine-readable *and machine-understandable* [1]_ -#. Must be human-readable *and human-understandable* [1]_ -#. Must be searchable/indexable - -Fortunately for digital twin developers, many of these requirements have already been seen for data interchange formats -developed for the web. **twined** uses ``JSON`` and ``JSONSchema`` to interchange data between digital twins. - -If you're not already familiar with ``JSONSchema`` (or wish to know why **twined** uses ``JSON`` over the seemingly more -appropriate ``XML`` standard), see :ref:`introducing_json_schema`. - -.. toctree:: - :maxdepth: 0 - :hidden: - - schema_introducing_json - - -.. _data_framework: - -Data framework -============== - -We cannot simply expect many developers to create digital twins with some schema, then to be able to connect them all -together - even if those schema are all fully valid (*readable*). **twined** makes things slightly more specific. - -**twined** has an opinionated view on how incoming data is organised. This results in a top-level schema that is -extremely prescriptive (*understandable*), allowing digital twins to be introspected and connected. - - -.. _data_types: - -Data types ----------- - -Let us review the classes of data i/o undertaken a digital twin: - -.. tabs:: - - .. group-tab:: Config - - **Configuration data (input)** - - Control parameters relating to what the twin should do, or how it should operate. For example, should a twin produce - output images as low resolution PNGs or as SVGs? How many iterations of a fluid flow solver should be used? What is - the acceptable error level on an classifier algorithm? - - *These values should always have defaults.* - - .. group-tab:: Values - - **Value data (input, output)** - - Raw values passed directly to/from a twin. For example current rotor speed, or forecast wind direction. - - Values might be passed at instantiation of a twin (typical application-like process) or via a socket. - - *These values should never have defaults.* - - .. group-tab:: Files - - **File data (input, output)** - - Twins frequently operate on file content - eg files on disc or objects in a cloud data store. For example, - groups of ``.csv`` files can contain data to train a machine learning algorithm. There are four subclasses of file i/o - that may be undertaken by digital twins: - - #. Input file (read) - eg to read input data from a csv file - #. Temporary file (read-write, disposable) - eg to save intermediate results to disk, reducing memory use - #. Cache file (read-write, persistent) - eg to save a trained classifier for later use in prediction - #. Output file (write) - eg to write postprocessed csv data ready for the next twin, or save generated images etc. - - .. group-tab:: External - - **External service data (input, output)** - - A digital twin might: - - GET/POST data from/to an external API, - - query/update a database. - - Such data exchange may not be controllable by **twined** (which is intended to operate at the boundaries of the - twin) unless the resulting data is returned from the twin and must therefore be schema-compliant. - - .. group-tab:: Credentials - - **Credentials (input)** - - In order to: - - GET/POST data from/to an API, - - query a database, or - - connect to a socket (for receiving Values or emitting Values, Monitors or Logs) - - a digital twin must have *access* to it. API keys, database URIs, etc must be supplied to the digital twin but - treated with best practice with respect to security considerations. - - *Credentials should never be hard-coded into application code, always passed in* - - .. group-tab:: Monitors/Logs - - There are two kinds of monitoring data required from a digital twin. - - **Monitor data (output)** - - Values for health and progress monitoring of the twin, for example percentage progress, iteration number and - status - perhaps even residuals graphs for a converging calculation. Broadly speaking, this should be user-facing - information. - - *This kind of monitoring data can be in a suitable form for display on a dashboard* - - **Log data (output)** - - Logged statements, typically in iostream form, produced by the twin (e.g. via python's ``logging`` module) must be - capturable as an output for debugging and monitoring purposes. Broadly speaking, this should be developer-facing - information. - - -.. _data_descriptions: - -Data descriptions ------------------ - -Here, we describe how each of these data classes is described by **twined**. - - -.. tabs:: - - .. group-tab:: Config - - **Configuration data** - - Configuration data is supplied as a simple object, which of course can be nested (although we don't encourage deep - nesting). The following is a totally hypothetical configuration... - - .. code-block:: javascript - - { - "max_iterations": 0, - "compute_vectors": True, - "cache_mode": "extended", - "initial_conditions": { - "intensity": 0.0, - "direction", 0.0 - } - } - - .. group-tab:: Values - - **Value data (input, output)** - - For Values data, a twin will accept and/or respond with raw JSON (this could originate over a socket, be read from - a file or API depending exactly on the twin) containing variables of importance: - - .. code-block:: javascript - - { - "rotor_speed": 13.2, - "wind_direction": 179.4 - } - - .. group-tab:: Files - - **File data (input, output)** - - Files are not streamed directly to the digital twin (this would require extreme bandwidth in whatever system is - orchestrating all the twins). Instead, files should be made available on the local storage system; i.e. a volume - mounted to whatever container or VM the digital twin runs in. - - Groups of files are described by a ``manifest``, where a manifest is (in essence) a catalogue of files in a - dataset. - - A digital twin might receive multiple manifests, if it uses multiple datasets. For example, it could use a 3D - point cloud LiDAR dataset, and a meteorological dataset. - - .. code-block:: javascript - - { - "manifests": [ - { - "type": "dataset", - "id": "3c15c2ba-6a32-87e0-11e9-3baa66a632fe", // UUID of the manifest - "files": [ - { - "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86", // UUID of that file - "sha1": "askjnkdfoisdnfkjnkjsnd" // for quality control to check correctness of file contents - "name": "Lidar - 4 to 10 Dec.csv", - "path": "local/file/path/to/folder/containing/it/", - "type": "csv", - "metadata": { - }, - "size_bytes": 59684813, - "tags": "lidar, helpful, information, like, sequence:1", // Searchable, parsable and filterable - }, - { - "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86", - "name": "Lidar - 11 to 18 Dec.csv", - "path": "local/file/path/to/folder/containing/it/", - "type": "csv", - "metadata": { - }, - "size_bytes": 59684813, - "tags": "lidar, helpful, information, like, sequence:2", // Searchable, parsable and filterable - }, - { - "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86", - "name": "Lidar report.pdf", - "path": "local/file/path/to/folder/containing/it/", - "type": "pdf", - "metadata": { - }, - "size_bytes": 484813, - "tags": "report", // Searchable, parsable and filterable - } - ] - }, - { - // ... another dataset manifest ... - } - ] - } - - .. NOTE:: - - Tagging syntax is extremely powerful. Below, you'll see how this enables a digital twin to specify things like: - - *"Uh, so I need an ordered sequence of files, that are CSV files, and are tagged as lidar."* - - This allows **twined** to check that the input files contain what is needed, enables quick and easy - extraction of subgroups or particular sequences of files within a dataset, and enables management systems - to map candidate datasets to twins that might be used to process them. - - - .. group-tab:: External - - **External service data (input, output)** - - There's nothing for **twined** to do here! - - If the purpose of the twin (and this is a common scenario!) is simply - to fetch data from some service then return it as values from the twin, that's perfect. But its - the twin developer's job to do the fetchin', not ours ;) - - However, fetching from your API or database might require some credentials. See the following tab for help with - that. - - .. group-tab:: Credentials - - **Credentials (input)** - - Credentials should be securely managed by whatever system is managing the twin, then made accessible to the twin - in the form of environment variables: - - .. code-block:: javascript - - SERVICE_API_KEY=someLongTokenTHatYouProbablyHaveToPayTheThirdPartyProviderLoadsOfMoneyFor - - Credentials may also reside in a ``.env`` file in the current directory, either in the format above - (with a new line for each variable) or, for convenience, as bash exports like: - - .. code-block:: javascript - - export SERVICE_API_KEY=someLongTokenTHatYouProbablyHaveToPayTheThirdPartyProviderLoadsOfMoneyFor - - The ``validate_credentials()`` method of the ``Twine class checks for their presence and, where contained in a - ``.env`` file, ensures they are loaded into the environment. - - .. ATTENTION:: - - Do you trust the twin code? If you insert credentials to your own database into a digital twin - provided by a third party, you better be very sure that twin isn't going to scrape all that data out then send - it elsewhere! - - Alternatively, if you're building a twin requiring such credentials, it's your responsibility to give the end - users confidence that you're not abusing their access. - - There'll be a lot more discussion on these issues, but it's outside the scope of **twined** - all we do here is - make sure a twin has the credentials it requires. - - .. group-tab:: Monitors/Logs - - **Monitor data (output)** - - **Log data (output)** - - -.. ATTENTION:: - *What's the difference between Configuration and Values data? Isn't it the same?* - - No. Configuration data is supplied to a twin to initialise it, and always has defaults. Values data is ingested by a - twin, maybe at startup but maybe also later (if the twin is working like a live server). In complex cases, which - Values are required may also depend on the Configuration of the twin! - - Values data can also be returned from a twin whereas configuration data is not. - - Don't get hung up on this yet - in simple (most) cases, they are effectively the same. For a twin which is run as a - straightforward analysis, both the Configuration and Values are processed at startup. - - - -.. Footnotes: - -.. [1] *Understandable* essentially means that, once read, the machine or human knows what it actually means and what to do with it. - - -.. toctree:: - :maxdepth: 0 - :hidden: - - schema_other_considerations diff --git a/examples/damage_classifier_service/data/configuration_manifest.json b/examples/damage_classifier_service/data/configuration_manifest.json new file mode 100644 index 0000000..8acf4c2 --- /dev/null +++ b/examples/damage_classifier_service/data/configuration_manifest.json @@ -0,0 +1,26 @@ +{ + "id": "8ead7669-8162-4f64-8cd5-4abe92509e17", + "datasets": [ + { + "id": "7ead7669-8162-4f64-8cd5-4abe92509e17", + "name": "training data for system abc123", + "organisation": "megacorp", + "tags": "classifier, damage, system:abc123", + "files": [ + { + "path": "datasets/7ead7669/blade_damage.mdl", + "cluster": 0, + "sequence": 0, + "extension": "csv", + "tags": "", + "posix_timestamp": 0, + "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86", + "last_modified": "2019-02-28T22:40:30.533005Z", + "name": "blade_damage.mdl", + "size_bytes": 59684813, + "sha-512/256": "somesha" + } + ] + } + ] +} diff --git a/examples/damage_classifier_service/twine.json b/examples/damage_classifier_service/twine.json new file mode 100644 index 0000000..e6e5f52 --- /dev/null +++ b/examples/damage_classifier_service/twine.json @@ -0,0 +1,14 @@ +{ + // Manifest strands contain lists, with one entry for each required dataset + "configuration_manifest_filters": [ + { + // Once the inputs are validated, your analysis program can use this key to access the dataset + "key": "trained_model", + // General notes, which are helpful as a reminder to users of the service + "purpose": "The trained classifier", + // Issues a strict search for data provided by megacorp, containing *.mdl files tagged as + // classifiers for blade damage on system abc123 + "filters": "organisation: megacorp AND tags:(classifier AND damage AND system:abc123) AND files:(extension:mdl)" + } + ], +} \ No newline at end of file diff --git a/examples/met_mast_scada_service/data/input_manifest.json b/examples/met_mast_scada_service/data/input_manifest.json new file mode 100644 index 0000000..8acf4c2 --- /dev/null +++ b/examples/met_mast_scada_service/data/input_manifest.json @@ -0,0 +1,26 @@ +{ + "id": "8ead7669-8162-4f64-8cd5-4abe92509e17", + "datasets": [ + { + "id": "7ead7669-8162-4f64-8cd5-4abe92509e17", + "name": "training data for system abc123", + "organisation": "megacorp", + "tags": "classifier, damage, system:abc123", + "files": [ + { + "path": "datasets/7ead7669/blade_damage.mdl", + "cluster": 0, + "sequence": 0, + "extension": "csv", + "tags": "", + "posix_timestamp": 0, + "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86", + "last_modified": "2019-02-28T22:40:30.533005Z", + "name": "blade_damage.mdl", + "size_bytes": 59684813, + "sha-512/256": "somesha" + } + ] + } + ] +} diff --git a/examples/met_mast_scada_service/data/output_manifest.json b/examples/met_mast_scada_service/data/output_manifest.json new file mode 100644 index 0000000..8acf4c2 --- /dev/null +++ b/examples/met_mast_scada_service/data/output_manifest.json @@ -0,0 +1,26 @@ +{ + "id": "8ead7669-8162-4f64-8cd5-4abe92509e17", + "datasets": [ + { + "id": "7ead7669-8162-4f64-8cd5-4abe92509e17", + "name": "training data for system abc123", + "organisation": "megacorp", + "tags": "classifier, damage, system:abc123", + "files": [ + { + "path": "datasets/7ead7669/blade_damage.mdl", + "cluster": 0, + "sequence": 0, + "extension": "csv", + "tags": "", + "posix_timestamp": 0, + "id": "abff07bc-7c19-4ed5-be6d-a6546eae8e86", + "last_modified": "2019-02-28T22:40:30.533005Z", + "name": "blade_damage.mdl", + "size_bytes": 59684813, + "sha-512/256": "somesha" + } + ] + } + ] +} diff --git a/examples/met_mast_scada_service/strands/input_manifest_filters.json b/examples/met_mast_scada_service/strands/input_manifest_filters.json new file mode 100644 index 0000000..fed513b --- /dev/null +++ b/examples/met_mast_scada_service/strands/input_manifest_filters.json @@ -0,0 +1,20 @@ +{ + // Manifest strands contain lists, with one entry for each required dataset + "input_manifest_filters": [ + { + // Once the inputs are validated, your analysis program can use this key to access the dataset + "key": "met_mast_data", + // General notes, which are helpful as a reminder to users of the service + "purpose": "A dataset containing meteorological mast data", + // Searches datasets which are tagged "met*" (allowing for "met" and "meterological"), whose + // files are CSVs in a numbered sequence, and which occur at a particular location + "filters": "tags:(met* AND mast) AND files:(extension:csv AND sequence:>=0) AND location:10" + }, + { + "key": "scada_data", + "purpose": "A dataset containing scada data", + // The organisation: filter refines search to datasets owned by a particular organisation handle + "filters": "organisation: megacorp AND tags:(scada AND mast) AND files:(extension:csv AND sequence:>=0)" + } + ], +} \ No newline at end of file diff --git a/examples/met_mast_scada_service/strands/output_manifest_filters.json b/examples/met_mast_scada_service/strands/output_manifest_filters.json new file mode 100644 index 0000000..43b7a08 --- /dev/null +++ b/examples/met_mast_scada_service/strands/output_manifest_filters.json @@ -0,0 +1,12 @@ +{ + "output_manifest_filters": [ + { + // Twined will prepare a manifest with this key, which you can add to during the analysis or once its complete + "key": "met_scada_checks", + // General notes, which are helpful as a reminder to users of the service + "purpose": "A dataset containing figures (in json format) chowing correlations between mast and scada data", + // Twined will check that the output file manifest has tags appropriate to the filters + "filters": "tags:(met* AND scada AND correlation) AND files:(extension:json) AND location:10" + } + ] +} \ No newline at end of file diff --git a/requirements-dev.txt b/requirements-dev.txt index e48c652..e01469c 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -21,4 +21,4 @@ twine # <---- nothing to do with the twined library! # Building documentation # ------------------------------------------------------------------------------ -include docs/requirements.txt +-r docs/requirements.txt diff --git a/setup.py b/setup.py index 518f7eb..731ce5e 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ setup( name="twined", - version="0.0.11", + version="0.0.12-alpha", py_modules=[], install_requires=["jsonschema ~= 3.2.0", "python-dotenv"], url="https://www.github.com/octue/twined",