diff --git a/.coveragerc b/.coveragerc index 4602a609f..da13860e7 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,8 +1,12 @@ # .coveragerc to control coverage.py [run] -concurrency=thread,greenlet +concurrency=multiprocessing +parallel=true +sigterm=true [report] +omit = + dff/utils/testing/parser_test_generators.py # Regexes for lines to exclude from consideration exclude_lines = # Have to re-enable the standard pragma @@ -13,3 +17,16 @@ exclude_lines = # Don't complain if tests don't cover error handling: except .* + + # Don't complain if tests don't cover unreachable code: + if tp.TYPE_CHECKING: + if TYPE_CHECKING: + if typing.TYPE_CHECKING: + + # Don't complain if tests don't cover abstract methods: + @abstractmethod + + # Don't complain if tests don't cover type overload: + @tp.overload + @overload + @typing.overload diff --git a/.github/workflows/test_coverage.yml b/.github/workflows/test_coverage.yml index 5737716ba..4505095c1 100644 --- a/.github/workflows/test_coverage.yml +++ b/.github/workflows/test_coverage.yml @@ -35,6 +35,7 @@ jobs: - name: build & install package run: | + sudo apt install graphviz python -m pip install build python -m build find . -name "*.whl" -exec python -m pip install {}[test_full] ';' diff --git a/dff/utils/parser/base_parser_object.py b/dff/utils/parser/base_parser_object.py new file mode 100644 index 000000000..ed2103303 --- /dev/null +++ b/dff/utils/parser/base_parser_object.py @@ -0,0 +1,1312 @@ +""" +Base Parser Objects +-------------------- +This module defines parser objects. + +Each class defined here is either an interface for other classes or represents a subset of :py:mod:`ast` classes. +The base interface for every parser object is :py:class:`~.BaseParserObject`. + +Instances of its subclasses form a parser tree. +For example, an instance representing a statement `module.object = 6` is a node in the parser tree, and it has two +child nodes: `module.object` and `6`. +This tree structure allows to assign IDs to various objects (such as transition conditions) as a path to that object +from the tree root. +""" +import typing as tp +from abc import ABC, abstractmethod +from collections import defaultdict +import ast +import logging +from inspect import Signature, Parameter +from enum import Enum + +try: + from functools import cached_property +except ImportError: + try: + from cached_property import cached_property # type: ignore + except ImportError: + raise ModuleNotFoundError( + "Module `cached_property` is not installed. Install it with `pip install dff[parser]`." + ) +# todo: remove this when python3.7 support is dropped + +try: + from ast import unparse +except ImportError: + try: + from astunparse import unparse # type: ignore + except ImportError: + raise ModuleNotFoundError("Module `astunparse` is not installed. Install it with `pip install dff[parser]`.") +# todo: remove this when python3.8 support is dropped + + +if tp.TYPE_CHECKING: + from dff.utils.parser.namespace import Namespace + from dff.utils.parser.dff_project import DFFProject +from dff.utils.parser.exceptions import StarError, ParsingError +from dff.utils.parser.utils import is_instance + + +logger = logging.getLogger(__name__) + + +class BaseParserObject(ABC): + """ + An interface for other parser objects specifying methods that all parser objects should define: + - :py:meth:`~.BaseParserObject.dump` + - :py:meth:`~.BaseParserObject.from_ast` + + This class also implements some useful methods for any parser object. + """ + + def __init__(self): + self.parent: tp.Optional[BaseParserObject] = None + "Parent node." + self._name: tp.Optional[str] = None + "Name of the node: `path = parent.path + _name`." + self.children: tp.MutableMapping[str, BaseParserObject] = {} + "Mapping from child names to child nodes." + + def dependencies(self) -> tp.Dict[str, tp.Set[str]]: + """A list of objects defined in :py:class:`.Namespace`\\s that are used inside current node. + + :return: A mapping from :py:class:`.Namespace`\\s names to sets of object names. + """ + result: tp.DefaultDict[str, tp.Set[str]] = defaultdict(set) + if len(self.path) >= 2: + result[self.path[0]].add(self.path[1]) + else: # self is a Namespace, return no dependencies + return result + + if isinstance(self, ReferenceObject): + resolved = self._resolve_once + if resolved is not None: + for namespace, objects in resolved.dependencies().items(): + result[namespace].update(objects) + + for child in self.children.values(): + for namespace, objects in child.dependencies().items(): + result[namespace].update(objects) + return dict(result) + + def add_child(self, child: "BaseParserObject", asname: str): + """Add a child node `child` by the name `asname`. + + :param child: Child node to add. + :param asname: Name of the child node. + """ + child.parent = self + child._name = asname + self.children[asname] = child + + def resolve_path(self, path: tp.Tuple[str, ...]) -> "BaseParserObject": + """Resolve tree path relative to this node. + + :param path: A tuple of child names. + :raises KeyError: + If a key in `path` cannot be found in children. + :return: A child path[-1] of a child path[-2] of .. a child path[0] of this object. + """ + if len(path) == 0: + return self + child = self.children.get(path[0]) + if child is None: + raise KeyError(f"Not found key {path[0]} in {str(self)}") + return child.resolve_path(path[1:]) + + @cached_property + def path(self) -> tp.Tuple[str, ...]: + """Path to this node from the tree root node.""" + if self._name is None: + raise RuntimeError(f"Name is not set: {str(self)}") + if self.parent is None: + raise RuntimeError(f"Parent is not set: {str(self)}") + return self.parent.path + (self._name,) + + @cached_property + def namespace(self) -> "Namespace": + """Namespace this object belongs to.""" + if self.parent is None: + raise RuntimeError(f"Parent is not set: {str(self)}") + return self.parent.namespace + + @cached_property + def dff_project(self) -> "DFFProject": + """DFFProject this object belongs to.""" + return self.namespace.dff_project + + @abstractmethod + def dump(self, current_indent: int = 0, indent: tp.Optional[int] = 4) -> str: + """ + Dump object as string. `current_indent` should already be applied to the current line by the node's parent. + `current_indent` is supposed to be used only when creating new lines. + + :param current_indent: Current indentation level (in whitespace number), defaults to 0. + :param indent: + Indentation increment (in whitespace number), defaults to 4. + If set to None, an object is dumped in one line. + :return: Representation of the object as a string. + """ + + def __repr__(self) -> str: + return self.__class__.__name__ + "(" + self.true_value() + ")" + + def __str__(self) -> str: + return self.dump() + + def true_value(self) -> str: + """Return the true value of the object that is used to compare objects and compute hash.""" + return self.dump(indent=None) + + def __hash__(self): + return hash(self.true_value()) + + def __eq__(self, other): + if isinstance(other, BaseParserObject): + return self.true_value() == other.true_value() + if isinstance(other, str): + return self.true_value() == other + return NotImplemented + + @classmethod + @abstractmethod + def from_ast(cls, node: tp.Union[ast.stmt, ast.expr], **kwargs): + """Construct an object from an :py:class:`ast.stmt` or :py:class:`ast.expr`. + + :param node: AST node to construct the object from. + :return: Constructed object(s) or None if an object cannot be constructed from `node`. + """ + + +class Statement(BaseParserObject, ABC): + """ + This class is for nodes that represent :py:class:`ast.stmt`. + """ + + def __init__(self): + BaseParserObject.__init__(self) + self.parent: tp.Optional[Namespace] = None + self.children: tp.MutableMapping[str, Expression] = {} + + @classmethod + @abstractmethod + def from_ast( + cls, node: tp.Union[ast.stmt, ast.expr], **kwargs + ) -> tp.Optional[tp.Union[tp.Mapping[str, "Statement"], "Python"]]: + """ + Extract statements from ast node. + + :return: + - None, if type of the `node` is not compatible with the current class. + - For non-:py:class:`~.Python` classes + return a mapping from names of defined objects inside the statement to their definitions. + - :py:class:`~.Python` should return an instance of itself. + """ + ... + + @classmethod + @tp.overload + def auto(cls, node: ast.stmt, **kwargs) -> tp.Union[tp.Mapping[str, "Statement"], "Python"]: # type: ignore + ... + + @classmethod + @tp.overload + def auto(cls, node: tp.Union[ast.stmt, ast.expr], **kwargs) -> None: + ... + + @classmethod + def auto(cls, node, **kwargs): + """Construct a statement automatically choosing the correct type.""" + if not isinstance(node, ast.stmt): + return None + for _cls in (Import, ImportFrom, Assignment, Python): + _cls = tp.cast(tp.Type[Statement], _cls) + obj = _cls.from_ast(node, **kwargs) + if obj is not None: + return obj + raise RuntimeError(node) + + +class Expression(BaseParserObject, ABC): + """ + This class is for nodes that represent :py:class:`ast.expr`. + """ + + def __init__(self): + BaseParserObject.__init__(self) + self.parent: tp.Optional[tp.Union[Statement, Expression]] = None + self.children: tp.MutableMapping[str, Expression] = {} + + @classmethod + @abstractmethod + def from_ast(cls, node: tp.Union[ast.stmt, ast.expr], **kwargs) -> tp.Optional["Expression"]: + ... + + @classmethod + def from_str(cls, string: str) -> "Expression": + """ + Construct an expression from a string representing it. + + :raises ParsingError: + - If a string represents anything but a single expression (:py:class:`ast.Expr`). + """ + body = ast.parse(string).body + if len(body) != 1: + raise ParsingError(f"Body should contain only one expression: {string}") + statement = body[0] + if not isinstance(statement, ast.Expr): + raise ParsingError(f"Body should contain only expressions: {string}") + return cls.auto(statement.value) + + @classmethod + def from_obj(cls, obj: object) -> "Expression": + """Construct an expression representing an object.""" + return cls.from_str(repr(obj)) + + @classmethod + @tp.overload + def auto(cls, node: ast.expr, **kwargs) -> "Expression": # type: ignore + ... + + @classmethod + @tp.overload + def auto(cls, node: tp.Union[ast.stmt, ast.expr], **kwargs) -> None: + ... + + @classmethod + def auto(cls, node, **kwargs): + """Construct an expression automatically choosing the correct type.""" + if not isinstance(node, ast.expr): + return None + for _cls in (Comprehension, Call, Iterable, Subscript, Name, Attribute, Dict, String, Python): + _cls = tp.cast(tp.Type[Expression], _cls) + obj = _cls.from_ast(node, **kwargs) + if obj is not None: + return obj + raise RuntimeError(node) + + +class ReferenceObject(BaseParserObject, ABC): + """ + An interface for reference objects. Reference objects are objects that reference other objects, + e.g. Name, Import, Subscript. + """ + + def __init__(self): + BaseParserObject.__init__(self) + + @cached_property + @abstractmethod + def _resolve_once(self) -> tp.Optional[BaseParserObject]: + """Try to find the object being referenced by the object. + + :return: Referenced object or None if it can't be resolved. + """ + ... + + @cached_property + def absolute(self) -> tp.Optional[BaseParserObject]: # todo: handle recursion + """An absolute object -- if the current object is a reference to another reference, that reference will + be resolved as well. + + :return: A final object that is not :py:class:`.ReferenceObject` or None if any object cannot be resolved. + """ + resolved = self._resolve_once + if isinstance(resolved, ReferenceObject): + return resolved.absolute + return resolved + + @cached_property + @abstractmethod + def referenced_object(self) -> str: + """ + Return a path of a referenced object (as well as modifiers such as indexes or attribute references). + + So if `ReferenceObject` is `from dff import pipeline as pl referenced_object` for `pl` is `dff.pipeline`. + However, if `ReferencedObject` is `pl.Pipeline` or `pl.dictionary[pl.number][5]` then their + `referenced_object`\\s are, respectively, `dff.pipeline.Pipeline` and + `dff.pipeline.dictionary[dff.pipeline.number][5]`. + """ + + def __repr__(self): + if self.dump(indent=None) == self.true_value(): + return BaseParserObject.__repr__(self) + return self.__class__.__name__ + "(dump=" + self.dump(indent=None) + "; true_value=" + self.true_value() + ")" + + def true_value(self) -> str: + if self.absolute is not None: + return self.absolute.true_value() + return self.referenced_object + + @staticmethod + def resolve_absolute(obj: BaseParserObject) -> BaseParserObject: + """ + Process an object and return its absolute value if possible. + + :param obj: An object to process. + :return: + `obj.absolute` if `obj` is `ReferenceObject` and `absolute` is not None. + Return `obj` otherwise. + """ + if isinstance(obj, ReferenceObject): + return obj.absolute or obj + return obj + + @staticmethod + def resolve_expression(obj: Expression) -> Expression: + """ + Process an object and return its absolute value of :py:class:`~.Expression` if possible. + + :param obj: An object to process. + :return: + `obj.absolute` if `obj` is `ReferenceObject` and `absolute` has :py:class:`~.Expression` type. + Return `obj` otherwise. + """ + if isinstance(obj, ReferenceObject): + absolute = obj.absolute + if isinstance(absolute, Expression): + return absolute + return obj + + +class Import(Statement, ReferenceObject): + """ + This class if for nodes that represent :py:class:`ast.Import`. + """ + + def __init__(self, module: str, alias: tp.Optional[str] = None): + ReferenceObject.__init__(self) + Statement.__init__(self) + self.module = module + self.alias = alias + + def dump(self, current_indent: int = 0, indent: tp.Optional[int] = 4) -> str: + return f"import {self.module}" + (f" as {self.alias}" if self.alias else "") + + @cached_property + def _resolve_once(self) -> tp.Optional[BaseParserObject]: + namespace_name = self.namespace.resolve_relative_import(self.module) + namespace = self.dff_project.get_namespace(namespace_name) + if namespace is None: + logger.debug( + f"{self.__class__.__name__} did not resolve: {str(self)}\nNamespace {namespace_name} not found" + ) + return None + return namespace + + @cached_property + def referenced_object(self) -> str: + return self.namespace.resolve_relative_import(self.module) + + @classmethod + @tp.overload + def from_ast(cls, node: ast.Import, **kwargs) -> tp.Dict[str, "Import"]: # type: ignore + ... + + @classmethod + @tp.overload + def from_ast(cls, node: tp.Union[ast.stmt, ast.expr], **kwargs) -> None: + ... + + @classmethod + def from_ast(cls, node, **kwargs): + """ + Extract imports from ast node. + + :return: A dictionary of statements contained in the node. + The keys are names under which an object is imported, and the values are instances of this class. + For example an import statement `import obj_1 as obj, obj_2, obj_3 as obj_3` + will produce a dictionary with the following items: + + - `(obj, Import(import obj_1 as obj))` + - `(obj_2, Import(import obj_2))` + - `(obj_3, Import(import obj_3 as obj_3))` + """ + if not isinstance(node, ast.Import): + return None + result = {} + for name in node.names: + result[name.asname or name.name] = cls(name.name, name.asname) + return result + + +class ImportFrom(Statement, ReferenceObject): + """ + This class if for nodes that represent :py:class:`ast.ImportFrom`. + """ + + def __init__(self, module: str, level: int, obj: str, alias: tp.Optional[str] = None): + ReferenceObject.__init__(self) + Statement.__init__(self) + self.module = module + self.level = level + self.obj = obj + self.alias = alias + + def dump(self, current_indent: int = 0, indent: tp.Optional[int] = 4) -> str: + return f"from {self.level * '.' + self.module} import {self.obj}" + (f" as {self.alias}" if self.alias else "") + + @cached_property + def _resolve_once(self) -> tp.Optional[BaseParserObject]: + namespace_name = self.namespace.resolve_relative_import(self.module, self.level) + namespace = self.dff_project.get_namespace(namespace_name) + if namespace is None: + logger.debug( + f"{self.__class__.__name__} did not resolve: {str(self)}\nNamespace {namespace_name} not found" + ) + return None + if not is_instance(namespace, "dff.utils.parser.namespace.Namespace"): + raise RuntimeError(namespace) + + obj = namespace.get_object(self.obj) + if obj is None: + logger.debug( + f"{self.__class__.__name__} did not resolve: {str(self)}\n" + f"Object {self.obj} not found in namespace {namespace}" + ) + return None + + return obj + + @cached_property + def referenced_object(self) -> str: + resolved = self._resolve_once + if isinstance(resolved, ReferenceObject): + return resolved.referenced_object + if self.level > 0: + substitute_module_name = self.namespace.resolve_relative_import(self.module, self.level) + "." + self.obj + else: + substitute_module_name = self.module + "." + self.obj + return substitute_module_name + + @classmethod + @tp.overload + def from_ast(cls, node: ast.ImportFrom, **kwargs) -> tp.Dict[str, "ImportFrom"]: # type: ignore + ... + + @classmethod + @tp.overload + def from_ast(cls, node: tp.Union[ast.stmt, ast.expr], **kwargs) -> None: + ... + + @classmethod + def from_ast(cls, node, **kwargs): + """ + Extract from-imports from ast node. + + :return: + A dictionary of statements contained in the node. + The keys are names under which an object is imported, and the values are instances of this class. + For example an import statement `from module import obj_1 as obj, obj_2, obj_3 as obj_3` + will produce a dictionary with the following items: + + - `(obj, ImportFrom(from module import obj_1 as obj))` + - `(obj_2, ImportFrom(from module import obj_2))` + - `(obj_3, ImportFrom(from module import obj_3 as obj_3))` + """ + if not isinstance(node, ast.ImportFrom): + return None + result = {} + for name in node.names: + if name.name == "*": + raise StarError(f"Starred import is not supported: {unparse(node)}") + result[name.asname or name.name] = cls(node.module or "", node.level, name.name, name.asname) + return result + + +class Assignment(Statement): + """ + This class if for nodes that represent :py:class:`ast.Assign` or :py:class:`ast.AnnAssign`. + """ + + def __init__(self, target: Expression, value: Expression): + super().__init__() + self.add_child(target, "target") + self.add_child(value, "value") + + def dump(self, current_indent: int = 0, indent: tp.Optional[int] = 4) -> str: + return ( + f"{self.children['target'].dump(current_indent, indent)} =" + f" {self.children['value'].dump(current_indent, indent)}" + ) + + @classmethod + @tp.overload + def from_ast( # type: ignore + cls, node: tp.Union[ast.Assign, ast.AnnAssign], **kwargs + ) -> tp.Dict[str, "Assignment"]: + ... + + @classmethod + @tp.overload + def from_ast(cls, node: tp.Union[ast.stmt, ast.expr], **kwargs) -> None: + ... + + @classmethod + def from_ast(cls, node, **kwargs): + """ + Extract assignments from ast node. + + :return: + A dictionary of statements contained in the node. + The keys are names of declared object, and the values are instances of this class. + For example an assignment statement `a = b = c = 1` + will produce a dictionary with the following items: + + - `(c, Assignment(c = 1))` + - `(a, Assignment(a = c))` + - `(b, Assignment(b = c))` + """ + if isinstance(node, ast.Assign): + result = {} + target = Expression.auto(node.targets[-1]) + value = Expression.auto(node.value) + result[str(target)] = cls(target=target, value=value) + for new_target in map(Expression.auto, node.targets[:-1]): + # todo: add support for tuple targets + result[str(new_target)] = cls(target=new_target, value=target) + return result + if isinstance(node, ast.AnnAssign): + result = {} + if node.value is None: + logger.warning(f"Assignment has no value: {unparse(node)}") + return None + target = Expression.auto(node.target) + value = Expression.auto(node.value) + result[str(target)] = cls(target=target, value=value) + return result + return None + + +class String(Expression): + """ + This class is for nodes that represent :py:class:`ast.Str` (for python 3.7) + or :py:class:`ast.Constant` with str value. + """ + + def __init__(self, string: str): + super().__init__() + self.string = string + + def dump(self, current_indent: int = 0, indent: tp.Optional[int] = 4) -> str: + return repr(self.string) + + @classmethod + @tp.overload + def from_ast(cls, node: tp.Union[ast.Str, ast.Constant], **kwargs) -> "String": # type: ignore + ... + + @classmethod + @tp.overload + def from_ast(cls, node: tp.Union[ast.stmt, ast.expr], **kwargs) -> None: + ... + + @classmethod + def from_ast(cls, node, **kwargs): + if isinstance(node, ast.Str): # todo: remove this when python3.7 support is dropped + return cls(node.s) + elif isinstance(node, ast.Constant): + if isinstance(node.value, str): + return cls(node.value) + return None + + +class Python(Expression, Statement): # type: ignore + """ + This class is for nodes that cannot be represented by any other classes. It's children contain direct children + as well as children inside iterable fields. + """ + + def __init__(self, node: tp.Union[ast.expr, ast.stmt]): + Expression.__init__(self) + Statement.__init__(self) + self.parent: tp.Optional[tp.Union[Namespace, Statement, Expression]] = None # type: ignore + for key, value in node.__dict__.items(): + if isinstance(value, ast.expr): + self.add_child(Expression.auto(value), key) + elif isinstance(value, tp.Iterable): + for index, child in enumerate(value): + if isinstance(child, ast.expr): + self.add_child(Expression.auto(child), key + "_" + str(index)) + if unparse.__module__ == "astunparse": + self.string = unparse(node).strip() + # astunparse.unparse adds "\n" + # todo: remove this when python3.8 support is dropped + else: + self.string = unparse(node) + self.type = node.__class__.__name__ + + def dump(self, current_indent: int = 0, indent: tp.Optional[int] = 4) -> str: + return self.string + + @classmethod + def from_str(cls, string: str) -> "Python": + parsed = ast.parse(string).body + if len(parsed) != 1: + raise RuntimeError(f"String {string} should contain only one statement or expression") + statement = parsed[0] + if isinstance(statement, ast.stmt): + return cls(statement) + elif isinstance(statement, ast.Expr): + return cls(statement.value) + else: + raise RuntimeError(statement) + + @classmethod + def from_ast(cls, node: tp.Union[ast.stmt, ast.expr], **kwargs) -> "Python": # type: ignore + return cls(node) + + +class Dict(Expression): + """ + This class if for nodes that represent :py:class:`ast.Dict`. + """ + + def __init__(self, keys: tp.List[Expression], values: tp.List[Expression]): + super().__init__() + self.__keys: tp.List[tp.Tuple[Expression, str]] = [] + for key, value in zip(keys, values): + self.__keys.append((key, str(key))) + self.add_child(key, self._key(key)) + self.add_child(value, self._value(key)) + + @staticmethod + def _key(str_key: tp.Union[Expression, str]) -> str: + """Get a name which is used to store a child that is a key in the dictionary. + + :param str_key: An object or a string representation of an object. + The object represents a key in the dictionary. + :return: Name of a child-key. + """ + if not isinstance(str_key, str): + str_key = str(str_key) + return "key_" + str_key + + @staticmethod + def _value(str_value: tp.Union[Expression, str]) -> str: + """Get a name which is used to store a child that is a value in the dictionary. + + :param str_value: An object or a string representation of an object. + The object represents a value in the dictionary. + :return: Name of a child-value. + """ + if not isinstance(str_value, str): + str_value = str(str_value) + return "value_" + str_value + + @staticmethod + def _clear(child_name: str) -> str: + """Get a string representation of a key that is associated with a child under the name `child_name`. + + :param child_name: A name of a child. + :return: A string representation of the corresponding key. + """ + if child_name.startswith("value_"): + return child_name[len("value_") :] # noqa: E203 + if child_name.startswith("key_"): + return child_name[len("key_") :] # noqa: E203 + return child_name + + def key_by_value(self, value: Expression) -> Expression: + """Get a key by the value. + + :param value: Value stored in a dictionary. + :return: A key that is associated with the value. + """ + child_name = value._name + if child_name is None: + raise RuntimeError(f"Value does not have a parent: {value}") + return self.children[self._key(self._clear(child_name))] + + def keys(self) -> tp.Iterator[Expression]: + """An iterator over keys in the dictionary.""" + for _, key_str in self.__keys: + yield self.children[self._key(key_str)] + + def values(self) -> tp.Iterator[Expression]: + """An iterator over values in the dictionary.""" + for _, key_str in self.__keys: + yield self.children[self._value(key_str)] + + def items(self) -> tp.Iterator[tp.Tuple[Expression, Expression]]: + """An iterator over tuples of keys and values in the dictionary.""" + for _, key_str in self.__keys: + yield self.children[self._key(key_str)], self.children[self._value(key_str)] + + @cached_property + def _keys(self) -> tp.Dict[Expression, str]: + """A mapping from dictionary keys to their string representations.""" + result = {} + for key, value in self.__keys: + result[key] = value + return result + + def dump(self, current_indent: int = 0, indent: tp.Optional[int] = 4) -> str: + items = [ + (indent * " " if indent else "") + + self.children[self._key(key)].dump( + current_indent=0 if indent is None else (current_indent + indent), indent=indent + ) + + ": " + + self.children[self._value(key)].dump( + current_indent=0 if indent is None else (current_indent + indent), indent=indent + ) + + "," + for _, key in self.__keys + ] + if indent is None: + return "{" + " ".join(items) + "}" + else: + return ("\n" + current_indent * " ").join(["{", *items, "}"]) + + def __getitem__(self, item: tp.Union[Expression, str]) -> Expression: + """Get dictionary value based on a key. + + :param item: Either a key or a string representation of a key. + :return: Dictionary value. + :raises TypeError: + If the type of `item` is not :py:class:`.BaseParserObject` nor `str`. + :raises KeyError: + If the key is not in the dictionary. + """ + if isinstance(item, Expression): + key = self._keys[item] + return self.children[self._value(key)] + elif isinstance(item, str): + return self.children[self._value(item)] + else: + raise TypeError(f"Item {repr(item)} is not `BaseParserObject` nor `str`") + + def get(self, item: tp.Union[Expression, str], default=None) -> Expression: + """Get dictionary value based on a key. + + :param item: Either a key or a string representation of a key. + :param default: Value to return if the dictionary does not have the `item` key. + :return: Dictionary value. + :raises TypeError: + If the type of `item` is not :py:class:`.BaseParserObject` nor `str`. + """ + if isinstance(item, Expression): + key = self._keys.get(item) + if key is None: + return default + return self.children.get(self._value(key), default) + elif isinstance(item, str): + return self.children.get(self._value(item), default) + else: + raise TypeError(f"Item {repr(item)} is not `BaseParserObject` nor `str`") + + @classmethod + @tp.overload + def from_ast(cls, node: ast.Dict, **kwargs) -> "Dict": # type: ignore + ... + + @classmethod + @tp.overload + def from_ast(cls, node: tp.Union[ast.stmt, ast.expr], **kwargs) -> None: + ... + + @classmethod + def from_ast(cls, node, **kwargs): + if not isinstance(node, ast.Dict): + return None + keys, values = [], [] + for key, value in zip(node.keys, node.values): + if key is None: + raise StarError(f"Dict unpacking is not supported: {unparse(node)}") + keys.append(Expression.auto(key)) + values.append(Expression.auto(value)) + return cls(keys, values) + + +class Name(Expression, ReferenceObject): + """ + This class if for nodes that represent :py:class:`ast.Name`. + """ + + def __init__(self, name: str): + Expression.__init__(self) + ReferenceObject.__init__(self) + self.name = name + + @cached_property + def _resolve_once(self) -> tp.Optional[BaseParserObject]: + result = self.namespace.get_object(self.name) + if result is None: + logger.debug( + f"{self.__class__.__name__} did not resolve: {str(self)}\n" + f"Object {self.name} not found in {self.namespace}" + ) + return result + + @cached_property + def referenced_object(self) -> str: + resolved = self._resolve_once + if isinstance(resolved, ReferenceObject): + return resolved.referenced_object + return ".".join([*self.namespace.location, self.name]) + + def dump(self, current_indent: int = 0, indent: tp.Optional[int] = 4) -> str: + return self.name + + @classmethod + @tp.overload + def from_ast(cls, node: ast.Name, **kwargs) -> "Name": # type: ignore + ... + + @classmethod + @tp.overload + def from_ast(cls, node: tp.Union[ast.stmt, ast.expr], **kwargs) -> None: + ... + + @classmethod + def from_ast(cls, node, **kwargs): + if not isinstance(node, ast.Name): + return None + return cls(node.id) + + +class Attribute(Expression, ReferenceObject): + """ + This class if for nodes that represent :py:class:`ast.Attribute`. + """ + + def __init__(self, value: Expression, attr: str): + Expression.__init__(self) + ReferenceObject.__init__(self) + self.add_child(value, "value") + self.attr = attr + + @cached_property + def _resolve_once(self) -> tp.Optional[BaseParserObject]: + value: tp.Optional[tp.Union[BaseParserObject, "Namespace"]] = ReferenceObject.resolve_absolute( + self.children["value"] + ) + if is_instance(value, "dff.utils.parser.namespace.Namespace"): + value = tp.cast("Namespace", value) + obj = value.get_object(self.attr) + if obj is None: + logger.debug( + f"{self.__class__.__name__} did not resolve: {str(self)}\nKey {self.attr} does not exist in {value}" + ) + return obj + return None + + @cached_property + def referenced_object(self) -> str: + resolved = self._resolve_once + if isinstance(resolved, ReferenceObject): + return resolved.referenced_object + value = self.children["value"] + return value.true_value() + "." + self.attr + + def dump(self, current_indent: int = 0, indent: tp.Optional[int] = 4) -> str: + return self.children["value"].dump(current_indent, indent) + "." + self.attr + + @classmethod + @tp.overload + def from_ast(cls, node: ast.Attribute, **kwargs) -> "Attribute": # type: ignore + ... + + @classmethod + @tp.overload + def from_ast(cls, node: tp.Union[ast.stmt, ast.expr], **kwargs) -> None: + ... + + @classmethod + def from_ast(cls, node, **kwargs): + if not isinstance(node, ast.Attribute): + return None + return cls(Expression.auto(node.value), node.attr) + + +class Subscript(Expression, ReferenceObject): + """ + This class if for nodes that represent :py:class:`ast.Subscript`. + """ + + def __init__(self, value: Expression, index: Expression): + Expression.__init__(self) + ReferenceObject.__init__(self) + self.add_child(value, "value") + self.add_child(index, "index") + + @cached_property + def _resolve_once(self) -> tp.Optional[BaseParserObject]: + value: tp.Optional[BaseParserObject] = ReferenceObject.resolve_absolute(self.children["value"]) + index: tp.Optional[BaseParserObject] = ReferenceObject.resolve_absolute(self.children["index"]) + + debug_message = f"{self.__class__.__name__} did not resolve: {str(self)}" + + if value is None: + logger.debug(f"{debug_message}\nValue did not resolve: {self.children['value']}") + return None + if index is None: + logger.debug(f"{debug_message}\nIndex did not resolve: {self.children['index']}") + return None + if not isinstance(value, (Dict, Iterable)): + logger.debug(f"{debug_message}\nValue is not a `Dict`: {value}") + return None + if not isinstance(index, Expression): + logger.debug(f"{debug_message}\nIndex is not an `Expression`: {index}") + return None + result = value.get(index) + if result is None: + logger.debug(f"{debug_message}\nKey not found.\nKey: {index}\nDict: {value}") + return None + return result + + @cached_property + def referenced_object(self) -> str: + resolved = self._resolve_once + if isinstance(resolved, ReferenceObject): + return resolved.referenced_object + value = self.children["value"] + index = self.children["index"] + return value.true_value() + "[" + index.true_value() + "]" + + def dump(self, current_indent: int = 0, indent: tp.Optional[int] = 4) -> str: + return ( + self.children["value"].dump(current_indent, indent) + + "[" + + self.children["index"].dump(current_indent, indent) + + "]" + ) + + @classmethod + @tp.overload + def from_ast(cls, node: ast.Subscript, **kwargs) -> "Subscript": # type: ignore + ... + + @classmethod + @tp.overload + def from_ast(cls, node: tp.Union[ast.stmt, ast.expr], **kwargs) -> None: + ... + + @classmethod + def from_ast(cls, node, **kwargs): + if not isinstance(node, ast.Subscript): + return None + value = Expression.auto(node.value) + # todo: remove the right part when python3.8 support is dropped + if isinstance(node.slice, ast.Slice) or is_instance(node.slice, "_ast.ExtSlice"): + raise RuntimeError(f"Slices are not supported: {unparse(node)}") + index = node.slice + # todo: remove this when python3.8 support is dropped + if is_instance(index, "_ast.Index"): + index = index.value # type: ignore + return cls(value, Expression.auto(index)) + + +class Iterable(Expression): + """ + This class if for nodes that represent :py:class:`ast.Tuple`, :py:class:`ast.List` or :py:class:`ast.Set`. + """ + + class Type(tuple, Enum): + LIST = ("[", "]") + TUPLE = ("(", ")") + SET = ("{", "}") + + def __init__(self, iterable: tp.Iterable[Expression], iterable_type: Type): + Expression.__init__(self) + self.children: tp.Dict[str, Expression] + self.type: Iterable.Type = iterable_type + """Type of the iterable""" + for index, value in enumerate(iterable): + self.add_child(value, str(index)) + + def __iter__(self): + yield from self.children.values() + + def __len__(self): + return len(self.children) + + def __getitem__(self, item: tp.Union[Expression, str, int]) -> Expression: + if isinstance(item, str): + return self.children[item] + elif isinstance(item, int): + return self.children[str(item)] + else: + return self.children[str(item)] + + def get(self, item: tp.Union[Expression, str, int], default=None) -> Expression: + if isinstance(item, str): + return self.children.get(item, default) + elif isinstance(item, int): + return self.children.get(str(item), default) + else: + return self.children.get(str(item), default) + + def dump(self, current_indent: int = 0, indent: tp.Optional[int] = 4) -> str: + return ( + self.type.value[0] + + ", ".join([child.dump(current_indent, indent) for child in self.children.values()]) + + ("," if (len(self.children) == 1 and self.type == Iterable.Type.TUPLE) else "") + + self.type.value[1] + ) + + @classmethod + @tp.overload + def from_ast(cls, node: tp.Union[ast.Tuple, ast.List, ast.Set], **kwargs) -> "Iterable": # type: ignore + ... + + @classmethod + @tp.overload + def from_ast(cls, node: tp.Union[ast.stmt, ast.expr], **kwargs) -> None: + ... + + @classmethod + def from_ast(cls, node, **kwargs): + if not isinstance(node, (ast.Tuple, ast.List, ast.Set)): + return None + result = [] + for item in node.elts: + result.append(Expression.auto(item)) + if isinstance(node, ast.Tuple): + iterable_type = Iterable.Type.TUPLE + elif isinstance(node, ast.List): + iterable_type = Iterable.Type.LIST + else: + iterable_type = Iterable.Type.SET + return cls(result, iterable_type) + + +class Call(Expression): + """ + This class if for nodes that represent :py:class:`ast.Call`. + """ + + def __init__(self, func: Expression, args: tp.List[Expression], keywords: tp.Dict[str, Expression]): + Expression.__init__(self) + self.add_child(func, "func") + self.args: tp.List[Expression] = args + self.keywords: tp.Dict[str, Expression] = keywords + for index, arg in enumerate(args): + self.add_child(arg, "arg_" + str(index)) + for key, value in keywords.items(): + self.add_child(value, "keyword_" + key) + + def get_args(self, func_sig: Signature) -> tp.Dict[str, Expression]: + """ + Return a dictionary of pairs `{arg_name: arg_value}`. + If `arg_name` corresponds to a collection of unbound arguments (such as `args` in `def func(*args, **kwargs):`), + `arg_value` has type :py:class:`~.Iterable` (for a tuple of positional unbound arguments) + or :py:class:`~.Dict`(for a dict of unbound keyword arguments). + Note: alternative names for collections of unbound arguments are supported + (i.e. if a function is defined as `def func(*func_args, **func_kwargs):`). + + :param func_sig: Function signature. + :return: A mapping from argument names to their values (represented by :py:class:`.Expression`). + :raises TypeError: + If `self.args` and `self.kwargs` do not match function signature. + """ + first_arg = list(func_sig.parameters.keys())[0] + if first_arg in ("self", "cls"): + stub = [None] + else: + stub = [] + + params = func_sig.bind(*stub, *self.args, **self.keywords) + params.apply_defaults() + + result: tp.Dict[str, Expression] = {} + for key, value in params.arguments.items(): + if key not in ("self", "cls"): + if func_sig.parameters[key].kind == Parameter.VAR_POSITIONAL: # *args processing + result[key] = Iterable(value, Iterable.Type.TUPLE) + elif func_sig.parameters[key].kind == Parameter.VAR_KEYWORD: # **kwargs processing + result[key] = Dict(list(map(Expression.from_obj, value.keys())), list(value.values())) + else: + result[key] = value if isinstance(value, Expression) else Expression.from_obj(value) + return result + + @cached_property + def func_name(self) -> str: + """Name of the function being called. If function being called is a lambda function, it's body is returned.""" + func = self.children["func"] + if isinstance(func, ReferenceObject): + return func.referenced_object + return str(func) + + def dump(self, current_indent: int = 0, indent: tp.Optional[int] = 4) -> str: + return ( + self.children["func"].dump(current_indent, indent) + + "(" + + ", ".join( + [ + self.children[arg].dump(current_indent, indent) + for arg in self.children.keys() + if arg.startswith("arg_") + ] + + [ + f"{keyword[len('keyword_'):]}={self.children[keyword].dump(current_indent, indent)}" + for keyword in self.children.keys() + if keyword.startswith("keyword_") + ] + ) + + ")" + ) + + @classmethod + @tp.overload + def from_ast(cls, node: ast.Call, **kwargs) -> "Call": # type: ignore + ... + + @classmethod + @tp.overload + def from_ast(cls, node: tp.Union[ast.stmt, ast.expr], **kwargs) -> None: + ... + + @classmethod + def from_ast(cls, node, **kwargs): + if not isinstance(node, ast.Call): + return None + func = Expression.auto(node.func) + args = [] + keywords = {} + for arg in node.args: + if isinstance(arg, ast.Starred): + raise StarError(f"Starred calls are not supported: {unparse(node)}") + args.append(Expression.auto(arg)) + for keyword in node.keywords: + if keyword.arg is None: + raise StarError(f"Starred calls are not supported: {unparse(node)}") + keywords[str(keyword.arg)] = Expression.auto(keyword.value) + return cls(func, args, keywords) + + +class Generator(BaseParserObject): + """ + This class if for nodes that represent :py:class:`ast.comprehension`. + """ + + def __init__(self, target: Expression, iterator: Expression, ifs: tp.List[Expression], is_async: bool): + BaseParserObject.__init__(self) + self.add_child(target, "target") + self.add_child(iterator, "iter") + for index, if_expr in enumerate(ifs): + self.add_child(if_expr, "if_" + str(index)) + self.is_async = is_async + + def dump(self, current_indent: int = 0, indent: tp.Optional[int] = 4) -> str: + ifs = [ + f"if {expression.dump(current_indent, indent)}" + for key, expression in self.children.items() + if key.startswith("if_") + ] + return ( + ("async " if self.is_async else "") + + f"for {self.children['target'].dump(current_indent, indent)}" + + f" in {self.children['iter'].dump(current_indent, indent)}" + + (" " if ifs else "") + + " ".join(ifs) + ) + + @classmethod + @tp.overload + def from_ast(cls, node: ast.comprehension, **kwargs) -> "Generator": # type: ignore + ... + + @classmethod + @tp.overload + def from_ast(cls, node: tp.Union[ast.stmt, ast.expr], **kwargs) -> None: + ... + + @classmethod + def from_ast(cls, node, **kwargs): + if not isinstance(node, ast.comprehension): + return None + return cls( + target=Expression.auto(node.target), + iterator=Expression.auto(node.iter), + ifs=[Expression.auto(if_expr) for if_expr in node.ifs], + is_async=node.is_async == 1, + ) + + +class Comprehension(Expression): + """ + This class if for nodes that represent :py:class:`ast.DictComp`, :py:class:`ast.ListComp`, + :py:class:`ast.SetComp` or :py:class:`ast.GeneratorExp`. + """ + + class Type(tuple, Enum): + LIST = ("[", "]") + GEN = ("(", ")") + SET = ("{", "}") + DICT = (None, None) + + def __init__( + self, + element: tp.Union[Expression, tp.Tuple[Expression, Expression]], + generators: tp.List[Generator], + comp_type: Type, + ): + Expression.__init__(self) + if isinstance(element, tuple): + if comp_type is not Comprehension.Type.DICT: + raise RuntimeError(comp_type) + self.add_child(element[0], "key") + self.add_child(element[1], "value") + else: + if comp_type is Comprehension.Type.DICT: + raise RuntimeError(comp_type) + self.add_child(element, "element") + + self.comp_type: Comprehension.Type = comp_type + """Type of comprehension""" + for index, generator in enumerate(generators): + self.add_child(generator, "gens_" + str(index)) + + def dump(self, current_indent: int = 0, indent: tp.Optional[int] = 4) -> str: + gens = [gen.dump(current_indent, indent) for key, gen in self.children.items() if key.startswith("gens_")] + if self.comp_type is Comprehension.Type.DICT: + return ( + f"{{{self.children['key'].dump(current_indent, indent)}: " + f"{self.children['value'].dump(current_indent, indent)}" + (" " if gens else "") + " ".join(gens) + "}" + ) + else: + return ( + self.comp_type.value[0] + + self.children["element"].dump(current_indent, indent) + + (" " if gens else "") + + " ".join(gens) + + self.comp_type.value[1] + ) + + @classmethod + @tp.overload + def from_ast( # type: ignore + cls, node: tp.Union[ast.ListComp, ast.SetComp, ast.GeneratorExp], **kwargs + ) -> "Comprehension": + ... + + @classmethod + @tp.overload + def from_ast(cls, node: tp.Union[ast.stmt, ast.expr], **kwargs) -> None: + ... + + @classmethod + def from_ast(cls, node, **kwargs): + if not isinstance(node, (ast.DictComp, ast.ListComp, ast.SetComp, ast.GeneratorExp)): + return None + gens = [Generator.from_ast(gen) for gen in node.generators] + if isinstance(node, ast.DictComp): + return cls( + (Expression.auto(node.key), Expression.auto(node.value)), + gens, + Comprehension.Type.DICT, + ) + elif isinstance(node, ast.ListComp): + comp_type = Comprehension.Type.LIST + elif isinstance(node, ast.SetComp): + comp_type = Comprehension.Type.SET + elif isinstance(node, ast.GeneratorExp): + comp_type = Comprehension.Type.GEN + return cls( + Expression.auto(node.elt), + gens, + comp_type, + ) diff --git a/dff/utils/parser/dff_project.py b/dff/utils/parser/dff_project.py new file mode 100644 index 000000000..59ce1bc31 --- /dev/null +++ b/dff/utils/parser/dff_project.py @@ -0,0 +1,864 @@ +""" +DFF Project +----------- +This module defines a class that represents a DFF project -- +a collection of python source files that define a script and a Pipeline. + +Glossary +^^^^^^^^ +Script Initializer -- A function that takes a DFF script and uses it to initialize an object to store and process it. +""" +from pathlib import Path +import json +import typing as tp +import logging +from collections import defaultdict +import ast +import inspect +from typing_extensions import TypeAlias + +try: + import networkx as nx +except ImportError: + raise ImportError("Module `networkx` is not installed. Install it with `pip install dff[parser]`.") + +from dff.utils.parser.base_parser_object import ( + cached_property, + BaseParserObject, + Call, + ReferenceObject, + Import, + ImportFrom, + Assignment, + Expression, + Dict, + String, + Iterable, + Statement, + Python, +) +from dff.utils.parser.namespace import Namespace +from dff.utils.parser.exceptions import ScriptValidationError, ParsingError +from dff.utils.parser.yaml import yaml +from dff.pipeline.pipeline.pipeline import Actor, Pipeline +from dff.script.core.keywords import Keywords +import dff.script.labels as labels + +logger = logging.getLogger(__name__) + +script_initializers: tp.Dict[str, inspect.Signature] = { + **{ + actor_name: inspect.signature(Actor.__init__) + for actor_name in ( + "dff.pipeline.pipeline.actor.Actor", + "dff.pipeline.pipeline.pipeline.Actor", + ) + }, + **{ + pipeline_name: inspect.signature(Pipeline.from_script) + for pipeline_name in ( + "dff.pipeline.Pipeline.from_script", + "dff.pipeline.pipeline.pipeline.Pipeline.from_script", + ) + }, +} +""" +A mapping from names of script initializers to their signatures. + +:meta hide-value: +""" + +label_prefixes = ( + "dff.script.labels.std_labels.", + "dff.script.labels.", +) +"""A tuple of possible prefixes for label names.""" + +label_args: tp.Dict[str, inspect.Signature] = { + label_prefix + label.__name__: inspect.signature(label) + for label in ( + getattr(labels, lbl) + for lbl in ( + "backward", + "forward", + "previous", + "repeat", + "to_fallback", + "to_start", + ) + ) + for label_prefix in label_prefixes +} +""" +A mapping from label names to their signatures. + +:meta hide-value: +""" + +keyword_prefixes = ( + "dff.script.core.keywords.Keywords.", + "dff.script.core.keywords.", + "dff.script.", + "dff.script.Keywords.", +) +"""A tuple of possible keyword name prefixes.""" + +keyword_dict = {k: [keyword_prefix + k for keyword_prefix in keyword_prefixes] for k in Keywords.__members__} +""" +A mapping from short names of keywords to all their full names. +(e.g. GLOBAL -> [dff.script.GLOBAL, dff.script.Keywords.GLOBAL, ...]) + +:meta hide-value: +""" + +keyword_list = [keyword_prefix + k for keyword_prefix in keyword_prefixes for k in Keywords.__members__] +""" +A list of all keyword full names. + +:meta hide-value: +""" + +reversed_keyword_dict = {keyword_prefix + k: k for k in Keywords.__members__ for keyword_prefix in keyword_prefixes} +""" +A mapping from full keyword names to their short names. + +:meta hide-value: +""" + +RecursiveDictValue: TypeAlias = tp.Union[str, tp.Dict[str, "RecursiveDictValue"]] +RecursiveDict: TypeAlias = tp.Dict[str, "RecursiveDictValue"] +DFFProjectDict: TypeAlias = tp.Dict[str, "RecursiveDict"] + + +class DFFProject(BaseParserObject): + """ + A collection of files that define a script and a script initializer. + """ + + def __init__( + self, + namespaces: tp.List["Namespace"], + validate: bool = True, + script_initializer: tp.Optional[str] = None, + ): + """ + + :param namespaces: A list of Namespaces that comprise a DFF project. + :param validate: + Whether to perform validation -- check for a script initializer and validate its arguments. + Defaults to True. + :param script_initializer: + A colon-separated string that points to a script initializer call. + The first part of the string should be the name of the namespace. + The second part of the string should be the name of the object that is a result of script initializer call. + Defaults to None. + """ + BaseParserObject.__init__(self) + self.children: tp.MutableMapping[str, Namespace] = {} + self.script_initializer = script_initializer + """ + A colon-separated string that points to a script initializer call. + The first part of the string should be the name of the namespace. + The second part of the string should be the name of the object that is a result of script initializer call. + """ + if script_initializer is not None and len(script_initializer.split(":")) != 2: + raise ValueError( + f"`script_initializer` should be a string of two parts separated by `:`: {script_initializer}" + ) + for namespace in namespaces: + self.add_child(namespace, namespace.name) + if validate: + _ = self.graph + + def get_namespace(self, namespace_name: str) -> tp.Optional[Namespace]: + """Get a namespace by its name. Return None if it does not exist.""" + return self.children.get(namespace_name) or self.children.get(namespace_name + ".__init__") + + @cached_property + def script_initializer_call(self) -> Call: + """ + Return a Script Initializer call. + If `self.script_initializer` is specified during `__init__` return the call it points to and verify it. + Otherwise, search for a Script Initializer call in `self.namespaces`. + + :raises ScriptValidationError: + This exception is called under these conditions: + + - If `self.script_initializer` is specified during `__init__` and any of the following is true: + - Namespace specified by the first part of the `script_initializer` + does not exist in `self.namespaces`. + - Object specified by the second part of the `script_initializer` does not exist in namespace. + - Object specified by the second part of the `script_initializer` is not an assignment. + - Object specified by the second part of the `script_initializer` + assigns an object other than :py:class:`~.Call`. + - Object specified by the second part of the `script_initializer` + assigns an object other than any specified in :py:data:`~.script_initializers`. + - If `self.script_initializer` is not specified and a search found multiple or no Script Initializer calls. + """ + call = None + if self.script_initializer is not None: + namespace_name, obj_name = self.script_initializer.split(":") + namespace = self.children.get(namespace_name) + if namespace is None: + raise ScriptValidationError(f"Namespace {namespace_name} not found.") + obj = namespace.children.get(obj_name) + if obj is None: + raise ScriptValidationError(f"Object {obj_name} not found in namespace {namespace_name}.") + if not isinstance(obj, Assignment): + raise ScriptValidationError(f"Object {obj_name} is not `Assignment`: {obj}") + value = obj.children["value"] + if not isinstance(value, Call): + raise ScriptValidationError(f"Object {obj_name} is not `Call`: {value}") + if value.func_name not in script_initializers.keys(): + raise ScriptValidationError(f"Object {obj_name} is not a Script Initializer: {value.func_name}") + return value + for namespace in self.children.values(): + for statement in namespace.children.values(): + if isinstance(statement, Assignment): + value = statement.children["value"] + if isinstance(value, Call): + func_name = value.func_name + if func_name in script_initializers.keys(): + if call is None: + call = value + else: + raise ScriptValidationError( + f"Found two Script Initializer calls\nFirst: {str(call)}\nSecond:{str(value)}" + ) + if call is not None: + return call + raise ScriptValidationError( + "Script Initialization call is not found (use either `Actor` or `Pipeline.from_script`" + ) + + @cached_property + def script_initializer_dependencies(self) -> tp.Dict[str, tp.Set[str]]: + """Dependencies of script initializer.""" + return self.script_initializer_call.dependencies() + + @cached_property + def script(self) -> tp.Tuple[Expression, Expression, Expression]: + """ + Extract objects representing script, start label and fallback label from Script Initializer call. + If fallback label is not specified in that call, return start label instead. + + :raises ScriptValidationError: + If Script Initializer call does not include `script` or `start_label` parameters. + """ + call = self.script_initializer_call + args: tp.Dict[str, Expression] = call.get_args(script_initializers[call.func_name]) + script = args.get("script") + start_label = args.get("start_label") + fallback_label = args.get("fallback_label") + + # script validation + if script is None or script == "None": + raise ScriptValidationError(f"Actor argument `script` is not found: {str(call)}") + + # start_label validation + if start_label is None or start_label == "None": + raise ScriptValidationError(f"Actor argument `start_label` is not found: {str(call)}") + + # fallback_label validation + if fallback_label is None or fallback_label == "None": + fallback_label = start_label + + return script, start_label, fallback_label + + @cached_property + def resolved_script( + self, + ) -> tp.Tuple[ + tp.Dict[Expression, tp.Dict[tp.Optional[Expression], tp.Dict[str, Expression]]], + tp.Tuple[str, str], + tp.Tuple[str, str], + ]: + """ + Resolve values of :py:attr:`.~DFFProject.script`. + The first value (script) is resolved in the following way: + + 1. For each (`flow_name`, `flow`) pair in the script resulting dict has a + (`resolved_flow_name`, `resolved_flow`) pair where `resolved_flow_name` is the result of + :py:meth:`~.ReferenceObject.resolve_expression` applied to `flow_name`; + and `resolved_flow` is a dictionary constructed in the following way: + + 2. If `resolved_flow_name` is `GLOBAL`, `resolved_flow` is a dictionary with a single pair + (`None`, `resolved_node`) where `resolved_node` is the result of processing `flow` in the same way nodes are + processed (see step 3). + If `resolved_flow_name` is not `GLOBAL`, for each (`node_name`, `node`) pair in `flow` + resulting dict `resolved_flow` contains a pair (`resolved_node_name`, `resolved_node`) where + `resolved_node_name` is the result of :py:meth:`~.ReferenceObject.resolve_expression` applied to `node_name`; + and `resolved_node` is a dictionary constructed in the following way: + + 3. For each (`key`, `value`) pair in `node` resulting dict has a + (`keyword`, `resolved_value`) pair where `resolved_value` is the result of + :py:meth:`~.ReferenceObject.resolve_expression` applied to `value`; and `keyword` is one of the keys of + :py:data:`~.keyword_dict`. If `key` is not a keyword, :py:exc:`~.ScriptValidationError` + is raised. Additionally the result contains a (__node__, `resolved_node`) pair + where __node__ is a literal string and `resolved_node` is the result of + :py:meth:`~.ReferenceObject.resolve_expression` applied to `node`. + + The second and third values (start label and fallback label) are resolved in the following way: + + If a label resolves to :py:class:`~.Iterable` of length 2 both elements of which resolve to + :py:class:`~.String` a tuple of their values is returned. Otherwise, :py:exc:`~.ScriptValidationError` + is raised. + + Labels are also validated (checking that label keys exist in the script). + + :return: A tuple (resolved_script, resolved_start_label, resolved_fallback_label). + :raises ScriptValidationError: + During script resolution if: + - The first element of :py:attr:`~.DFFProject.script` does not resolve to :py:class:`~.Dict`. + - If `resolved_flow_name` is not `GLOBAL` and `flow` does not resolve to :py:class:`~.Dict`. + - Here `node` refers to both `node` and, if `resolved_flow_name` is `GLOBAL`, `flow`: + + - If `node` does not resolve to :py:class:`~. Dict`. + - If any key in `node` is not a keyword (is not in :py:data:`~.keyword_list`). + - If any key in `node` is a `GLOBAL` or `LOCAL` keyword. + - If any key is found twice inside the `node` dictionary. + + During label resolution if: + - Label does not resolve to :py:class:`~.Iterable`. + - Number of elements in label is not 2. + - Label elements do not resolve to :py:class:`~.String`. + + During label validation if a node referenced by a label does not exist in resolved script. + """ + script: tp.DefaultDict[Expression, tp.Dict[tp.Optional[Expression], tp.Dict[str, Expression]]] = defaultdict( + dict + ) + + def resolve_label(label: Expression) -> tp.Tuple[str, str]: + label = ReferenceObject.resolve_expression(label) + if not isinstance(label, Iterable): + raise ScriptValidationError(f"Label {label} is not iterable.") + if len(label) != 2: + raise ScriptValidationError(f"Length of label should be 2: {label}") + resolved_flow_name = ReferenceObject.resolve_absolute(label[0]) + resolved_node_name = ReferenceObject.resolve_absolute(label[1]) + if not isinstance(resolved_flow_name, String) or not isinstance(resolved_node_name, String): + raise ScriptValidationError(f"Label elements should be strings: {label}") + return str(resolved_flow_name), str(resolved_node_name) + + def resolve_node(node_info: Expression) -> tp.Dict[str, Expression]: + result: tp.Dict[str, Expression] = {} + node_info = ReferenceObject.resolve_expression(node_info) + if not isinstance(node_info, Dict): + raise ScriptValidationError(f"Node {str(node_info)} is not a Dict") + result["__node__"] = node_info + for key, value in node_info.items(): + str_key = key.true_value() + if str_key not in keyword_list: + raise ScriptValidationError(f"Node key {str_key} is not a keyword") + if str_key in keyword_dict["GLOBAL"]: + raise ScriptValidationError(f"Node key is a GLOBAL keyword: {str_key}") + if str_key in keyword_dict["LOCAL"]: + raise ScriptValidationError(f"Node key is a LOCAL keyword: {str_key}") + + keyword = reversed_keyword_dict[str_key] + + if result.get(keyword) is not None: # duplicate found + raise ScriptValidationError(f"Keyword {str_key} is used twice in one node: {str(node_info)}") + + result[keyword] = ReferenceObject.resolve_expression(value) + return result + + flows = ReferenceObject.resolve_absolute(self.script[0]) + if not isinstance(flows, Dict): + raise ScriptValidationError(f"{str(self.script[0])} is not a Dict: {str(flows)}") + for flow, nodes in flows.items(): + resolved_flow = ReferenceObject.resolve_expression(flow) + if flow in keyword_dict["GLOBAL"]: + script[resolved_flow][None] = resolve_node(nodes) + else: + resolved_nodes = ReferenceObject.resolve_expression(nodes) + if not isinstance(resolved_nodes, Dict): + raise ScriptValidationError(f"{str(nodes)} is not a Dict: {str(resolved_nodes)}") + for node, info in resolved_nodes.items(): + script[resolved_flow][ReferenceObject.resolve_expression(node)] = resolve_node(info) + + resolved_start_label = resolve_label(self.script[1]) + resolved_fallback_label = resolve_label(self.script[2]) + + # validate labels + for resolved_label in (resolved_start_label, resolved_fallback_label): + flow = script.get(resolved_label[0]) # type: ignore + if flow is None: + raise ScriptValidationError( + f"Not found flow {str(resolved_label[0])} in {[str(key) for key in script.keys()]}" + ) + else: + if flow.get(resolved_label[1]) is None: # type: ignore + raise ScriptValidationError( + f"Not found node {str(resolved_label[1])} in {[str(key) for key in script.keys()]}" + ) + + return script, resolved_start_label, resolved_fallback_label + + @cached_property + def graph(self) -> nx.MultiDiGraph: + """ + Export DFF project as a networkx graph and validate transitions. + + Resulting graph contains the following fields: + - full_script: Stores dictionary exported via :py:meth:`~.DFFProject.to_dict`. + - start_label: A tuple of two strings (second element of :py:attr:`~.DFFProject.resolved_script`). + - fallback_label: A tuple of two strings (third element of :py:attr:`~.DFFProject.resolved_script`). + + All nodes of the resulting graph are represented by a single value -- a tuple of strings or lists of strings. + + For each node in the script there is a node in the resulting graph which has a value of: + - `("GLOBAL_NODE", flow_name)` if the node belongs to the `GLOBAL` flow. + - `("LOCAL_NODE", flow_name, node_name)` if the node is a `LOCAL` node. + - `("NODE", flow_name, node_name)` otherwise. + + where `flow_name` and `node_name` are results of `str` applied to `resolved_flow_name` and `resolved_node_name` + respectively (see documentation of :py:attr:`~.DFFProject.resolved_script`). + + Additionally, nodes representing script nodes contain the following field: + - ref: Path to the :py:class:`~.Expression` representing the node (see :py:attr:`~.BaseParserObject.path`). + + Graph has other nodes: + - `("NONE",)` -- empty node. + - Label nodes. The first element of their value is `"LABEL"`, + the second element is the name of the label used (e.g. `"to_fallback"`). + The rest of the elements are tuples of two strings with the first element being a name + of a function argument, and the second element being its `true_value`. + + For each transition between script nodes there is an edge in the graph: + The first node of the edge is always a node representing a node in the script. + The second node is either a node in the script (if transition directly specifies it), a label node + (if one of the labels from :py:mod:`~.dff.script.labels` is used) and an empty node otherwise. + + All edges have 4 fields: + - label_ref: Path to the object defining transition label. + - label: `str` of either absolute value of the label or the label itself. + - condition_ref: Path to the object defining transition condition. + - condition: `str` of either absolute value of the condition or the condition itself. + + :raises ScriptValidationError: + - If `TRANSITION` keyword does not refer to a :py:class:`~.Dict`. + - If any of the first two elements of any transition label is not :py:class:`~.String`. + """ + + def resolve_label(label: Expression, current_flow: Expression) -> tuple: + if isinstance(label, ReferenceObject): # label did not resolve (possibly due to a missing func def) + return ("NONE",) + if isinstance(label, String): + return "NODE", str(current_flow), str(label) + if isinstance(label, Iterable): + resolved_flow_name = ReferenceObject.resolve_absolute(label[0]) + resolved_node_name = ReferenceObject.resolve_absolute(label[1]) + if not isinstance(resolved_flow_name, String): + raise ScriptValidationError(f"First argument of label is not str: {label}") + if len(label) == 2 and not isinstance(resolved_node_name, String): # second element is priority + return "NODE", str(current_flow), str(resolved_flow_name) + if len(label) == 2: + return "NODE", str(resolved_flow_name), str(resolved_node_name) + if len(label) == 3: + if not isinstance(resolved_node_name, String): + raise ScriptValidationError(f"Second argument of label is not str: {label}") + return "NODE", str(resolved_flow_name), str(resolved_node_name) + if isinstance(label, Call): + if label.func_name in label_args: + return ( + "LABEL", + label.func_name.rpartition(".")[2], + *[ + (key, value.true_value()) + for key, value in label.get_args(label_args[label.func_name]).items() + ], + ) + logger.warning(f"Label did not resolve: {label}") + return ("NONE",) + + graph = nx.MultiDiGraph( + full_script=self.to_dict(), + start_label=self.resolved_script[1], + fallback_label=self.resolved_script[2], + ) + for flow_name, flow in self.resolved_script[0].items(): + for node_name, node_info in flow.items(): + if node_name is None: + current_label = ("GLOBAL_NODE", str(flow_name)) + elif node_name in keyword_dict["LOCAL"]: + current_label = ("LOCAL_NODE", str(flow_name), str(node_name)) + else: + current_label = ("NODE", str(flow_name), str(node_name)) + + graph.add_node( + current_label, + ref=node_info["__node__"].path, + ) + transitions = node_info.get("TRANSITIONS") + if transitions is None: + continue + if not isinstance(transitions, Dict): + raise ScriptValidationError(f"TRANSITIONS keyword should point to a dictionary: {transitions}") + for transition_label, transition_condition in transitions.items(): + graph.add_edge( + current_label, + resolve_label(ReferenceObject.resolve_expression(transition_label), flow_name), + label_ref=ReferenceObject.resolve_absolute(transition_label).path, + label=str(ReferenceObject.resolve_absolute(transition_label)), + condition_ref=ReferenceObject.resolve_absolute(transition_condition).path, + condition=str(ReferenceObject.resolve_absolute(transition_condition)), + ) + return graph + + def to_dict( + self, + object_filter: tp.Optional[tp.Dict[str, tp.Set[str]]] = None, + ) -> DFFProjectDict: + """ + Export DFF Project as a dictionary. + + First-level keys in the dictionary are the names of the namespaces in the project. + Second-level keys in the dictionary are the names of the objects in the namespaces. + + Values in the dictionary are results of applying the `process` function to the objects + in the namespaces. The function works in the following way: + + - If the object is :py:class:`~.Assignment`, the result is the same as the one that + assignment value would have. + + `process(target=value) = process(value)` + - If the object is :py:class:`~.Import` or :py:class:`~.ImportFrom`, the result is the string + representation of the object without import alias. + + `process(import a as b) = process(import a) = "import a"` + - If the object is :py:class:`~.Dict`, the result is also a dictionary in which keys and values are results + of applying the `process` function to keys and values of the dictionary being processed. + + `process({a: b}) = {process(a): process(b)}` + - Otherwise, the result is the string representation of the object (:py:meth:`~.BaseParserObject.__str__`). + + `process(a) = str(a)` + + :param object_filter: + A dictionary from namespace names to sets of object names. + Only objects in the sets will be in the resulting dictionary. + Defaults to :py:attr:`~.DFFProject.script_initializer_dependencies`. + :raises ScriptValidationError: + Inside the `process` function -- if a key of :py:class:`~.Dict` is also a `dict`. + :raises TypeError: + Inside the `process` function -- if value is not a :py:class:`~.Statement` nor a :py:class:`~.Expression`. + """ + + def process_base_parser_object(bpo: BaseParserObject) -> RecursiveDictValue: + if isinstance(bpo, Assignment): + return process_base_parser_object(bpo.children["value"]) + if isinstance(bpo, Import): + return f"import {bpo.module}" + if isinstance(bpo, ImportFrom): + return f"from {bpo.level * '.' + bpo.module} import {bpo.obj}" + if isinstance(bpo, Dict): + processed_dict: RecursiveDict = {} + for key, value in bpo.items(): + processed_key = process_base_parser_object(key) + if not isinstance(processed_key, str): + raise ScriptValidationError(f"Key should be `str`: {processed_key}") + processed_dict[processed_key] = process_base_parser_object(value) + return processed_dict + elif isinstance(bpo, (Statement, Expression)): + return str(bpo) + raise TypeError(str(type(bpo)) + "_" + repr(bpo)) + + result: DFFProjectDict = defaultdict(dict) + if object_filter is None: + object_filter = self.script_initializer_dependencies + + for namespace_name, namespace in self.children.items(): + namespace_filter = object_filter.get(namespace_name) + if namespace_filter is not None: + for obj_name, obj in namespace.children.items(): + if obj_name in namespace_filter: + result[namespace_name][obj_name] = process_base_parser_object(obj) + return dict(result) + + @classmethod + def from_dict( + cls, + dictionary: DFFProjectDict, + validate: bool = True, + script_initializer: tp.Optional[str] = None, + ): + """ + Construct DFF Project from a dictionary. + + :param dictionary: + A dictionary with the same characteristics as one returned by :py:meth:`~.DFFProject.to_dict`. + :param validate: + Whether to perform validation -- check for a script initializer and validate its arguments. + Defaults to True. + :param script_initializer: + A colon-separated string that points to a script initializer call. + The first part of the string should be the name of the namespace. + The second part of the string should be the name of the object that is a result of script initializer call. + Defaults to None. + + :raises ParsingError: + - If a dictionary object is an Import statement that contains an import alias. + - If a dictionary object is an Assignment statement. + """ + + def process_dict(d): + return ( + "{" + ", ".join([f"{k}: {process_dict(v) if isinstance(v, dict) else v}" for k, v in d.items()]) + "}" + ) + + namespaces = [] + for namespace_name, namespace in dictionary.items(): + objects = [] + for obj_name, obj in namespace.items(): + if isinstance(obj, str): + split = obj.split(" ") + if split[0] == "import": + if len(split) != 2: + raise ParsingError( + f"Import statement should contain 2 words. AsName is set via key.\n{obj}" + ) + objects.append(obj if split[1] == obj_name else obj + " as " + obj_name) + elif split[0] == "from": + if len(split) != 4: + raise ParsingError( + f"ImportFrom statement should contain 4 words. AsName is set via key.\n{obj}" + ) + objects.append(obj if split[3] == obj_name else obj + " as " + obj_name) + elif obj_name.isnumeric(): + objects.append(obj) # unsupported by BPOs statement (e.g. func def) + else: + if isinstance(ast.parse(obj).body[0], (ast.Assign, ast.AnnAssign)): + raise ParsingError(f"Assignment statement should not be used in the dictionary: {obj}") + objects.append(f"{obj_name} = {obj}") + else: + objects.append(f"{obj_name} = {str(process_dict(obj))}") + namespaces.append(Namespace.from_ast(ast.parse("\n".join(objects)), location=namespace_name.split("."))) + return cls(namespaces, validate, script_initializer) + + def __getitem__(self, item: str) -> Namespace: + if isinstance(item, str): + return self.children[item] + raise TypeError(f"{type(item)}") + + @cached_property + def path(self) -> tp.Tuple[str, ...]: + return () + + @cached_property + def namespace(self) -> "Namespace": + raise RuntimeError(f"DFFProject does not have a `namespace` attribute\n{repr(self)}") + + @cached_property + def dff_project(self) -> "DFFProject": + return self + + def dump(self, current_indent=0, indent=4) -> str: + return repr(self.children) + + @classmethod + def from_ast(cls, node, **kwargs): + raise NotImplementedError() + + @classmethod + def from_python( + cls, + project_root_dir: Path, + entry_point: Path, + validate: bool = True, + script_initializer: tp.Optional[str] = None, + ): + """ + Construct DFF Project from a directory of python files. + + Namespaces are created from files using :py:meth:`~.Namespace.from_file`. + + :param project_root_dir: + A directory that stores the project. + :param entry_point: + A file to start processing with (only files that are imported in processed files will be processed). + :param validate: + Whether to perform validation -- check for a script initializer and validate its arguments. + Defaults to True. + :param script_initializer: + A colon-separated string that points to a script initializer call. + The first part of the string should be the name of the namespace. + The second part of the string should be the name of the object that is a result of script initializer call. + Instead of a colon-separated string a name of the object can be provided + in which case the name of the namespace is assumed to be the name of `entry_point`. + Defaults to None. + """ + namespaces = {} + if not project_root_dir.exists(): + raise RuntimeError(f"Path does not exist: {project_root_dir}") + if not entry_point.exists(): + raise RuntimeError(f"File {entry_point} does not exist in {project_root_dir}") + + def _process_file(file: Path): + namespace = Namespace.from_file(project_root_dir, file) + namespaces[namespace.name] = namespace + result = namespace.name + + for imported_file in namespace.get_imports(): + if imported_file not in namespaces.keys(): + path = project_root_dir.joinpath(*imported_file.split(".")).with_suffix(".py") + if path.exists(): + _process_file(path) + return result + + namespace_name = _process_file(entry_point) + + if script_initializer is not None: + if not isinstance(script_initializer, str): + raise TypeError("Argument `script_initializer` should be `str`") + if ":" not in script_initializer: + script_initializer = namespace_name + ":" + script_initializer + + return cls(list(namespaces.values()), validate, script_initializer) + + def to_python(self, project_root_dir: Path, object_filter: tp.Optional[tp.Dict[str, tp.Set[str]]] = None): + """ + Export DFF Project as a directory of python files. + + For each namespace in the project if a file corresponding to that namespace does not exist in the directory, + a new file will be created and the namespace will be dumped into it. + If a file already exists, it will be edited in the following way: + + All statements are extracted from the file the same way :py:meth:`~.Namespace.from_file` extracts them. + + Extracted statements are then edited: + Starting from the top of the namespace its statements are inserted into the list of extracted statements + as late as possible (always inserts before the last inserted statement and if a statement with the same name + already exists it will be replaced; if a statement with the same name already exists but comes after the last + inserted statement a warning is given and a new statement is inserted before the last inserted statement + resulting in two statements with the same name -- one before the last inserted statement and one after). + + This is done with the following in mind: + If two statements come in the namespace one after another, their order likely matters (either because of direct + referencing or some actions done in the first statement that affect the result of the second statement). + + :param project_root_dir: + A directory to extract the project to. + :param object_filter: + An optional dictionary from namespace names to sets of object names. Only objects specified in the filter + will be written into files. + Defaults to :py:attr:`~.DFFProject.script_initializer_dependencies`. + """ + logger.info(f"Executing `to_python` with project_root_dir={project_root_dir}") + if object_filter is None: + object_filter = self.script_initializer_dependencies + + for namespace in self.children.values(): + namespace_object_filter = object_filter.get(namespace.name) + + if namespace_object_filter is None: + continue + + file = project_root_dir.joinpath(*namespace.name.split(".")).with_suffix(".py") + if file.exists(): + objects: tp.List[Statement] = [] + names: tp.Dict[str, int] = {} # reverse index of names + + with open(file, "r", encoding="utf-8") as fd: + parsed_file = ast.parse(fd.read()) + for statement in parsed_file.body: + statements = Statement.auto(statement) + if isinstance(statements, dict): + for obj_name, obj in statements.items(): + if names.get(obj_name) is not None: + logger.warning( + f"The same name is used twice:\n{str(names.get(obj_name))}\n{str(obj)}\nfile:{file}" + ) + names[obj_name] = len(objects) + objects.append(obj) + elif isinstance(statements, Python): + objects.append(statements) + else: + raise RuntimeError(statement) + + last_insertion_index = len(objects) + for replaced_obj_name, replaced_obj in reversed(list(namespace.children.items())): + if replaced_obj_name in namespace_object_filter: + obj_index = names.get(replaced_obj_name) + if obj_index is not None: + if obj_index > last_insertion_index: + logger.warning( + f"Object order was changed. This might cause issues.\n" + f"Inserting object: {str(replaced_obj)}\n" + f"New script places it below: {str(objects[last_insertion_index])}" + ) + objects.insert(last_insertion_index, replaced_obj) + else: + objects.pop(obj_index) + objects.insert(obj_index, replaced_obj) + last_insertion_index = obj_index + else: + objects.insert(last_insertion_index, replaced_obj) + + with open(file, "w", encoding="utf-8") as fd: + fd.write(Namespace.dump_statements(objects)) + else: + logger.warning(f"File {file} is not found. It will be created.") + file.parent.mkdir(parents=True, exist_ok=True) + file.touch() + with open(file, "w", encoding="utf-8") as fd: + fd.write(namespace.dump(object_filter=namespace_object_filter)) + + @classmethod + def from_yaml(cls, file: Path, validate: bool = True, script_initializer: tp.Optional[str] = None): + """ + Construct DFF Project from a yaml file. + + :param file: + Yaml file containing a result of :py:meth:`~.DFFProject.to_yaml`. + :param validate: + Whether to perform validation -- check for a script initializer and validate its arguments. + Defaults to True. + :param script_initializer: + A colon-separated string that points to a script initializer call. + The first part of the string should be the name of the namespace. + The second part of the string should be the name of the object that is a result of script initializer call. + Defaults to None. + """ + with open(file, "r", encoding="utf-8") as fd: + return cls.from_dict(yaml.load(fd), validate, script_initializer) + + def to_yaml(self, file: Path): + """ + Export DFF Project in the yaml format. + Uses :py:meth:`~.DFFProject.to_dict`. + """ + file.parent.mkdir(parents=True, exist_ok=True) + file.touch() + with open(file, "w", encoding="utf-8") as fd: + yaml.dump(self.to_dict(), fd) + + @classmethod + def from_graph(cls, file: Path, validate: bool = True, script_initializer: tp.Optional[str] = None): + """ + Construct DFF Project from a graph file. + + :param file: + Graph file containing a result of :py:meth:`~.DFFProject.to_graph`. + :param validate: + Whether to perform validation -- check for a script initializer and validate its arguments. + Defaults to True. + :param script_initializer: + A colon-separated string that points to a script initializer call. + The first part of the string should be the name of the namespace. + The second part of the string should be the name of the object that is a result of script initializer call. + Defaults to None. + """ + with open(file, "r", encoding="utf-8") as fd: + return cls.from_dict(json.load(fd)["graph"]["full_script"], validate, script_initializer) + + def to_graph(self, file: Path): + """ + Export DFF Project in the graph format. + Graph file contains :py:attr:`~.DFFProject.graph` exported by :py:func:`networkx.readwrite.node_link_data`. + """ + file.parent.mkdir(parents=True, exist_ok=True) + file.touch() + with open(file, "w", encoding="utf-8") as fd: + json.dump(nx.readwrite.node_link_data(self.graph), fd, indent=4) diff --git a/dff/utils/parser/exceptions.py b/dff/utils/parser/exceptions.py new file mode 100644 index 000000000..587222fc7 --- /dev/null +++ b/dff/utils/parser/exceptions.py @@ -0,0 +1,38 @@ +""" +Exceptions +---------- +This module defines exceptions raised during parsing. +""" + + +class BaseParserException(BaseException): + ... + + +class ScriptValidationError(BaseParserException): + """ + Raised during script validation. + """ + + +class ParsingError(BaseParserException): + """ + Raised during parsing. + """ + + +class ResolutionError(BaseParserException): + """ + Raised during name resolution. + """ + + ... + + +# todo: add support for star notation +class StarError(BaseParserException): + """ + Raised when star notation is used. + """ + + ... diff --git a/dff/utils/parser/namespace.py b/dff/utils/parser/namespace.py new file mode 100644 index 000000000..73fe1ab4a --- /dev/null +++ b/dff/utils/parser/namespace.py @@ -0,0 +1,208 @@ +""" +Namespace +--------- +This module defines a :py:class:`~.Namespace` class. + +Its children attribute stores all statements defined inside a single python file. +""" +import typing as tp +import ast +from pathlib import Path + +from dff.utils.parser.base_parser_object import ( + BaseParserObject, + cached_property, + Statement, + Assignment, + Import, + ImportFrom, + Python, +) + +if tp.TYPE_CHECKING: + from dff.utils.parser.dff_project import DFFProject + + +class Namespace(BaseParserObject): + """ + This class represents a python file. + It stores all the statements / expressions defined in a file as well as the location of that file relative to + `project_root_dir`. + """ + + def __init__(self, location: tp.List[str], names: tp.Dict[str, Statement]): + BaseParserObject.__init__(self) + self.children: tp.MutableMapping[str, Statement] = {} + self.location: tp.List[str] = location + """Location of the file (as a list of path extensions from `project_root_dir`)""" + self.name: str = ".".join(location) + """A name of the file as it would be imported in python (except `__init__` files -- they end with `.__init__`""" + for key, value in names.items(): + self.add_child(value, key) + + def resolve_relative_import(self, module: str, level: int = 0) -> str: + """ + Find a location of a namespace referenced by `level * "." + module` in this namespace. + + :param module: Name of the module. + :param level: Relative position of the module. + :return: A location of the module (a string representing path to the module separated by dots). + """ + stripped_module = module.lstrip(".") + leading_dots = len(module) - len(stripped_module) + if leading_dots != 0: + if level == 0: + level = leading_dots + else: + raise RuntimeError(f"Level is set but module contains leading dots: module={module}, level={level}") + if level == 0: + level = 1 + if level > len(self.location): + raise ImportError( + f"Cannot import file outside the project_root_dir\n" + f"Current file location={self.location}\nAttempted import of {module} at level {level}" + ) + return ".".join(self.location[:-level] + ([stripped_module] if stripped_module else [])) + + @cached_property + def namespace(self) -> "Namespace": + return self + + @cached_property + def dff_project(self) -> "DFFProject": + if self.parent is None: + raise RuntimeError(f"Parent is not set: {repr(self)}") + return self.parent.dff_project + + def get_object(self, item: str): + """Return an object by its name. If the object is of type `Assignment` return its value.""" + obj = self.children.get(item) + if isinstance(obj, Assignment): + return obj.children["value"] + return obj + + def __getitem__(self, item: str): + """ + Return an object by its name. If the object is of type `Assignment` return its value. + :raises KeyError: + Object not found. + """ + obj = self.children[item] + if isinstance(obj, Assignment): + return obj.children["value"] + return obj + + @staticmethod + def dump_statements(statements: tp.List[Statement], current_indent: int = 0, indent: tp.Optional[int] = 4) -> str: + """ + A method for dumping a list of statements. Inserts newlines between statements in the following amount: + + - If any of the two neighboring statements is `Def` -- 3 new lines. + - If both neighboring statements are :py:class:`~.Import` or :py:class:`.~ImportFrom` -- 1 new line. + - Otherwise, 2 new lines. + + :param statements: A list of statements to dump. + :param current_indent: Current indentation level (in whitespace number), defaults to 0. + :param indent: + Indentation increment (in whitespace number), defaults to 4. + If set to None, all statements will be printed in one line (except for unsupported statements). + :return: Dumps of the statements separated by an appropriate amount of new lines. + """ + + def get_newline_count(statement: Statement): + if isinstance(statement, (Import, ImportFrom)): + return 1 + if isinstance(statement, Python) and statement.type.endswith("Def"): # function and class defs + return 3 + return 2 + + if len(statements) == 0: + return "\n" + + result = [statements[0].dump(current_indent, indent)] + previous_stmt = statements[0] + for current_stmt in statements[1:]: + result.append(max(get_newline_count(previous_stmt), get_newline_count(current_stmt)) * "\n") + result.append(current_indent * " " + current_stmt.dump(current_indent, indent)) + previous_stmt = current_stmt + return "".join(result) + "\n" + + def dump( + self, current_indent: int = 0, indent: tp.Optional[int] = 4, object_filter: tp.Optional[tp.Set[str]] = None + ) -> str: + """ + Dump all statements in the namespace. + + :param current_indent: Current indentation level (in whitespace number), defaults to 0. + :param indent: + Indentation increment (in whitespace number), defaults to 4. + If set to None, all statements will be printed in one line (except for unsupported statements). + :param object_filter: + A set of object names. If specified, only objects specified in the filter will be dumped. + Defaults to None. + :return: Representation of the namespace as a string. + """ + return self.dump_statements( + [value for key, value in self.children.items() if object_filter is None or key in object_filter], + current_indent, + indent, + ) + + def get_imports(self) -> tp.List[str]: + """Return a list of imported modules (represented by their locations).""" + imports = [] + for statement in self.children.values(): + if isinstance(statement, Import): + imports.append(self.resolve_relative_import(statement.module)) + if isinstance(statement, ImportFrom): + imports.append(self.resolve_relative_import(statement.module, statement.level)) + return imports + + @classmethod + def from_ast(cls, node: ast.Module, **kwargs) -> "Namespace": # type: ignore + """ + Construct Namespace from :py:class:`ast.Module`. + + For each statement in the module: + + - If it is supported by any :py:class:`~.Statement`, all objects extracted from the statement will be added to + the namespace under their names. + - Otherwise a :py:class:`~.Python` object is added under a string representation of the count of python + objects added to the namespace so far. + + For example, there is currently no :py:class:`~.Statement` that supports function definitions and + if one is present in `node`, its (key, value) pair will be `("0", Python(def ...))`. + """ + children = {} + python_counter = 0 + for statement in node.body: + statements = Statement.auto(statement) + if isinstance(statements, dict): + children.update(statements) + elif isinstance(statements, Python): + children[str(python_counter)] = statements + python_counter += 1 + return cls(names=children, **kwargs) + + @classmethod + def from_file(cls, project_root_dir: Path, file: Path): + """ + Construct a Namespace from a python file. + + For each statement in the file: + + - If it is supported by any :py:class:`~.Statement`, all objects extracted from the statement will be added to + the namespace under their names. + - Otherwise a :py:class:`~.Python` object is added under a string representation of the count of python + objects added to the namespace so far. + + For example, there is currently no :py:class:`~.Statement` that supports function definitions and + if one is present in `file`, its (key, value) pair will be `("0", Python(def ...))`. + + :param project_root_dir: A root dir of the dff project. All project files should be inside this dir. + :param file: A `.py` file to construct the namespace from. + :return: A Namespace of the file. + """ + location = list(file.with_suffix("").relative_to(project_root_dir).parts) + with open(file, "r", encoding="utf-8") as fd: + return Namespace.from_ast(ast.parse(fd.read()), location=location) diff --git a/dff/utils/parser/utils.py b/dff/utils/parser/utils.py new file mode 100644 index 000000000..4326fb88d --- /dev/null +++ b/dff/utils/parser/utils.py @@ -0,0 +1,31 @@ +""" +Utils +----- +This module defines useful functions. +""" +from typing import Union, Iterable +from collections.abc import Iterable as abc_Iterable + + +def is_instance(obj: object, cls: Union[str, type, Iterable[Union[str, type]]]): + """ + The same as builtin `isinstance` but also accepts strings as types. + This allows checking if the object is of the type that is not defined. + E.g. a type that is only present in previous versions of python: + + >>> is_instance(node, "_ast.ExtSlice") + + Or a type importing which would cause circular import. + """ + + def _is_instance(_cls: Union[str, type]): + if isinstance(_cls, str): + return obj.__class__.__module__ + "." + obj.__class__.__name__ == _cls + return isinstance(obj, _cls) + + if isinstance(cls, (str, type)): + return _is_instance(cls) + if isinstance(cls, abc_Iterable): + return any(map(_is_instance, cls)) + else: + raise TypeError(f"{type(cls)}") diff --git a/dff/utils/parser/yaml.py b/dff/utils/parser/yaml.py new file mode 100644 index 000000000..5997d8e2f --- /dev/null +++ b/dff/utils/parser/yaml.py @@ -0,0 +1,15 @@ +""" +Yaml +---- +This module defines an object for writing and reading yaml files and a configuration for that object. +""" +from math import inf + +try: + from ruamel.yaml import YAML +except ImportError: + raise ImportError("Module `ruamel.yaml` is not installed. Install it with `pip install dff[parser]`.") + +yaml = YAML() + +yaml.width = inf # type: ignore diff --git a/dff/utils/viewer/__init__.py b/dff/utils/viewer/__init__.py index 973e6e2d5..45e872a9e 100644 --- a/dff/utils/viewer/__init__.py +++ b/dff/utils/viewer/__init__.py @@ -1,2 +1 @@ -# -*- coding: utf-8 -*- -# flake8: noqa: F401 +from .cli import make_image, make_server # noqa: F401 diff --git a/dff/utils/viewer/app.py b/dff/utils/viewer/app.py new file mode 100644 index 000000000..782eac33e --- /dev/null +++ b/dff/utils/viewer/app.py @@ -0,0 +1,15 @@ +import plotly.graph_objects as go + +import dash +from dash import dcc +from dash import html + + +def create_app(fig: go.Figure): + fig.update_layout(title="Script Graph View") + fig.update_xaxes(showticklabels=False).update_yaxes(showticklabels=False) + app = dash.Dash() + app.layout = html.Div( + [dcc.Graph(id="script", figure=fig, style={"width": "160vh", "height": "120vh", "margin": "auto"})] + ) + return app diff --git a/dff/utils/viewer/chord_plot.py b/dff/utils/viewer/chord_plot.py new file mode 100644 index 000000000..7ae55d895 --- /dev/null +++ b/dff/utils/viewer/chord_plot.py @@ -0,0 +1,337 @@ +import networkx as nx +import numpy as np +import pandas as pd +import plotly.graph_objects as go +import random + +from .utils import get_spaced_colors, normalize_color +from .preprocessing import get_adjacency_dataframe + +PI = np.pi + + +# def moduloAB(x, a, b): +# if a >= b: +# raise ValueError("Incorrect inverval ends") +# y = (x - a) % (b - a) +# return y + b if y < 0 else y + a + + +def test_2PI(x): + return 0 <= x < 2 * PI + + +def get_ideogram_ends(ideogram_len, gap): + ideo_ends = [] + left = 0 + for k in range(len(ideogram_len)): + right = left + ideogram_len[k] + ideo_ends.append([left, right]) + left = right + gap + return ideo_ends + + +def make_ideogram_arc(R, phi, a=50): + """ + R is the circle radius + Phi is a list of the ends angle coordinates of an arc + a is a parameter that controls the number of points to be evaluated + """ + # if not test_2PI(phi[0]) or not test_2PI(phi[1]): + # phi = [moduloAB(t, 0, 2 * PI) for t in phi] + length = (phi[1] - phi[0]) % 2 * PI + nr = 5 if length <= PI / 4 else int(a * length / PI) + # if phi[0] < phi[1]: + nr = 100 + + theta = np.linspace(phi[0], phi[1], nr) + # else: + # phi = [moduloAB(t, -PI, PI) for t in phi] + # # nr = 100 + # theta = np.linspace(phi[0], phi[1], nr) + return R * np.exp(1j * theta) + + +def map_data(data_matrix, row_value, ideogram_length): + n = data_matrix.shape[0] # square, so same as 1 + mapped = np.zeros([n, n]) + for j in range(n): + mapped[:, j] = ideogram_length * data_matrix[:, j] / row_value + return mapped + + +def make_ribbon_ends(mapped_data, ideo_ends, idx_sort): + n = mapped_data.shape[0] + ribbon_boundary = np.zeros((n, n + 1)) + for k in range(n): + start = ideo_ends[k][0] + ribbon_boundary[k][0] = start + for j in range(1, n + 1): + J = idx_sort[k][j - 1] + ribbon_boundary[k][j] = start + mapped_data[k][J] + start = ribbon_boundary[k][j] + return [[(ribbon_boundary[k][j], ribbon_boundary[k][j + 1]) for j in range(n)] for k in range(n)] + + +def control_pts(angle, radius): + if len(angle) != 3: + raise ValueError("Angle must have len = 3") + b_cplx = np.array([np.exp(1j * angle[k]) for k in range(3)]) + b_cplx[1] = radius * b_cplx[1] + return list(zip(b_cplx.real, b_cplx.imag)) + + +def ctrl_rib_chords(left, right, radius): + if len(left) != 2 or len(right) != 2: + raise ValueError("The arc ends must be elements in a list of len 2") + return [control_pts([left[j], (left[j] + right[j]) / 2, right[j]], radius) for j in range(2)] + + +def make_q_bezier(b): + if len(b) != 3: + raise ValueError("Control polygon must have 3 points") + A, B, C = b + return ( + "M " + + str(A[0]) + + "," + + str(A[1]) + + " " + + "Q " + + str(B[0]) + + ", " + + str(B[1]) + + " " + + str(C[0]) + + ", " + + str(C[1]) + ) + + +def make_ribbon_arc(theta0, theta1): + if test_2PI(theta0) and test_2PI(theta1): + # if theta0 < theta1: + # theta0 = moduloAB(theta0, -PI, PI) + # theta1 = moduloAB(theta1, -PI, PI) + # if theta0 * theta1 > 0: + # raise ValueError("Incorrect angle coordinates for ribbon") + nr = int(40 * (theta0 - theta1) / PI) + if nr <= 2: + nr = 3 + theta = np.linspace(theta0, theta1, nr) + pts = np.exp(1j * theta) + string_arc = "" + for k in range(len(theta)): + string_arc += "L " + str(pts.real[k]) + ", " + str(pts.imag[k]) + " " + return string_arc + else: + raise ValueError("The angle coords for arc ribbon must be [0, 2*PI]") + + +def make_layout(title): + xaxis = dict( + showline=False, automargin=False, zeroline=False, showgrid=False, showticklabels=False, title=dict(standoff=0) + ) + yaxis = {**xaxis, "scaleanchor": "x"} + return dict( + title=title, + xaxis=xaxis, + yaxis=yaxis, + showlegend=False, + margin=dict(t=0, b=0, l=0, r=0), + hovermode="closest", + shapes=[], + ) + + +def make_ideo_shape(path, line_color, fill_color): + return dict( + line=go.layout.shape.Line(color=line_color, width=0.45), + path=path, + type="path", + fillcolor=fill_color, + layer="below", + ) + + +def make_ribbon(left, right, line_color, fill_color, radius=0.2): + poligon = ctrl_rib_chords(left, right, radius) + b, c = poligon + return dict( + line=go.layout.shape.Line(color=line_color, width=0.5), + path=make_q_bezier(b) + + make_ribbon_arc(right[0], right[1]) + + make_q_bezier(c[::-1]) + + make_ribbon_arc(left[1], left[0]), + type="path", + fillcolor=fill_color, + layer="below", + ) + + +def make_self_rel(line, line_color, fill_color, radius): + b = control_pts([line[0], (line[0] + line[1]) / 2, line[1]], radius) + return dict( + line=dict(color=line_color, width=0.5), + path=make_q_bezier(b) + make_ribbon_arc(line[1], line[0]), + type="path", + fillcolor=fill_color, + layer="below", + ) + + +def invPerm(perm): + inv = [0] * len(perm) + for i, s in enumerate(perm): + inv[s] = i + return inv + + +def make_filled_chord(adjacency_df: pd.DataFrame, width: int = 800, height: int = 800): # ,labels): + labels = list(adjacency_df.columns) + adjacency_df = adjacency_df.T + matrix = adjacency_df.to_numpy() + n = adjacency_df.shape[0] + row_sum = [np.sum(matrix[k, :]) or 1 for k in range(n)] + gap = 2 * PI * 10e-8 + + ideogram_length = 2 * PI * np.asarray(row_sum) / sum(row_sum) - gap * np.ones(n) + flow_labels = list(set([i.split(", ")[1] for i in labels])) + flow_col_dict = {flow: x for flow, x in zip(flow_labels, get_spaced_colors(len(flow_labels)))} + flow_colors = [normalize_color(flow_col_dict[i.split(", ")[1]], level="flow") for i in labels] + ideo_colors = [normalize_color(flow_col_dict[i.split(", ")[1]], level="node") for i in labels] + mapped_data = map_data(matrix, row_sum, ideogram_length) + idx_sort = np.argsort(mapped_data, axis=1) + ideo_ends = get_ideogram_ends(ideogram_length, gap) + ribbon_ends = make_ribbon_ends(mapped_data, ideo_ends, idx_sort) + ribbon_color = [n * [ideo_colors[k]] for k in range(n)] + layout = make_layout(" ") + ribbon_info = [] + radii_sribb = [0.2] * n + for k in range(n): + sigma = idx_sort[k] + sigma_inv = invPerm(sigma) + for j in range(k, n): + if adjacency_df.iloc[k, j] == 0 and adjacency_df.iloc[j, k] == 0: + continue + eta = idx_sort[j] + eta_inv = invPerm(eta) + left = ribbon_ends[k][sigma_inv[j]] + if j == k: + layout["shapes"].append(make_self_rel(left, "rgb(175,175,175)", ideo_colors[k], radius=radii_sribb[k])) + z = 0.9 * np.exp(1j * (left[0] + left[1]) / 2) + text = labels[k] + " {0} transitions to ".format(adjacency_df.iloc[k, k]) + ribbon_info.append( + go.Scatter( + x=[z.real], + y=[z.imag], + mode="markers", + text=text, + hoverinfo="text", + marker=dict(size=0.5, color=ideo_colors[k]), + ) + ) + else: + right = ribbon_ends[j][eta_inv[k]] + zi = 0.9 * np.exp(1j * (left[0] + left[1]) / 2) + zf = 0.9 * np.exp(1j * (right[0] + right[1]) / 2) + + texti = labels[k] + " {0} transitions to ".format(matrix[k][j]) + labels[j] + textf = labels[j] + " {0} transitions to ".format(matrix[j][k]) + labels[k] + + ribbon_info.append( + go.Scatter( + x=[zi.real], + y=[zi.imag], + mode="markers", + text=texti, + hoverinfo="text", + marker=dict(size=0.5, color=ribbon_color[k][j]), + ) + ) + ribbon_info.append( + go.Scatter( + x=[zf.real], + y=[zf.imag], + mode="markers", + text=textf, + hoverinfo="text", + marker=dict(size=0.5, color=ribbon_color[j][k]), + ) + ) + right = (right[1], right[0]) + if matrix[k][j] > matrix[j][k]: + color_of_highest = ribbon_color[k][j] + else: + color_of_highest = ribbon_color[j][k] + layout["shapes"].append(make_ribbon(left, right, "rgb(175, 175, 175)", color_of_highest)) + ideograms = [] + + for k in range(len(ideo_ends)): + node_z = make_ideogram_arc(1.1, ideo_ends[k]) + node_zi = make_ideogram_arc(1.0, ideo_ends[k]) + flow_z = make_ideogram_arc(1.1 + 0.2, ideo_ends[k]) + flow_zi = make_ideogram_arc(1.1, ideo_ends[k]) + for z, zi, label, meta, color in [ + (node_z, node_zi, labels[k], "node", ideo_colors[k]), + (flow_z, flow_zi, labels[k].split(", ")[1], "flow", flow_colors[k]), + ]: + m = len(z) + n = len(zi) + ideograms.append( + go.Scatter( + x=z.real, + y=z.imag, + mode="lines", + name=label, + line=dict(color=color, shape="spline", width=0.25), + text=label, + hoverinfo="text", + meta=[meta], + ) + ) + path = "M " + for s in range(m): + path += str(z.real[s]) + ", " + str(z.imag[s]) + " L " + Zi = np.array(zi.tolist()[::-1]) + for s in range(m): + path += str(Zi.real[s]) + ", " + str(Zi.imag[s]) + " L " + path += str(z.real[0]) + " ," + str(z.imag[0]) + layout["shapes"].append(make_ideo_shape(path, "rgb(150,150,150)", color)) + + layout["width"] = width + layout["height"] = height + data = ideograms + ribbon_info + fig = {"data": data, "layout": layout} + return fig + + +def add_annotations(figure: go.Figure): + def add_annotations_inner(trace): + if not trace.meta or "node" not in trace.meta: + return () + rand = 1.6 + anno = figure.add_annotation( + x=trace.x[len(trace.x) // 2] * rand, + y=trace.y[len(trace.y) // 2] * rand, + text=trace.name.replace(", ", "
"), + showarrow=False, + align="right", + font_size=8, + ) + return anno + + return add_annotations_inner + + +def get_plot( + nx_graph: nx.Graph, + random_seed: int = 1, + **kwargs, # for cli integration +) -> go.Figure: + random.seed(random_seed) + adj_df = get_adjacency_dataframe(nx_graph) + figure_data = make_filled_chord(adjacency_df=adj_df) + figure = go.Figure(figure_data) + figure.for_each_trace(add_annotations(figure)) + return figure diff --git a/dff/utils/viewer/cli.py b/dff/utils/viewer/cli.py new file mode 100644 index 000000000..c55c232bf --- /dev/null +++ b/dff/utils/viewer/cli.py @@ -0,0 +1,149 @@ +import sys +from pathlib import Path +from typing import Optional +import argparse + +import hupper + +from . import graph_plot +from . import chord_plot +from . import image +from .graph import get_graph +from .app import create_app +from .preprocessing import preprocess +from .utils import graphviz_to_plotly + + +def is_dir(arg: Optional[str]) -> Optional[Path]: + """Check that the passed argument is a directory + + :param arg: Argument to check + :return: :py:class:`.Path` instance created from arg if it is a directory + """ + if arg is None: + return arg + elif isinstance(arg, str): + path = Path(arg) + if path.is_dir(): + return path + raise argparse.ArgumentTypeError(f"Not a directory: {path}") + + +def is_file(arg: str) -> Path: + """Check that the passed argument is a file + + :param arg: Argument to check + :return: :py:class:`.Path` instance created from arg if it is a file + """ + path = Path(arg) + if path.is_file(): + return path + raise argparse.ArgumentTypeError(f"Not a file: {path}") + + +py2file_parser = argparse.ArgumentParser(add_help=False) +py2file_parser.add_argument( + "-e", + "--entry_point", + required=True, + metavar="ENTRY_POINT", + help="Python file to start parsing with", + type=is_file, +) +py2file_parser.add_argument( + "-d", + "--project_root_dir", + metavar="PROJECT_ROOT_DIR", + help="Directory that contains all the local files required to run ROOT_FILE", + type=is_dir, +) +py2file_parser.add_argument( + "-t", + "--type", + choices=["graph", "chord"], + default="graph", + help="Plot type: graph plot or chord plot.", +) +py2file_parser.add_argument( + "-r", "--show_response", "--show-response", action="store_true", help="Show node response values." +) +py2file_parser.add_argument("-m", "--show_misc", "--show-misc", action="store_true", help="Show node misc values.") +py2file_parser.add_argument("-l", "--show_local", "--show-local", action="store_true", help="Show local transitions.") +py2file_parser.add_argument( + "-p", "--show_processing", "--show-processing", action="store_true", help="Show processing functions." +) +py2file_parser.add_argument( + "-g", "--show_global", "--show-global", action="store_true", help="Show global transitions." +) +py2file_parser.add_argument( + "-i", "--show_isolates", "--show-isolates", action="store_true", help="Show isolated nodes." +) +py2file_parser.add_argument( + "-u", "--show_unresolved", "--show-unresolved", action="store_true", help="Show unresolved transitions" +) +py2file_parser.add_argument( + "-rs", + "--random_seed", + "--random-seed", + required=False, + type=int, + default=1, + help="Random seed to control color generation.", +) + +server_parser = argparse.ArgumentParser(add_help=False) +server_parser.add_argument( + "-H", "--host", required=False, metavar="HOST", type=str, default="127.0.0.1", help="Dash application host." +) +server_parser.add_argument( + "-P", "--port", required=False, metavar="PORT", type=int, default=5000, help="Dash application port." +) + + +def make_server(args=sys.argv[1:]): + server_praser = argparse.ArgumentParser(parents=[py2file_parser, server_parser], add_help=True) + parsed_args: argparse.Namespace = server_praser.parse_args(args) + processed_graph = preprocess(get_graph(**vars(parsed_args)), **vars(parsed_args)) + if parsed_args.type == "graph": + plotly_fig = graphviz_to_plotly(graph_plot.get_plot(processed_graph, **vars(parsed_args))) + elif parsed_args.type == "chord": + plotly_fig = chord_plot.get_plot(processed_graph, **vars(parsed_args)) + else: + raise argparse.ArgumentError("Invalid value for argument `type`") + app = create_app(plotly_fig) + reloader = hupper.start_reloader("dff.utils.viewer.cli.make_server") + reloader.watch_files([str(i) for i in parsed_args.entry_point.parent.absolute().glob("./**/*.py")]) + app.run(host=parsed_args.host, port=parsed_args.port, debug=True, dev_tools_hot_reload=True) + + +def make_image(args=sys.argv[1:]): + image_parser = argparse.ArgumentParser(parents=[py2file_parser], add_help=True) + image_parser.add_argument( + "-f", + "--format", + metavar="FORMAT", + help="Plot output format", + default="png", + choices=["png", "jpeg", "pdf", "svg", "gif", "bmp", "dot"], + type=str, + ) + image_parser.add_argument( + "-o", + "--output_file", + "--output-file", + metavar="OUTPUT_FILE", + help="Image file", + type=str, + ) + parsed_args: argparse.Namespace = image_parser.parse_args(args) + args_dict = vars(parsed_args) + graph = get_graph(**args_dict) + processed_graph = preprocess(graph, **args_dict) + if args_dict["type"] == "graph": + plot = graph_plot.get_plot(processed_graph, **args_dict) + image.graphviz_image(plot, parsed_args.output_file, format=parsed_args.format) + elif args_dict["type"] == "chord": + plot = chord_plot.get_plot(processed_graph, **args_dict) + image.plotly_image(plot, parsed_args.output_file, format=parsed_args.format) + else: + raise argparse.ArgumentError("Invalid value for argument `type`") diff --git a/dff/utils/viewer/graph.py b/dff/utils/viewer/graph.py new file mode 100644 index 000000000..b000dc7b4 --- /dev/null +++ b/dff/utils/viewer/graph.py @@ -0,0 +1,18 @@ +from typing import Union +from pathlib import Path + +import networkx as nx + +from dff.utils.parser.dff_project import DFFProject + + +def get_graph( + entry_point: Union[str, Path], project_root_dir: Union[str, Path, None] = None, **kwargs # For cli integration +) -> nx.Graph: + if not isinstance(entry_point, Path): + entry_point = Path(entry_point) + project_root_dir = project_root_dir or entry_point.parent + project: DFFProject = DFFProject.from_python( + project_root_dir=project_root_dir.absolute(), entry_point=entry_point.absolute() + ) + return project.graph diff --git a/dff/utils/viewer/graph_plot.py b/dff/utils/viewer/graph_plot.py new file mode 100644 index 000000000..a8e33382c --- /dev/null +++ b/dff/utils/viewer/graph_plot.py @@ -0,0 +1,131 @@ +import random +from typing import Dict + +import networkx as nx +import graphviz + +from .utils import get_random_colors +from .preprocessing import get_script, UNRESOLVED_KEY + + +NODE_ATTRS = { + "fontname": "Helvetica,Arial,sans-serif", + "shape": "box", + "style": "rounded, filled", + "fillcolor": "#ffffffbf", +} + + +def format_name(name: str): + """ + Format node name as graphviz html code. + If the node name is 'NONE', replace it with the UNRESOLVED_KEY constant. + """ + name_value = name.upper().strip("'") + return f'
{name_value}
' + + +def format_title(title: str): # second
to the left for spacing + return f'

{title}
' # noqa: E501 + + +def format_lines(lines: list): + return f'
{"
".join(lines)}
' + + +def format_port(name: str, port: str) -> str: + return f'(
{name})
' + + +def format_as_table(rows: list) -> str: + return "".join(['<', *rows, "
>"]) + + +def get_node_struct(node: tuple) -> Dict: + """ + Get a formatted node structure. + """ + return { + "name": str(node), + "label": [format_name(node[-1])], + "transitions": {}, + "ports": [], + "full_label": None, + } + + +def get_script_data(script_node: dict, key: str, show_flag: bool) -> list: + if not show_flag or key not in script_node: # add response data + return [] + return ["
", format_title(key.title()), format_lines([str(script_node[key])])] + + +def get_plot( + nx_graph: nx.Graph, + show_response: bool = False, + show_processing: bool = False, + show_misc: bool = False, + random_seed: int = 1, + **requirements, # for cli integration +) -> graphviz.Digraph: + random.seed(random_seed) + + graph = graphviz.Digraph() + graph.attr(compound="true", splines="spline", overlap="ipsep", fontname="Helvetica,Arial,sans-serif") + graph.node_attr.update(**NODE_ATTRS) + + nodes: Dict[str, Dict] = {} + + for node, node_data in nx_graph.nodes.items(): + if node not in nodes: + node_struct = get_node_struct(node) + nodes[node] = node_struct + + if node[-1] == UNRESOLVED_KEY: + continue + + # get script data if necessary, add to node struct + script_node = get_script(nx_graph, node_data) + node_copy = list(node[1:]) # skip the initial NODE identifier + while node_copy: # recursively get node + label_part = node_copy.pop(0) + script_node = script_node.get(label_part, {}) + nodes[node]["label"].extend(get_script_data(script_node, "RESPONSE", show_response)) + nodes[node]["label"].extend(get_script_data(script_node, "PRE_RESPONSE_PROCESSING", show_processing)) + nodes[node]["label"].extend(get_script_data(script_node, "PRE_TRANSITIONS_PROCESSING", show_processing)) + nodes[node]["label"].extend(get_script_data(script_node, "MISC", show_misc)) + + # add edge data to node structs + for edge, edge_data in nx_graph.edges.items(): + edge_source_node, edge_target_node, _ = edge + if edge_target_node not in nodes: + continue # ignore expelled nodes + + nodes[edge_source_node]["ports"] += [format_port(edge_data["condition"], edge_data["label"])] + # port id mapped to the target node + nodes[edge_source_node]["transitions"][edge_data["label"]] = str(edge_target_node) + + flows: dict = {} + + # add flows, nodes, edges to graphviz graph + for key in nodes.keys(): + _, flow, _ = key + if flow not in flows: + flows[flow] = graphviz.Digraph(name=f"cluster_{flow}") + flows[flow].attr(label=flow.upper().strip("'"), style="rounded, filled") + + if len(nodes[key]["ports"]) > 0: + nodes[key]["label"].extend(["
", format_title("Transitions"), *nodes[key]["ports"]]) + + nodes[key]["full_label"] = format_as_table(nodes[key]["label"]) + flows[flow].node(name=nodes[key]["name"], label=nodes[key]["full_label"]) + + for transition, dest in nodes[key]["transitions"].items(): + graph.edge(f"{key}:{transition}", dest) + + for color, subgraph in zip(get_random_colors(), flows.values()): + subgraph.attr(color=color.lower()) + graph.subgraph(subgraph) + + graph = graph.unflatten(stagger=5, fanout=True) + return graph diff --git a/dff/utils/viewer/image.py b/dff/utils/viewer/image.py new file mode 100644 index 000000000..209c4b3cd --- /dev/null +++ b/dff/utils/viewer/image.py @@ -0,0 +1,17 @@ +from graphviz import Digraph +import plotly.graph_objects as go +from plotly.io import write_image + + +def graphviz_image(plot: Digraph, output_file: str, format: str = "png") -> None: + if format == "dot": + plot.render(filename=output_file) + return + + _bytes = plot.pipe(format=format) + with open(output_file, "wb+") as file: + file.write(_bytes) + + +def plotly_image(plot: go.Figure, output_file: str, format: str = "png") -> None: + write_image(plot, output_file, format=format) diff --git a/dff/utils/viewer/preprocessing.py b/dff/utils/viewer/preprocessing.py new file mode 100644 index 000000000..eced9e670 --- /dev/null +++ b/dff/utils/viewer/preprocessing.py @@ -0,0 +1,117 @@ +import networkx as nx +import pandas as pd + +VIRTUAL_FLOW_KEY = "virtual" +UNRESOLVED_KEY = "UNRESOLVED" +RESTRICTED_NODE_TYPES = {"LABEL", "NONE", "GLOBAL_NODE", "LOCAL_NODE"} + + +def get_script(nx_graph: nx.Graph, node_data: dict) -> dict: + namespace, script_name, *_ = node_data["ref"] # get namespace from ref + script = nx_graph.graph["full_script"].get(namespace, {}).get(script_name, {}) + return script + + +def get_label_by_index_shifting( + nx_graph: nx.Graph, node_id: tuple, increment_flag: bool = True, cyclicality_flag: bool = True +) -> tuple: + script = get_script(nx_graph, nx_graph.nodes[node_id]) + _, flow, *info = node_id + labels = list(script[flow]) + if "LOCAL" in labels: + labels.remove("LOCAL") # cannot transition into LOCAL + node_label = info[0] + label_index = labels.index(node_label) + label_index = label_index + 1 if increment_flag else label_index - 1 + if not (cyclicality_flag or (0 <= label_index < len(labels))): + return ("NODE", *nx_graph.graph["fallback_label"]) + label_index %= len(labels) + return ("NODE", flow, labels[label_index]) + + +def resolve_labels(nx_graph: nx.Graph, edge: tuple, edge_data: dict) -> dict: + source_node, target_node, *edge_info = edge + _, label_type, *_ = target_node + source_type, *source_info = source_node + + # get label transition sources + if source_type == "GLOBAL_NODE": + sources = [node for node in nx_graph.nodes.keys() if node[0] not in RESTRICTED_NODE_TYPES] + elif source_type == "LOCAL_NODE": + sources = [ + node for node in nx_graph.nodes.keys() if node[1] == source_info[0] and node[0] not in RESTRICTED_NODE_TYPES + ] + else: + sources = [source_node] + # get label transiton targets + if label_type == "repeat": + targets = sources + elif label_type == "to_fallback": + targets = [("NODE", *nx_graph.graph["fallback_label"])] * len(sources) + elif label_type == "to_start": + targets = [("NODE", *nx_graph.graph["start_label"])] * len(sources) + elif label_type == "forward": + targets = [get_label_by_index_shifting(nx_graph, node_id, increment_flag=True) for node_id in sources] + elif label_type == "backward": + targets = [get_label_by_index_shifting(nx_graph, node_id, increment_flag=False) for node_id in sources] + else: + return {} + new_data = edge_data.copy() + new_data["label"] = label_type + return [(s, t, new_data) for s, t in zip(sources, targets)] + + +def transform_virtual(node: tuple): + """ + Put special nodes to virtual flow. Replace NONE with unresolved key constant. + """ + if node == ("GLOBAL_NODE", "GLOBAL"): + return ("NODE", VIRTUAL_FLOW_KEY, node[-1]) + elif node == ("NONE",): + return ("NODE", VIRTUAL_FLOW_KEY, UNRESOLVED_KEY) + return node + + +def preprocess( + nx_graph: nx.Graph, + show_global: bool = False, + show_local: bool = False, + show_unresolved: bool = False, + show_isolates: bool = True, + **kwargs, # for cli integration +) -> nx.Graph: + + label_edges = [] + for edge, edge_data in nx_graph.edges.items(): + if edge[1][0] != "LABEL": + continue + label_edges += resolve_labels(nx_graph, edge, edge_data) + nx_graph.add_edges_from(label_edges) + + nx_graph.remove_nodes_from(list(node for node in nx_graph.nodes if node[0] == "LABEL")) + + if not show_global and ("GLOBAL_NODE", "GLOBAL") in nx_graph.nodes: + nx_graph.remove_nodes_from([("GLOBAL_NODE", "GLOBAL")]) + + if not show_local: + nx_graph.remove_nodes_from(list(node for node in nx_graph.nodes if node[-1] == "LOCAL")) + + if not show_unresolved and ("NONE",) in nx_graph.nodes: + nx_graph.remove_nodes_from([("NONE",)]) + + if not show_isolates: + nx_graph.remove_nodes_from(list(nx.isolates(nx_graph))) + + nx_graph = nx.relabel_nodes(nx_graph, {name: transform_virtual(name) for name in nx_graph.nodes.keys()}) + + return nx_graph + + +def get_adjacency_dataframe(nx_graph: nx.Graph) -> pd.DataFrame: + matrix = nx.adjacency_matrix(nx_graph).toarray() + df = pd.DataFrame( + matrix, index=[str(x[:3]) for x in nx_graph.nodes.keys()], columns=[str(x[:3]) for x in nx_graph.nodes.keys()] + ) + df = df.loc[(df != 0).any(axis=1), (df != 0).any(axis=0)] + df = df.reindex(sorted(df.columns), axis=1).reindex(sorted(df.index), axis=0) + return df diff --git a/dff/utils/viewer/utils.py b/dff/utils/viewer/utils.py new file mode 100644 index 000000000..608d95179 --- /dev/null +++ b/dff/utils/viewer/utils.py @@ -0,0 +1,51 @@ +import random +from base64 import b64encode +from io import BytesIO + +from graphviz import Digraph +import plotly.graph_objects as go + + +def graphviz_to_plotly(plot: Digraph) -> go.Figure: + _bytes = plot.pipe(format="png") + prefix = "data:image/png;base64," + with BytesIO(_bytes) as stream: + base64 = prefix + b64encode(stream.getvalue()).decode("utf-8") + fig = go.Figure(go.Image(source=base64)) + return fig + + +def get_random_colors(): + target_colors = ["#96B0AF", "#C6AE82", "#F78378", "#FF7B9C", "#D289AB", "#86ACD5", "#86ACD5", "#F8D525", "#F6AE2D"] + reserve = [] + for element in target_colors: + yield element + reserve.append(random.choice(target_colors)) + while reserve: + yield reserve.pop(0) + + +def get_spaced_colors(n): + colors = [ + f"rgb({int(color[1:3], base=16)}, {int(color[3:5], base=16)}, {int(color[5:7], base=16)})" + for _, color in zip(range(n), get_random_colors()) + ] + return colors or None + + +def normalize_color(color: str, level: str = "node"): + opacity_value = 95 + if level == "node": + r, g, b = color.strip("rgb()").split(", ") + r, g, b = ( + (int(r) + random.randint(-25, 25)), + (int(g) + random.randint(-25, 25)), + (int(b) + random.randint(-25, 25)), + ) + r = 0 if r < 0 else 255 if r > 255 else r + g = 0 if g < 0 else 255 if g > 255 else g + b = 0 if b < 0 else 255 if b > 255 else b + color = f"rgb({r}, {g}, {b})" + opacity_value = 70 + normalized = color[:3] + "a" + color[3:-1] + f",.{opacity_value}" + color[-1] + return normalized diff --git a/docs/source/conf.py b/docs/source/conf.py index 00da310a9..d44e0707e 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -168,6 +168,7 @@ def setup(_): ("dff.messengers", "Messenger Interfaces"), ("dff.pipeline", "Pipeline"), ("dff.script", "Script"), + ("dff.utils.parser", "Parser"), ("dff.utils.testing", "Utils"), ] ) diff --git a/examples/viewer/1_server.py b/examples/viewer/1_server.py new file mode 100644 index 000000000..74a575287 --- /dev/null +++ b/examples/viewer/1_server.py @@ -0,0 +1,38 @@ +# %% [markdown] +""" +# 1. Server + +## View the graph on a Dash server + +```bash +dff.viewer.server -e "./python_files/main.py" -d "./python_files/" -H localhost -P 8000 +``` + +## CLI parameters reference + +--entry_point: Python file to start parsing with. +--project_root_dir: Directory that contains all the local files required to run ROOT_FILE. +--show_response: Show node response values. +--show_misc: Show node misc values. +--show_local: Show local transitions. +--show_processing: Show processing functions. +--show_global: Show global transitions. +--show_isolates: Show isolated nodes. +--random_seed: Random seed to control color generation. +--host: Dash application host. +--port: Dash application port. + +""" +# %% +from pathlib import Path +import sys +from dff.utils.viewer import make_server +from dff.utils.testing.common import is_interactive_mode + +if is_interactive_mode(): + entry_point = "../../tests/viewer/TEST_CASES/main.py" +else: + entry_point = Path(__file__).parent.parent / "tests" / "viewer" / "TEST_CASES" / "main.py" + +sys.argv = ["", f"--entry_point={entry_point}"] +make_server(sys.argv[1:]) diff --git a/examples/viewer/2_image.py b/examples/viewer/2_image.py new file mode 100644 index 000000000..5b84f4f5d --- /dev/null +++ b/examples/viewer/2_image.py @@ -0,0 +1,38 @@ +# %% [markdown] +""" +# 2. Image + +## View the graph as a static image. + +```bash +dff.viewer.image -e ./python_files/main.py -d ./python_files/ -o ./plot.png +``` + +## CLI parameters reference + +--entry_point: Python file to start parsing with. +--project_root_dir: Directory that contains all the local files required to run ROOT_FILE. +--show_response: Show node response values. +--show_misc: Show node misc values. +--show_local: Show local transitions. +--show_processing: Show processing functions. +--show_global: Show global transitions. +--show_isolates: Show isolated nodes. +--random_seed: Random seed to control color generation. +--format: Graphviz output format. +--output_file: Image file. + +""" +# %% +from pathlib import Path +from dff.utils.viewer import make_image +from dff.utils.testing.common import is_interactive_mode + +if is_interactive_mode(): + entry_point = "../../tests/viewer/TEST_CASES/main.py" +else: + entry_point = Path(__file__).parent.parent / "tests" / "viewer" / "TEST_CASES" / "main.py" + +make_image([f"--entry_point={entry_point}", "--output_file=plot.png"]) + +# %% diff --git a/makefile b/makefile index b84e14fde..c5c0fc5d9 100644 --- a/makefile +++ b/makefile @@ -29,20 +29,20 @@ venv: pip install -e .[devel_full] format: venv - black --line-length=120 --exclude='venv|build|tutorials' . + black --line-length=120 --exclude='venv|build|tutorials|tests/parser/TEST_CASES' . black --line-length=100 tutorials .PHONY: format lint: venv - flake8 --max-line-length=120 --exclude venv,build,tutorials . + flake8 --max-line-length=120 --exclude venv,build,tutorials,tests/parser/TEST_CASES . flake8 --max-line-length=100 tutorials - @set -e && black --line-length=120 --check --exclude='venv|build|tutorials' . && black --line-length=100 --check tutorials || ( \ + @set -e && black --line-length=120 --check --exclude='venv|build|tutorials|tests/parser/TEST_CASES' . && black --line-length=100 --check tutorials || ( \ echo "================================"; \ echo "Bad formatting? Run: make format"; \ echo "================================"; \ false) # TODO: Add mypy testing - # @mypy . --exclude venv*,build + # mypy . .PHONY: lint docker_up: diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 000000000..5e8d7006e --- /dev/null +++ b/mypy.ini @@ -0,0 +1,7 @@ +[mypy] +exclude = (?x)( + ^venv + | ^build + | ^tests/parser/TEST_CASES + | ^docs/examples + ) \ No newline at end of file diff --git a/setup.py b/setup.py index ed6f1f58c..2a6727ad4 100644 --- a/setup.py +++ b/setup.py @@ -80,6 +80,28 @@ def merge_req_lists(*req_lists: List[str]) -> List[str]: "pytelegrambotapi==4.5.1", ] +parser_dependencies = [ + "cached-property==1.5.2; python_version<'3.8'", + "astunparse==1.6.3; python_version<'3.9'", + "ruamel.yaml", + "networkx", +] + +script_viewer_dependencies = merge_req_lists( + parser_dependencies, + [ + "graphviz==0.17", + "dash==2.6.2", + "hupper==1.11", + "watchdog==3.0.0", + "plotly<=5.10.0", + "numpy<=1.24.2", + "scipy<=1.10.1", + "pandas<=1.5.3", + "kaleido==0.2.1", + ], +) + full = merge_req_lists( core, async_files_dependencies, @@ -90,6 +112,8 @@ def merge_req_lists(*req_lists: List[str]) -> List[str]: postgresql_dependencies, ydb_dependencies, telegram_dependencies, + parser_dependencies, + script_viewer_dependencies, ) requests_requirements = [ @@ -101,7 +125,10 @@ def merge_req_lists(*req_lists: List[str]) -> List[str]: "pytest >=7.2.1,<8.0.0", "pytest-cov >=4.0.0,<5.0.0", "pytest-asyncio >=0.14.0,<0.15.0", - "flake8 >=3.8.3,<4.0.0", + "flake8==6.0.0; python_version>'3.7'", + "flake8<=5.0.4; python_version=='3.7'", + "pyflakes==3.0.1; python_version>'3.7'", + "pyflakes<=2.5.0; python_version=='3.7'", "click<=8.0.4", "black ==20.8b1", "isort >=5.0.6,<6.0.0", @@ -142,7 +169,8 @@ def merge_req_lists(*req_lists: List[str]) -> List[str]: ] mypy_dependencies = [ - "mypy==0.950", + "mypy==1.1.1", + "networkx-stubs==0.0.1", ] devel_full = merge_req_lists( @@ -163,6 +191,8 @@ def merge_req_lists(*req_lists: List[str]) -> List[str]: "postgresql": postgresql_dependencies, # dependencies for using PostgreSQL "ydb": ydb_dependencies, # dependencies for using Yandex Database "telegram": telegram_dependencies, # dependencies for using Telegram + "parser": parser_dependencies, # dependencies for using parser + "viewer": script_viewer_dependencies, # dependencies for script viewer "full": full, # full dependencies including all options above "tests": test_requirements, # dependencies for running tests "test_full": tests_full, # full dependencies for running all tests (all options above) @@ -203,4 +233,10 @@ def merge_req_lists(*req_lists: List[str]) -> List[str]: install_requires=core, # Optional test_suite="tests", extras_require=EXTRA_DEPENDENCIES, + entry_points={ + "console_scripts": [ + "dff.viewer.server=dff.utils.viewer:make_server", + "dff.viewer.image=dff.utils.viewer:make_image", + ] + }, ) diff --git a/tests/parser/TEST_CASES/complex_cases/just_works/graph.json b/tests/parser/TEST_CASES/complex_cases/just_works/graph.json new file mode 100644 index 000000000..2b9bf0fc4 --- /dev/null +++ b/tests/parser/TEST_CASES/complex_cases/just_works/graph.json @@ -0,0 +1,1761 @@ +{ + "directed": true, + "multigraph": true, + "graph": { + "full_script": { + "main": { + "re": "import re", + "TRANSITIONS": "from dff.script.core.keywords import TRANSITIONS", + "RESPONSE": "from dff.script.core.keywords import RESPONSE", + "GLOBAL": "from dff.script.core.keywords import GLOBAL", + "MISC": "from dff.script.core.keywords import MISC", + "cnd": "import dff.script.conditions", + "lbl": "import dff.script.labels", + "rsp": "import dff.script.responses", + "Pipeline": "from dff.pipeline import Pipeline", + "transitions": "import transitions", + "global_flow": "from flow import global_flow", + "script": { + "GLOBAL": { + "TRANSITIONS": { + "('greeting_flow', 'node1', 1.1)": "cnd.regexp('\\\\b(hi|hello)\\\\b', re.I)", + "('music_flow', 'node1', 1.1)": "cnd.regexp('talk about music')", + "lbl.to_fallback(0.1)": "cnd.true()", + "lbl.forward()": "cnd.all([cnd.regexp('next\\\\b'), cnd.has_last_labels(labels=[('music_flow', i) for i in ['node2', 'node3']])])", + "lbl.repeat(0.2)": "cnd.all([cnd.regexp('repeat', re.I), cnd.negation(cnd.has_last_labels(flow_labels=['global_flow']))])" + }, + "MISC": { + "'var1'": "'global_data'", + "'var2'": "'global_data'", + "'var3'": "'global_data'" + }, + "RESPONSE": "\"''\"" + }, + "'global_flow'": "global_flow", + "'greeting_flow'": { + "'node1'": { + "RESPONSE": "rsp.choice(['Hi, what is up?', 'Hello, how are you?'])", + "TRANSITIONS": { + "('global_flow', 'fallback_node', 0.1)": "cnd.true()", + "'node2'": "cnd.regexp('how are you')" + }, + "MISC": { + "'var3'": "'info_of_step_1'" + } + }, + "'node2'": { + "RESPONSE": "'Good. What do you want to talk about?'", + "TRANSITIONS": { + "lbl.to_fallback(0.1)": "cnd.true()", + "lbl.forward(0.5)": "cnd.regexp('talk about')", + "('music_flow', 'node1')": "cnd.regexp('talk about music')", + "lbl.previous()": "cnd.regexp('previous', re.IGNORECASE)" + } + }, + "'node3'": { + "RESPONSE": "foo", + "TRANSITIONS": { + "lbl.forward()": "cnd.regexp('bye')" + } + }, + "'node4'": { + "RESPONSE": "bar('bye')", + "TRANSITIONS": { + "'node1'": "cnd.regexp('hi|hello', re.IGNORECASE)", + "lbl.to_fallback()": "cnd.true()" + } + } + }, + "'music_flow'": { + "'node1'": { + "RESPONSE": "'I love `System of a Down` group, would you like to tell about it? '", + "TRANSITIONS": { + "lbl.forward()": "cnd.regexp('yes|yep|ok', re.IGNORECASE)", + "lbl.to_fallback()": "cnd.true()" + } + }, + "'node2'": { + "RESPONSE": "'System of a Down is an Armenian-American heavy metal band formed in in 1994.'", + "TRANSITIONS": { + "lbl.forward()": "cnd.regexp('next', re.IGNORECASE)", + "lbl.repeat()": "cnd.regexp('repeat', re.IGNORECASE)", + "lbl.to_fallback()": "cnd.true()" + } + }, + "'node3'": { + "RESPONSE": "'The band achieved commercial success with the release of five studio albums.'", + "TRANSITIONS": { + "lbl.forward()": "cnd.regexp('next', re.IGNORECASE)", + "lbl.backward()": "cnd.regexp('back', re.IGNORECASE)", + "lbl.repeat()": "cnd.regexp('repeat', re.IGNORECASE)", + "lbl.to_fallback()": "cnd.true()" + } + }, + "'node4'": { + "RESPONSE": "\"That's all what I know\"", + "TRANSITIONS": { + "transitions.greeting_flow_n2_transition": "cnd.regexp('next', re.IGNORECASE)", + "transitions.high_priority_node_transition('greeting_flow', 'node4')": "cnd.regexp('next time', re.IGNORECASE)", + "lbl.to_fallback()": "cnd.true()" + } + } + } + }, + "pipeline": "Pipeline.from_script(fallback_label=('global_flow', 'fallback_node'), script=script, start_label=('global_flow', 'start_node'))" + }, + "flow": { + "TRANSITIONS": "from dff.script.core.keywords import TRANSITIONS", + "RESPONSE": "from dff.script.core.keywords import RESPONSE", + "cnd": "import dff.script.conditions", + "lbl": "import dff.script.labels.std_labels", + "re": "import re", + "global_flow": { + "'start_node'": { + "RESPONSE": "''", + "TRANSITIONS": { + "('music_flow', 'node1')": "cnd.regexp('talk about music')", + "('greeting_flow', 'node1')": "cnd.regexp('hi|hello', re.IGNORECASE)", + "'fallback_node'": "cnd.true()" + } + }, + "'fallback_node'": { + "RESPONSE": "'Ooops'", + "TRANSITIONS": { + "('music_flow', 'node1')": "cnd.regexp('talk about music')", + "('greeting_flow', 'node1')": "cnd.regexp('hi|hello', re.IGNORECASE)", + "lbl.previous()": "cnd.regexp('previous', re.IGNORECASE)", + "lbl.repeat()": "cnd.true()" + } + } + } + } + }, + "start_label": [ + "'global_flow'", + "'start_node'" + ], + "fallback_label": [ + "'global_flow'", + "'fallback_node'" + ] + }, + "nodes": [ + { + "ref": [ + "main", + "script", + "value", + "value_GLOBAL" + ], + "id": [ + "GLOBAL_NODE", + "GLOBAL" + ] + }, + { + "ref": [ + "main", + "script", + "value", + "value_'greeting_flow'", + "value_'node1'" + ], + "id": [ + "NODE", + "'greeting_flow'", + "'node1'" + ] + }, + { + "ref": [ + "main", + "script", + "value", + "value_'music_flow'", + "value_'node1'" + ], + "id": [ + "NODE", + "'music_flow'", + "'node1'" + ] + }, + { + "id": [ + "LABEL", + "to_fallback", + [ + "priority", + "0.1" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ] + }, + { + "id": [ + "LABEL", + "forward", + [ + "priority", + "None" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ] + }, + { + "id": [ + "LABEL", + "repeat", + [ + "priority", + "0.2" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ] + }, + { + "ref": [ + "flow", + "global_flow", + "value", + "value_'start_node'" + ], + "id": [ + "NODE", + "'global_flow'", + "'start_node'" + ] + }, + { + "ref": [ + "flow", + "global_flow", + "value", + "value_'fallback_node'" + ], + "id": [ + "NODE", + "'global_flow'", + "'fallback_node'" + ] + }, + { + "id": [ + "LABEL", + "previous", + [ + "priority", + "None" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ] + }, + { + "id": [ + "LABEL", + "repeat", + [ + "priority", + "None" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ] + }, + { + "ref": [ + "main", + "script", + "value", + "value_'greeting_flow'", + "value_'node2'" + ], + "id": [ + "NODE", + "'greeting_flow'", + "'node2'" + ] + }, + { + "id": [ + "LABEL", + "forward", + [ + "priority", + "0.5" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ] + }, + { + "ref": [ + "main", + "script", + "value", + "value_'greeting_flow'", + "value_'node3'" + ], + "id": [ + "NODE", + "'greeting_flow'", + "'node3'" + ] + }, + { + "ref": [ + "main", + "script", + "value", + "value_'greeting_flow'", + "value_'node4'" + ], + "id": [ + "NODE", + "'greeting_flow'", + "'node4'" + ] + }, + { + "id": [ + "LABEL", + "to_fallback", + [ + "priority", + "None" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ] + }, + { + "ref": [ + "main", + "script", + "value", + "value_'music_flow'", + "value_'node2'" + ], + "id": [ + "NODE", + "'music_flow'", + "'node2'" + ] + }, + { + "ref": [ + "main", + "script", + "value", + "value_'music_flow'", + "value_'node3'" + ], + "id": [ + "NODE", + "'music_flow'", + "'node3'" + ] + }, + { + "id": [ + "LABEL", + "backward", + [ + "priority", + "None" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ] + }, + { + "ref": [ + "main", + "script", + "value", + "value_'music_flow'", + "value_'node4'" + ], + "id": [ + "NODE", + "'music_flow'", + "'node4'" + ] + }, + { + "id": [ + "NONE" + ] + } + ], + "links": [ + { + "label_ref": [ + "main", + "script", + "value", + "value_GLOBAL", + "value_TRANSITIONS", + "key_('greeting_flow', 'node1', 1.1)" + ], + "label": "('greeting_flow', 'node1', 1.1)", + "condition_ref": [ + "main", + "script", + "value", + "value_GLOBAL", + "value_TRANSITIONS", + "value_('greeting_flow', 'node1', 1.1)" + ], + "condition": "cnd.regexp('\\\\b(hi|hello)\\\\b', re.I)", + "source": [ + "GLOBAL_NODE", + "GLOBAL" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'node1'" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "value", + "value_GLOBAL", + "value_TRANSITIONS", + "key_('music_flow', 'node1', 1.1)" + ], + "label": "('music_flow', 'node1', 1.1)", + "condition_ref": [ + "main", + "script", + "value", + "value_GLOBAL", + "value_TRANSITIONS", + "value_('music_flow', 'node1', 1.1)" + ], + "condition": "cnd.regexp('talk about music')", + "source": [ + "GLOBAL_NODE", + "GLOBAL" + ], + "target": [ + "NODE", + "'music_flow'", + "'node1'" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "value", + "value_GLOBAL", + "value_TRANSITIONS", + "key_lbl.to_fallback(0.1)" + ], + "label": "lbl.to_fallback(0.1)", + "condition_ref": [ + "main", + "script", + "value", + "value_GLOBAL", + "value_TRANSITIONS", + "value_lbl.to_fallback(0.1)" + ], + "condition": "cnd.true()", + "source": [ + "GLOBAL_NODE", + "GLOBAL" + ], + "target": [ + "LABEL", + "to_fallback", + [ + "priority", + "0.1" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "value", + "value_GLOBAL", + "value_TRANSITIONS", + "key_lbl.forward()" + ], + "label": "lbl.forward()", + "condition_ref": [ + "main", + "script", + "value", + "value_GLOBAL", + "value_TRANSITIONS", + "value_lbl.forward()" + ], + "condition": "cnd.all([cnd.regexp('next\\\\b'), cnd.has_last_labels(labels=[('music_flow', i) for i in ['node2', 'node3']])])", + "source": [ + "GLOBAL_NODE", + "GLOBAL" + ], + "target": [ + "LABEL", + "forward", + [ + "priority", + "None" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "value", + "value_GLOBAL", + "value_TRANSITIONS", + "key_lbl.repeat(0.2)" + ], + "label": "lbl.repeat(0.2)", + "condition_ref": [ + "main", + "script", + "value", + "value_GLOBAL", + "value_TRANSITIONS", + "value_lbl.repeat(0.2)" + ], + "condition": "cnd.all([cnd.regexp('repeat', re.I), cnd.negation(cnd.has_last_labels(flow_labels=['global_flow']))])", + "source": [ + "GLOBAL_NODE", + "GLOBAL" + ], + "target": [ + "LABEL", + "repeat", + [ + "priority", + "0.2" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "value", + "value_'greeting_flow'", + "value_'node1'", + "value_TRANSITIONS", + "key_('global_flow', 'fallback_node', 0.1)" + ], + "label": "('global_flow', 'fallback_node', 0.1)", + "condition_ref": [ + "main", + "script", + "value", + "value_'greeting_flow'", + "value_'node1'", + "value_TRANSITIONS", + "value_('global_flow', 'fallback_node', 0.1)" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'greeting_flow'", + "'node1'" + ], + "target": [ + "NODE", + "'global_flow'", + "'fallback_node'" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "value", + "value_'greeting_flow'", + "value_'node1'", + "value_TRANSITIONS", + "key_'node2'" + ], + "label": "'node2'", + "condition_ref": [ + "main", + "script", + "value", + "value_'greeting_flow'", + "value_'node1'", + "value_TRANSITIONS", + "value_'node2'" + ], + "condition": "cnd.regexp('how are you')", + "source": [ + "NODE", + "'greeting_flow'", + "'node1'" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'node2'" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "value", + "value_'music_flow'", + "value_'node1'", + "value_TRANSITIONS", + "key_lbl.forward()" + ], + "label": "lbl.forward()", + "condition_ref": [ + "main", + "script", + "value", + "value_'music_flow'", + "value_'node1'", + "value_TRANSITIONS", + "value_lbl.forward()" + ], + "condition": "cnd.regexp('yes|yep|ok', re.IGNORECASE)", + "source": [ + "NODE", + "'music_flow'", + "'node1'" + ], + "target": [ + "LABEL", + "forward", + [ + "priority", + "None" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "value", + "value_'music_flow'", + "value_'node1'", + "value_TRANSITIONS", + "key_lbl.to_fallback()" + ], + "label": "lbl.to_fallback()", + "condition_ref": [ + "main", + "script", + "value", + "value_'music_flow'", + "value_'node1'", + "value_TRANSITIONS", + "value_lbl.to_fallback()" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'music_flow'", + "'node1'" + ], + "target": [ + "LABEL", + "to_fallback", + [ + "priority", + "None" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "flow", + "global_flow", + "value", + "value_'start_node'", + "value_TRANSITIONS", + "key_('music_flow', 'node1')" + ], + "label": "('music_flow', 'node1')", + "condition_ref": [ + "flow", + "global_flow", + "value", + "value_'start_node'", + "value_TRANSITIONS", + "value_('music_flow', 'node1')" + ], + "condition": "cnd.regexp('talk about music')", + "source": [ + "NODE", + "'global_flow'", + "'start_node'" + ], + "target": [ + "NODE", + "'music_flow'", + "'node1'" + ], + "key": 0 + }, + { + "label_ref": [ + "flow", + "global_flow", + "value", + "value_'start_node'", + "value_TRANSITIONS", + "key_('greeting_flow', 'node1')" + ], + "label": "('greeting_flow', 'node1')", + "condition_ref": [ + "flow", + "global_flow", + "value", + "value_'start_node'", + "value_TRANSITIONS", + "value_('greeting_flow', 'node1')" + ], + "condition": "cnd.regexp('hi|hello', re.IGNORECASE)", + "source": [ + "NODE", + "'global_flow'", + "'start_node'" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'node1'" + ], + "key": 0 + }, + { + "label_ref": [ + "flow", + "global_flow", + "value", + "value_'start_node'", + "value_TRANSITIONS", + "key_'fallback_node'" + ], + "label": "'fallback_node'", + "condition_ref": [ + "flow", + "global_flow", + "value", + "value_'start_node'", + "value_TRANSITIONS", + "value_'fallback_node'" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'global_flow'", + "'start_node'" + ], + "target": [ + "NODE", + "'global_flow'", + "'fallback_node'" + ], + "key": 0 + }, + { + "label_ref": [ + "flow", + "global_flow", + "value", + "value_'fallback_node'", + "value_TRANSITIONS", + "key_('music_flow', 'node1')" + ], + "label": "('music_flow', 'node1')", + "condition_ref": [ + "flow", + "global_flow", + "value", + "value_'fallback_node'", + "value_TRANSITIONS", + "value_('music_flow', 'node1')" + ], + "condition": "cnd.regexp('talk about music')", + "source": [ + "NODE", + "'global_flow'", + "'fallback_node'" + ], + "target": [ + "NODE", + "'music_flow'", + "'node1'" + ], + "key": 0 + }, + { + "label_ref": [ + "flow", + "global_flow", + "value", + "value_'fallback_node'", + "value_TRANSITIONS", + "key_('greeting_flow', 'node1')" + ], + "label": "('greeting_flow', 'node1')", + "condition_ref": [ + "flow", + "global_flow", + "value", + "value_'fallback_node'", + "value_TRANSITIONS", + "value_('greeting_flow', 'node1')" + ], + "condition": "cnd.regexp('hi|hello', re.IGNORECASE)", + "source": [ + "NODE", + "'global_flow'", + "'fallback_node'" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'node1'" + ], + "key": 0 + }, + { + "label_ref": [ + "flow", + "global_flow", + "value", + "value_'fallback_node'", + "value_TRANSITIONS", + "key_lbl.previous()" + ], + "label": "lbl.previous()", + "condition_ref": [ + "flow", + "global_flow", + "value", + "value_'fallback_node'", + "value_TRANSITIONS", + "value_lbl.previous()" + ], + "condition": "cnd.regexp('previous', re.IGNORECASE)", + "source": [ + "NODE", + "'global_flow'", + "'fallback_node'" + ], + "target": [ + "LABEL", + "previous", + [ + "priority", + "None" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "flow", + "global_flow", + "value", + "value_'fallback_node'", + "value_TRANSITIONS", + "key_lbl.repeat()" + ], + "label": "lbl.repeat()", + "condition_ref": [ + "flow", + "global_flow", + "value", + "value_'fallback_node'", + "value_TRANSITIONS", + "value_lbl.repeat()" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'global_flow'", + "'fallback_node'" + ], + "target": [ + "LABEL", + "repeat", + [ + "priority", + "None" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "value", + "value_'greeting_flow'", + "value_'node2'", + "value_TRANSITIONS", + "key_lbl.to_fallback(0.1)" + ], + "label": "lbl.to_fallback(0.1)", + "condition_ref": [ + "main", + "script", + "value", + "value_'greeting_flow'", + "value_'node2'", + "value_TRANSITIONS", + "value_lbl.to_fallback(0.1)" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'greeting_flow'", + "'node2'" + ], + "target": [ + "LABEL", + "to_fallback", + [ + "priority", + "0.1" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "value", + "value_'greeting_flow'", + "value_'node2'", + "value_TRANSITIONS", + "key_lbl.forward(0.5)" + ], + "label": "lbl.forward(0.5)", + "condition_ref": [ + "main", + "script", + "value", + "value_'greeting_flow'", + "value_'node2'", + "value_TRANSITIONS", + "value_lbl.forward(0.5)" + ], + "condition": "cnd.regexp('talk about')", + "source": [ + "NODE", + "'greeting_flow'", + "'node2'" + ], + "target": [ + "LABEL", + "forward", + [ + "priority", + "0.5" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "value", + "value_'greeting_flow'", + "value_'node2'", + "value_TRANSITIONS", + "key_('music_flow', 'node1')" + ], + "label": "('music_flow', 'node1')", + "condition_ref": [ + "main", + "script", + "value", + "value_'greeting_flow'", + "value_'node2'", + "value_TRANSITIONS", + "value_('music_flow', 'node1')" + ], + "condition": "cnd.regexp('talk about music')", + "source": [ + "NODE", + "'greeting_flow'", + "'node2'" + ], + "target": [ + "NODE", + "'music_flow'", + "'node1'" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "value", + "value_'greeting_flow'", + "value_'node2'", + "value_TRANSITIONS", + "key_lbl.previous()" + ], + "label": "lbl.previous()", + "condition_ref": [ + "main", + "script", + "value", + "value_'greeting_flow'", + "value_'node2'", + "value_TRANSITIONS", + "value_lbl.previous()" + ], + "condition": "cnd.regexp('previous', re.IGNORECASE)", + "source": [ + "NODE", + "'greeting_flow'", + "'node2'" + ], + "target": [ + "LABEL", + "previous", + [ + "priority", + "None" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "value", + "value_'greeting_flow'", + "value_'node3'", + "value_TRANSITIONS", + "key_lbl.forward()" + ], + "label": "lbl.forward()", + "condition_ref": [ + "main", + "script", + "value", + "value_'greeting_flow'", + "value_'node3'", + "value_TRANSITIONS", + "value_lbl.forward()" + ], + "condition": "cnd.regexp('bye')", + "source": [ + "NODE", + "'greeting_flow'", + "'node3'" + ], + "target": [ + "LABEL", + "forward", + [ + "priority", + "None" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "value", + "value_'greeting_flow'", + "value_'node4'", + "value_TRANSITIONS", + "key_'node1'" + ], + "label": "'node1'", + "condition_ref": [ + "main", + "script", + "value", + "value_'greeting_flow'", + "value_'node4'", + "value_TRANSITIONS", + "value_'node1'" + ], + "condition": "cnd.regexp('hi|hello', re.IGNORECASE)", + "source": [ + "NODE", + "'greeting_flow'", + "'node4'" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'node1'" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "value", + "value_'greeting_flow'", + "value_'node4'", + "value_TRANSITIONS", + "key_lbl.to_fallback()" + ], + "label": "lbl.to_fallback()", + "condition_ref": [ + "main", + "script", + "value", + "value_'greeting_flow'", + "value_'node4'", + "value_TRANSITIONS", + "value_lbl.to_fallback()" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'greeting_flow'", + "'node4'" + ], + "target": [ + "LABEL", + "to_fallback", + [ + "priority", + "None" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "value", + "value_'music_flow'", + "value_'node2'", + "value_TRANSITIONS", + "key_lbl.forward()" + ], + "label": "lbl.forward()", + "condition_ref": [ + "main", + "script", + "value", + "value_'music_flow'", + "value_'node2'", + "value_TRANSITIONS", + "value_lbl.forward()" + ], + "condition": "cnd.regexp('next', re.IGNORECASE)", + "source": [ + "NODE", + "'music_flow'", + "'node2'" + ], + "target": [ + "LABEL", + "forward", + [ + "priority", + "None" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "value", + "value_'music_flow'", + "value_'node2'", + "value_TRANSITIONS", + "key_lbl.repeat()" + ], + "label": "lbl.repeat()", + "condition_ref": [ + "main", + "script", + "value", + "value_'music_flow'", + "value_'node2'", + "value_TRANSITIONS", + "value_lbl.repeat()" + ], + "condition": "cnd.regexp('repeat', re.IGNORECASE)", + "source": [ + "NODE", + "'music_flow'", + "'node2'" + ], + "target": [ + "LABEL", + "repeat", + [ + "priority", + "None" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "value", + "value_'music_flow'", + "value_'node2'", + "value_TRANSITIONS", + "key_lbl.to_fallback()" + ], + "label": "lbl.to_fallback()", + "condition_ref": [ + "main", + "script", + "value", + "value_'music_flow'", + "value_'node2'", + "value_TRANSITIONS", + "value_lbl.to_fallback()" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'music_flow'", + "'node2'" + ], + "target": [ + "LABEL", + "to_fallback", + [ + "priority", + "None" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "value", + "value_'music_flow'", + "value_'node3'", + "value_TRANSITIONS", + "key_lbl.forward()" + ], + "label": "lbl.forward()", + "condition_ref": [ + "main", + "script", + "value", + "value_'music_flow'", + "value_'node3'", + "value_TRANSITIONS", + "value_lbl.forward()" + ], + "condition": "cnd.regexp('next', re.IGNORECASE)", + "source": [ + "NODE", + "'music_flow'", + "'node3'" + ], + "target": [ + "LABEL", + "forward", + [ + "priority", + "None" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "value", + "value_'music_flow'", + "value_'node3'", + "value_TRANSITIONS", + "key_lbl.backward()" + ], + "label": "lbl.backward()", + "condition_ref": [ + "main", + "script", + "value", + "value_'music_flow'", + "value_'node3'", + "value_TRANSITIONS", + "value_lbl.backward()" + ], + "condition": "cnd.regexp('back', re.IGNORECASE)", + "source": [ + "NODE", + "'music_flow'", + "'node3'" + ], + "target": [ + "LABEL", + "backward", + [ + "priority", + "None" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "value", + "value_'music_flow'", + "value_'node3'", + "value_TRANSITIONS", + "key_lbl.repeat()" + ], + "label": "lbl.repeat()", + "condition_ref": [ + "main", + "script", + "value", + "value_'music_flow'", + "value_'node3'", + "value_TRANSITIONS", + "value_lbl.repeat()" + ], + "condition": "cnd.regexp('repeat', re.IGNORECASE)", + "source": [ + "NODE", + "'music_flow'", + "'node3'" + ], + "target": [ + "LABEL", + "repeat", + [ + "priority", + "None" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "value", + "value_'music_flow'", + "value_'node3'", + "value_TRANSITIONS", + "key_lbl.to_fallback()" + ], + "label": "lbl.to_fallback()", + "condition_ref": [ + "main", + "script", + "value", + "value_'music_flow'", + "value_'node3'", + "value_TRANSITIONS", + "value_lbl.to_fallback()" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'music_flow'", + "'node3'" + ], + "target": [ + "LABEL", + "to_fallback", + [ + "priority", + "None" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "value", + "value_'music_flow'", + "value_'node4'", + "value_TRANSITIONS", + "key_transitions.greeting_flow_n2_transition" + ], + "label": "transitions.greeting_flow_n2_transition", + "condition_ref": [ + "main", + "script", + "value", + "value_'music_flow'", + "value_'node4'", + "value_TRANSITIONS", + "value_transitions.greeting_flow_n2_transition" + ], + "condition": "cnd.regexp('next', re.IGNORECASE)", + "source": [ + "NODE", + "'music_flow'", + "'node4'" + ], + "target": [ + "NONE" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "value", + "value_'music_flow'", + "value_'node4'", + "value_TRANSITIONS", + "key_transitions.high_priority_node_transition('greeting_flow', 'node4')" + ], + "label": "transitions.high_priority_node_transition('greeting_flow', 'node4')", + "condition_ref": [ + "main", + "script", + "value", + "value_'music_flow'", + "value_'node4'", + "value_TRANSITIONS", + "value_transitions.high_priority_node_transition('greeting_flow', 'node4')" + ], + "condition": "cnd.regexp('next time', re.IGNORECASE)", + "source": [ + "NODE", + "'music_flow'", + "'node4'" + ], + "target": [ + "NONE" + ], + "key": 1 + }, + { + "label_ref": [ + "main", + "script", + "value", + "value_'music_flow'", + "value_'node4'", + "value_TRANSITIONS", + "key_lbl.to_fallback()" + ], + "label": "lbl.to_fallback()", + "condition_ref": [ + "main", + "script", + "value", + "value_'music_flow'", + "value_'node4'", + "value_TRANSITIONS", + "value_lbl.to_fallback()" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'music_flow'", + "'node4'" + ], + "target": [ + "LABEL", + "to_fallback", + [ + "priority", + "None" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + } + ] +} \ No newline at end of file diff --git a/tests/parser/TEST_CASES/complex_cases/just_works/new_script.yaml b/tests/parser/TEST_CASES/complex_cases/just_works/new_script.yaml new file mode 100644 index 000000000..817102512 --- /dev/null +++ b/tests/parser/TEST_CASES/complex_cases/just_works/new_script.yaml @@ -0,0 +1,97 @@ +main: + re: import re + TRANSITIONS: from dff.script.core.keywords import TRANSITIONS + RESPONSE: from dff.script.core.keywords import RESPONSE + GLOBAL: from dff.script.core.keywords import GLOBAL + MISC: from dff.script.core.keywords import MISC + cnd: import dff.script.conditions + lbl: import dff.script.labels + rsp: import dff.script.responses + Pipeline: from dff.pipeline import Pipeline + transitions: import transitions + global_flow: from flow import global_flow + global_flow_1: global_flow + script: + GLOBAL: + TRANSITIONS: + ('greeting_flow', 'node1', 1.1): cnd.regexp('\\b(hi|hello)\\b', re.I) + ('music_flow', 'node1', 1.1): cnd.regexp('talk about music') + lbl.to_fallback(0.1): cnd.true() + lbl.forward(): cnd.all([cnd.regexp('next\\b'), cnd.has_last_labels(labels=[('music_flow', i) for i in ['node2', 'node3']])]) + lbl.repeat(0.2): cnd.all([cnd.regexp('repeat', re.I), cnd.negation(cnd.has_last_labels(flow_labels=['global_flow']))]) + MISC: + "'var1'": "'global_data'" + "'var2'": "'global_data'" + "'var3'": "'global_data'" + RESPONSE: "\"''\"" + "'global_flow'": global_flow_1 + "'greeting_flow'": + "'node1'": + RESPONSE: rsp.choice(['Hi, what is up?', 'Hello, how are you?']) + TRANSITIONS: + ('global_flow', 'fallback_node', 0.1): cnd.true() + "'node2'": cnd.regexp('how are you') + MISC: + "'var3'": "'info_of_step_1'" + "'node2'": + RESPONSE: "'Good. What do you want to talk about?'" + TRANSITIONS: + lbl.to_fallback(0.1): cnd.true() + lbl.forward(0.5): cnd.regexp('talk about') + ('music_flow', 'node1'): cnd.regexp('talk about music') + lbl.previous(): cnd.regexp('previous', re.IGNORECASE) + "'node3'": + RESPONSE: foo + TRANSITIONS: + lbl.forward(): cnd.regexp('bye') + "'node4'": + RESPONSE: bar('bye') + TRANSITIONS: + "'node1'": cnd.regexp('hi|hello', re.IGNORECASE) + lbl.to_fallback(): cnd.true() + "'music_flow'": + "'node1'": + RESPONSE: "'I love `System of a Down` group, would you like to tell about it? '" + TRANSITIONS: + lbl.forward(): cnd.regexp('yes|yep|ok', re.IGNORECASE) + lbl.to_fallback(): cnd.true() + "'node2'": + RESPONSE: "'System of a Down is an Armenian-American heavy metal band formed in in 1994.'" + TRANSITIONS: + lbl.forward(): cnd.regexp('next', re.IGNORECASE) + lbl.repeat(): cnd.regexp('repeat', re.IGNORECASE) + lbl.to_fallback(): cnd.true() + "'node3'": + RESPONSE: "'The band achieved commercial success with the release of five studio albums.'" + TRANSITIONS: + lbl.forward(): cnd.regexp('next', re.IGNORECASE) + lbl.backward(): cnd.regexp('back', re.IGNORECASE) + lbl.repeat(): cnd.regexp('repeat', re.IGNORECASE) + lbl.to_fallback(): cnd.true() + "'node4'": + RESPONSE: "\"That's all what I know\"" + TRANSITIONS: + transitions.greeting_flow_n2_transition: cnd.regexp('next', re.IGNORECASE) + transitions.high_priority_node_transition('greeting_flow', 'node4'): cnd.regexp('next time', re.IGNORECASE) + lbl.to_fallback(): cnd.true() + pipeline: Pipeline.from_script(fallback_label=('global_flow', 'fallback_node'), script=script, start_label=('global_flow', 'start_node')) +flow: + TRANSITIONS: from dff.script.core.keywords import TRANSITIONS + RESPONSE: from dff.script.core.keywords import RESPONSE + cnd: import dff.script.conditions + re: import re + lbl: import dff.script.labels.std_labels + global_flow: + "'start_node'": + RESPONSE: "''" + TRANSITIONS: + ('music_flow', 'node1'): cnd.regexp('talk about music') + ('greeting_flow', 'node1'): cnd.regexp('hi|hello', re.IGNORECASE) + "'fallback_node'": cnd.true() + "'fallback_node'": + RESPONSE: "'Ooops'" + TRANSITIONS: + ('music_flow', 'node1'): cnd.regexp('talk about music') + ('greeting_flow', 'node1'): cnd.regexp('hi|hello', re.IGNORECASE) + lbl.previous(): cnd.regexp('previous', re.IGNORECASE) + lbl.repeat(): cnd.true() diff --git a/tests/parser/TEST_CASES/complex_cases/just_works/python_files/flow.py b/tests/parser/TEST_CASES/complex_cases/just_works/python_files/flow.py new file mode 100755 index 000000000..b2dde633f --- /dev/null +++ b/tests/parser/TEST_CASES/complex_cases/just_works/python_files/flow.py @@ -0,0 +1,33 @@ +from dff.script.core.keywords import TRANSITIONS, RESPONSE +import dff.script.conditions as cnd +import dff.script.labels.std_labels as lbl +import re + + +global_flow = { + "start_node": { # This is an initial node, it doesn't need an `RESPONSE` + RESPONSE: "", + TRANSITIONS: { + ("music_flow", "node1"): cnd.regexp(r"talk about music"), # first check + ("greeting_flow", "node1"): cnd.regexp( + r"hi|hello", re.IGNORECASE + ), # second check + # ("global_flow", "fallback_node"): cnd.true(), # third check + "fallback_node": cnd.true(), # third check + # "fallback_node" is equivalent to ("global_flow", "fallback_node") + }, + }, + "fallback_node": { # We get to this node if an error occurred while the agent was running + RESPONSE: "Ooops", + TRANSITIONS: { + ("music_flow", "node1"): cnd.regexp(r"talk about music"), # first check + ("greeting_flow", "node1"): cnd.regexp( + r"hi|hello", re.IGNORECASE + ), # second check + lbl.previous(): cnd.regexp(r"previous", re.IGNORECASE), # third check + # lbl.previous() is equivalent to ("PREVIOUS_flow", "PREVIOUS_node") + lbl.repeat(): cnd.true(), # fourth check + # lbl.repeat() is equivalent to ("global_flow", "fallback_node") + }, + }, +} diff --git a/tests/parser/TEST_CASES/complex_cases/just_works/python_files/main.py b/tests/parser/TEST_CASES/complex_cases/just_works/python_files/main.py new file mode 100755 index 000000000..1cfaa54cc --- /dev/null +++ b/tests/parser/TEST_CASES/complex_cases/just_works/python_files/main.py @@ -0,0 +1,124 @@ +import re + +from dff.script.core.keywords import ( + TRANSITIONS, + RESPONSE, + GLOBAL, + MISC, +) +import dff.script.conditions as cnd +import dff.script.labels as lbl +import dff.script.responses as rsp +from dff.pipeline import Pipeline + +import transitions +from flow import global_flow + + +script = { + GLOBAL: { + TRANSITIONS: { + ("greeting_flow", "node1", 1.1): cnd.regexp(r"\b(hi|hello)\b", re.I), + ("music_flow", "node1", 1.1): cnd.regexp(r"talk about music"), + lbl.to_fallback(0.1): cnd.true(), + lbl.forward(): cnd.all( + [ + cnd.regexp(r"next\b"), + cnd.has_last_labels( + labels=[("music_flow", i) for i in ["node2", "node3"]] + ), + ] + ), + lbl.repeat(0.2): cnd.all( + [ + cnd.regexp(r"repeat", re.I), + cnd.negation(cnd.has_last_labels(flow_labels=["global_flow"])), + ] + ), + }, + MISC: { + "var1": "global_data", + "var2": "global_data", + "var3": "global_data", + }, + RESPONSE: """''""", + }, + "global_flow": global_flow, + "greeting_flow": { + "node1": { + RESPONSE: rsp.choice( + ["Hi, what is up?", "Hello, how are you?"] + ), # When the agent goes to node1, we return "Hi, how are you?" + TRANSITIONS: { + ("global_flow", "fallback_node", 0.1): cnd.true(), # second check + "node2": cnd.regexp(r"how are you"), # first check + # "node2" is equivalent to ("greeting_flow", "node2", 1.0) + }, + MISC: {"var3": "info_of_step_1"}, + }, + "node2": { + RESPONSE: "Good. What do you want to talk about?", + TRANSITIONS: { + lbl.to_fallback(0.1): cnd.true(), # third check + # lbl.to_fallback(0.1) is equivalent to ("global_flow", "fallback_node", 0.1) + lbl.forward(0.5): cnd.regexp(r"talk about"), # second check + # lbl.forward(0.5) is equivalent to ("greeting_flow", "node3", 0.5) + ("music_flow", "node1"): cnd.regexp(r"talk about music"), # first check + lbl.previous(): cnd.regexp(r"previous", re.IGNORECASE), # third check + # ("music_flow", "node1") is equivalent to ("music_flow", "node1", 1.0) + }, + }, + "node3": {RESPONSE: foo, TRANSITIONS: {lbl.forward(): cnd.regexp(r"bye")}}, + "node4": { + RESPONSE: bar("bye"), + TRANSITIONS: { + "node1": cnd.regexp(r"hi|hello", re.IGNORECASE), # first check + lbl.to_fallback(): cnd.true(), # second check + }, + }, + }, + "music_flow": { + "node1": { + RESPONSE: "I love `System of a Down` group, would you like to tell about it? ", + TRANSITIONS: { + lbl.forward(): cnd.regexp(r"yes|yep|ok", re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + "node2": { + RESPONSE: "System of a Down is an Armenian-American heavy metal band formed in in 1994.", + TRANSITIONS: { + lbl.forward(): cnd.regexp(r"next", re.IGNORECASE), + lbl.repeat(): cnd.regexp(r"repeat", re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + "node3": { + RESPONSE: "The band achieved commercial success with the release of five studio albums.", + TRANSITIONS: { + lbl.forward(): cnd.regexp(r"next", re.IGNORECASE), + lbl.backward(): cnd.regexp(r"back", re.IGNORECASE), + lbl.repeat(): cnd.regexp(r"repeat", re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + "node4": { + RESPONSE: "That's all what I know", + TRANSITIONS: { + transitions.greeting_flow_n2_transition: cnd.regexp( + r"next", re.IGNORECASE + ), + transitions.high_priority_node_transition( + "greeting_flow", "node4" + ): cnd.regexp(r"next time", re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + }, +} + +pipeline = Pipeline.from_script( + fallback_label=("global_flow", "fallback_node"), + script=script, + start_label=("global_flow", "start_node"), +) diff --git a/tests/parser/TEST_CASES/complex_cases/just_works/python_files/transitions.py b/tests/parser/TEST_CASES/complex_cases/just_works/python_files/transitions.py new file mode 100755 index 000000000..15dca2a14 --- /dev/null +++ b/tests/parser/TEST_CASES/complex_cases/just_works/python_files/transitions.py @@ -0,0 +1,19 @@ +from dff.script.core.types import NodeLabel3Type +from dff.script import Context +from dff.pipeline import Pipeline +import typing as tp + + +def greeting_flow_n2_transition( + _: Context, __: Pipeline, *args, **kwargs +) -> NodeLabel3Type: + return "greeting_flow", "node2", 1.0 + + +def high_priority_node_transition( + flow_label: str, label: str +) -> tp.Callable[..., NodeLabel3Type]: + def transition(_: Context, __: Pipeline, *args, **kwargs) -> NodeLabel3Type: + return flow_label, label, 2.0 + + return transition diff --git a/tests/parser/TEST_CASES/complex_cases/just_works/result_creating/flow.py b/tests/parser/TEST_CASES/complex_cases/just_works/result_creating/flow.py new file mode 100644 index 000000000..e01f9f687 --- /dev/null +++ b/tests/parser/TEST_CASES/complex_cases/just_works/result_creating/flow.py @@ -0,0 +1,25 @@ +from dff.script.core.keywords import TRANSITIONS +from dff.script.core.keywords import RESPONSE +import dff.script.conditions as cnd +import re +import dff.script.labels.std_labels as lbl + +global_flow = { + 'start_node': { + RESPONSE: '', + TRANSITIONS: { + ('music_flow', 'node1'): cnd.regexp('talk about music'), + ('greeting_flow', 'node1'): cnd.regexp('hi|hello', re.IGNORECASE), + 'fallback_node': cnd.true(), + }, + }, + 'fallback_node': { + RESPONSE: 'Ooops', + TRANSITIONS: { + ('music_flow', 'node1'): cnd.regexp('talk about music'), + ('greeting_flow', 'node1'): cnd.regexp('hi|hello', re.IGNORECASE), + lbl.previous(): cnd.regexp('previous', re.IGNORECASE), + lbl.repeat(): cnd.true(), + }, + }, +} diff --git a/tests/parser/TEST_CASES/complex_cases/just_works/result_creating/main.py b/tests/parser/TEST_CASES/complex_cases/just_works/result_creating/main.py new file mode 100644 index 000000000..b90a3b0b4 --- /dev/null +++ b/tests/parser/TEST_CASES/complex_cases/just_works/result_creating/main.py @@ -0,0 +1,102 @@ +import re +from dff.script.core.keywords import TRANSITIONS +from dff.script.core.keywords import RESPONSE +from dff.script.core.keywords import GLOBAL +from dff.script.core.keywords import MISC +import dff.script.conditions as cnd +import dff.script.labels as lbl +import dff.script.responses as rsp +from dff.pipeline import Pipeline +import transitions +from flow import global_flow + +global_flow_1 = global_flow + +script = { + GLOBAL: { + TRANSITIONS: { + ('greeting_flow', 'node1', 1.1): cnd.regexp('\\b(hi|hello)\\b', re.I), + ('music_flow', 'node1', 1.1): cnd.regexp('talk about music'), + lbl.to_fallback(0.1): cnd.true(), + lbl.forward(): cnd.all([cnd.regexp('next\\b'), cnd.has_last_labels(labels=[('music_flow', i) for i in ['node2', 'node3']])]), + lbl.repeat(0.2): cnd.all([cnd.regexp('repeat', re.I), cnd.negation(cnd.has_last_labels(flow_labels=['global_flow']))]), + }, + MISC: { + 'var1': 'global_data', + 'var2': 'global_data', + 'var3': 'global_data', + }, + RESPONSE: "''", + }, + 'global_flow': global_flow_1, + 'greeting_flow': { + 'node1': { + RESPONSE: rsp.choice(['Hi, what is up?', 'Hello, how are you?']), + TRANSITIONS: { + ('global_flow', 'fallback_node', 0.1): cnd.true(), + 'node2': cnd.regexp('how are you'), + }, + MISC: { + 'var3': 'info_of_step_1', + }, + }, + 'node2': { + RESPONSE: 'Good. What do you want to talk about?', + TRANSITIONS: { + lbl.to_fallback(0.1): cnd.true(), + lbl.forward(0.5): cnd.regexp('talk about'), + ('music_flow', 'node1'): cnd.regexp('talk about music'), + lbl.previous(): cnd.regexp('previous', re.IGNORECASE), + }, + }, + 'node3': { + RESPONSE: foo, + TRANSITIONS: { + lbl.forward(): cnd.regexp('bye'), + }, + }, + 'node4': { + RESPONSE: bar('bye'), + TRANSITIONS: { + 'node1': cnd.regexp('hi|hello', re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + }, + 'music_flow': { + 'node1': { + RESPONSE: 'I love `System of a Down` group, would you like to tell about it? ', + TRANSITIONS: { + lbl.forward(): cnd.regexp('yes|yep|ok', re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + 'node2': { + RESPONSE: 'System of a Down is an Armenian-American heavy metal band formed in in 1994.', + TRANSITIONS: { + lbl.forward(): cnd.regexp('next', re.IGNORECASE), + lbl.repeat(): cnd.regexp('repeat', re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + 'node3': { + RESPONSE: 'The band achieved commercial success with the release of five studio albums.', + TRANSITIONS: { + lbl.forward(): cnd.regexp('next', re.IGNORECASE), + lbl.backward(): cnd.regexp('back', re.IGNORECASE), + lbl.repeat(): cnd.regexp('repeat', re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + 'node4': { + RESPONSE: "That's all what I know", + TRANSITIONS: { + transitions.greeting_flow_n2_transition: cnd.regexp('next', re.IGNORECASE), + transitions.high_priority_node_transition('greeting_flow', 'node4'): cnd.regexp('next time', re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + }, +} + +pipeline = Pipeline.from_script(fallback_label=('global_flow', 'fallback_node'), script=script, start_label=('global_flow', 'start_node')) diff --git a/tests/parser/TEST_CASES/complex_cases/just_works/result_editing/flow.py b/tests/parser/TEST_CASES/complex_cases/just_works/result_editing/flow.py new file mode 100755 index 000000000..5356c39a2 --- /dev/null +++ b/tests/parser/TEST_CASES/complex_cases/just_works/result_editing/flow.py @@ -0,0 +1,26 @@ +from dff.script.core.keywords import TRANSITIONS +from dff.script.core.keywords import RESPONSE +import dff.script.conditions as cnd +import re +import dff.script.labels.std_labels as lbl +import re + +global_flow = { + 'start_node': { + RESPONSE: '', + TRANSITIONS: { + ('music_flow', 'node1'): cnd.regexp('talk about music'), + ('greeting_flow', 'node1'): cnd.regexp('hi|hello', re.IGNORECASE), + 'fallback_node': cnd.true(), + }, + }, + 'fallback_node': { + RESPONSE: 'Ooops', + TRANSITIONS: { + ('music_flow', 'node1'): cnd.regexp('talk about music'), + ('greeting_flow', 'node1'): cnd.regexp('hi|hello', re.IGNORECASE), + lbl.previous(): cnd.regexp('previous', re.IGNORECASE), + lbl.repeat(): cnd.true(), + }, + }, +} diff --git a/tests/parser/TEST_CASES/complex_cases/just_works/result_editing/main.py b/tests/parser/TEST_CASES/complex_cases/just_works/result_editing/main.py new file mode 100755 index 000000000..b90a3b0b4 --- /dev/null +++ b/tests/parser/TEST_CASES/complex_cases/just_works/result_editing/main.py @@ -0,0 +1,102 @@ +import re +from dff.script.core.keywords import TRANSITIONS +from dff.script.core.keywords import RESPONSE +from dff.script.core.keywords import GLOBAL +from dff.script.core.keywords import MISC +import dff.script.conditions as cnd +import dff.script.labels as lbl +import dff.script.responses as rsp +from dff.pipeline import Pipeline +import transitions +from flow import global_flow + +global_flow_1 = global_flow + +script = { + GLOBAL: { + TRANSITIONS: { + ('greeting_flow', 'node1', 1.1): cnd.regexp('\\b(hi|hello)\\b', re.I), + ('music_flow', 'node1', 1.1): cnd.regexp('talk about music'), + lbl.to_fallback(0.1): cnd.true(), + lbl.forward(): cnd.all([cnd.regexp('next\\b'), cnd.has_last_labels(labels=[('music_flow', i) for i in ['node2', 'node3']])]), + lbl.repeat(0.2): cnd.all([cnd.regexp('repeat', re.I), cnd.negation(cnd.has_last_labels(flow_labels=['global_flow']))]), + }, + MISC: { + 'var1': 'global_data', + 'var2': 'global_data', + 'var3': 'global_data', + }, + RESPONSE: "''", + }, + 'global_flow': global_flow_1, + 'greeting_flow': { + 'node1': { + RESPONSE: rsp.choice(['Hi, what is up?', 'Hello, how are you?']), + TRANSITIONS: { + ('global_flow', 'fallback_node', 0.1): cnd.true(), + 'node2': cnd.regexp('how are you'), + }, + MISC: { + 'var3': 'info_of_step_1', + }, + }, + 'node2': { + RESPONSE: 'Good. What do you want to talk about?', + TRANSITIONS: { + lbl.to_fallback(0.1): cnd.true(), + lbl.forward(0.5): cnd.regexp('talk about'), + ('music_flow', 'node1'): cnd.regexp('talk about music'), + lbl.previous(): cnd.regexp('previous', re.IGNORECASE), + }, + }, + 'node3': { + RESPONSE: foo, + TRANSITIONS: { + lbl.forward(): cnd.regexp('bye'), + }, + }, + 'node4': { + RESPONSE: bar('bye'), + TRANSITIONS: { + 'node1': cnd.regexp('hi|hello', re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + }, + 'music_flow': { + 'node1': { + RESPONSE: 'I love `System of a Down` group, would you like to tell about it? ', + TRANSITIONS: { + lbl.forward(): cnd.regexp('yes|yep|ok', re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + 'node2': { + RESPONSE: 'System of a Down is an Armenian-American heavy metal band formed in in 1994.', + TRANSITIONS: { + lbl.forward(): cnd.regexp('next', re.IGNORECASE), + lbl.repeat(): cnd.regexp('repeat', re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + 'node3': { + RESPONSE: 'The band achieved commercial success with the release of five studio albums.', + TRANSITIONS: { + lbl.forward(): cnd.regexp('next', re.IGNORECASE), + lbl.backward(): cnd.regexp('back', re.IGNORECASE), + lbl.repeat(): cnd.regexp('repeat', re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + 'node4': { + RESPONSE: "That's all what I know", + TRANSITIONS: { + transitions.greeting_flow_n2_transition: cnd.regexp('next', re.IGNORECASE), + transitions.high_priority_node_transition('greeting_flow', 'node4'): cnd.regexp('next time', re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + }, +} + +pipeline = Pipeline.from_script(fallback_label=('global_flow', 'fallback_node'), script=script, start_label=('global_flow', 'start_node')) diff --git a/tests/parser/TEST_CASES/complex_cases/just_works/result_editing/transitions.py b/tests/parser/TEST_CASES/complex_cases/just_works/result_editing/transitions.py new file mode 100755 index 000000000..15dca2a14 --- /dev/null +++ b/tests/parser/TEST_CASES/complex_cases/just_works/result_editing/transitions.py @@ -0,0 +1,19 @@ +from dff.script.core.types import NodeLabel3Type +from dff.script import Context +from dff.pipeline import Pipeline +import typing as tp + + +def greeting_flow_n2_transition( + _: Context, __: Pipeline, *args, **kwargs +) -> NodeLabel3Type: + return "greeting_flow", "node2", 1.0 + + +def high_priority_node_transition( + flow_label: str, label: str +) -> tp.Callable[..., NodeLabel3Type]: + def transition(_: Context, __: Pipeline, *args, **kwargs) -> NodeLabel3Type: + return flow_label, label, 2.0 + + return transition diff --git a/tests/parser/TEST_CASES/complex_cases/just_works/script.yaml b/tests/parser/TEST_CASES/complex_cases/just_works/script.yaml new file mode 100644 index 000000000..f543ac542 --- /dev/null +++ b/tests/parser/TEST_CASES/complex_cases/just_works/script.yaml @@ -0,0 +1,96 @@ +main: + re: import re + TRANSITIONS: from dff.script.core.keywords import TRANSITIONS + RESPONSE: from dff.script.core.keywords import RESPONSE + GLOBAL: from dff.script.core.keywords import GLOBAL + MISC: from dff.script.core.keywords import MISC + cnd: import dff.script.conditions + lbl: import dff.script.labels + rsp: import dff.script.responses + Pipeline: from dff.pipeline import Pipeline + transitions: import transitions + global_flow: from flow import global_flow + script: + GLOBAL: + TRANSITIONS: + ('greeting_flow', 'node1', 1.1): cnd.regexp('\\b(hi|hello)\\b', re.I) + ('music_flow', 'node1', 1.1): cnd.regexp('talk about music') + lbl.to_fallback(0.1): cnd.true() + lbl.forward(): cnd.all([cnd.regexp('next\\b'), cnd.has_last_labels(labels=[('music_flow', i) for i in ['node2', 'node3']])]) + lbl.repeat(0.2): cnd.all([cnd.regexp('repeat', re.I), cnd.negation(cnd.has_last_labels(flow_labels=['global_flow']))]) + MISC: + "'var1'": "'global_data'" + "'var2'": "'global_data'" + "'var3'": "'global_data'" + RESPONSE: "\"''\"" + "'global_flow'": global_flow + "'greeting_flow'": + "'node1'": + RESPONSE: rsp.choice(['Hi, what is up?', 'Hello, how are you?']) + TRANSITIONS: + ('global_flow', 'fallback_node', 0.1): cnd.true() + "'node2'": cnd.regexp('how are you') + MISC: + "'var3'": "'info_of_step_1'" + "'node2'": + RESPONSE: "'Good. What do you want to talk about?'" + TRANSITIONS: + lbl.to_fallback(0.1): cnd.true() + lbl.forward(0.5): cnd.regexp('talk about') + ('music_flow', 'node1'): cnd.regexp('talk about music') + lbl.previous(): cnd.regexp('previous', re.IGNORECASE) + "'node3'": + RESPONSE: foo + TRANSITIONS: + lbl.forward(): cnd.regexp('bye') + "'node4'": + RESPONSE: bar('bye') + TRANSITIONS: + "'node1'": cnd.regexp('hi|hello', re.IGNORECASE) + lbl.to_fallback(): cnd.true() + "'music_flow'": + "'node1'": + RESPONSE: "'I love `System of a Down` group, would you like to tell about it? '" + TRANSITIONS: + lbl.forward(): cnd.regexp('yes|yep|ok', re.IGNORECASE) + lbl.to_fallback(): cnd.true() + "'node2'": + RESPONSE: "'System of a Down is an Armenian-American heavy metal band formed in in 1994.'" + TRANSITIONS: + lbl.forward(): cnd.regexp('next', re.IGNORECASE) + lbl.repeat(): cnd.regexp('repeat', re.IGNORECASE) + lbl.to_fallback(): cnd.true() + "'node3'": + RESPONSE: "'The band achieved commercial success with the release of five studio albums.'" + TRANSITIONS: + lbl.forward(): cnd.regexp('next', re.IGNORECASE) + lbl.backward(): cnd.regexp('back', re.IGNORECASE) + lbl.repeat(): cnd.regexp('repeat', re.IGNORECASE) + lbl.to_fallback(): cnd.true() + "'node4'": + RESPONSE: "\"That's all what I know\"" + TRANSITIONS: + transitions.greeting_flow_n2_transition: cnd.regexp('next', re.IGNORECASE) + transitions.high_priority_node_transition('greeting_flow', 'node4'): cnd.regexp('next time', re.IGNORECASE) + lbl.to_fallback(): cnd.true() + pipeline: Pipeline.from_script(fallback_label=('global_flow', 'fallback_node'), script=script, start_label=('global_flow', 'start_node')) +flow: + TRANSITIONS: from dff.script.core.keywords import TRANSITIONS + RESPONSE: from dff.script.core.keywords import RESPONSE + cnd: import dff.script.conditions + lbl: import dff.script.labels.std_labels + re: import re + global_flow: + "'start_node'": + RESPONSE: "''" + TRANSITIONS: + ('music_flow', 'node1'): cnd.regexp('talk about music') + ('greeting_flow', 'node1'): cnd.regexp('hi|hello', re.IGNORECASE) + "'fallback_node'": cnd.true() + "'fallback_node'": + RESPONSE: "'Ooops'" + TRANSITIONS: + ('music_flow', 'node1'): cnd.regexp('talk about music') + ('greeting_flow', 'node1'): cnd.regexp('hi|hello', re.IGNORECASE) + lbl.previous(): cnd.regexp('previous', re.IGNORECASE) + lbl.repeat(): cnd.true() diff --git a/tests/parser/TEST_CASES/complex_cases/just_works/script.yaml.diff b/tests/parser/TEST_CASES/complex_cases/just_works/script.yaml.diff new file mode 100644 index 000000000..9eae9a82a --- /dev/null +++ b/tests/parser/TEST_CASES/complex_cases/just_works/script.yaml.diff @@ -0,0 +1,100 @@ + main: + re: import re + TRANSITIONS: from dff.script.core.keywords import TRANSITIONS + RESPONSE: from dff.script.core.keywords import RESPONSE + GLOBAL: from dff.script.core.keywords import GLOBAL + MISC: from dff.script.core.keywords import MISC + cnd: import dff.script.conditions + lbl: import dff.script.labels + rsp: import dff.script.responses + Pipeline: from dff.pipeline import Pipeline + transitions: import transitions + global_flow: from flow import global_flow ++ global_flow_1: global_flow + script: + GLOBAL: + TRANSITIONS: + ('greeting_flow', 'node1', 1.1): cnd.regexp('\\b(hi|hello)\\b', re.I) + ('music_flow', 'node1', 1.1): cnd.regexp('talk about music') + lbl.to_fallback(0.1): cnd.true() + lbl.forward(): cnd.all([cnd.regexp('next\\b'), cnd.has_last_labels(labels=[('music_flow', i) for i in ['node2', 'node3']])]) + lbl.repeat(0.2): cnd.all([cnd.regexp('repeat', re.I), cnd.negation(cnd.has_last_labels(flow_labels=['global_flow']))]) + MISC: + "'var1'": "'global_data'" + "'var2'": "'global_data'" + "'var3'": "'global_data'" + RESPONSE: "\"''\"" +- "'global_flow'": global_flow ++ "'global_flow'": global_flow_1 +? ++ + "'greeting_flow'": + "'node1'": + RESPONSE: rsp.choice(['Hi, what is up?', 'Hello, how are you?']) + TRANSITIONS: + ('global_flow', 'fallback_node', 0.1): cnd.true() + "'node2'": cnd.regexp('how are you') + MISC: + "'var3'": "'info_of_step_1'" + "'node2'": + RESPONSE: "'Good. What do you want to talk about?'" + TRANSITIONS: + lbl.to_fallback(0.1): cnd.true() + lbl.forward(0.5): cnd.regexp('talk about') + ('music_flow', 'node1'): cnd.regexp('talk about music') + lbl.previous(): cnd.regexp('previous', re.IGNORECASE) + "'node3'": + RESPONSE: foo + TRANSITIONS: + lbl.forward(): cnd.regexp('bye') + "'node4'": + RESPONSE: bar('bye') + TRANSITIONS: + "'node1'": cnd.regexp('hi|hello', re.IGNORECASE) + lbl.to_fallback(): cnd.true() + "'music_flow'": + "'node1'": + RESPONSE: "'I love `System of a Down` group, would you like to tell about it? '" + TRANSITIONS: + lbl.forward(): cnd.regexp('yes|yep|ok', re.IGNORECASE) + lbl.to_fallback(): cnd.true() + "'node2'": + RESPONSE: "'System of a Down is an Armenian-American heavy metal band formed in in 1994.'" + TRANSITIONS: + lbl.forward(): cnd.regexp('next', re.IGNORECASE) + lbl.repeat(): cnd.regexp('repeat', re.IGNORECASE) + lbl.to_fallback(): cnd.true() + "'node3'": + RESPONSE: "'The band achieved commercial success with the release of five studio albums.'" + TRANSITIONS: + lbl.forward(): cnd.regexp('next', re.IGNORECASE) + lbl.backward(): cnd.regexp('back', re.IGNORECASE) + lbl.repeat(): cnd.regexp('repeat', re.IGNORECASE) + lbl.to_fallback(): cnd.true() + "'node4'": + RESPONSE: "\"That's all what I know\"" + TRANSITIONS: + transitions.greeting_flow_n2_transition: cnd.regexp('next', re.IGNORECASE) + transitions.high_priority_node_transition('greeting_flow', 'node4'): cnd.regexp('next time', re.IGNORECASE) + lbl.to_fallback(): cnd.true() + pipeline: Pipeline.from_script(fallback_label=('global_flow', 'fallback_node'), script=script, start_label=('global_flow', 'start_node')) + flow: + TRANSITIONS: from dff.script.core.keywords import TRANSITIONS + RESPONSE: from dff.script.core.keywords import RESPONSE + cnd: import dff.script.conditions ++ re: import re + lbl: import dff.script.labels.std_labels +- re: import re + global_flow: + "'start_node'": + RESPONSE: "''" + TRANSITIONS: + ('music_flow', 'node1'): cnd.regexp('talk about music') + ('greeting_flow', 'node1'): cnd.regexp('hi|hello', re.IGNORECASE) + "'fallback_node'": cnd.true() + "'fallback_node'": + RESPONSE: "'Ooops'" + TRANSITIONS: + ('music_flow', 'node1'): cnd.regexp('talk about music') + ('greeting_flow', 'node1'): cnd.regexp('hi|hello', re.IGNORECASE) + lbl.previous(): cnd.regexp('previous', re.IGNORECASE) + lbl.repeat(): cnd.true() diff --git a/tests/parser/TEST_CASES/complex_cases/modular/graph.json b/tests/parser/TEST_CASES/complex_cases/modular/graph.json new file mode 100644 index 000000000..0b6df4f37 --- /dev/null +++ b/tests/parser/TEST_CASES/complex_cases/modular/graph.json @@ -0,0 +1,48 @@ +{ + "directed": true, + "multigraph": true, + "graph": { + "full_script": { + "script": { + "flow": "from flows.start import flow", + "Pipeline": "from dff.pipeline import Pipeline", + "pipeline": "Pipeline.from_script(script={\n 'start_flow': flow,\n}, start_label=('start_flow', 'start_node'))" + }, + "flows.start": { + "node": "from ..nodes.node_1 import node", + "flow": { + "'start_node'": "node" + } + }, + "nodes.node_1": { + "kw": "from dff.script import Keywords", + "node": { + "kw.RESPONSE": "'hey'" + } + } + }, + "start_label": [ + "'start_flow'", + "'start_node'" + ], + "fallback_label": [ + "'start_flow'", + "'start_node'" + ] + }, + "nodes": [ + { + "ref": [ + "nodes.node_1", + "node", + "value" + ], + "id": [ + "NODE", + "'start_flow'", + "'start_node'" + ] + } + ], + "links": [] +} \ No newline at end of file diff --git a/tests/parser/TEST_CASES/complex_cases/modular/new_script.yaml b/tests/parser/TEST_CASES/complex_cases/modular/new_script.yaml new file mode 100644 index 000000000..49d670982 --- /dev/null +++ b/tests/parser/TEST_CASES/complex_cases/modular/new_script.yaml @@ -0,0 +1,13 @@ +script: + flow: from flows.start import flow + Pipeline: from dff.pipeline import Pipeline + start_label: "('start_flow', 'start_node')" + pipeline: "Pipeline.from_script(script={'start_flow': flow}, start_label=start_label)" +flows.start: + node: from ..nodes.node_1 import node + flow: + "'start_node'": node +nodes.node_1: + kw: from dff.script import Keywords + node: + kw.RESPONSE: "'hey!'" diff --git a/tests/parser/TEST_CASES/complex_cases/modular/python_files/flows/__init__.py b/tests/parser/TEST_CASES/complex_cases/modular/python_files/flows/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/parser/TEST_CASES/complex_cases/modular/python_files/flows/start.py b/tests/parser/TEST_CASES/complex_cases/modular/python_files/flows/start.py new file mode 100644 index 000000000..415e2115c --- /dev/null +++ b/tests/parser/TEST_CASES/complex_cases/modular/python_files/flows/start.py @@ -0,0 +1,3 @@ +from ..nodes.node_1 import node + +flow = {"start_node": node} diff --git a/tests/parser/TEST_CASES/complex_cases/modular/python_files/main.py b/tests/parser/TEST_CASES/complex_cases/modular/python_files/main.py new file mode 100644 index 000000000..9e23b9e7e --- /dev/null +++ b/tests/parser/TEST_CASES/complex_cases/modular/python_files/main.py @@ -0,0 +1 @@ +from script import pipeline diff --git a/tests/parser/TEST_CASES/complex_cases/modular/python_files/nodes/__init__.py b/tests/parser/TEST_CASES/complex_cases/modular/python_files/nodes/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/parser/TEST_CASES/complex_cases/modular/python_files/nodes/node_1.py b/tests/parser/TEST_CASES/complex_cases/modular/python_files/nodes/node_1.py new file mode 100644 index 000000000..ec2679651 --- /dev/null +++ b/tests/parser/TEST_CASES/complex_cases/modular/python_files/nodes/node_1.py @@ -0,0 +1,3 @@ +from dff.script import Keywords as kw + +node = {kw.RESPONSE: "hey"} diff --git a/tests/parser/TEST_CASES/complex_cases/modular/python_files/script.py b/tests/parser/TEST_CASES/complex_cases/modular/python_files/script.py new file mode 100644 index 000000000..5c74d3c76 --- /dev/null +++ b/tests/parser/TEST_CASES/complex_cases/modular/python_files/script.py @@ -0,0 +1,5 @@ +from flows.start import flow +from dff.pipeline import Pipeline + + +pipeline = Pipeline.from_script(script={"start_flow": flow}, start_label=("start_flow", "start_node")) diff --git a/tests/parser/TEST_CASES/complex_cases/modular/result_creating/flows/start.py b/tests/parser/TEST_CASES/complex_cases/modular/result_creating/flows/start.py new file mode 100644 index 000000000..ebd705ef8 --- /dev/null +++ b/tests/parser/TEST_CASES/complex_cases/modular/result_creating/flows/start.py @@ -0,0 +1,5 @@ +from ..nodes.node_1 import node + +flow = { + 'start_node': node, +} diff --git a/tests/parser/TEST_CASES/complex_cases/modular/result_creating/nodes/node_1.py b/tests/parser/TEST_CASES/complex_cases/modular/result_creating/nodes/node_1.py new file mode 100644 index 000000000..71b6cd04a --- /dev/null +++ b/tests/parser/TEST_CASES/complex_cases/modular/result_creating/nodes/node_1.py @@ -0,0 +1,5 @@ +from dff.script import Keywords as kw + +node = { + kw.RESPONSE: 'hey!', +} diff --git a/tests/parser/TEST_CASES/complex_cases/modular/result_creating/script.py b/tests/parser/TEST_CASES/complex_cases/modular/result_creating/script.py new file mode 100644 index 000000000..dce8cb6a3 --- /dev/null +++ b/tests/parser/TEST_CASES/complex_cases/modular/result_creating/script.py @@ -0,0 +1,8 @@ +from flows.start import flow +from dff.pipeline import Pipeline + +start_label = ('start_flow', 'start_node') + +pipeline = Pipeline.from_script(script={ + 'start_flow': flow, +}, start_label=start_label) diff --git a/tests/parser/TEST_CASES/complex_cases/modular/result_editing/flows/__init__.py b/tests/parser/TEST_CASES/complex_cases/modular/result_editing/flows/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/parser/TEST_CASES/complex_cases/modular/result_editing/flows/start.py b/tests/parser/TEST_CASES/complex_cases/modular/result_editing/flows/start.py new file mode 100644 index 000000000..ebd705ef8 --- /dev/null +++ b/tests/parser/TEST_CASES/complex_cases/modular/result_editing/flows/start.py @@ -0,0 +1,5 @@ +from ..nodes.node_1 import node + +flow = { + 'start_node': node, +} diff --git a/tests/parser/TEST_CASES/complex_cases/modular/result_editing/main.py b/tests/parser/TEST_CASES/complex_cases/modular/result_editing/main.py new file mode 100644 index 000000000..9e23b9e7e --- /dev/null +++ b/tests/parser/TEST_CASES/complex_cases/modular/result_editing/main.py @@ -0,0 +1 @@ +from script import pipeline diff --git a/tests/parser/TEST_CASES/complex_cases/modular/result_editing/nodes/__init__.py b/tests/parser/TEST_CASES/complex_cases/modular/result_editing/nodes/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/parser/TEST_CASES/complex_cases/modular/result_editing/nodes/node_1.py b/tests/parser/TEST_CASES/complex_cases/modular/result_editing/nodes/node_1.py new file mode 100644 index 000000000..71b6cd04a --- /dev/null +++ b/tests/parser/TEST_CASES/complex_cases/modular/result_editing/nodes/node_1.py @@ -0,0 +1,5 @@ +from dff.script import Keywords as kw + +node = { + kw.RESPONSE: 'hey!', +} diff --git a/tests/parser/TEST_CASES/complex_cases/modular/result_editing/script.py b/tests/parser/TEST_CASES/complex_cases/modular/result_editing/script.py new file mode 100644 index 000000000..dce8cb6a3 --- /dev/null +++ b/tests/parser/TEST_CASES/complex_cases/modular/result_editing/script.py @@ -0,0 +1,8 @@ +from flows.start import flow +from dff.pipeline import Pipeline + +start_label = ('start_flow', 'start_node') + +pipeline = Pipeline.from_script(script={ + 'start_flow': flow, +}, start_label=start_label) diff --git a/tests/parser/TEST_CASES/complex_cases/modular/script.yaml b/tests/parser/TEST_CASES/complex_cases/modular/script.yaml new file mode 100644 index 000000000..eb95158f7 --- /dev/null +++ b/tests/parser/TEST_CASES/complex_cases/modular/script.yaml @@ -0,0 +1,12 @@ +script: + flow: from flows.start import flow + Pipeline: from dff.pipeline import Pipeline + pipeline: "Pipeline.from_script(script={\n 'start_flow': flow,\n}, start_label=('start_flow', 'start_node'))" +flows.start: + node: from ..nodes.node_1 import node + flow: + "'start_node'": node +nodes.node_1: + kw: from dff.script import Keywords + node: + kw.RESPONSE: "'hey'" diff --git a/tests/parser/TEST_CASES/complex_cases/modular/script.yaml.diff b/tests/parser/TEST_CASES/complex_cases/modular/script.yaml.diff new file mode 100644 index 000000000..5873c8f60 --- /dev/null +++ b/tests/parser/TEST_CASES/complex_cases/modular/script.yaml.diff @@ -0,0 +1,18 @@ + script: + flow: from flows.start import flow + Pipeline: from dff.pipeline import Pipeline ++ start_label: "('start_flow', 'start_node')" +- pipeline: "Pipeline.from_script(script={\n 'start_flow': flow,\n}, start_label=('start_flow', 'start_node'))" +? ------ --- -- - -------- ^^^^^^ ^^ ++ pipeline: "Pipeline.from_script(script={'start_flow': flow}, start_label=start_label)" +? ^ ^ + flows.start: + node: from ..nodes.node_1 import node + flow: + "'start_node'": node + nodes.node_1: + kw: from dff.script import Keywords + node: +- kw.RESPONSE: "'hey'" ++ kw.RESPONSE: "'hey!'" +? + diff --git a/tests/parser/TEST_CASES/core_tutorials/1_basics.json b/tests/parser/TEST_CASES/core_tutorials/1_basics.json new file mode 100644 index 000000000..639827e28 --- /dev/null +++ b/tests/parser/TEST_CASES/core_tutorials/1_basics.json @@ -0,0 +1,350 @@ +{ + "directed": true, + "multigraph": true, + "graph": { + "full_script": { + "1_basics": { + "TRANSITIONS": "from dff.script import TRANSITIONS", + "RESPONSE": "from dff.script import RESPONSE", + "Message": "from dff.script import Message", + "Pipeline": "from dff.pipeline import Pipeline", + "cnd": "import dff.script.conditions", + "toy_script": { + "'greeting_flow'": { + "'start_node'": { + "RESPONSE": "Message()", + "TRANSITIONS": { + "'node1'": "cnd.exact_match(Message(text='Hi'))" + } + }, + "'node1'": { + "RESPONSE": "Message(text='Hi, how are you?')", + "TRANSITIONS": { + "'node2'": "cnd.exact_match(Message(text=\"I'm fine, how are you?\"))" + } + }, + "'node2'": { + "RESPONSE": "Message(text='Good. What do you want to talk about?')", + "TRANSITIONS": { + "'node3'": "cnd.exact_match(Message(text=\"Let's talk about music.\"))" + } + }, + "'node3'": { + "RESPONSE": "Message(text='Sorry, I can not talk about music now.')", + "TRANSITIONS": { + "'node4'": "cnd.exact_match(Message(text='Ok, goodbye.'))" + } + }, + "'node4'": { + "RESPONSE": "Message(text='Bye')", + "TRANSITIONS": { + "'node1'": "cnd.exact_match(Message(text='Hi'))" + } + }, + "'fallback_node'": { + "RESPONSE": "Message(text='Ooops')", + "TRANSITIONS": { + "'node1'": "cnd.exact_match(Message(text='Hi'))" + } + } + } + }, + "pipeline": "Pipeline.from_script(toy_script, start_label=('greeting_flow', 'start_node'), fallback_label=('greeting_flow', 'fallback_node'))" + } + }, + "start_label": [ + "'greeting_flow'", + "'start_node'" + ], + "fallback_label": [ + "'greeting_flow'", + "'fallback_node'" + ] + }, + "nodes": [ + { + "ref": [ + "1_basics", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'start_node'" + ], + "id": [ + "NODE", + "'greeting_flow'", + "'start_node'" + ] + }, + { + "ref": [ + "1_basics", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node1'" + ], + "id": [ + "NODE", + "'greeting_flow'", + "'node1'" + ] + }, + { + "ref": [ + "1_basics", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node2'" + ], + "id": [ + "NODE", + "'greeting_flow'", + "'node2'" + ] + }, + { + "ref": [ + "1_basics", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node3'" + ], + "id": [ + "NODE", + "'greeting_flow'", + "'node3'" + ] + }, + { + "ref": [ + "1_basics", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node4'" + ], + "id": [ + "NODE", + "'greeting_flow'", + "'node4'" + ] + }, + { + "ref": [ + "1_basics", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'fallback_node'" + ], + "id": [ + "NODE", + "'greeting_flow'", + "'fallback_node'" + ] + } + ], + "links": [ + { + "label_ref": [ + "1_basics", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'start_node'", + "value_TRANSITIONS", + "key_'node1'" + ], + "label": "'node1'", + "condition_ref": [ + "1_basics", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'start_node'", + "value_TRANSITIONS", + "value_'node1'" + ], + "condition": "cnd.exact_match(Message(text='Hi'))", + "source": [ + "NODE", + "'greeting_flow'", + "'start_node'" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'node1'" + ], + "key": 0 + }, + { + "label_ref": [ + "1_basics", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node1'", + "value_TRANSITIONS", + "key_'node2'" + ], + "label": "'node2'", + "condition_ref": [ + "1_basics", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node1'", + "value_TRANSITIONS", + "value_'node2'" + ], + "condition": "cnd.exact_match(Message(text=\"I'm fine, how are you?\"))", + "source": [ + "NODE", + "'greeting_flow'", + "'node1'" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'node2'" + ], + "key": 0 + }, + { + "label_ref": [ + "1_basics", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node2'", + "value_TRANSITIONS", + "key_'node3'" + ], + "label": "'node3'", + "condition_ref": [ + "1_basics", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node2'", + "value_TRANSITIONS", + "value_'node3'" + ], + "condition": "cnd.exact_match(Message(text=\"Let's talk about music.\"))", + "source": [ + "NODE", + "'greeting_flow'", + "'node2'" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'node3'" + ], + "key": 0 + }, + { + "label_ref": [ + "1_basics", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node3'", + "value_TRANSITIONS", + "key_'node4'" + ], + "label": "'node4'", + "condition_ref": [ + "1_basics", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node3'", + "value_TRANSITIONS", + "value_'node4'" + ], + "condition": "cnd.exact_match(Message(text='Ok, goodbye.'))", + "source": [ + "NODE", + "'greeting_flow'", + "'node3'" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'node4'" + ], + "key": 0 + }, + { + "label_ref": [ + "1_basics", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node4'", + "value_TRANSITIONS", + "key_'node1'" + ], + "label": "'node1'", + "condition_ref": [ + "1_basics", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node4'", + "value_TRANSITIONS", + "value_'node1'" + ], + "condition": "cnd.exact_match(Message(text='Hi'))", + "source": [ + "NODE", + "'greeting_flow'", + "'node4'" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'node1'" + ], + "key": 0 + }, + { + "label_ref": [ + "1_basics", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'fallback_node'", + "value_TRANSITIONS", + "key_'node1'" + ], + "label": "'node1'", + "condition_ref": [ + "1_basics", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'fallback_node'", + "value_TRANSITIONS", + "value_'node1'" + ], + "condition": "cnd.exact_match(Message(text='Hi'))", + "source": [ + "NODE", + "'greeting_flow'", + "'fallback_node'" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'node1'" + ], + "key": 0 + } + ] +} \ No newline at end of file diff --git a/tests/parser/TEST_CASES/core_tutorials/1_basics.py b/tests/parser/TEST_CASES/core_tutorials/1_basics.py new file mode 100644 index 000000000..ba20877eb --- /dev/null +++ b/tests/parser/TEST_CASES/core_tutorials/1_basics.py @@ -0,0 +1,48 @@ +from dff.script import TRANSITIONS +from dff.script import RESPONSE +from dff.script import Message +from dff.pipeline import Pipeline +import dff.script.conditions as cnd + +toy_script = { + 'greeting_flow': { + 'start_node': { + RESPONSE: Message(), + TRANSITIONS: { + 'node1': cnd.exact_match(Message(text='Hi')), + }, + }, + 'node1': { + RESPONSE: Message(text='Hi, how are you?'), + TRANSITIONS: { + 'node2': cnd.exact_match(Message(text="I'm fine, how are you?")), + }, + }, + 'node2': { + RESPONSE: Message(text='Good. What do you want to talk about?'), + TRANSITIONS: { + 'node3': cnd.exact_match(Message(text="Let's talk about music.")), + }, + }, + 'node3': { + RESPONSE: Message(text='Sorry, I can not talk about music now.'), + TRANSITIONS: { + 'node4': cnd.exact_match(Message(text='Ok, goodbye.')), + }, + }, + 'node4': { + RESPONSE: Message(text='Bye'), + TRANSITIONS: { + 'node1': cnd.exact_match(Message(text='Hi')), + }, + }, + 'fallback_node': { + RESPONSE: Message(text='Ooops'), + TRANSITIONS: { + 'node1': cnd.exact_match(Message(text='Hi')), + }, + }, + }, +} + +pipeline = Pipeline.from_script(toy_script, start_label=('greeting_flow', 'start_node'), fallback_label=('greeting_flow', 'fallback_node')) diff --git a/tests/parser/TEST_CASES/core_tutorials/1_basics.yaml b/tests/parser/TEST_CASES/core_tutorials/1_basics.yaml new file mode 100644 index 000000000..5d3cf8cb4 --- /dev/null +++ b/tests/parser/TEST_CASES/core_tutorials/1_basics.yaml @@ -0,0 +1,33 @@ +1_basics: + TRANSITIONS: from dff.script import TRANSITIONS + RESPONSE: from dff.script import RESPONSE + Message: from dff.script import Message + Pipeline: from dff.pipeline import Pipeline + cnd: import dff.script.conditions + toy_script: + "'greeting_flow'": + "'start_node'": + RESPONSE: Message() + TRANSITIONS: + "'node1'": cnd.exact_match(Message(text='Hi')) + "'node1'": + RESPONSE: Message(text='Hi, how are you?') + TRANSITIONS: + "'node2'": cnd.exact_match(Message(text="I'm fine, how are you?")) + "'node2'": + RESPONSE: Message(text='Good. What do you want to talk about?') + TRANSITIONS: + "'node3'": cnd.exact_match(Message(text="Let's talk about music.")) + "'node3'": + RESPONSE: Message(text='Sorry, I can not talk about music now.') + TRANSITIONS: + "'node4'": cnd.exact_match(Message(text='Ok, goodbye.')) + "'node4'": + RESPONSE: Message(text='Bye') + TRANSITIONS: + "'node1'": cnd.exact_match(Message(text='Hi')) + "'fallback_node'": + RESPONSE: Message(text='Ooops') + TRANSITIONS: + "'node1'": cnd.exact_match(Message(text='Hi')) + pipeline: Pipeline.from_script(toy_script, start_label=('greeting_flow', 'start_node'), fallback_label=('greeting_flow', 'fallback_node')) diff --git a/tests/parser/TEST_CASES/core_tutorials/2_conditions.json b/tests/parser/TEST_CASES/core_tutorials/2_conditions.json new file mode 100644 index 000000000..94188df91 --- /dev/null +++ b/tests/parser/TEST_CASES/core_tutorials/2_conditions.json @@ -0,0 +1,385 @@ +{ + "directed": true, + "multigraph": true, + "graph": { + "full_script": { + "2_conditions": { + "re": "import re", + "TRANSITIONS": "from dff.script import TRANSITIONS", + "RESPONSE": "from dff.script import RESPONSE", + "Message": "from dff.script import Message", + "cnd": "import dff.script.conditions", + "Pipeline": "from dff.pipeline import Pipeline", + "toy_script": { + "'greeting_flow'": { + "'start_node'": { + "RESPONSE": "Message()", + "TRANSITIONS": { + "'node1'": "cnd.exact_match(Message(text='Hi'))" + } + }, + "'node1'": { + "RESPONSE": "Message(text='Hi, how are you?')", + "TRANSITIONS": { + "'node2'": "cnd.regexp('.*how are you', re.IGNORECASE)" + } + }, + "'node2'": { + "RESPONSE": "Message(text='Good. What do you want to talk about?')", + "TRANSITIONS": { + "'node3'": "cnd.all([cnd.regexp('talk'), cnd.regexp('about.*music')])" + } + }, + "'node3'": { + "RESPONSE": "Message(text='Sorry, I can not talk about music now.')", + "TRANSITIONS": { + "'node4'": "cnd.regexp(re.compile('Ok, goodbye.'))" + } + }, + "'node4'": { + "RESPONSE": "Message(text='bye')", + "TRANSITIONS": { + "'node1'": "cnd.any([hi_lower_case_condition, cnd.exact_match(Message(text='hello'))])" + } + }, + "'fallback_node'": { + "RESPONSE": "Message(text='Ooops')", + "TRANSITIONS": { + "'node1'": "complex_user_answer_condition", + "'fallback_node'": "predetermined_condition(True)" + } + } + } + }, + "pipeline": "Pipeline.from_script(toy_script, start_label=('greeting_flow', 'start_node'), fallback_label=('greeting_flow', 'fallback_node'))" + } + }, + "start_label": [ + "'greeting_flow'", + "'start_node'" + ], + "fallback_label": [ + "'greeting_flow'", + "'fallback_node'" + ] + }, + "nodes": [ + { + "ref": [ + "2_conditions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'start_node'" + ], + "id": [ + "NODE", + "'greeting_flow'", + "'start_node'" + ] + }, + { + "ref": [ + "2_conditions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node1'" + ], + "id": [ + "NODE", + "'greeting_flow'", + "'node1'" + ] + }, + { + "ref": [ + "2_conditions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node2'" + ], + "id": [ + "NODE", + "'greeting_flow'", + "'node2'" + ] + }, + { + "ref": [ + "2_conditions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node3'" + ], + "id": [ + "NODE", + "'greeting_flow'", + "'node3'" + ] + }, + { + "ref": [ + "2_conditions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node4'" + ], + "id": [ + "NODE", + "'greeting_flow'", + "'node4'" + ] + }, + { + "ref": [ + "2_conditions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'fallback_node'" + ], + "id": [ + "NODE", + "'greeting_flow'", + "'fallback_node'" + ] + } + ], + "links": [ + { + "label_ref": [ + "2_conditions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'start_node'", + "value_TRANSITIONS", + "key_'node1'" + ], + "label": "'node1'", + "condition_ref": [ + "2_conditions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'start_node'", + "value_TRANSITIONS", + "value_'node1'" + ], + "condition": "cnd.exact_match(Message(text='Hi'))", + "source": [ + "NODE", + "'greeting_flow'", + "'start_node'" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'node1'" + ], + "key": 0 + }, + { + "label_ref": [ + "2_conditions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node1'", + "value_TRANSITIONS", + "key_'node2'" + ], + "label": "'node2'", + "condition_ref": [ + "2_conditions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node1'", + "value_TRANSITIONS", + "value_'node2'" + ], + "condition": "cnd.regexp('.*how are you', re.IGNORECASE)", + "source": [ + "NODE", + "'greeting_flow'", + "'node1'" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'node2'" + ], + "key": 0 + }, + { + "label_ref": [ + "2_conditions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node2'", + "value_TRANSITIONS", + "key_'node3'" + ], + "label": "'node3'", + "condition_ref": [ + "2_conditions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node2'", + "value_TRANSITIONS", + "value_'node3'" + ], + "condition": "cnd.all([cnd.regexp('talk'), cnd.regexp('about.*music')])", + "source": [ + "NODE", + "'greeting_flow'", + "'node2'" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'node3'" + ], + "key": 0 + }, + { + "label_ref": [ + "2_conditions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node3'", + "value_TRANSITIONS", + "key_'node4'" + ], + "label": "'node4'", + "condition_ref": [ + "2_conditions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node3'", + "value_TRANSITIONS", + "value_'node4'" + ], + "condition": "cnd.regexp(re.compile('Ok, goodbye.'))", + "source": [ + "NODE", + "'greeting_flow'", + "'node3'" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'node4'" + ], + "key": 0 + }, + { + "label_ref": [ + "2_conditions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node4'", + "value_TRANSITIONS", + "key_'node1'" + ], + "label": "'node1'", + "condition_ref": [ + "2_conditions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node4'", + "value_TRANSITIONS", + "value_'node1'" + ], + "condition": "cnd.any([hi_lower_case_condition, cnd.exact_match(Message(text='hello'))])", + "source": [ + "NODE", + "'greeting_flow'", + "'node4'" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'node1'" + ], + "key": 0 + }, + { + "label_ref": [ + "2_conditions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'fallback_node'", + "value_TRANSITIONS", + "key_'node1'" + ], + "label": "'node1'", + "condition_ref": [ + "2_conditions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'fallback_node'", + "value_TRANSITIONS", + "value_'node1'" + ], + "condition": "complex_user_answer_condition", + "source": [ + "NODE", + "'greeting_flow'", + "'fallback_node'" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'node1'" + ], + "key": 0 + }, + { + "label_ref": [ + "2_conditions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'fallback_node'", + "value_TRANSITIONS", + "key_'fallback_node'" + ], + "label": "'fallback_node'", + "condition_ref": [ + "2_conditions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'fallback_node'", + "value_TRANSITIONS", + "value_'fallback_node'" + ], + "condition": "predetermined_condition(True)", + "source": [ + "NODE", + "'greeting_flow'", + "'fallback_node'" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'fallback_node'" + ], + "key": 0 + } + ] +} \ No newline at end of file diff --git a/tests/parser/TEST_CASES/core_tutorials/2_conditions.py b/tests/parser/TEST_CASES/core_tutorials/2_conditions.py new file mode 100644 index 000000000..d4b8aa322 --- /dev/null +++ b/tests/parser/TEST_CASES/core_tutorials/2_conditions.py @@ -0,0 +1,50 @@ +import re +from dff.script import TRANSITIONS +from dff.script import RESPONSE +from dff.script import Message +import dff.script.conditions as cnd +from dff.pipeline import Pipeline + +toy_script = { + 'greeting_flow': { + 'start_node': { + RESPONSE: Message(), + TRANSITIONS: { + 'node1': cnd.exact_match(Message(text='Hi')), + }, + }, + 'node1': { + RESPONSE: Message(text='Hi, how are you?'), + TRANSITIONS: { + 'node2': cnd.regexp('.*how are you', re.IGNORECASE), + }, + }, + 'node2': { + RESPONSE: Message(text='Good. What do you want to talk about?'), + TRANSITIONS: { + 'node3': cnd.all([cnd.regexp('talk'), cnd.regexp('about.*music')]), + }, + }, + 'node3': { + RESPONSE: Message(text='Sorry, I can not talk about music now.'), + TRANSITIONS: { + 'node4': cnd.regexp(re.compile('Ok, goodbye.')), + }, + }, + 'node4': { + RESPONSE: Message(text='bye'), + TRANSITIONS: { + 'node1': cnd.any([hi_lower_case_condition, cnd.exact_match(Message(text='hello'))]), + }, + }, + 'fallback_node': { + RESPONSE: Message(text='Ooops'), + TRANSITIONS: { + 'node1': complex_user_answer_condition, + 'fallback_node': predetermined_condition(True), + }, + }, + }, +} + +pipeline = Pipeline.from_script(toy_script, start_label=('greeting_flow', 'start_node'), fallback_label=('greeting_flow', 'fallback_node')) diff --git a/tests/parser/TEST_CASES/core_tutorials/2_conditions.yaml b/tests/parser/TEST_CASES/core_tutorials/2_conditions.yaml new file mode 100644 index 000000000..a2112f441 --- /dev/null +++ b/tests/parser/TEST_CASES/core_tutorials/2_conditions.yaml @@ -0,0 +1,35 @@ +2_conditions: + re: import re + TRANSITIONS: from dff.script import TRANSITIONS + RESPONSE: from dff.script import RESPONSE + Message: from dff.script import Message + cnd: import dff.script.conditions + Pipeline: from dff.pipeline import Pipeline + toy_script: + "'greeting_flow'": + "'start_node'": + RESPONSE: Message() + TRANSITIONS: + "'node1'": cnd.exact_match(Message(text='Hi')) + "'node1'": + RESPONSE: Message(text='Hi, how are you?') + TRANSITIONS: + "'node2'": cnd.regexp('.*how are you', re.IGNORECASE) + "'node2'": + RESPONSE: Message(text='Good. What do you want to talk about?') + TRANSITIONS: + "'node3'": cnd.all([cnd.regexp('talk'), cnd.regexp('about.*music')]) + "'node3'": + RESPONSE: Message(text='Sorry, I can not talk about music now.') + TRANSITIONS: + "'node4'": cnd.regexp(re.compile('Ok, goodbye.')) + "'node4'": + RESPONSE: Message(text='bye') + TRANSITIONS: + "'node1'": cnd.any([hi_lower_case_condition, cnd.exact_match(Message(text='hello'))]) + "'fallback_node'": + RESPONSE: Message(text='Ooops') + TRANSITIONS: + "'node1'": complex_user_answer_condition + "'fallback_node'": predetermined_condition(True) + pipeline: Pipeline.from_script(toy_script, start_label=('greeting_flow', 'start_node'), fallback_label=('greeting_flow', 'fallback_node')) diff --git a/tests/parser/TEST_CASES/core_tutorials/3_responses.json b/tests/parser/TEST_CASES/core_tutorials/3_responses.json new file mode 100644 index 000000000..92300c565 --- /dev/null +++ b/tests/parser/TEST_CASES/core_tutorials/3_responses.json @@ -0,0 +1,351 @@ +{ + "directed": true, + "multigraph": true, + "graph": { + "full_script": { + "3_responses": { + "TRANSITIONS": "from dff.script import TRANSITIONS", + "RESPONSE": "from dff.script import RESPONSE", + "Message": "from dff.script import Message", + "rsp": "import dff.script.responses", + "cnd": "import dff.script.conditions", + "Pipeline": "from dff.pipeline import Pipeline", + "toy_script": { + "'greeting_flow'": { + "'start_node'": { + "RESPONSE": "Message()", + "TRANSITIONS": { + "'node1'": "cnd.exact_match(Message(text='Hi'))" + } + }, + "'node1'": { + "RESPONSE": "rsp.choice([Message(text='Hi, what is up?'), Message(text='Hello, how are you?')])", + "TRANSITIONS": { + "'node2'": "cnd.exact_match(Message(text=\"I'm fine, how are you?\"))" + } + }, + "'node2'": { + "RESPONSE": "Message(text='Good. What do you want to talk about?')", + "TRANSITIONS": { + "'node3'": "cnd.exact_match(Message(text=\"Let's talk about music.\"))" + } + }, + "'node3'": { + "RESPONSE": "cannot_talk_about_topic_response", + "TRANSITIONS": { + "'node4'": "cnd.exact_match(Message(text='Ok, goodbye.'))" + } + }, + "'node4'": { + "RESPONSE": "upper_case_response(Message(text='bye'))", + "TRANSITIONS": { + "'node1'": "cnd.exact_match(Message(text='Hi'))" + } + }, + "'fallback_node'": { + "RESPONSE": "fallback_trace_response", + "TRANSITIONS": { + "'node1'": "cnd.exact_match(Message(text='Hi'))" + } + } + } + }, + "pipeline": "Pipeline.from_script(toy_script, start_label=('greeting_flow', 'start_node'), fallback_label=('greeting_flow', 'fallback_node'))" + } + }, + "start_label": [ + "'greeting_flow'", + "'start_node'" + ], + "fallback_label": [ + "'greeting_flow'", + "'fallback_node'" + ] + }, + "nodes": [ + { + "ref": [ + "3_responses", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'start_node'" + ], + "id": [ + "NODE", + "'greeting_flow'", + "'start_node'" + ] + }, + { + "ref": [ + "3_responses", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node1'" + ], + "id": [ + "NODE", + "'greeting_flow'", + "'node1'" + ] + }, + { + "ref": [ + "3_responses", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node2'" + ], + "id": [ + "NODE", + "'greeting_flow'", + "'node2'" + ] + }, + { + "ref": [ + "3_responses", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node3'" + ], + "id": [ + "NODE", + "'greeting_flow'", + "'node3'" + ] + }, + { + "ref": [ + "3_responses", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node4'" + ], + "id": [ + "NODE", + "'greeting_flow'", + "'node4'" + ] + }, + { + "ref": [ + "3_responses", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'fallback_node'" + ], + "id": [ + "NODE", + "'greeting_flow'", + "'fallback_node'" + ] + } + ], + "links": [ + { + "label_ref": [ + "3_responses", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'start_node'", + "value_TRANSITIONS", + "key_'node1'" + ], + "label": "'node1'", + "condition_ref": [ + "3_responses", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'start_node'", + "value_TRANSITIONS", + "value_'node1'" + ], + "condition": "cnd.exact_match(Message(text='Hi'))", + "source": [ + "NODE", + "'greeting_flow'", + "'start_node'" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'node1'" + ], + "key": 0 + }, + { + "label_ref": [ + "3_responses", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node1'", + "value_TRANSITIONS", + "key_'node2'" + ], + "label": "'node2'", + "condition_ref": [ + "3_responses", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node1'", + "value_TRANSITIONS", + "value_'node2'" + ], + "condition": "cnd.exact_match(Message(text=\"I'm fine, how are you?\"))", + "source": [ + "NODE", + "'greeting_flow'", + "'node1'" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'node2'" + ], + "key": 0 + }, + { + "label_ref": [ + "3_responses", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node2'", + "value_TRANSITIONS", + "key_'node3'" + ], + "label": "'node3'", + "condition_ref": [ + "3_responses", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node2'", + "value_TRANSITIONS", + "value_'node3'" + ], + "condition": "cnd.exact_match(Message(text=\"Let's talk about music.\"))", + "source": [ + "NODE", + "'greeting_flow'", + "'node2'" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'node3'" + ], + "key": 0 + }, + { + "label_ref": [ + "3_responses", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node3'", + "value_TRANSITIONS", + "key_'node4'" + ], + "label": "'node4'", + "condition_ref": [ + "3_responses", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node3'", + "value_TRANSITIONS", + "value_'node4'" + ], + "condition": "cnd.exact_match(Message(text='Ok, goodbye.'))", + "source": [ + "NODE", + "'greeting_flow'", + "'node3'" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'node4'" + ], + "key": 0 + }, + { + "label_ref": [ + "3_responses", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node4'", + "value_TRANSITIONS", + "key_'node1'" + ], + "label": "'node1'", + "condition_ref": [ + "3_responses", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node4'", + "value_TRANSITIONS", + "value_'node1'" + ], + "condition": "cnd.exact_match(Message(text='Hi'))", + "source": [ + "NODE", + "'greeting_flow'", + "'node4'" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'node1'" + ], + "key": 0 + }, + { + "label_ref": [ + "3_responses", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'fallback_node'", + "value_TRANSITIONS", + "key_'node1'" + ], + "label": "'node1'", + "condition_ref": [ + "3_responses", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'fallback_node'", + "value_TRANSITIONS", + "value_'node1'" + ], + "condition": "cnd.exact_match(Message(text='Hi'))", + "source": [ + "NODE", + "'greeting_flow'", + "'fallback_node'" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'node1'" + ], + "key": 0 + } + ] +} \ No newline at end of file diff --git a/tests/parser/TEST_CASES/core_tutorials/3_responses.py b/tests/parser/TEST_CASES/core_tutorials/3_responses.py new file mode 100644 index 000000000..e4007142b --- /dev/null +++ b/tests/parser/TEST_CASES/core_tutorials/3_responses.py @@ -0,0 +1,49 @@ +from dff.script import TRANSITIONS +from dff.script import RESPONSE +from dff.script import Message +import dff.script.responses as rsp +import dff.script.conditions as cnd +from dff.pipeline import Pipeline + +toy_script = { + 'greeting_flow': { + 'start_node': { + RESPONSE: Message(), + TRANSITIONS: { + 'node1': cnd.exact_match(Message(text='Hi')), + }, + }, + 'node1': { + RESPONSE: rsp.choice([Message(text='Hi, what is up?'), Message(text='Hello, how are you?')]), + TRANSITIONS: { + 'node2': cnd.exact_match(Message(text="I'm fine, how are you?")), + }, + }, + 'node2': { + RESPONSE: Message(text='Good. What do you want to talk about?'), + TRANSITIONS: { + 'node3': cnd.exact_match(Message(text="Let's talk about music.")), + }, + }, + 'node3': { + RESPONSE: cannot_talk_about_topic_response, + TRANSITIONS: { + 'node4': cnd.exact_match(Message(text='Ok, goodbye.')), + }, + }, + 'node4': { + RESPONSE: upper_case_response(Message(text='bye')), + TRANSITIONS: { + 'node1': cnd.exact_match(Message(text='Hi')), + }, + }, + 'fallback_node': { + RESPONSE: fallback_trace_response, + TRANSITIONS: { + 'node1': cnd.exact_match(Message(text='Hi')), + }, + }, + }, +} + +pipeline = Pipeline.from_script(toy_script, start_label=('greeting_flow', 'start_node'), fallback_label=('greeting_flow', 'fallback_node')) diff --git a/tests/parser/TEST_CASES/core_tutorials/3_responses.yaml b/tests/parser/TEST_CASES/core_tutorials/3_responses.yaml new file mode 100644 index 000000000..6ed610dce --- /dev/null +++ b/tests/parser/TEST_CASES/core_tutorials/3_responses.yaml @@ -0,0 +1,34 @@ +3_responses: + TRANSITIONS: from dff.script import TRANSITIONS + RESPONSE: from dff.script import RESPONSE + Message: from dff.script import Message + rsp: import dff.script.responses + cnd: import dff.script.conditions + Pipeline: from dff.pipeline import Pipeline + toy_script: + "'greeting_flow'": + "'start_node'": + RESPONSE: Message() + TRANSITIONS: + "'node1'": cnd.exact_match(Message(text='Hi')) + "'node1'": + RESPONSE: rsp.choice([Message(text='Hi, what is up?'), Message(text='Hello, how are you?')]) + TRANSITIONS: + "'node2'": cnd.exact_match(Message(text="I'm fine, how are you?")) + "'node2'": + RESPONSE: Message(text='Good. What do you want to talk about?') + TRANSITIONS: + "'node3'": cnd.exact_match(Message(text="Let's talk about music.")) + "'node3'": + RESPONSE: cannot_talk_about_topic_response + TRANSITIONS: + "'node4'": cnd.exact_match(Message(text='Ok, goodbye.')) + "'node4'": + RESPONSE: upper_case_response(Message(text='bye')) + TRANSITIONS: + "'node1'": cnd.exact_match(Message(text='Hi')) + "'fallback_node'": + RESPONSE: fallback_trace_response + TRANSITIONS: + "'node1'": cnd.exact_match(Message(text='Hi')) + pipeline: Pipeline.from_script(toy_script, start_label=('greeting_flow', 'start_node'), fallback_label=('greeting_flow', 'fallback_node')) diff --git a/tests/parser/TEST_CASES/core_tutorials/4_transitions.json b/tests/parser/TEST_CASES/core_tutorials/4_transitions.json new file mode 100644 index 000000000..a26dd2cfe --- /dev/null +++ b/tests/parser/TEST_CASES/core_tutorials/4_transitions.json @@ -0,0 +1,1530 @@ +{ + "directed": true, + "multigraph": true, + "graph": { + "full_script": { + "4_transitions": { + "re": "import re", + "TRANSITIONS": "from dff.script import TRANSITIONS", + "RESPONSE": "from dff.script import RESPONSE", + "Message": "from dff.script import Message", + "cnd": "import dff.script.conditions", + "lbl": "import dff.script.labels", + "Pipeline": "from dff.pipeline import Pipeline", + "toy_script": { + "'global_flow'": { + "'start_node'": { + "RESPONSE": "Message()", + "TRANSITIONS": { + "('music_flow', 'node1')": "cnd.regexp('talk about music')", + "('greeting_flow', 'node1')": "cnd.regexp('hi|hello', re.IGNORECASE)", + "'fallback_node'": "cnd.true()" + } + }, + "'fallback_node'": { + "RESPONSE": "Message(text='Ooops')", + "TRANSITIONS": { + "('music_flow', 'node1')": "cnd.regexp('talk about music')", + "('greeting_flow', 'node1')": "cnd.regexp('hi|hello', re.IGNORECASE)", + "lbl.previous()": "cnd.regexp('previous', re.IGNORECASE)", + "lbl.repeat()": "cnd.true()" + } + } + }, + "'greeting_flow'": { + "'node1'": { + "RESPONSE": "Message(text='Hi, how are you?')", + "TRANSITIONS": { + "('global_flow', 'fallback_node', 0.1)": "cnd.true()", + "'node2'": "cnd.regexp('how are you')" + } + }, + "'node2'": { + "RESPONSE": "Message(text='Good. What do you want to talk about?')", + "TRANSITIONS": { + "lbl.to_fallback(0.1)": "cnd.true()", + "lbl.forward(0.5)": "cnd.regexp('talk about')", + "('music_flow', 'node1')": "cnd.regexp('talk about music')", + "lbl.previous()": "cnd.regexp('previous', re.IGNORECASE)" + } + }, + "'node3'": { + "RESPONSE": "Message(text='Sorry, I can not talk about that now.')", + "TRANSITIONS": { + "lbl.forward()": "cnd.regexp('bye')" + } + }, + "'node4'": { + "RESPONSE": "Message(text='Bye')", + "TRANSITIONS": { + "'node1'": "cnd.regexp('hi|hello', re.IGNORECASE)", + "lbl.to_fallback()": "cnd.true()" + } + } + }, + "'music_flow'": { + "'node1'": { + "RESPONSE": "Message(text='I love `System of a Down` group, would you like to talk about it?')", + "TRANSITIONS": { + "lbl.forward()": "cnd.regexp('yes|yep|ok', re.IGNORECASE)", + "lbl.to_fallback()": "cnd.true()" + } + }, + "'node2'": { + "RESPONSE": "Message(text='System of a Down is an Armenian-American heavy metal band formed in 1994.')", + "TRANSITIONS": { + "lbl.forward()": "cnd.regexp('next', re.IGNORECASE)", + "lbl.repeat()": "cnd.regexp('repeat', re.IGNORECASE)", + "lbl.to_fallback()": "cnd.true()" + } + }, + "'node3'": { + "RESPONSE": "Message(text='The band achieved commercial success with the release of five studio albums.')", + "TRANSITIONS": { + "lbl.forward()": "cnd.regexp('next', re.IGNORECASE)", + "lbl.backward()": "cnd.regexp('back', re.IGNORECASE)", + "lbl.repeat()": "cnd.regexp('repeat', re.IGNORECASE)", + "lbl.to_fallback()": "cnd.true()" + } + }, + "'node4'": { + "RESPONSE": "Message(text=\"That's all what I know.\")", + "TRANSITIONS": { + "greeting_flow_n2_transition": "cnd.regexp('next', re.IGNORECASE)", + "high_priority_node_transition('greeting_flow', 'node4')": "cnd.regexp('next time', re.IGNORECASE)", + "lbl.to_fallback()": "cnd.true()" + } + } + } + }, + "pipeline": "Pipeline.from_script(toy_script, start_label=('global_flow', 'start_node'), fallback_label=('global_flow', 'fallback_node'))" + } + }, + "start_label": [ + "'global_flow'", + "'start_node'" + ], + "fallback_label": [ + "'global_flow'", + "'fallback_node'" + ] + }, + "nodes": [ + { + "ref": [ + "4_transitions", + "toy_script", + "value", + "value_'global_flow'", + "value_'start_node'" + ], + "id": [ + "NODE", + "'global_flow'", + "'start_node'" + ] + }, + { + "ref": [ + "4_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node1'" + ], + "id": [ + "NODE", + "'music_flow'", + "'node1'" + ] + }, + { + "ref": [ + "4_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node1'" + ], + "id": [ + "NODE", + "'greeting_flow'", + "'node1'" + ] + }, + { + "ref": [ + "4_transitions", + "toy_script", + "value", + "value_'global_flow'", + "value_'fallback_node'" + ], + "id": [ + "NODE", + "'global_flow'", + "'fallback_node'" + ] + }, + { + "id": [ + "LABEL", + "previous", + [ + "priority", + "None" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ] + }, + { + "id": [ + "LABEL", + "repeat", + [ + "priority", + "None" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ] + }, + { + "ref": [ + "4_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node2'" + ], + "id": [ + "NODE", + "'greeting_flow'", + "'node2'" + ] + }, + { + "id": [ + "LABEL", + "to_fallback", + [ + "priority", + "0.1" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ] + }, + { + "id": [ + "LABEL", + "forward", + [ + "priority", + "0.5" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ] + }, + { + "ref": [ + "4_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node3'" + ], + "id": [ + "NODE", + "'greeting_flow'", + "'node3'" + ] + }, + { + "id": [ + "LABEL", + "forward", + [ + "priority", + "None" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ] + }, + { + "ref": [ + "4_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node4'" + ], + "id": [ + "NODE", + "'greeting_flow'", + "'node4'" + ] + }, + { + "id": [ + "LABEL", + "to_fallback", + [ + "priority", + "None" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ] + }, + { + "ref": [ + "4_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node2'" + ], + "id": [ + "NODE", + "'music_flow'", + "'node2'" + ] + }, + { + "ref": [ + "4_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node3'" + ], + "id": [ + "NODE", + "'music_flow'", + "'node3'" + ] + }, + { + "id": [ + "LABEL", + "backward", + [ + "priority", + "None" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ] + }, + { + "ref": [ + "4_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node4'" + ], + "id": [ + "NODE", + "'music_flow'", + "'node4'" + ] + }, + { + "id": [ + "NONE" + ] + } + ], + "links": [ + { + "label_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'global_flow'", + "value_'start_node'", + "value_TRANSITIONS", + "key_('music_flow', 'node1')" + ], + "label": "('music_flow', 'node1')", + "condition_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'global_flow'", + "value_'start_node'", + "value_TRANSITIONS", + "value_('music_flow', 'node1')" + ], + "condition": "cnd.regexp('talk about music')", + "source": [ + "NODE", + "'global_flow'", + "'start_node'" + ], + "target": [ + "NODE", + "'music_flow'", + "'node1'" + ], + "key": 0 + }, + { + "label_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'global_flow'", + "value_'start_node'", + "value_TRANSITIONS", + "key_('greeting_flow', 'node1')" + ], + "label": "('greeting_flow', 'node1')", + "condition_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'global_flow'", + "value_'start_node'", + "value_TRANSITIONS", + "value_('greeting_flow', 'node1')" + ], + "condition": "cnd.regexp('hi|hello', re.IGNORECASE)", + "source": [ + "NODE", + "'global_flow'", + "'start_node'" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'node1'" + ], + "key": 0 + }, + { + "label_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'global_flow'", + "value_'start_node'", + "value_TRANSITIONS", + "key_'fallback_node'" + ], + "label": "'fallback_node'", + "condition_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'global_flow'", + "value_'start_node'", + "value_TRANSITIONS", + "value_'fallback_node'" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'global_flow'", + "'start_node'" + ], + "target": [ + "NODE", + "'global_flow'", + "'fallback_node'" + ], + "key": 0 + }, + { + "label_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node1'", + "value_TRANSITIONS", + "key_lbl.forward()" + ], + "label": "lbl.forward()", + "condition_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node1'", + "value_TRANSITIONS", + "value_lbl.forward()" + ], + "condition": "cnd.regexp('yes|yep|ok', re.IGNORECASE)", + "source": [ + "NODE", + "'music_flow'", + "'node1'" + ], + "target": [ + "LABEL", + "forward", + [ + "priority", + "None" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node1'", + "value_TRANSITIONS", + "key_lbl.to_fallback()" + ], + "label": "lbl.to_fallback()", + "condition_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node1'", + "value_TRANSITIONS", + "value_lbl.to_fallback()" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'music_flow'", + "'node1'" + ], + "target": [ + "LABEL", + "to_fallback", + [ + "priority", + "None" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node1'", + "value_TRANSITIONS", + "key_('global_flow', 'fallback_node', 0.1)" + ], + "label": "('global_flow', 'fallback_node', 0.1)", + "condition_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node1'", + "value_TRANSITIONS", + "value_('global_flow', 'fallback_node', 0.1)" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'greeting_flow'", + "'node1'" + ], + "target": [ + "NODE", + "'global_flow'", + "'fallback_node'" + ], + "key": 0 + }, + { + "label_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node1'", + "value_TRANSITIONS", + "key_'node2'" + ], + "label": "'node2'", + "condition_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node1'", + "value_TRANSITIONS", + "value_'node2'" + ], + "condition": "cnd.regexp('how are you')", + "source": [ + "NODE", + "'greeting_flow'", + "'node1'" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'node2'" + ], + "key": 0 + }, + { + "label_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'global_flow'", + "value_'fallback_node'", + "value_TRANSITIONS", + "key_('music_flow', 'node1')" + ], + "label": "('music_flow', 'node1')", + "condition_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'global_flow'", + "value_'fallback_node'", + "value_TRANSITIONS", + "value_('music_flow', 'node1')" + ], + "condition": "cnd.regexp('talk about music')", + "source": [ + "NODE", + "'global_flow'", + "'fallback_node'" + ], + "target": [ + "NODE", + "'music_flow'", + "'node1'" + ], + "key": 0 + }, + { + "label_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'global_flow'", + "value_'fallback_node'", + "value_TRANSITIONS", + "key_('greeting_flow', 'node1')" + ], + "label": "('greeting_flow', 'node1')", + "condition_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'global_flow'", + "value_'fallback_node'", + "value_TRANSITIONS", + "value_('greeting_flow', 'node1')" + ], + "condition": "cnd.regexp('hi|hello', re.IGNORECASE)", + "source": [ + "NODE", + "'global_flow'", + "'fallback_node'" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'node1'" + ], + "key": 0 + }, + { + "label_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'global_flow'", + "value_'fallback_node'", + "value_TRANSITIONS", + "key_lbl.previous()" + ], + "label": "lbl.previous()", + "condition_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'global_flow'", + "value_'fallback_node'", + "value_TRANSITIONS", + "value_lbl.previous()" + ], + "condition": "cnd.regexp('previous', re.IGNORECASE)", + "source": [ + "NODE", + "'global_flow'", + "'fallback_node'" + ], + "target": [ + "LABEL", + "previous", + [ + "priority", + "None" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'global_flow'", + "value_'fallback_node'", + "value_TRANSITIONS", + "key_lbl.repeat()" + ], + "label": "lbl.repeat()", + "condition_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'global_flow'", + "value_'fallback_node'", + "value_TRANSITIONS", + "value_lbl.repeat()" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'global_flow'", + "'fallback_node'" + ], + "target": [ + "LABEL", + "repeat", + [ + "priority", + "None" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node2'", + "value_TRANSITIONS", + "key_lbl.to_fallback(0.1)" + ], + "label": "lbl.to_fallback(0.1)", + "condition_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node2'", + "value_TRANSITIONS", + "value_lbl.to_fallback(0.1)" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'greeting_flow'", + "'node2'" + ], + "target": [ + "LABEL", + "to_fallback", + [ + "priority", + "0.1" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node2'", + "value_TRANSITIONS", + "key_lbl.forward(0.5)" + ], + "label": "lbl.forward(0.5)", + "condition_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node2'", + "value_TRANSITIONS", + "value_lbl.forward(0.5)" + ], + "condition": "cnd.regexp('talk about')", + "source": [ + "NODE", + "'greeting_flow'", + "'node2'" + ], + "target": [ + "LABEL", + "forward", + [ + "priority", + "0.5" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node2'", + "value_TRANSITIONS", + "key_('music_flow', 'node1')" + ], + "label": "('music_flow', 'node1')", + "condition_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node2'", + "value_TRANSITIONS", + "value_('music_flow', 'node1')" + ], + "condition": "cnd.regexp('talk about music')", + "source": [ + "NODE", + "'greeting_flow'", + "'node2'" + ], + "target": [ + "NODE", + "'music_flow'", + "'node1'" + ], + "key": 0 + }, + { + "label_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node2'", + "value_TRANSITIONS", + "key_lbl.previous()" + ], + "label": "lbl.previous()", + "condition_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node2'", + "value_TRANSITIONS", + "value_lbl.previous()" + ], + "condition": "cnd.regexp('previous', re.IGNORECASE)", + "source": [ + "NODE", + "'greeting_flow'", + "'node2'" + ], + "target": [ + "LABEL", + "previous", + [ + "priority", + "None" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node3'", + "value_TRANSITIONS", + "key_lbl.forward()" + ], + "label": "lbl.forward()", + "condition_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node3'", + "value_TRANSITIONS", + "value_lbl.forward()" + ], + "condition": "cnd.regexp('bye')", + "source": [ + "NODE", + "'greeting_flow'", + "'node3'" + ], + "target": [ + "LABEL", + "forward", + [ + "priority", + "None" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node4'", + "value_TRANSITIONS", + "key_'node1'" + ], + "label": "'node1'", + "condition_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node4'", + "value_TRANSITIONS", + "value_'node1'" + ], + "condition": "cnd.regexp('hi|hello', re.IGNORECASE)", + "source": [ + "NODE", + "'greeting_flow'", + "'node4'" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'node1'" + ], + "key": 0 + }, + { + "label_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node4'", + "value_TRANSITIONS", + "key_lbl.to_fallback()" + ], + "label": "lbl.to_fallback()", + "condition_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node4'", + "value_TRANSITIONS", + "value_lbl.to_fallback()" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'greeting_flow'", + "'node4'" + ], + "target": [ + "LABEL", + "to_fallback", + [ + "priority", + "None" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node2'", + "value_TRANSITIONS", + "key_lbl.forward()" + ], + "label": "lbl.forward()", + "condition_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node2'", + "value_TRANSITIONS", + "value_lbl.forward()" + ], + "condition": "cnd.regexp('next', re.IGNORECASE)", + "source": [ + "NODE", + "'music_flow'", + "'node2'" + ], + "target": [ + "LABEL", + "forward", + [ + "priority", + "None" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node2'", + "value_TRANSITIONS", + "key_lbl.repeat()" + ], + "label": "lbl.repeat()", + "condition_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node2'", + "value_TRANSITIONS", + "value_lbl.repeat()" + ], + "condition": "cnd.regexp('repeat', re.IGNORECASE)", + "source": [ + "NODE", + "'music_flow'", + "'node2'" + ], + "target": [ + "LABEL", + "repeat", + [ + "priority", + "None" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node2'", + "value_TRANSITIONS", + "key_lbl.to_fallback()" + ], + "label": "lbl.to_fallback()", + "condition_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node2'", + "value_TRANSITIONS", + "value_lbl.to_fallback()" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'music_flow'", + "'node2'" + ], + "target": [ + "LABEL", + "to_fallback", + [ + "priority", + "None" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node3'", + "value_TRANSITIONS", + "key_lbl.forward()" + ], + "label": "lbl.forward()", + "condition_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node3'", + "value_TRANSITIONS", + "value_lbl.forward()" + ], + "condition": "cnd.regexp('next', re.IGNORECASE)", + "source": [ + "NODE", + "'music_flow'", + "'node3'" + ], + "target": [ + "LABEL", + "forward", + [ + "priority", + "None" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node3'", + "value_TRANSITIONS", + "key_lbl.backward()" + ], + "label": "lbl.backward()", + "condition_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node3'", + "value_TRANSITIONS", + "value_lbl.backward()" + ], + "condition": "cnd.regexp('back', re.IGNORECASE)", + "source": [ + "NODE", + "'music_flow'", + "'node3'" + ], + "target": [ + "LABEL", + "backward", + [ + "priority", + "None" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node3'", + "value_TRANSITIONS", + "key_lbl.repeat()" + ], + "label": "lbl.repeat()", + "condition_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node3'", + "value_TRANSITIONS", + "value_lbl.repeat()" + ], + "condition": "cnd.regexp('repeat', re.IGNORECASE)", + "source": [ + "NODE", + "'music_flow'", + "'node3'" + ], + "target": [ + "LABEL", + "repeat", + [ + "priority", + "None" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node3'", + "value_TRANSITIONS", + "key_lbl.to_fallback()" + ], + "label": "lbl.to_fallback()", + "condition_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node3'", + "value_TRANSITIONS", + "value_lbl.to_fallback()" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'music_flow'", + "'node3'" + ], + "target": [ + "LABEL", + "to_fallback", + [ + "priority", + "None" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node4'", + "value_TRANSITIONS", + "key_greeting_flow_n2_transition" + ], + "label": "greeting_flow_n2_transition", + "condition_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node4'", + "value_TRANSITIONS", + "value_greeting_flow_n2_transition" + ], + "condition": "cnd.regexp('next', re.IGNORECASE)", + "source": [ + "NODE", + "'music_flow'", + "'node4'" + ], + "target": [ + "NONE" + ], + "key": 0 + }, + { + "label_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node4'", + "value_TRANSITIONS", + "key_high_priority_node_transition('greeting_flow', 'node4')" + ], + "label": "high_priority_node_transition('greeting_flow', 'node4')", + "condition_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node4'", + "value_TRANSITIONS", + "value_high_priority_node_transition('greeting_flow', 'node4')" + ], + "condition": "cnd.regexp('next time', re.IGNORECASE)", + "source": [ + "NODE", + "'music_flow'", + "'node4'" + ], + "target": [ + "NONE" + ], + "key": 1 + }, + { + "label_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node4'", + "value_TRANSITIONS", + "key_lbl.to_fallback()" + ], + "label": "lbl.to_fallback()", + "condition_ref": [ + "4_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node4'", + "value_TRANSITIONS", + "value_lbl.to_fallback()" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'music_flow'", + "'node4'" + ], + "target": [ + "LABEL", + "to_fallback", + [ + "priority", + "None" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + } + ] +} \ No newline at end of file diff --git a/tests/parser/TEST_CASES/core_tutorials/4_transitions.py b/tests/parser/TEST_CASES/core_tutorials/4_transitions.py new file mode 100644 index 000000000..04a781004 --- /dev/null +++ b/tests/parser/TEST_CASES/core_tutorials/4_transitions.py @@ -0,0 +1,96 @@ +import re +from dff.script import TRANSITIONS +from dff.script import RESPONSE +from dff.script import Message +import dff.script.conditions as cnd +import dff.script.labels as lbl +from dff.pipeline import Pipeline + +toy_script = { + 'global_flow': { + 'start_node': { + RESPONSE: Message(), + TRANSITIONS: { + ('music_flow', 'node1'): cnd.regexp('talk about music'), + ('greeting_flow', 'node1'): cnd.regexp('hi|hello', re.IGNORECASE), + 'fallback_node': cnd.true(), + }, + }, + 'fallback_node': { + RESPONSE: Message(text='Ooops'), + TRANSITIONS: { + ('music_flow', 'node1'): cnd.regexp('talk about music'), + ('greeting_flow', 'node1'): cnd.regexp('hi|hello', re.IGNORECASE), + lbl.previous(): cnd.regexp('previous', re.IGNORECASE), + lbl.repeat(): cnd.true(), + }, + }, + }, + 'greeting_flow': { + 'node1': { + RESPONSE: Message(text='Hi, how are you?'), + TRANSITIONS: { + ('global_flow', 'fallback_node', 0.1): cnd.true(), + 'node2': cnd.regexp('how are you'), + }, + }, + 'node2': { + RESPONSE: Message(text='Good. What do you want to talk about?'), + TRANSITIONS: { + lbl.to_fallback(0.1): cnd.true(), + lbl.forward(0.5): cnd.regexp('talk about'), + ('music_flow', 'node1'): cnd.regexp('talk about music'), + lbl.previous(): cnd.regexp('previous', re.IGNORECASE), + }, + }, + 'node3': { + RESPONSE: Message(text='Sorry, I can not talk about that now.'), + TRANSITIONS: { + lbl.forward(): cnd.regexp('bye'), + }, + }, + 'node4': { + RESPONSE: Message(text='Bye'), + TRANSITIONS: { + 'node1': cnd.regexp('hi|hello', re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + }, + 'music_flow': { + 'node1': { + RESPONSE: Message(text='I love `System of a Down` group, would you like to talk about it?'), + TRANSITIONS: { + lbl.forward(): cnd.regexp('yes|yep|ok', re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + 'node2': { + RESPONSE: Message(text='System of a Down is an Armenian-American heavy metal band formed in 1994.'), + TRANSITIONS: { + lbl.forward(): cnd.regexp('next', re.IGNORECASE), + lbl.repeat(): cnd.regexp('repeat', re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + 'node3': { + RESPONSE: Message(text='The band achieved commercial success with the release of five studio albums.'), + TRANSITIONS: { + lbl.forward(): cnd.regexp('next', re.IGNORECASE), + lbl.backward(): cnd.regexp('back', re.IGNORECASE), + lbl.repeat(): cnd.regexp('repeat', re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + 'node4': { + RESPONSE: Message(text="That's all what I know."), + TRANSITIONS: { + greeting_flow_n2_transition: cnd.regexp('next', re.IGNORECASE), + high_priority_node_transition('greeting_flow', 'node4'): cnd.regexp('next time', re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + }, +} + +pipeline = Pipeline.from_script(toy_script, start_label=('global_flow', 'start_node'), fallback_label=('global_flow', 'fallback_node')) diff --git a/tests/parser/TEST_CASES/core_tutorials/4_transitions.yaml b/tests/parser/TEST_CASES/core_tutorials/4_transitions.yaml new file mode 100644 index 000000000..cda5b426f --- /dev/null +++ b/tests/parser/TEST_CASES/core_tutorials/4_transitions.yaml @@ -0,0 +1,71 @@ +4_transitions: + re: import re + TRANSITIONS: from dff.script import TRANSITIONS + RESPONSE: from dff.script import RESPONSE + Message: from dff.script import Message + cnd: import dff.script.conditions + lbl: import dff.script.labels + Pipeline: from dff.pipeline import Pipeline + toy_script: + "'global_flow'": + "'start_node'": + RESPONSE: Message() + TRANSITIONS: + ('music_flow', 'node1'): cnd.regexp('talk about music') + ('greeting_flow', 'node1'): cnd.regexp('hi|hello', re.IGNORECASE) + "'fallback_node'": cnd.true() + "'fallback_node'": + RESPONSE: Message(text='Ooops') + TRANSITIONS: + ('music_flow', 'node1'): cnd.regexp('talk about music') + ('greeting_flow', 'node1'): cnd.regexp('hi|hello', re.IGNORECASE) + lbl.previous(): cnd.regexp('previous', re.IGNORECASE) + lbl.repeat(): cnd.true() + "'greeting_flow'": + "'node1'": + RESPONSE: Message(text='Hi, how are you?') + TRANSITIONS: + ('global_flow', 'fallback_node', 0.1): cnd.true() + "'node2'": cnd.regexp('how are you') + "'node2'": + RESPONSE: Message(text='Good. What do you want to talk about?') + TRANSITIONS: + lbl.to_fallback(0.1): cnd.true() + lbl.forward(0.5): cnd.regexp('talk about') + ('music_flow', 'node1'): cnd.regexp('talk about music') + lbl.previous(): cnd.regexp('previous', re.IGNORECASE) + "'node3'": + RESPONSE: Message(text='Sorry, I can not talk about that now.') + TRANSITIONS: + lbl.forward(): cnd.regexp('bye') + "'node4'": + RESPONSE: Message(text='Bye') + TRANSITIONS: + "'node1'": cnd.regexp('hi|hello', re.IGNORECASE) + lbl.to_fallback(): cnd.true() + "'music_flow'": + "'node1'": + RESPONSE: Message(text='I love `System of a Down` group, would you like to talk about it?') + TRANSITIONS: + lbl.forward(): cnd.regexp('yes|yep|ok', re.IGNORECASE) + lbl.to_fallback(): cnd.true() + "'node2'": + RESPONSE: Message(text='System of a Down is an Armenian-American heavy metal band formed in 1994.') + TRANSITIONS: + lbl.forward(): cnd.regexp('next', re.IGNORECASE) + lbl.repeat(): cnd.regexp('repeat', re.IGNORECASE) + lbl.to_fallback(): cnd.true() + "'node3'": + RESPONSE: Message(text='The band achieved commercial success with the release of five studio albums.') + TRANSITIONS: + lbl.forward(): cnd.regexp('next', re.IGNORECASE) + lbl.backward(): cnd.regexp('back', re.IGNORECASE) + lbl.repeat(): cnd.regexp('repeat', re.IGNORECASE) + lbl.to_fallback(): cnd.true() + "'node4'": + RESPONSE: Message(text="That's all what I know.") + TRANSITIONS: + greeting_flow_n2_transition: cnd.regexp('next', re.IGNORECASE) + high_priority_node_transition('greeting_flow', 'node4'): cnd.regexp('next time', re.IGNORECASE) + lbl.to_fallback(): cnd.true() + pipeline: Pipeline.from_script(toy_script, start_label=('global_flow', 'start_node'), fallback_label=('global_flow', 'fallback_node')) diff --git a/tests/parser/TEST_CASES/core_tutorials/5_global_transitions.json b/tests/parser/TEST_CASES/core_tutorials/5_global_transitions.json new file mode 100644 index 000000000..bd7d87a61 --- /dev/null +++ b/tests/parser/TEST_CASES/core_tutorials/5_global_transitions.json @@ -0,0 +1,939 @@ +{ + "directed": true, + "multigraph": true, + "graph": { + "full_script": { + "5_global_transitions": { + "re": "import re", + "GLOBAL": "from dff.script import GLOBAL", + "TRANSITIONS": "from dff.script import TRANSITIONS", + "RESPONSE": "from dff.script import RESPONSE", + "Message": "from dff.script import Message", + "cnd": "import dff.script.conditions", + "lbl": "import dff.script.labels", + "Pipeline": "from dff.pipeline import Pipeline", + "toy_script": { + "GLOBAL": { + "TRANSITIONS": { + "('greeting_flow', 'node1', 1.1)": "cnd.regexp('\\\\b(hi|hello)\\\\b', re.I)", + "('music_flow', 'node1', 1.1)": "cnd.regexp('talk about music')", + "lbl.to_fallback(0.1)": "cnd.true()", + "lbl.forward()": "cnd.all([cnd.regexp('next\\\\b'), cnd.has_last_labels(labels=[('music_flow', i) for i in ['node2', 'node3']])])", + "lbl.repeat(0.2)": "cnd.all([cnd.regexp('repeat', re.I), cnd.negation(cnd.has_last_labels(flow_labels=['global_flow']))])" + } + }, + "'global_flow'": { + "'start_node'": { + "RESPONSE": "Message()" + }, + "'fallback_node'": { + "RESPONSE": "Message(text='Ooops')", + "TRANSITIONS": { + "lbl.previous()": "cnd.regexp('previous', re.I)" + } + } + }, + "'greeting_flow'": { + "'node1'": { + "RESPONSE": "Message(text='Hi, how are you?')", + "TRANSITIONS": { + "'node2'": "cnd.regexp('how are you')" + } + }, + "'node2'": { + "RESPONSE": "Message(text='Good. What do you want to talk about?')", + "TRANSITIONS": { + "lbl.forward(0.5)": "cnd.regexp('talk about')", + "lbl.previous()": "cnd.regexp('previous', re.I)" + } + }, + "'node3'": { + "RESPONSE": "Message(text='Sorry, I can not talk about that now.')", + "TRANSITIONS": { + "lbl.forward()": "cnd.regexp('bye')" + } + }, + "'node4'": { + "RESPONSE": "Message(text='bye')" + } + }, + "'music_flow'": { + "'node1'": { + "RESPONSE": "Message(text='I love `System of a Down` group, would you like to talk about it?')", + "TRANSITIONS": { + "lbl.forward()": "cnd.regexp('yes|yep|ok', re.I)" + } + }, + "'node2'": { + "RESPONSE": "Message(text='System of a Down is an Armenian-American heavy metal band formed in 1994.')" + }, + "'node3'": { + "RESPONSE": "Message(text='The band achieved commercial success with the release of five studio albums.')", + "TRANSITIONS": { + "lbl.backward()": "cnd.regexp('back', re.I)" + } + }, + "'node4'": { + "RESPONSE": "Message(text=\"That's all what I know.\")", + "TRANSITIONS": { + "('greeting_flow', 'node4')": "cnd.regexp('next time', re.I)", + "('greeting_flow', 'node2')": "cnd.regexp('next', re.I)" + } + } + } + }, + "pipeline": "Pipeline.from_script(toy_script, start_label=('global_flow', 'start_node'), fallback_label=('global_flow', 'fallback_node'))" + } + }, + "start_label": [ + "'global_flow'", + "'start_node'" + ], + "fallback_label": [ + "'global_flow'", + "'fallback_node'" + ] + }, + "nodes": [ + { + "ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_GLOBAL" + ], + "id": [ + "GLOBAL_NODE", + "GLOBAL" + ] + }, + { + "ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node1'" + ], + "id": [ + "NODE", + "'greeting_flow'", + "'node1'" + ] + }, + { + "ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node1'" + ], + "id": [ + "NODE", + "'music_flow'", + "'node1'" + ] + }, + { + "id": [ + "LABEL", + "to_fallback", + [ + "priority", + "0.1" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ] + }, + { + "id": [ + "LABEL", + "forward", + [ + "priority", + "None" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ] + }, + { + "id": [ + "LABEL", + "repeat", + [ + "priority", + "0.2" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ] + }, + { + "ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_'global_flow'", + "value_'start_node'" + ], + "id": [ + "NODE", + "'global_flow'", + "'start_node'" + ] + }, + { + "ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_'global_flow'", + "value_'fallback_node'" + ], + "id": [ + "NODE", + "'global_flow'", + "'fallback_node'" + ] + }, + { + "id": [ + "LABEL", + "previous", + [ + "priority", + "None" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ] + }, + { + "ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node2'" + ], + "id": [ + "NODE", + "'greeting_flow'", + "'node2'" + ] + }, + { + "id": [ + "LABEL", + "forward", + [ + "priority", + "0.5" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ] + }, + { + "ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node3'" + ], + "id": [ + "NODE", + "'greeting_flow'", + "'node3'" + ] + }, + { + "ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node4'" + ], + "id": [ + "NODE", + "'greeting_flow'", + "'node4'" + ] + }, + { + "ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node2'" + ], + "id": [ + "NODE", + "'music_flow'", + "'node2'" + ] + }, + { + "ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node3'" + ], + "id": [ + "NODE", + "'music_flow'", + "'node3'" + ] + }, + { + "id": [ + "LABEL", + "backward", + [ + "priority", + "None" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ] + }, + { + "ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node4'" + ], + "id": [ + "NODE", + "'music_flow'", + "'node4'" + ] + } + ], + "links": [ + { + "label_ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_GLOBAL", + "value_TRANSITIONS", + "key_('greeting_flow', 'node1', 1.1)" + ], + "label": "('greeting_flow', 'node1', 1.1)", + "condition_ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_GLOBAL", + "value_TRANSITIONS", + "value_('greeting_flow', 'node1', 1.1)" + ], + "condition": "cnd.regexp('\\\\b(hi|hello)\\\\b', re.I)", + "source": [ + "GLOBAL_NODE", + "GLOBAL" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'node1'" + ], + "key": 0 + }, + { + "label_ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_GLOBAL", + "value_TRANSITIONS", + "key_('music_flow', 'node1', 1.1)" + ], + "label": "('music_flow', 'node1', 1.1)", + "condition_ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_GLOBAL", + "value_TRANSITIONS", + "value_('music_flow', 'node1', 1.1)" + ], + "condition": "cnd.regexp('talk about music')", + "source": [ + "GLOBAL_NODE", + "GLOBAL" + ], + "target": [ + "NODE", + "'music_flow'", + "'node1'" + ], + "key": 0 + }, + { + "label_ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_GLOBAL", + "value_TRANSITIONS", + "key_lbl.to_fallback(0.1)" + ], + "label": "lbl.to_fallback(0.1)", + "condition_ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_GLOBAL", + "value_TRANSITIONS", + "value_lbl.to_fallback(0.1)" + ], + "condition": "cnd.true()", + "source": [ + "GLOBAL_NODE", + "GLOBAL" + ], + "target": [ + "LABEL", + "to_fallback", + [ + "priority", + "0.1" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_GLOBAL", + "value_TRANSITIONS", + "key_lbl.forward()" + ], + "label": "lbl.forward()", + "condition_ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_GLOBAL", + "value_TRANSITIONS", + "value_lbl.forward()" + ], + "condition": "cnd.all([cnd.regexp('next\\\\b'), cnd.has_last_labels(labels=[('music_flow', i) for i in ['node2', 'node3']])])", + "source": [ + "GLOBAL_NODE", + "GLOBAL" + ], + "target": [ + "LABEL", + "forward", + [ + "priority", + "None" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_GLOBAL", + "value_TRANSITIONS", + "key_lbl.repeat(0.2)" + ], + "label": "lbl.repeat(0.2)", + "condition_ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_GLOBAL", + "value_TRANSITIONS", + "value_lbl.repeat(0.2)" + ], + "condition": "cnd.all([cnd.regexp('repeat', re.I), cnd.negation(cnd.has_last_labels(flow_labels=['global_flow']))])", + "source": [ + "GLOBAL_NODE", + "GLOBAL" + ], + "target": [ + "LABEL", + "repeat", + [ + "priority", + "0.2" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node1'", + "value_TRANSITIONS", + "key_'node2'" + ], + "label": "'node2'", + "condition_ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node1'", + "value_TRANSITIONS", + "value_'node2'" + ], + "condition": "cnd.regexp('how are you')", + "source": [ + "NODE", + "'greeting_flow'", + "'node1'" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'node2'" + ], + "key": 0 + }, + { + "label_ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node1'", + "value_TRANSITIONS", + "key_lbl.forward()" + ], + "label": "lbl.forward()", + "condition_ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node1'", + "value_TRANSITIONS", + "value_lbl.forward()" + ], + "condition": "cnd.regexp('yes|yep|ok', re.I)", + "source": [ + "NODE", + "'music_flow'", + "'node1'" + ], + "target": [ + "LABEL", + "forward", + [ + "priority", + "None" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_'global_flow'", + "value_'fallback_node'", + "value_TRANSITIONS", + "key_lbl.previous()" + ], + "label": "lbl.previous()", + "condition_ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_'global_flow'", + "value_'fallback_node'", + "value_TRANSITIONS", + "value_lbl.previous()" + ], + "condition": "cnd.regexp('previous', re.I)", + "source": [ + "NODE", + "'global_flow'", + "'fallback_node'" + ], + "target": [ + "LABEL", + "previous", + [ + "priority", + "None" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node2'", + "value_TRANSITIONS", + "key_lbl.forward(0.5)" + ], + "label": "lbl.forward(0.5)", + "condition_ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node2'", + "value_TRANSITIONS", + "value_lbl.forward(0.5)" + ], + "condition": "cnd.regexp('talk about')", + "source": [ + "NODE", + "'greeting_flow'", + "'node2'" + ], + "target": [ + "LABEL", + "forward", + [ + "priority", + "0.5" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node2'", + "value_TRANSITIONS", + "key_lbl.previous()" + ], + "label": "lbl.previous()", + "condition_ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node2'", + "value_TRANSITIONS", + "value_lbl.previous()" + ], + "condition": "cnd.regexp('previous', re.I)", + "source": [ + "NODE", + "'greeting_flow'", + "'node2'" + ], + "target": [ + "LABEL", + "previous", + [ + "priority", + "None" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node3'", + "value_TRANSITIONS", + "key_lbl.forward()" + ], + "label": "lbl.forward()", + "condition_ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_'greeting_flow'", + "value_'node3'", + "value_TRANSITIONS", + "value_lbl.forward()" + ], + "condition": "cnd.regexp('bye')", + "source": [ + "NODE", + "'greeting_flow'", + "'node3'" + ], + "target": [ + "LABEL", + "forward", + [ + "priority", + "None" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node3'", + "value_TRANSITIONS", + "key_lbl.backward()" + ], + "label": "lbl.backward()", + "condition_ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node3'", + "value_TRANSITIONS", + "value_lbl.backward()" + ], + "condition": "cnd.regexp('back', re.I)", + "source": [ + "NODE", + "'music_flow'", + "'node3'" + ], + "target": [ + "LABEL", + "backward", + [ + "priority", + "None" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node4'", + "value_TRANSITIONS", + "key_('greeting_flow', 'node4')" + ], + "label": "('greeting_flow', 'node4')", + "condition_ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node4'", + "value_TRANSITIONS", + "value_('greeting_flow', 'node4')" + ], + "condition": "cnd.regexp('next time', re.I)", + "source": [ + "NODE", + "'music_flow'", + "'node4'" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'node4'" + ], + "key": 0 + }, + { + "label_ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node4'", + "value_TRANSITIONS", + "key_('greeting_flow', 'node2')" + ], + "label": "('greeting_flow', 'node2')", + "condition_ref": [ + "5_global_transitions", + "toy_script", + "value", + "value_'music_flow'", + "value_'node4'", + "value_TRANSITIONS", + "value_('greeting_flow', 'node2')" + ], + "condition": "cnd.regexp('next', re.I)", + "source": [ + "NODE", + "'music_flow'", + "'node4'" + ], + "target": [ + "NODE", + "'greeting_flow'", + "'node2'" + ], + "key": 0 + } + ] +} \ No newline at end of file diff --git a/tests/parser/TEST_CASES/core_tutorials/5_global_transitions.py b/tests/parser/TEST_CASES/core_tutorials/5_global_transitions.py new file mode 100644 index 000000000..dec540811 --- /dev/null +++ b/tests/parser/TEST_CASES/core_tutorials/5_global_transitions.py @@ -0,0 +1,81 @@ +import re +from dff.script import GLOBAL +from dff.script import TRANSITIONS +from dff.script import RESPONSE +from dff.script import Message +import dff.script.conditions as cnd +import dff.script.labels as lbl +from dff.pipeline import Pipeline + +toy_script = { + GLOBAL: { + TRANSITIONS: { + ('greeting_flow', 'node1', 1.1): cnd.regexp('\\b(hi|hello)\\b', re.I), + ('music_flow', 'node1', 1.1): cnd.regexp('talk about music'), + lbl.to_fallback(0.1): cnd.true(), + lbl.forward(): cnd.all([cnd.regexp('next\\b'), cnd.has_last_labels(labels=[('music_flow', i) for i in ['node2', 'node3']])]), + lbl.repeat(0.2): cnd.all([cnd.regexp('repeat', re.I), cnd.negation(cnd.has_last_labels(flow_labels=['global_flow']))]), + }, + }, + 'global_flow': { + 'start_node': { + RESPONSE: Message(), + }, + 'fallback_node': { + RESPONSE: Message(text='Ooops'), + TRANSITIONS: { + lbl.previous(): cnd.regexp('previous', re.I), + }, + }, + }, + 'greeting_flow': { + 'node1': { + RESPONSE: Message(text='Hi, how are you?'), + TRANSITIONS: { + 'node2': cnd.regexp('how are you'), + }, + }, + 'node2': { + RESPONSE: Message(text='Good. What do you want to talk about?'), + TRANSITIONS: { + lbl.forward(0.5): cnd.regexp('talk about'), + lbl.previous(): cnd.regexp('previous', re.I), + }, + }, + 'node3': { + RESPONSE: Message(text='Sorry, I can not talk about that now.'), + TRANSITIONS: { + lbl.forward(): cnd.regexp('bye'), + }, + }, + 'node4': { + RESPONSE: Message(text='bye'), + }, + }, + 'music_flow': { + 'node1': { + RESPONSE: Message(text='I love `System of a Down` group, would you like to talk about it?'), + TRANSITIONS: { + lbl.forward(): cnd.regexp('yes|yep|ok', re.I), + }, + }, + 'node2': { + RESPONSE: Message(text='System of a Down is an Armenian-American heavy metal band formed in 1994.'), + }, + 'node3': { + RESPONSE: Message(text='The band achieved commercial success with the release of five studio albums.'), + TRANSITIONS: { + lbl.backward(): cnd.regexp('back', re.I), + }, + }, + 'node4': { + RESPONSE: Message(text="That's all what I know."), + TRANSITIONS: { + ('greeting_flow', 'node4'): cnd.regexp('next time', re.I), + ('greeting_flow', 'node2'): cnd.regexp('next', re.I), + }, + }, + }, +} + +pipeline = Pipeline.from_script(toy_script, start_label=('global_flow', 'start_node'), fallback_label=('global_flow', 'fallback_node')) diff --git a/tests/parser/TEST_CASES/core_tutorials/5_global_transitions.yaml b/tests/parser/TEST_CASES/core_tutorials/5_global_transitions.yaml new file mode 100644 index 000000000..d951fedd9 --- /dev/null +++ b/tests/parser/TEST_CASES/core_tutorials/5_global_transitions.yaml @@ -0,0 +1,57 @@ +5_global_transitions: + re: import re + GLOBAL: from dff.script import GLOBAL + TRANSITIONS: from dff.script import TRANSITIONS + RESPONSE: from dff.script import RESPONSE + Message: from dff.script import Message + cnd: import dff.script.conditions + lbl: import dff.script.labels + Pipeline: from dff.pipeline import Pipeline + toy_script: + GLOBAL: + TRANSITIONS: + ('greeting_flow', 'node1', 1.1): cnd.regexp('\\b(hi|hello)\\b', re.I) + ('music_flow', 'node1', 1.1): cnd.regexp('talk about music') + lbl.to_fallback(0.1): cnd.true() + lbl.forward(): cnd.all([cnd.regexp('next\\b'), cnd.has_last_labels(labels=[('music_flow', i) for i in ['node2', 'node3']])]) + lbl.repeat(0.2): cnd.all([cnd.regexp('repeat', re.I), cnd.negation(cnd.has_last_labels(flow_labels=['global_flow']))]) + "'global_flow'": + "'start_node'": + RESPONSE: Message() + "'fallback_node'": + RESPONSE: Message(text='Ooops') + TRANSITIONS: + lbl.previous(): cnd.regexp('previous', re.I) + "'greeting_flow'": + "'node1'": + RESPONSE: Message(text='Hi, how are you?') + TRANSITIONS: + "'node2'": cnd.regexp('how are you') + "'node2'": + RESPONSE: Message(text='Good. What do you want to talk about?') + TRANSITIONS: + lbl.forward(0.5): cnd.regexp('talk about') + lbl.previous(): cnd.regexp('previous', re.I) + "'node3'": + RESPONSE: Message(text='Sorry, I can not talk about that now.') + TRANSITIONS: + lbl.forward(): cnd.regexp('bye') + "'node4'": + RESPONSE: Message(text='bye') + "'music_flow'": + "'node1'": + RESPONSE: Message(text='I love `System of a Down` group, would you like to talk about it?') + TRANSITIONS: + lbl.forward(): cnd.regexp('yes|yep|ok', re.I) + "'node2'": + RESPONSE: Message(text='System of a Down is an Armenian-American heavy metal band formed in 1994.') + "'node3'": + RESPONSE: Message(text='The band achieved commercial success with the release of five studio albums.') + TRANSITIONS: + lbl.backward(): cnd.regexp('back', re.I) + "'node4'": + RESPONSE: Message(text="That's all what I know.") + TRANSITIONS: + ('greeting_flow', 'node4'): cnd.regexp('next time', re.I) + ('greeting_flow', 'node2'): cnd.regexp('next', re.I) + pipeline: Pipeline.from_script(toy_script, start_label=('global_flow', 'start_node'), fallback_label=('global_flow', 'fallback_node')) diff --git a/tests/parser/TEST_CASES/core_tutorials/6_context_serialization.json b/tests/parser/TEST_CASES/core_tutorials/6_context_serialization.json new file mode 100644 index 000000000..3c2e5c88a --- /dev/null +++ b/tests/parser/TEST_CASES/core_tutorials/6_context_serialization.json @@ -0,0 +1,84 @@ +{ + "directed": true, + "multigraph": true, + "graph": { + "full_script": { + "6_context_serialization": { + "TRANSITIONS": "from dff.script import TRANSITIONS", + "RESPONSE": "from dff.script import RESPONSE", + "cnd": "import dff.script.conditions", + "Pipeline": "from dff.pipeline import Pipeline", + "toy_script": { + "'flow_start'": { + "'node_start'": { + "RESPONSE": "response_handler", + "TRANSITIONS": { + "('flow_start', 'node_start')": "cnd.true()" + } + } + } + }, + "pipeline": "Pipeline.from_script(toy_script, start_label=('flow_start', 'node_start'), post_services=[process_response])" + } + }, + "start_label": [ + "'flow_start'", + "'node_start'" + ], + "fallback_label": [ + "'flow_start'", + "'node_start'" + ] + }, + "nodes": [ + { + "ref": [ + "6_context_serialization", + "toy_script", + "value", + "value_'flow_start'", + "value_'node_start'" + ], + "id": [ + "NODE", + "'flow_start'", + "'node_start'" + ] + } + ], + "links": [ + { + "label_ref": [ + "6_context_serialization", + "toy_script", + "value", + "value_'flow_start'", + "value_'node_start'", + "value_TRANSITIONS", + "key_('flow_start', 'node_start')" + ], + "label": "('flow_start', 'node_start')", + "condition_ref": [ + "6_context_serialization", + "toy_script", + "value", + "value_'flow_start'", + "value_'node_start'", + "value_TRANSITIONS", + "value_('flow_start', 'node_start')" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'flow_start'", + "'node_start'" + ], + "target": [ + "NODE", + "'flow_start'", + "'node_start'" + ], + "key": 0 + } + ] +} \ No newline at end of file diff --git a/tests/parser/TEST_CASES/core_tutorials/6_context_serialization.py b/tests/parser/TEST_CASES/core_tutorials/6_context_serialization.py new file mode 100644 index 000000000..43dfebf76 --- /dev/null +++ b/tests/parser/TEST_CASES/core_tutorials/6_context_serialization.py @@ -0,0 +1,17 @@ +from dff.script import TRANSITIONS +from dff.script import RESPONSE +import dff.script.conditions as cnd +from dff.pipeline import Pipeline + +toy_script = { + 'flow_start': { + 'node_start': { + RESPONSE: response_handler, + TRANSITIONS: { + ('flow_start', 'node_start'): cnd.true(), + }, + }, + }, +} + +pipeline = Pipeline.from_script(toy_script, start_label=('flow_start', 'node_start'), post_services=[process_response]) diff --git a/tests/parser/TEST_CASES/core_tutorials/6_context_serialization.yaml b/tests/parser/TEST_CASES/core_tutorials/6_context_serialization.yaml new file mode 100644 index 000000000..d66575ee4 --- /dev/null +++ b/tests/parser/TEST_CASES/core_tutorials/6_context_serialization.yaml @@ -0,0 +1,12 @@ +6_context_serialization: + TRANSITIONS: from dff.script import TRANSITIONS + RESPONSE: from dff.script import RESPONSE + cnd: import dff.script.conditions + Pipeline: from dff.pipeline import Pipeline + toy_script: + "'flow_start'": + "'node_start'": + RESPONSE: response_handler + TRANSITIONS: + ('flow_start', 'node_start'): cnd.true() + pipeline: Pipeline.from_script(toy_script, start_label=('flow_start', 'node_start'), post_services=[process_response]) diff --git a/tests/parser/TEST_CASES/core_tutorials/7_pre_response_processing.json b/tests/parser/TEST_CASES/core_tutorials/7_pre_response_processing.json new file mode 100644 index 000000000..ada966616 --- /dev/null +++ b/tests/parser/TEST_CASES/core_tutorials/7_pre_response_processing.json @@ -0,0 +1,505 @@ +{ + "directed": true, + "multigraph": true, + "graph": { + "full_script": { + "7_pre_response_processing": { + "GLOBAL": "from dff.script import GLOBAL", + "LOCAL": "from dff.script import LOCAL", + "RESPONSE": "from dff.script import RESPONSE", + "TRANSITIONS": "from dff.script import TRANSITIONS", + "PRE_RESPONSE_PROCESSING": "from dff.script import PRE_RESPONSE_PROCESSING", + "Message": "from dff.script import Message", + "lbl": "import dff.script.labels", + "cnd": "import dff.script.conditions", + "Pipeline": "from dff.pipeline import Pipeline", + "toy_script": { + "'root'": { + "'start'": { + "RESPONSE": "Message()", + "TRANSITIONS": { + "('flow', 'step_0')": "cnd.true()" + } + }, + "'fallback'": { + "RESPONSE": "Message(text='the end')" + } + }, + "GLOBAL": { + "PRE_RESPONSE_PROCESSING": { + "'proc_name_1'": "add_prefix('l1_global')", + "'proc_name_2'": "add_prefix('l2_global')" + } + }, + "'flow'": { + "LOCAL": { + "PRE_RESPONSE_PROCESSING": { + "'proc_name_2'": "add_prefix('l2_local')", + "'proc_name_3'": "add_prefix('l3_local')" + } + }, + "'step_0'": { + "RESPONSE": "Message(text='first')", + "TRANSITIONS": { + "lbl.forward()": "cnd.true()" + } + }, + "'step_1'": { + "PRE_RESPONSE_PROCESSING": { + "'proc_name_1'": "add_prefix('l1_step_1')" + }, + "RESPONSE": "Message(text='second')", + "TRANSITIONS": { + "lbl.forward()": "cnd.true()" + } + }, + "'step_2'": { + "PRE_RESPONSE_PROCESSING": { + "'proc_name_2'": "add_prefix('l2_step_2')" + }, + "RESPONSE": "Message(text='third')", + "TRANSITIONS": { + "lbl.forward()": "cnd.true()" + } + }, + "'step_3'": { + "PRE_RESPONSE_PROCESSING": { + "'proc_name_3'": "add_prefix('l3_step_3')" + }, + "RESPONSE": "Message(text='fourth')", + "TRANSITIONS": { + "lbl.forward()": "cnd.true()" + } + }, + "'step_4'": { + "PRE_RESPONSE_PROCESSING": { + "'proc_name_4'": "add_prefix('l4_step_4')" + }, + "RESPONSE": "Message(text='fifth')", + "TRANSITIONS": { + "'step_0'": "cnd.true()" + } + } + } + }, + "pipeline": "Pipeline.from_script(toy_script, start_label=('root', 'start'), fallback_label=('root', 'fallback'))" + } + }, + "start_label": [ + "'root'", + "'start'" + ], + "fallback_label": [ + "'root'", + "'fallback'" + ] + }, + "nodes": [ + { + "ref": [ + "7_pre_response_processing", + "toy_script", + "value", + "value_'root'", + "value_'start'" + ], + "id": [ + "NODE", + "'root'", + "'start'" + ] + }, + { + "ref": [ + "7_pre_response_processing", + "toy_script", + "value", + "value_'flow'", + "value_'step_0'" + ], + "id": [ + "NODE", + "'flow'", + "'step_0'" + ] + }, + { + "ref": [ + "7_pre_response_processing", + "toy_script", + "value", + "value_'root'", + "value_'fallback'" + ], + "id": [ + "NODE", + "'root'", + "'fallback'" + ] + }, + { + "ref": [ + "7_pre_response_processing", + "toy_script", + "value", + "value_GLOBAL" + ], + "id": [ + "GLOBAL_NODE", + "GLOBAL" + ] + }, + { + "ref": [ + "7_pre_response_processing", + "toy_script", + "value", + "value_'flow'", + "value_LOCAL" + ], + "id": [ + "LOCAL_NODE", + "'flow'", + "LOCAL" + ] + }, + { + "id": [ + "LABEL", + "forward", + [ + "priority", + "None" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ] + }, + { + "ref": [ + "7_pre_response_processing", + "toy_script", + "value", + "value_'flow'", + "value_'step_1'" + ], + "id": [ + "NODE", + "'flow'", + "'step_1'" + ] + }, + { + "ref": [ + "7_pre_response_processing", + "toy_script", + "value", + "value_'flow'", + "value_'step_2'" + ], + "id": [ + "NODE", + "'flow'", + "'step_2'" + ] + }, + { + "ref": [ + "7_pre_response_processing", + "toy_script", + "value", + "value_'flow'", + "value_'step_3'" + ], + "id": [ + "NODE", + "'flow'", + "'step_3'" + ] + }, + { + "ref": [ + "7_pre_response_processing", + "toy_script", + "value", + "value_'flow'", + "value_'step_4'" + ], + "id": [ + "NODE", + "'flow'", + "'step_4'" + ] + } + ], + "links": [ + { + "label_ref": [ + "7_pre_response_processing", + "toy_script", + "value", + "value_'root'", + "value_'start'", + "value_TRANSITIONS", + "key_('flow', 'step_0')" + ], + "label": "('flow', 'step_0')", + "condition_ref": [ + "7_pre_response_processing", + "toy_script", + "value", + "value_'root'", + "value_'start'", + "value_TRANSITIONS", + "value_('flow', 'step_0')" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'root'", + "'start'" + ], + "target": [ + "NODE", + "'flow'", + "'step_0'" + ], + "key": 0 + }, + { + "label_ref": [ + "7_pre_response_processing", + "toy_script", + "value", + "value_'flow'", + "value_'step_0'", + "value_TRANSITIONS", + "key_lbl.forward()" + ], + "label": "lbl.forward()", + "condition_ref": [ + "7_pre_response_processing", + "toy_script", + "value", + "value_'flow'", + "value_'step_0'", + "value_TRANSITIONS", + "value_lbl.forward()" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'flow'", + "'step_0'" + ], + "target": [ + "LABEL", + "forward", + [ + "priority", + "None" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "7_pre_response_processing", + "toy_script", + "value", + "value_'flow'", + "value_'step_1'", + "value_TRANSITIONS", + "key_lbl.forward()" + ], + "label": "lbl.forward()", + "condition_ref": [ + "7_pre_response_processing", + "toy_script", + "value", + "value_'flow'", + "value_'step_1'", + "value_TRANSITIONS", + "value_lbl.forward()" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'flow'", + "'step_1'" + ], + "target": [ + "LABEL", + "forward", + [ + "priority", + "None" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "7_pre_response_processing", + "toy_script", + "value", + "value_'flow'", + "value_'step_2'", + "value_TRANSITIONS", + "key_lbl.forward()" + ], + "label": "lbl.forward()", + "condition_ref": [ + "7_pre_response_processing", + "toy_script", + "value", + "value_'flow'", + "value_'step_2'", + "value_TRANSITIONS", + "value_lbl.forward()" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'flow'", + "'step_2'" + ], + "target": [ + "LABEL", + "forward", + [ + "priority", + "None" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "7_pre_response_processing", + "toy_script", + "value", + "value_'flow'", + "value_'step_3'", + "value_TRANSITIONS", + "key_lbl.forward()" + ], + "label": "lbl.forward()", + "condition_ref": [ + "7_pre_response_processing", + "toy_script", + "value", + "value_'flow'", + "value_'step_3'", + "value_TRANSITIONS", + "value_lbl.forward()" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'flow'", + "'step_3'" + ], + "target": [ + "LABEL", + "forward", + [ + "priority", + "None" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "7_pre_response_processing", + "toy_script", + "value", + "value_'flow'", + "value_'step_4'", + "value_TRANSITIONS", + "key_'step_0'" + ], + "label": "'step_0'", + "condition_ref": [ + "7_pre_response_processing", + "toy_script", + "value", + "value_'flow'", + "value_'step_4'", + "value_TRANSITIONS", + "value_'step_0'" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'flow'", + "'step_4'" + ], + "target": [ + "NODE", + "'flow'", + "'step_0'" + ], + "key": 0 + } + ] +} \ No newline at end of file diff --git a/tests/parser/TEST_CASES/core_tutorials/7_pre_response_processing.py b/tests/parser/TEST_CASES/core_tutorials/7_pre_response_processing.py new file mode 100644 index 000000000..952cf55a1 --- /dev/null +++ b/tests/parser/TEST_CASES/core_tutorials/7_pre_response_processing.py @@ -0,0 +1,81 @@ +from dff.script import GLOBAL +from dff.script import LOCAL +from dff.script import RESPONSE +from dff.script import TRANSITIONS +from dff.script import PRE_RESPONSE_PROCESSING +from dff.script import Message +import dff.script.labels as lbl +import dff.script.conditions as cnd +from dff.pipeline import Pipeline + +toy_script = { + 'root': { + 'start': { + RESPONSE: Message(), + TRANSITIONS: { + ('flow', 'step_0'): cnd.true(), + }, + }, + 'fallback': { + RESPONSE: Message(text='the end'), + }, + }, + GLOBAL: { + PRE_RESPONSE_PROCESSING: { + 'proc_name_1': add_prefix('l1_global'), + 'proc_name_2': add_prefix('l2_global'), + }, + }, + 'flow': { + LOCAL: { + PRE_RESPONSE_PROCESSING: { + 'proc_name_2': add_prefix('l2_local'), + 'proc_name_3': add_prefix('l3_local'), + }, + }, + 'step_0': { + RESPONSE: Message(text='first'), + TRANSITIONS: { + lbl.forward(): cnd.true(), + }, + }, + 'step_1': { + PRE_RESPONSE_PROCESSING: { + 'proc_name_1': add_prefix('l1_step_1'), + }, + RESPONSE: Message(text='second'), + TRANSITIONS: { + lbl.forward(): cnd.true(), + }, + }, + 'step_2': { + PRE_RESPONSE_PROCESSING: { + 'proc_name_2': add_prefix('l2_step_2'), + }, + RESPONSE: Message(text='third'), + TRANSITIONS: { + lbl.forward(): cnd.true(), + }, + }, + 'step_3': { + PRE_RESPONSE_PROCESSING: { + 'proc_name_3': add_prefix('l3_step_3'), + }, + RESPONSE: Message(text='fourth'), + TRANSITIONS: { + lbl.forward(): cnd.true(), + }, + }, + 'step_4': { + PRE_RESPONSE_PROCESSING: { + 'proc_name_4': add_prefix('l4_step_4'), + }, + RESPONSE: Message(text='fifth'), + TRANSITIONS: { + 'step_0': cnd.true(), + }, + }, + }, +} + +pipeline = Pipeline.from_script(toy_script, start_label=('root', 'start'), fallback_label=('root', 'fallback')) diff --git a/tests/parser/TEST_CASES/core_tutorials/7_pre_response_processing.yaml b/tests/parser/TEST_CASES/core_tutorials/7_pre_response_processing.yaml new file mode 100644 index 000000000..4b3449839 --- /dev/null +++ b/tests/parser/TEST_CASES/core_tutorials/7_pre_response_processing.yaml @@ -0,0 +1,56 @@ +7_pre_response_processing: + GLOBAL: from dff.script import GLOBAL + LOCAL: from dff.script import LOCAL + RESPONSE: from dff.script import RESPONSE + TRANSITIONS: from dff.script import TRANSITIONS + PRE_RESPONSE_PROCESSING: from dff.script import PRE_RESPONSE_PROCESSING + Message: from dff.script import Message + lbl: import dff.script.labels + cnd: import dff.script.conditions + Pipeline: from dff.pipeline import Pipeline + toy_script: + "'root'": + "'start'": + RESPONSE: Message() + TRANSITIONS: + ('flow', 'step_0'): cnd.true() + "'fallback'": + RESPONSE: Message(text='the end') + GLOBAL: + PRE_RESPONSE_PROCESSING: + "'proc_name_1'": add_prefix('l1_global') + "'proc_name_2'": add_prefix('l2_global') + "'flow'": + LOCAL: + PRE_RESPONSE_PROCESSING: + "'proc_name_2'": add_prefix('l2_local') + "'proc_name_3'": add_prefix('l3_local') + "'step_0'": + RESPONSE: Message(text='first') + TRANSITIONS: + lbl.forward(): cnd.true() + "'step_1'": + PRE_RESPONSE_PROCESSING: + "'proc_name_1'": add_prefix('l1_step_1') + RESPONSE: Message(text='second') + TRANSITIONS: + lbl.forward(): cnd.true() + "'step_2'": + PRE_RESPONSE_PROCESSING: + "'proc_name_2'": add_prefix('l2_step_2') + RESPONSE: Message(text='third') + TRANSITIONS: + lbl.forward(): cnd.true() + "'step_3'": + PRE_RESPONSE_PROCESSING: + "'proc_name_3'": add_prefix('l3_step_3') + RESPONSE: Message(text='fourth') + TRANSITIONS: + lbl.forward(): cnd.true() + "'step_4'": + PRE_RESPONSE_PROCESSING: + "'proc_name_4'": add_prefix('l4_step_4') + RESPONSE: Message(text='fifth') + TRANSITIONS: + "'step_0'": cnd.true() + pipeline: Pipeline.from_script(toy_script, start_label=('root', 'start'), fallback_label=('root', 'fallback')) diff --git a/tests/parser/TEST_CASES/core_tutorials/8_misc.json b/tests/parser/TEST_CASES/core_tutorials/8_misc.json new file mode 100644 index 000000000..e09003313 --- /dev/null +++ b/tests/parser/TEST_CASES/core_tutorials/8_misc.json @@ -0,0 +1,509 @@ +{ + "directed": true, + "multigraph": true, + "graph": { + "full_script": { + "8_misc": { + "GLOBAL": "from dff.script import GLOBAL", + "LOCAL": "from dff.script import LOCAL", + "RESPONSE": "from dff.script import RESPONSE", + "TRANSITIONS": "from dff.script import TRANSITIONS", + "MISC": "from dff.script import MISC", + "Message": "from dff.script import Message", + "lbl": "import dff.script.labels", + "cnd": "import dff.script.conditions", + "Pipeline": "from dff.pipeline import Pipeline", + "toy_script": { + "'root'": { + "'start'": { + "RESPONSE": "Message()", + "TRANSITIONS": { + "('flow', 'step_0')": "cnd.true()" + } + }, + "'fallback'": { + "RESPONSE": "Message(text='the end')" + } + }, + "GLOBAL": { + "MISC": { + "'var1'": "'global_data'", + "'var2'": "'global_data'", + "'var3'": "'global_data'" + } + }, + "'flow'": { + "LOCAL": { + "MISC": { + "'var2'": "'rewrite_by_local'", + "'var3'": "'rewrite_by_local'" + } + }, + "'step_0'": { + "MISC": { + "'var3'": "'info_of_step_0'" + }, + "RESPONSE": "custom_response", + "TRANSITIONS": { + "lbl.forward()": "cnd.true()" + } + }, + "'step_1'": { + "MISC": { + "'var3'": "'info_of_step_1'" + }, + "RESPONSE": "custom_response", + "TRANSITIONS": { + "lbl.forward()": "cnd.true()" + } + }, + "'step_2'": { + "MISC": { + "'var3'": "'info_of_step_2'" + }, + "RESPONSE": "custom_response", + "TRANSITIONS": { + "lbl.forward()": "cnd.true()" + } + }, + "'step_3'": { + "MISC": { + "'var3'": "'info_of_step_3'" + }, + "RESPONSE": "custom_response", + "TRANSITIONS": { + "lbl.forward()": "cnd.true()" + } + }, + "'step_4'": { + "MISC": { + "'var3'": "'info_of_step_4'" + }, + "RESPONSE": "custom_response", + "TRANSITIONS": { + "'step_0'": "cnd.true()" + } + } + } + }, + "pipeline": "Pipeline.from_script(toy_script, start_label=('root', 'start'), fallback_label=('root', 'fallback'))" + } + }, + "start_label": [ + "'root'", + "'start'" + ], + "fallback_label": [ + "'root'", + "'fallback'" + ] + }, + "nodes": [ + { + "ref": [ + "8_misc", + "toy_script", + "value", + "value_'root'", + "value_'start'" + ], + "id": [ + "NODE", + "'root'", + "'start'" + ] + }, + { + "ref": [ + "8_misc", + "toy_script", + "value", + "value_'flow'", + "value_'step_0'" + ], + "id": [ + "NODE", + "'flow'", + "'step_0'" + ] + }, + { + "ref": [ + "8_misc", + "toy_script", + "value", + "value_'root'", + "value_'fallback'" + ], + "id": [ + "NODE", + "'root'", + "'fallback'" + ] + }, + { + "ref": [ + "8_misc", + "toy_script", + "value", + "value_GLOBAL" + ], + "id": [ + "GLOBAL_NODE", + "GLOBAL" + ] + }, + { + "ref": [ + "8_misc", + "toy_script", + "value", + "value_'flow'", + "value_LOCAL" + ], + "id": [ + "LOCAL_NODE", + "'flow'", + "LOCAL" + ] + }, + { + "id": [ + "LABEL", + "forward", + [ + "priority", + "None" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ] + }, + { + "ref": [ + "8_misc", + "toy_script", + "value", + "value_'flow'", + "value_'step_1'" + ], + "id": [ + "NODE", + "'flow'", + "'step_1'" + ] + }, + { + "ref": [ + "8_misc", + "toy_script", + "value", + "value_'flow'", + "value_'step_2'" + ], + "id": [ + "NODE", + "'flow'", + "'step_2'" + ] + }, + { + "ref": [ + "8_misc", + "toy_script", + "value", + "value_'flow'", + "value_'step_3'" + ], + "id": [ + "NODE", + "'flow'", + "'step_3'" + ] + }, + { + "ref": [ + "8_misc", + "toy_script", + "value", + "value_'flow'", + "value_'step_4'" + ], + "id": [ + "NODE", + "'flow'", + "'step_4'" + ] + } + ], + "links": [ + { + "label_ref": [ + "8_misc", + "toy_script", + "value", + "value_'root'", + "value_'start'", + "value_TRANSITIONS", + "key_('flow', 'step_0')" + ], + "label": "('flow', 'step_0')", + "condition_ref": [ + "8_misc", + "toy_script", + "value", + "value_'root'", + "value_'start'", + "value_TRANSITIONS", + "value_('flow', 'step_0')" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'root'", + "'start'" + ], + "target": [ + "NODE", + "'flow'", + "'step_0'" + ], + "key": 0 + }, + { + "label_ref": [ + "8_misc", + "toy_script", + "value", + "value_'flow'", + "value_'step_0'", + "value_TRANSITIONS", + "key_lbl.forward()" + ], + "label": "lbl.forward()", + "condition_ref": [ + "8_misc", + "toy_script", + "value", + "value_'flow'", + "value_'step_0'", + "value_TRANSITIONS", + "value_lbl.forward()" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'flow'", + "'step_0'" + ], + "target": [ + "LABEL", + "forward", + [ + "priority", + "None" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "8_misc", + "toy_script", + "value", + "value_'flow'", + "value_'step_1'", + "value_TRANSITIONS", + "key_lbl.forward()" + ], + "label": "lbl.forward()", + "condition_ref": [ + "8_misc", + "toy_script", + "value", + "value_'flow'", + "value_'step_1'", + "value_TRANSITIONS", + "value_lbl.forward()" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'flow'", + "'step_1'" + ], + "target": [ + "LABEL", + "forward", + [ + "priority", + "None" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "8_misc", + "toy_script", + "value", + "value_'flow'", + "value_'step_2'", + "value_TRANSITIONS", + "key_lbl.forward()" + ], + "label": "lbl.forward()", + "condition_ref": [ + "8_misc", + "toy_script", + "value", + "value_'flow'", + "value_'step_2'", + "value_TRANSITIONS", + "value_lbl.forward()" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'flow'", + "'step_2'" + ], + "target": [ + "LABEL", + "forward", + [ + "priority", + "None" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "8_misc", + "toy_script", + "value", + "value_'flow'", + "value_'step_3'", + "value_TRANSITIONS", + "key_lbl.forward()" + ], + "label": "lbl.forward()", + "condition_ref": [ + "8_misc", + "toy_script", + "value", + "value_'flow'", + "value_'step_3'", + "value_TRANSITIONS", + "value_lbl.forward()" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'flow'", + "'step_3'" + ], + "target": [ + "LABEL", + "forward", + [ + "priority", + "None" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + }, + { + "label_ref": [ + "8_misc", + "toy_script", + "value", + "value_'flow'", + "value_'step_4'", + "value_TRANSITIONS", + "key_'step_0'" + ], + "label": "'step_0'", + "condition_ref": [ + "8_misc", + "toy_script", + "value", + "value_'flow'", + "value_'step_4'", + "value_TRANSITIONS", + "value_'step_0'" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'flow'", + "'step_4'" + ], + "target": [ + "NODE", + "'flow'", + "'step_0'" + ], + "key": 0 + } + ] +} \ No newline at end of file diff --git a/tests/parser/TEST_CASES/core_tutorials/8_misc.py b/tests/parser/TEST_CASES/core_tutorials/8_misc.py new file mode 100644 index 000000000..995a53593 --- /dev/null +++ b/tests/parser/TEST_CASES/core_tutorials/8_misc.py @@ -0,0 +1,85 @@ +from dff.script import GLOBAL +from dff.script import LOCAL +from dff.script import RESPONSE +from dff.script import TRANSITIONS +from dff.script import MISC +from dff.script import Message +import dff.script.labels as lbl +import dff.script.conditions as cnd +from dff.pipeline import Pipeline + +toy_script = { + 'root': { + 'start': { + RESPONSE: Message(), + TRANSITIONS: { + ('flow', 'step_0'): cnd.true(), + }, + }, + 'fallback': { + RESPONSE: Message(text='the end'), + }, + }, + GLOBAL: { + MISC: { + 'var1': 'global_data', + 'var2': 'global_data', + 'var3': 'global_data', + }, + }, + 'flow': { + LOCAL: { + MISC: { + 'var2': 'rewrite_by_local', + 'var3': 'rewrite_by_local', + }, + }, + 'step_0': { + MISC: { + 'var3': 'info_of_step_0', + }, + RESPONSE: custom_response, + TRANSITIONS: { + lbl.forward(): cnd.true(), + }, + }, + 'step_1': { + MISC: { + 'var3': 'info_of_step_1', + }, + RESPONSE: custom_response, + TRANSITIONS: { + lbl.forward(): cnd.true(), + }, + }, + 'step_2': { + MISC: { + 'var3': 'info_of_step_2', + }, + RESPONSE: custom_response, + TRANSITIONS: { + lbl.forward(): cnd.true(), + }, + }, + 'step_3': { + MISC: { + 'var3': 'info_of_step_3', + }, + RESPONSE: custom_response, + TRANSITIONS: { + lbl.forward(): cnd.true(), + }, + }, + 'step_4': { + MISC: { + 'var3': 'info_of_step_4', + }, + RESPONSE: custom_response, + TRANSITIONS: { + 'step_0': cnd.true(), + }, + }, + }, +} + +pipeline = Pipeline.from_script(toy_script, start_label=('root', 'start'), fallback_label=('root', 'fallback')) diff --git a/tests/parser/TEST_CASES/core_tutorials/8_misc.yaml b/tests/parser/TEST_CASES/core_tutorials/8_misc.yaml new file mode 100644 index 000000000..8a79af833 --- /dev/null +++ b/tests/parser/TEST_CASES/core_tutorials/8_misc.yaml @@ -0,0 +1,59 @@ +8_misc: + GLOBAL: from dff.script import GLOBAL + LOCAL: from dff.script import LOCAL + RESPONSE: from dff.script import RESPONSE + TRANSITIONS: from dff.script import TRANSITIONS + MISC: from dff.script import MISC + Message: from dff.script import Message + lbl: import dff.script.labels + cnd: import dff.script.conditions + Pipeline: from dff.pipeline import Pipeline + toy_script: + "'root'": + "'start'": + RESPONSE: Message() + TRANSITIONS: + ('flow', 'step_0'): cnd.true() + "'fallback'": + RESPONSE: Message(text='the end') + GLOBAL: + MISC: + "'var1'": "'global_data'" + "'var2'": "'global_data'" + "'var3'": "'global_data'" + "'flow'": + LOCAL: + MISC: + "'var2'": "'rewrite_by_local'" + "'var3'": "'rewrite_by_local'" + "'step_0'": + MISC: + "'var3'": "'info_of_step_0'" + RESPONSE: custom_response + TRANSITIONS: + lbl.forward(): cnd.true() + "'step_1'": + MISC: + "'var3'": "'info_of_step_1'" + RESPONSE: custom_response + TRANSITIONS: + lbl.forward(): cnd.true() + "'step_2'": + MISC: + "'var3'": "'info_of_step_2'" + RESPONSE: custom_response + TRANSITIONS: + lbl.forward(): cnd.true() + "'step_3'": + MISC: + "'var3'": "'info_of_step_3'" + RESPONSE: custom_response + TRANSITIONS: + lbl.forward(): cnd.true() + "'step_4'": + MISC: + "'var3'": "'info_of_step_4'" + RESPONSE: custom_response + TRANSITIONS: + "'step_0'": cnd.true() + pipeline: Pipeline.from_script(toy_script, start_label=('root', 'start'), fallback_label=('root', 'fallback')) diff --git a/tests/parser/TEST_CASES/core_tutorials/9_pre_transitions_processing.json b/tests/parser/TEST_CASES/core_tutorials/9_pre_transitions_processing.json new file mode 100644 index 000000000..0c7d201f4 --- /dev/null +++ b/tests/parser/TEST_CASES/core_tutorials/9_pre_transitions_processing.json @@ -0,0 +1,283 @@ +{ + "directed": true, + "multigraph": true, + "graph": { + "full_script": { + "9_pre_transitions_processing": { + "GLOBAL": "from dff.script import GLOBAL", + "RESPONSE": "from dff.script import RESPONSE", + "TRANSITIONS": "from dff.script import TRANSITIONS", + "PRE_RESPONSE_PROCESSING": "from dff.script import PRE_RESPONSE_PROCESSING", + "PRE_TRANSITIONS_PROCESSING": "from dff.script import PRE_TRANSITIONS_PROCESSING", + "Message": "from dff.script import Message", + "lbl": "import dff.script.labels", + "cnd": "import dff.script.conditions", + "Pipeline": "from dff.pipeline import Pipeline", + "toy_script": { + "'root'": { + "'start'": { + "RESPONSE": "Message()", + "TRANSITIONS": { + "('flow', 'step_0')": "cnd.true()" + } + }, + "'fallback'": { + "RESPONSE": "Message(text='the end')" + } + }, + "GLOBAL": { + "PRE_RESPONSE_PROCESSING": { + "'proc_name_1'": "get_previous_node_response_for_response_processing" + }, + "PRE_TRANSITIONS_PROCESSING": { + "'proc_name_1'": "save_previous_node_response_to_ctx_processing" + }, + "TRANSITIONS": { + "lbl.forward(0.1)": "cnd.true()" + } + }, + "'flow'": { + "'step_0'": { + "RESPONSE": "Message(text='first')" + }, + "'step_1'": { + "RESPONSE": "Message(text='second')" + }, + "'step_2'": { + "RESPONSE": "Message(text='third')" + }, + "'step_3'": { + "RESPONSE": "Message(text='fourth')" + }, + "'step_4'": { + "RESPONSE": "Message(text='fifth')" + } + } + }, + "pipeline": "Pipeline.from_script(toy_script, start_label=('root', 'start'), fallback_label=('root', 'fallback'))" + } + }, + "start_label": [ + "'root'", + "'start'" + ], + "fallback_label": [ + "'root'", + "'fallback'" + ] + }, + "nodes": [ + { + "ref": [ + "9_pre_transitions_processing", + "toy_script", + "value", + "value_'root'", + "value_'start'" + ], + "id": [ + "NODE", + "'root'", + "'start'" + ] + }, + { + "ref": [ + "9_pre_transitions_processing", + "toy_script", + "value", + "value_'flow'", + "value_'step_0'" + ], + "id": [ + "NODE", + "'flow'", + "'step_0'" + ] + }, + { + "ref": [ + "9_pre_transitions_processing", + "toy_script", + "value", + "value_'root'", + "value_'fallback'" + ], + "id": [ + "NODE", + "'root'", + "'fallback'" + ] + }, + { + "ref": [ + "9_pre_transitions_processing", + "toy_script", + "value", + "value_GLOBAL" + ], + "id": [ + "GLOBAL_NODE", + "GLOBAL" + ] + }, + { + "id": [ + "LABEL", + "forward", + [ + "priority", + "0.1" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ] + }, + { + "ref": [ + "9_pre_transitions_processing", + "toy_script", + "value", + "value_'flow'", + "value_'step_1'" + ], + "id": [ + "NODE", + "'flow'", + "'step_1'" + ] + }, + { + "ref": [ + "9_pre_transitions_processing", + "toy_script", + "value", + "value_'flow'", + "value_'step_2'" + ], + "id": [ + "NODE", + "'flow'", + "'step_2'" + ] + }, + { + "ref": [ + "9_pre_transitions_processing", + "toy_script", + "value", + "value_'flow'", + "value_'step_3'" + ], + "id": [ + "NODE", + "'flow'", + "'step_3'" + ] + }, + { + "ref": [ + "9_pre_transitions_processing", + "toy_script", + "value", + "value_'flow'", + "value_'step_4'" + ], + "id": [ + "NODE", + "'flow'", + "'step_4'" + ] + } + ], + "links": [ + { + "label_ref": [ + "9_pre_transitions_processing", + "toy_script", + "value", + "value_'root'", + "value_'start'", + "value_TRANSITIONS", + "key_('flow', 'step_0')" + ], + "label": "('flow', 'step_0')", + "condition_ref": [ + "9_pre_transitions_processing", + "toy_script", + "value", + "value_'root'", + "value_'start'", + "value_TRANSITIONS", + "value_('flow', 'step_0')" + ], + "condition": "cnd.true()", + "source": [ + "NODE", + "'root'", + "'start'" + ], + "target": [ + "NODE", + "'flow'", + "'step_0'" + ], + "key": 0 + }, + { + "label_ref": [ + "9_pre_transitions_processing", + "toy_script", + "value", + "value_GLOBAL", + "value_TRANSITIONS", + "key_lbl.forward(0.1)" + ], + "label": "lbl.forward(0.1)", + "condition_ref": [ + "9_pre_transitions_processing", + "toy_script", + "value", + "value_GLOBAL", + "value_TRANSITIONS", + "value_lbl.forward(0.1)" + ], + "condition": "cnd.true()", + "source": [ + "GLOBAL_NODE", + "GLOBAL" + ], + "target": [ + "LABEL", + "forward", + [ + "priority", + "0.1" + ], + [ + "cyclicality_flag", + "True" + ], + [ + "args", + "()" + ], + [ + "kwargs", + "{}" + ] + ], + "key": 0 + } + ] +} \ No newline at end of file diff --git a/tests/parser/TEST_CASES/core_tutorials/9_pre_transitions_processing.py b/tests/parser/TEST_CASES/core_tutorials/9_pre_transitions_processing.py new file mode 100644 index 000000000..d760ba199 --- /dev/null +++ b/tests/parser/TEST_CASES/core_tutorials/9_pre_transitions_processing.py @@ -0,0 +1,53 @@ +from dff.script import GLOBAL +from dff.script import RESPONSE +from dff.script import TRANSITIONS +from dff.script import PRE_RESPONSE_PROCESSING +from dff.script import PRE_TRANSITIONS_PROCESSING +from dff.script import Message +import dff.script.labels as lbl +import dff.script.conditions as cnd +from dff.pipeline import Pipeline + +toy_script = { + 'root': { + 'start': { + RESPONSE: Message(), + TRANSITIONS: { + ('flow', 'step_0'): cnd.true(), + }, + }, + 'fallback': { + RESPONSE: Message(text='the end'), + }, + }, + GLOBAL: { + PRE_RESPONSE_PROCESSING: { + 'proc_name_1': get_previous_node_response_for_response_processing, + }, + PRE_TRANSITIONS_PROCESSING: { + 'proc_name_1': save_previous_node_response_to_ctx_processing, + }, + TRANSITIONS: { + lbl.forward(0.1): cnd.true(), + }, + }, + 'flow': { + 'step_0': { + RESPONSE: Message(text='first'), + }, + 'step_1': { + RESPONSE: Message(text='second'), + }, + 'step_2': { + RESPONSE: Message(text='third'), + }, + 'step_3': { + RESPONSE: Message(text='fourth'), + }, + 'step_4': { + RESPONSE: Message(text='fifth'), + }, + }, +} + +pipeline = Pipeline.from_script(toy_script, start_label=('root', 'start'), fallback_label=('root', 'fallback')) diff --git a/tests/parser/TEST_CASES/core_tutorials/9_pre_transitions_processing.yaml b/tests/parser/TEST_CASES/core_tutorials/9_pre_transitions_processing.yaml new file mode 100644 index 000000000..c8cc678b5 --- /dev/null +++ b/tests/parser/TEST_CASES/core_tutorials/9_pre_transitions_processing.yaml @@ -0,0 +1,37 @@ +9_pre_transitions_processing: + GLOBAL: from dff.script import GLOBAL + RESPONSE: from dff.script import RESPONSE + TRANSITIONS: from dff.script import TRANSITIONS + PRE_RESPONSE_PROCESSING: from dff.script import PRE_RESPONSE_PROCESSING + PRE_TRANSITIONS_PROCESSING: from dff.script import PRE_TRANSITIONS_PROCESSING + Message: from dff.script import Message + lbl: import dff.script.labels + cnd: import dff.script.conditions + Pipeline: from dff.pipeline import Pipeline + toy_script: + "'root'": + "'start'": + RESPONSE: Message() + TRANSITIONS: + ('flow', 'step_0'): cnd.true() + "'fallback'": + RESPONSE: Message(text='the end') + GLOBAL: + PRE_RESPONSE_PROCESSING: + "'proc_name_1'": get_previous_node_response_for_response_processing + PRE_TRANSITIONS_PROCESSING: + "'proc_name_1'": save_previous_node_response_to_ctx_processing + TRANSITIONS: + lbl.forward(0.1): cnd.true() + "'flow'": + "'step_0'": + RESPONSE: Message(text='first') + "'step_1'": + RESPONSE: Message(text='second') + "'step_2'": + RESPONSE: Message(text='third') + "'step_3'": + RESPONSE: Message(text='fourth') + "'step_4'": + RESPONSE: Message(text='fifth') + pipeline: Pipeline.from_script(toy_script, start_label=('root', 'start'), fallback_label=('root', 'fallback')) diff --git a/tests/parser/TEST_CASES/test_directory/__init__.py b/tests/parser/TEST_CASES/test_directory/__init__.py new file mode 100755 index 000000000..e69de29bb diff --git a/tests/parser/TEST_CASES/test_directory/another_package/__init__.py b/tests/parser/TEST_CASES/test_directory/another_package/__init__.py new file mode 100755 index 000000000..e69de29bb diff --git a/tests/parser/TEST_CASES/test_directory/another_package/file.py b/tests/parser/TEST_CASES/test_directory/another_package/file.py new file mode 100755 index 000000000..e69de29bb diff --git a/tests/parser/TEST_CASES/test_directory/dir/file.py b/tests/parser/TEST_CASES/test_directory/dir/file.py new file mode 100755 index 000000000..e69de29bb diff --git a/tests/parser/TEST_CASES/test_directory/file.py b/tests/parser/TEST_CASES/test_directory/file.py new file mode 100755 index 000000000..e69de29bb diff --git a/tests/parser/TEST_CASES/test_graph2py/complex_tests/test_1/graph_files/graph.json b/tests/parser/TEST_CASES/test_graph2py/complex_tests/test_1/graph_files/graph.json new file mode 100644 index 000000000..0f68ecd3c --- /dev/null +++ b/tests/parser/TEST_CASES/test_graph2py/complex_tests/test_1/graph_files/graph.json @@ -0,0 +1,1227 @@ +{ + "directed": true, + "multigraph": true, + "graph": { + "script": { + "namespaces": { + "main": { + "TRANSITIONS": "!from dff.core.engine.core.keywords TRANSITIONS", + "RESPONSE": "!from dff.core.engine.core.keywords RESPONSE", + "PROCESSING": "!from dff.core.engine.core.keywords PROCESSING", + "GLOBAL": "!from dff.core.engine.core.keywords GLOBAL", + "MISC": "!from dff.core.engine.core.keywords MISC", + "LOCAL": "!from dff.core.engine.core.keywords LOCAL", + "cnd": "!import dff.core.engine.conditions", + "lbl": "!import dff.core.engine.labels", + "Act": "!from dff.core.engine.core Actor", + "Context": "!from dff.core.engine.core Context", + "rsp": "!import dff.core.engine.responses", + "add_prefix": "!from functions add_prefix", + "tp": "!import typing", + "re": "!import re", + "transitions": "!import transitions", + "global_flow": "!from flow global_flow", + "script": { + "GLOBAL": { + "TRANSITIONS": { + "(\"greeting_flow\", \"node1\", 1.1)": "cnd.regexp(r\"\\b(hi|hello)\\b\", re.I)", + "(\"music_flow\", \"node1\", 1.1)": "cnd.regexp(r\"talk about music\")", + "lbl.to_fallback(0.1)": "cnd.true()", + "lbl.forward()": "cnd.all([cnd.regexp(r\"next\\b\"),cnd.has_last_labels(labels=[(\"music_flow\", i) for i in [\"node2\", \"node3\"]]),])", + "lbl.repeat(0.2)": "cnd.all([cnd.regexp(r\"repeat\", re.I),cnd.negation(cnd.has_last_labels(flow_labels=[\"global_flow\"])),])" + }, + "PROCESSING": { + "'1'\n": "add_prefix(\"l1_global\")", + "'2'\n": "add_prefix(\"l2_global\")" + }, + "MISC": { + "var1": "global_data", + "var2": "global_data", + "var3": "global_data" + }, + "RESPONSE": "!str" + }, + "!str global_flow": { + "LOCAL": { + "PROCESSING": { + "'2'\n": "add_prefix(\"l2_local\")", + "'3'\n": "add_prefix(\"l3_local\")" + } + }, + "start_node": { + "RESPONSE": "!str", + "TRANSITIONS": { + "(\"music_flow\", \"node1\")": "cnd.regexp(r\"talk about music\")", + "(\"greeting_flow\", \"node1\")": "cnd.regexp(r\"hi|hello\", re.IGNORECASE)", + "fallback_node": "cnd.true()" + } + }, + "fallback_node": { + "RESPONSE": "Ooops", + "TRANSITIONS": { + "(\"music_flow\", \"node1\")": "cnd.regexp(r\"talk about music\")", + "(\"greeting_flow\", \"node1\")": "cnd.regexp(r\"hi|hello\", re.IGNORECASE)", + "lbl.previous()": "cnd.regexp(r\"previous\", re.IGNORECASE)", + "lbl.repeat()": "cnd.true()" + } + } + }, + "greeting_flow": { + "node1": { + "RESPONSE": "rsp.choice([\"Hi, what is up?\", \"Hello, how are you?\"])", + "TRANSITIONS": { + "(\"global_flow\", \"fallback_node\", 0.1)": "cnd.true()", + "node2": "cnd.regexp(r\"how are you\")" + }, + "MISC": { + "var3": "info_of_step_1" + } + }, + "node2": { + "RESPONSE": "Good. What do you want to talk about?", + "TRANSITIONS": { + "lbl.to_fallback(0.1)": "cnd.true()", + "lbl.forward(0.5)": "cnd.regexp(r\"talk about\")", + "(\"music_flow\", \"node1\")": "cnd.regexp(r\"talk about music\")", + "lbl.previous()": "cnd.regexp(r\"previous\", re.IGNORECASE)" + } + }, + "node3": { + "RESPONSE": "!py foo", + "TRANSITIONS": { + "lbl.forward()": "cnd.regexp(r\"bye\")" + } + }, + "node4": { + "RESPONSE": "!py bar(\"bye\")", + "TRANSITIONS": { + "node1": "cnd.regexp(r\"hi|hello\", re.IGNORECASE)", + "lbl.to_fallback()": "cnd.true()" + } + } + }, + "music_flow": { + "node1": { + "RESPONSE": "'I love `System of a Down` group, would you like to tell about it? '\n", + "TRANSITIONS": { + "lbl.forward()": "cnd.regexp(r\"yes|yep|ok\", re.IGNORECASE)", + "lbl.to_fallback()": "cnd.true()" + } + }, + "node2": { + "RESPONSE": "System of a Down is an Armenian-American heavy metal band formed in in 1994.", + "TRANSITIONS": { + "lbl.forward()": "cnd.regexp(r\"next\", re.IGNORECASE)", + "lbl.repeat()": "cnd.regexp(r\"repeat\", re.IGNORECASE)", + "lbl.to_fallback()": "cnd.true()" + } + }, + "node3": { + "RESPONSE": "The band achieved commercial success with the release of five studio albums.", + "TRANSITIONS": { + "lbl.forward()": "cnd.regexp(r\"next\", re.IGNORECASE)", + "lbl.backward()": "cnd.regexp(r\"back\", re.IGNORECASE)", + "lbl.repeat()": "cnd.regexp(r\"repeat\", re.IGNORECASE)", + "lbl.to_fallback()": "cnd.true()" + } + }, + "node4": { + "RESPONSE": "That's all what I know", + "TRANSITIONS": { + "transitions.greeting_flow_n2_transition": "cnd.regexp(r\"next\", re.IGNORECASE)", + "transitions.high_priority_node_transition(\"greeting_flow\", \"node4\")": "cnd.regexp(r\"next time\", re.IGNORECASE)", + "lbl.to_fallback()": "cnd.true()" + } + } + } + }, + "actor": "!call\nname: Act\nargs:\n fallback_label:\n - !str global_flow\n - fallback_node\n script: script\n start_label:\n - !str global_flow\n - start_node\n" + }, + "flow": { + "TRANSITIONS": "!from dff.core.engine.core.keywords TRANSITIONS", + "RESPONSE": "!from dff.core.engine.core.keywords RESPONSE", + "PROCESSING": "!from dff.core.engine.core.keywords PROCESSING", + "LOCAL": "!from dff.core.engine.core.keywords LOCAL", + "cnd": "!import dff.core.engine.conditions", + "lbl": "!import dff.core.engine.labels", + "re": "!import re", + "add_prefix": "!from functions add_prefix", + "global_flow": { + "LOCAL": { + "PROCESSING": { + "'2'\n": "add_prefix(\"l2_local\")", + "'3'\n": "add_prefix(\"l3_local\")" + } + }, + "start_node": { + "RESPONSE": "!str", + "TRANSITIONS": { + "(\"music_flow\", \"node1\")": "cnd.regexp(r\"talk about music\")", + "(\"greeting_flow\", \"node1\")": "cnd.regexp(r\"hi|hello\", re.IGNORECASE)", + "fallback_node": "cnd.true()" + } + }, + "fallback_node": { + "RESPONSE": "Ooops", + "TRANSITIONS": { + "(\"music_flow\", \"node1\")": "cnd.regexp(r\"talk about music\")", + "(\"greeting_flow\", \"node1\")": "cnd.regexp(r\"hi|hello\", re.IGNORECASE)", + "lbl.previous()": "cnd.regexp(r\"previous\", re.IGNORECASE)", + "lbl.repeat()": "cnd.true()" + } + } + } + } + } + } + }, + "nodes": [ + { + "ref": [ + "main", + "script", + "GLOBAL" + ], + "local": false, + "id": [ + "GLOBAL" + ] + }, + { + "ref": [ + "main", + "script", + "greeting_flow", + "node1" + ], + "local": false, + "id": [ + "greeting_flow", + "node1" + ] + }, + { + "ref": [ + "main", + "script", + "music_flow", + "node1" + ], + "local": false, + "id": [ + "music_flow", + "node1" + ] + }, + { + "id": [ + "NONE" + ] + }, + { + "ref": [ + "main", + "script", + "global_flow", + "LOCAL" + ], + "local": true, + "id": [ + "global_flow", + "dff.core.engine.core.keywords.LOCAL" + ] + }, + { + "ref": [ + "main", + "script", + "global_flow", + "start_node" + ], + "local": false, + "start_label": true, + "id": [ + "global_flow", + "start_node" + ] + }, + { + "ref": [ + "main", + "script", + "global_flow", + "fallback_node" + ], + "local": false, + "fallback_label": true, + "id": [ + "global_flow", + "fallback_node" + ] + }, + { + "ref": [ + "main", + "script", + "greeting_flow", + "node2" + ], + "local": false, + "id": [ + "greeting_flow", + "node2" + ] + }, + { + "ref": [ + "main", + "script", + "greeting_flow", + "node3" + ], + "local": false, + "id": [ + "greeting_flow", + "node3" + ] + }, + { + "ref": [ + "main", + "script", + "greeting_flow", + "node4" + ], + "local": false, + "id": [ + "greeting_flow", + "node4" + ] + }, + { + "ref": [ + "main", + "script", + "music_flow", + "node2" + ], + "local": false, + "id": [ + "music_flow", + "node2" + ] + }, + { + "ref": [ + "main", + "script", + "music_flow", + "node3" + ], + "local": false, + "id": [ + "music_flow", + "node3" + ] + }, + { + "ref": [ + "main", + "script", + "music_flow", + "node4" + ], + "local": false, + "id": [ + "music_flow", + "node4" + ] + } + ], + "links": [ + { + "label_ref": [ + "main", + "script", + "GLOBAL", + "TRANSITIONS" + ], + "label": "(\"greeting_flow\", \"node1\", 1.1)", + "condition_ref": [ + "main", + "script", + "GLOBAL", + "TRANSITIONS", + "(\"greeting_flow\", \"node1\", 1.1)" + ], + "condition": "cnd.regexp(r\"\\b(hi|hello)\\b\", re.I)", + "source": [ + "GLOBAL" + ], + "target": [ + "greeting_flow", + "node1" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "GLOBAL", + "TRANSITIONS" + ], + "label": "(\"music_flow\", \"node1\", 1.1)", + "condition_ref": [ + "main", + "script", + "GLOBAL", + "TRANSITIONS", + "(\"music_flow\", \"node1\", 1.1)" + ], + "condition": "cnd.regexp(r\"talk about music\")", + "source": [ + "GLOBAL" + ], + "target": [ + "music_flow", + "node1" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "GLOBAL", + "TRANSITIONS" + ], + "label": "lbl.to_fallback(0.1)", + "condition_ref": [ + "main", + "script", + "GLOBAL", + "TRANSITIONS", + "lbl.to_fallback(0.1)" + ], + "condition": "cnd.true()", + "source": [ + "GLOBAL" + ], + "target": [ + "NONE" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "GLOBAL", + "TRANSITIONS" + ], + "label": "lbl.forward()", + "condition_ref": [ + "main", + "script", + "GLOBAL", + "TRANSITIONS", + "lbl.forward()" + ], + "condition": "cnd.all([cnd.regexp(r\"next\\b\"),cnd.has_last_labels(labels=[(\"music_flow\", i) for i in [\"node2\", \"node3\"]]),])", + "source": [ + "GLOBAL" + ], + "target": [ + "NONE" + ], + "key": 1 + }, + { + "label_ref": [ + "main", + "script", + "GLOBAL", + "TRANSITIONS" + ], + "label": "lbl.repeat(0.2)", + "condition_ref": [ + "main", + "script", + "GLOBAL", + "TRANSITIONS", + "lbl.repeat(0.2)" + ], + "condition": "cnd.all([cnd.regexp(r\"repeat\", re.I),cnd.negation(cnd.has_last_labels(flow_labels=[\"global_flow\"])),])", + "source": [ + "GLOBAL" + ], + "target": [ + "NONE" + ], + "key": 2 + }, + { + "label_ref": [ + "main", + "script", + "greeting_flow", + "node1", + "TRANSITIONS" + ], + "label": "(\"global_flow\", \"fallback_node\", 0.1)", + "condition_ref": [ + "main", + "script", + "greeting_flow", + "node1", + "TRANSITIONS", + "(\"global_flow\", \"fallback_node\", 0.1)" + ], + "condition": "cnd.true()", + "source": [ + "greeting_flow", + "node1" + ], + "target": [ + "global_flow", + "fallback_node" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "greeting_flow", + "node1", + "TRANSITIONS" + ], + "label": "node2", + "condition_ref": [ + "main", + "script", + "greeting_flow", + "node1", + "TRANSITIONS", + "node2" + ], + "condition": "cnd.regexp(r\"how are you\")", + "source": [ + "greeting_flow", + "node1" + ], + "target": [ + "NONE" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "music_flow", + "node1", + "TRANSITIONS" + ], + "label": "lbl.forward()", + "condition_ref": [ + "main", + "script", + "music_flow", + "node1", + "TRANSITIONS", + "lbl.forward()" + ], + "condition": "cnd.regexp(r\"yes|yep|ok\", re.IGNORECASE)", + "source": [ + "music_flow", + "node1" + ], + "target": [ + "NONE" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "music_flow", + "node1", + "TRANSITIONS" + ], + "label": "lbl.to_fallback()", + "condition_ref": [ + "main", + "script", + "music_flow", + "node1", + "TRANSITIONS", + "lbl.to_fallback()" + ], + "condition": "cnd.true()", + "source": [ + "music_flow", + "node1" + ], + "target": [ + "NONE" + ], + "key": 1 + }, + { + "label_ref": [ + "main", + "script", + "global_flow", + "start_node", + "TRANSITIONS" + ], + "label": "(\"music_flow\", \"node1\")", + "condition_ref": [ + "main", + "script", + "global_flow", + "start_node", + "TRANSITIONS", + "(\"music_flow\", \"node1\")" + ], + "condition": "cnd.regexp(r\"talk about music\")", + "source": [ + "global_flow", + "start_node" + ], + "target": [ + "music_flow", + "node1" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "global_flow", + "start_node", + "TRANSITIONS" + ], + "label": "(\"greeting_flow\", \"node1\")", + "condition_ref": [ + "main", + "script", + "global_flow", + "start_node", + "TRANSITIONS", + "(\"greeting_flow\", \"node1\")" + ], + "condition": "cnd.regexp(r\"hi|hello\", re.IGNORECASE)", + "source": [ + "global_flow", + "start_node" + ], + "target": [ + "greeting_flow", + "node1" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "global_flow", + "start_node", + "TRANSITIONS" + ], + "label": "fallback_node", + "condition_ref": [ + "main", + "script", + "global_flow", + "start_node", + "TRANSITIONS", + "fallback_node" + ], + "condition": "cnd.true()", + "source": [ + "global_flow", + "start_node" + ], + "target": [ + "NONE" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "global_flow", + "fallback_node", + "TRANSITIONS" + ], + "label": "(\"music_flow\", \"node1\")", + "condition_ref": [ + "main", + "script", + "global_flow", + "fallback_node", + "TRANSITIONS", + "(\"music_flow\", \"node1\")" + ], + "condition": "cnd.regexp(r\"talk about music\")", + "source": [ + "global_flow", + "fallback_node" + ], + "target": [ + "music_flow", + "node1" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "global_flow", + "fallback_node", + "TRANSITIONS" + ], + "label": "(\"greeting_flow\", \"node1\")", + "condition_ref": [ + "main", + "script", + "global_flow", + "fallback_node", + "TRANSITIONS", + "(\"greeting_flow\", \"node1\")" + ], + "condition": "cnd.regexp(r\"hi|hello\", re.IGNORECASE)", + "source": [ + "global_flow", + "fallback_node" + ], + "target": [ + "greeting_flow", + "node1" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "global_flow", + "fallback_node", + "TRANSITIONS" + ], + "label": "lbl.previous()", + "condition_ref": [ + "main", + "script", + "global_flow", + "fallback_node", + "TRANSITIONS", + "lbl.previous()" + ], + "condition": "cnd.regexp(r\"previous\", re.IGNORECASE)", + "source": [ + "global_flow", + "fallback_node" + ], + "target": [ + "NONE" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "global_flow", + "fallback_node", + "TRANSITIONS" + ], + "label": "lbl.repeat()", + "condition_ref": [ + "main", + "script", + "global_flow", + "fallback_node", + "TRANSITIONS", + "lbl.repeat()" + ], + "condition": "cnd.true()", + "source": [ + "global_flow", + "fallback_node" + ], + "target": [ + "NONE" + ], + "key": 1 + }, + { + "label_ref": [ + "main", + "script", + "greeting_flow", + "node2", + "TRANSITIONS" + ], + "label": "lbl.to_fallback(0.1)", + "condition_ref": [ + "main", + "script", + "greeting_flow", + "node2", + "TRANSITIONS", + "lbl.to_fallback(0.1)" + ], + "condition": "cnd.true()", + "source": [ + "greeting_flow", + "node2" + ], + "target": [ + "NONE" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "greeting_flow", + "node2", + "TRANSITIONS" + ], + "label": "lbl.forward(0.5)", + "condition_ref": [ + "main", + "script", + "greeting_flow", + "node2", + "TRANSITIONS", + "lbl.forward(0.5)" + ], + "condition": "cnd.regexp(r\"talk about\")", + "source": [ + "greeting_flow", + "node2" + ], + "target": [ + "NONE" + ], + "key": 1 + }, + { + "label_ref": [ + "main", + "script", + "greeting_flow", + "node2", + "TRANSITIONS" + ], + "label": "lbl.previous()", + "condition_ref": [ + "main", + "script", + "greeting_flow", + "node2", + "TRANSITIONS", + "lbl.previous()" + ], + "condition": "cnd.regexp(r\"previous\", re.IGNORECASE)", + "source": [ + "greeting_flow", + "node2" + ], + "target": [ + "NONE" + ], + "key": 2 + }, + { + "label_ref": [ + "main", + "script", + "greeting_flow", + "node2", + "TRANSITIONS" + ], + "label": "(\"music_flow\", \"node1\")", + "condition_ref": [ + "main", + "script", + "greeting_flow", + "node2", + "TRANSITIONS", + "(\"music_flow\", \"node1\")" + ], + "condition": "cnd.regexp(r\"talk about music\")", + "source": [ + "greeting_flow", + "node2" + ], + "target": [ + "music_flow", + "node1" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "greeting_flow", + "node3", + "TRANSITIONS" + ], + "label": "lbl.forward()", + "condition_ref": [ + "main", + "script", + "greeting_flow", + "node3", + "TRANSITIONS", + "lbl.forward()" + ], + "condition": "cnd.regexp(r\"bye\")", + "source": [ + "greeting_flow", + "node3" + ], + "target": [ + "NONE" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "greeting_flow", + "node4", + "TRANSITIONS" + ], + "label": "node1", + "condition_ref": [ + "main", + "script", + "greeting_flow", + "node4", + "TRANSITIONS", + "node1" + ], + "condition": "cnd.regexp(r\"hi|hello\", re.IGNORECASE)", + "source": [ + "greeting_flow", + "node4" + ], + "target": [ + "NONE" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "greeting_flow", + "node4", + "TRANSITIONS" + ], + "label": "lbl.to_fallback()", + "condition_ref": [ + "main", + "script", + "greeting_flow", + "node4", + "TRANSITIONS", + "lbl.to_fallback()" + ], + "condition": "cnd.true()", + "source": [ + "greeting_flow", + "node4" + ], + "target": [ + "NONE" + ], + "key": 1 + }, + { + "label_ref": [ + "main", + "script", + "music_flow", + "node2", + "TRANSITIONS" + ], + "label": "lbl.forward()", + "condition_ref": [ + "main", + "script", + "music_flow", + "node2", + "TRANSITIONS", + "lbl.forward()" + ], + "condition": "cnd.regexp(r\"next\", re.IGNORECASE)", + "source": [ + "music_flow", + "node2" + ], + "target": [ + "NONE" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "music_flow", + "node2", + "TRANSITIONS" + ], + "label": "lbl.repeat()", + "condition_ref": [ + "main", + "script", + "music_flow", + "node2", + "TRANSITIONS", + "lbl.repeat()" + ], + "condition": "cnd.regexp(r\"repeat\", re.IGNORECASE)", + "source": [ + "music_flow", + "node2" + ], + "target": [ + "NONE" + ], + "key": 1 + }, + { + "label_ref": [ + "main", + "script", + "music_flow", + "node2", + "TRANSITIONS" + ], + "label": "lbl.to_fallback()", + "condition_ref": [ + "main", + "script", + "music_flow", + "node2", + "TRANSITIONS", + "lbl.to_fallback()" + ], + "condition": "cnd.true()", + "source": [ + "music_flow", + "node2" + ], + "target": [ + "NONE" + ], + "key": 2 + }, + { + "label_ref": [ + "main", + "script", + "music_flow", + "node3", + "TRANSITIONS" + ], + "label": "lbl.forward()", + "condition_ref": [ + "main", + "script", + "music_flow", + "node3", + "TRANSITIONS", + "lbl.forward()" + ], + "condition": "cnd.regexp(r\"next\", re.IGNORECASE)", + "source": [ + "music_flow", + "node3" + ], + "target": [ + "NONE" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "music_flow", + "node3", + "TRANSITIONS" + ], + "label": "lbl.backward()", + "condition_ref": [ + "main", + "script", + "music_flow", + "node3", + "TRANSITIONS", + "lbl.backward()" + ], + "condition": "cnd.regexp(r\"back\", re.IGNORECASE)", + "source": [ + "music_flow", + "node3" + ], + "target": [ + "NONE" + ], + "key": 1 + }, + { + "label_ref": [ + "main", + "script", + "music_flow", + "node3", + "TRANSITIONS" + ], + "label": "lbl.repeat()", + "condition_ref": [ + "main", + "script", + "music_flow", + "node3", + "TRANSITIONS", + "lbl.repeat()" + ], + "condition": "cnd.regexp(r\"repeat\", re.IGNORECASE)", + "source": [ + "music_flow", + "node3" + ], + "target": [ + "NONE" + ], + "key": 2 + }, + { + "label_ref": [ + "main", + "script", + "music_flow", + "node3", + "TRANSITIONS" + ], + "label": "lbl.to_fallback()", + "condition_ref": [ + "main", + "script", + "music_flow", + "node3", + "TRANSITIONS", + "lbl.to_fallback()" + ], + "condition": "cnd.true()", + "source": [ + "music_flow", + "node3" + ], + "target": [ + "NONE" + ], + "key": 3 + }, + { + "label_ref": [ + "main", + "script", + "music_flow", + "node4", + "TRANSITIONS" + ], + "label": "transitions.greeting_flow_n2_transition", + "condition_ref": [ + "main", + "script", + "music_flow", + "node4", + "TRANSITIONS", + "transitions.greeting_flow_n2_transition" + ], + "condition": "cnd.regexp(r\"next\", re.IGNORECASE)", + "source": [ + "music_flow", + "node4" + ], + "target": [ + "NONE" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "music_flow", + "node4", + "TRANSITIONS" + ], + "label": "transitions.high_priority_node_transition(\"greeting_flow\", \"node4\")", + "condition_ref": [ + "main", + "script", + "music_flow", + "node4", + "TRANSITIONS", + "transitions.high_priority_node_transition(\"greeting_flow\", \"node4\")" + ], + "condition": "cnd.regexp(r\"next time\", re.IGNORECASE)", + "source": [ + "music_flow", + "node4" + ], + "target": [ + "NONE" + ], + "key": 1 + }, + { + "label_ref": [ + "main", + "script", + "music_flow", + "node4", + "TRANSITIONS" + ], + "label": "lbl.to_fallback()", + "condition_ref": [ + "main", + "script", + "music_flow", + "node4", + "TRANSITIONS", + "lbl.to_fallback()" + ], + "condition": "cnd.true()", + "source": [ + "music_flow", + "node4" + ], + "target": [ + "NONE" + ], + "key": 2 + } + ] +} \ No newline at end of file diff --git a/tests/parser/TEST_CASES/test_graph2py/complex_tests/test_1/python_files/flow.py b/tests/parser/TEST_CASES/test_graph2py/complex_tests/test_1/python_files/flow.py new file mode 100644 index 000000000..32f691479 --- /dev/null +++ b/tests/parser/TEST_CASES/test_graph2py/complex_tests/test_1/python_files/flow.py @@ -0,0 +1,29 @@ +from dff.core.engine.core.keywords import TRANSITIONS as TRANSITIONS +from dff.core.engine.core.keywords import RESPONSE as RESPONSE +from dff.core.engine.core.keywords import PROCESSING as PROCESSING +from dff.core.engine.core.keywords import LOCAL as LOCAL +import dff.core.engine.conditions as cnd +import dff.core.engine.labels as lbl +import re as re +from functions import add_prefix as add_prefix + +global_flow = { + LOCAL: {PROCESSING: {2: add_prefix("l2_local"), 3: add_prefix("l3_local")}}, + "start_node": { + RESPONSE: "", + TRANSITIONS: { + ("music_flow", "node1"): cnd.regexp(r"talk about music"), + ("greeting_flow", "node1"): cnd.regexp(r"hi|hello", re.IGNORECASE), + "fallback_node": cnd.true(), + }, + }, + "fallback_node": { + RESPONSE: "Ooops", + TRANSITIONS: { + ("music_flow", "node1"): cnd.regexp(r"talk about music"), + ("greeting_flow", "node1"): cnd.regexp(r"hi|hello", re.IGNORECASE), + lbl.previous(): cnd.regexp(r"previous", re.IGNORECASE), + lbl.repeat(): cnd.true(), + }, + }, +} diff --git a/tests/parser/TEST_CASES/test_graph2py/complex_tests/test_1/python_files/main.py b/tests/parser/TEST_CASES/test_graph2py/complex_tests/test_1/python_files/main.py new file mode 100644 index 000000000..268995796 --- /dev/null +++ b/tests/parser/TEST_CASES/test_graph2py/complex_tests/test_1/python_files/main.py @@ -0,0 +1,133 @@ +from dff.core.engine.core.keywords import TRANSITIONS as TRANSITIONS +from dff.core.engine.core.keywords import RESPONSE as RESPONSE +from dff.core.engine.core.keywords import PROCESSING as PROCESSING +from dff.core.engine.core.keywords import GLOBAL as GLOBAL +from dff.core.engine.core.keywords import MISC as MISC +from dff.core.engine.core.keywords import LOCAL as LOCAL +import dff.core.engine.conditions as cnd +import dff.core.engine.labels as lbl +from dff.core.engine.core import Actor as Act +from dff.core.engine.core import Context as Context +import dff.core.engine.responses as rsp +from functions import add_prefix as add_prefix +import typing as tp +import re as re +import transitions as transitions +from flow import global_flow as global_flow + +script = { + GLOBAL: { + TRANSITIONS: { + ("greeting_flow", "node1", 1.1): cnd.regexp(r"\b(hi|hello)\b", re.I), + ("music_flow", "node1", 1.1): cnd.regexp(r"talk about music"), + lbl.to_fallback(0.1): cnd.true(), + lbl.forward(): cnd.all( + [ + cnd.regexp(r"next\b"), + cnd.has_last_labels( + labels=[("music_flow", i) for i in ["node2", "node3"]] + ), + ] + ), + lbl.repeat(0.2): cnd.all( + [ + cnd.regexp(r"repeat", re.I), + cnd.negation(cnd.has_last_labels(flow_labels=["global_flow"])), + ] + ), + }, + PROCESSING: {1: add_prefix("l1_global"), 2: add_prefix("l2_global")}, + MISC: {"var1": "global_data", "var2": "global_data", "var3": "global_data"}, + RESPONSE: "", + }, + "global_flow": { + LOCAL: {PROCESSING: {2: add_prefix("l2_local"), 3: add_prefix("l3_local")}}, + "start_node": { + RESPONSE: "", + TRANSITIONS: { + ("music_flow", "node1"): cnd.regexp(r"talk about music"), + ("greeting_flow", "node1"): cnd.regexp(r"hi|hello", re.IGNORECASE), + "fallback_node": cnd.true(), + }, + }, + "fallback_node": { + RESPONSE: "Ooops", + TRANSITIONS: { + ("music_flow", "node1"): cnd.regexp(r"talk about music"), + ("greeting_flow", "node1"): cnd.regexp(r"hi|hello", re.IGNORECASE), + lbl.previous(): cnd.regexp(r"previous", re.IGNORECASE), + lbl.repeat(): cnd.true(), + }, + }, + }, + "greeting_flow": { + "node1": { + RESPONSE: rsp.choice(["Hi, what is up?", "Hello, how are you?"]), + TRANSITIONS: { + ("global_flow", "fallback_node", 0.1): cnd.true(), + "node2": cnd.regexp(r"how are you"), + }, + MISC: {"var3": "info_of_step_1"}, + }, + "node2": { + RESPONSE: "Good. What do you want to talk about?", + TRANSITIONS: { + lbl.to_fallback(0.1): cnd.true(), + lbl.forward(0.5): cnd.regexp(r"talk about"), + ("music_flow", "node1"): cnd.regexp(r"talk about music"), + lbl.previous(): cnd.regexp(r"previous", re.IGNORECASE), + }, + }, + "node3": {RESPONSE: foo, TRANSITIONS: {lbl.forward(): cnd.regexp(r"bye")}}, + "node4": { + RESPONSE: bar("bye"), + TRANSITIONS: { + "node1": cnd.regexp(r"hi|hello", re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + }, + "music_flow": { + "node1": { + RESPONSE: "I love `System of a Down` group, would you like to tell about it? ", + TRANSITIONS: { + lbl.forward(): cnd.regexp(r"yes|yep|ok", re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + "node2": { + RESPONSE: "System of a Down is an Armenian-American heavy metal band formed in in 1994.", + TRANSITIONS: { + lbl.forward(): cnd.regexp(r"next", re.IGNORECASE), + lbl.repeat(): cnd.regexp(r"repeat", re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + "node3": { + RESPONSE: "The band achieved commercial success with the release of five studio albums.", + TRANSITIONS: { + lbl.forward(): cnd.regexp(r"next", re.IGNORECASE), + lbl.backward(): cnd.regexp(r"back", re.IGNORECASE), + lbl.repeat(): cnd.regexp(r"repeat", re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + "node4": { + RESPONSE: "That's all what I know", + TRANSITIONS: { + transitions.greeting_flow_n2_transition: cnd.regexp( + r"next", re.IGNORECASE + ), + transitions.high_priority_node_transition( + "greeting_flow", "node4" + ): cnd.regexp(r"next time", re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + }, +} +actor = Act( + fallback_label=("global_flow", "fallback_node"), + script=script, + start_label=("global_flow", "start_node"), +) diff --git a/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_1/graph_files/graph.json b/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_1/graph_files/graph.json new file mode 100644 index 000000000..229823587 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_1/graph_files/graph.json @@ -0,0 +1,342 @@ +{ + "directed": true, + "multigraph": true, + "graph": { + "script": { + "namespaces": { + "main": { + "module": "!import module", + "TRANSITIONS": "!from dff.core.engine.core.keywords TRANSITIONS", + "GLOBAL": "!from dff.core.engine.core.keywords GLOBAL", + "Actor": "!from dff.core.engine.core.actor Actor", + "glob": { + "glob": "GLOBAL" + }, + "flow": { + "flow": "flow" + }, + "act": "!call\nname: Actor\nargs:\n script:\n glob[\"glob\"]:\n TRANSITIONS:\n module.consts.there[module.node1.ints[module.node1.ints[3]]]: module.proxy.node1.ints[3]\n flow[\"flow\"]:\n node1: module.proxy.node1.node\n module.proxy.node1.d[2][3]: module.node1.node\n flow2:\n node1: module.proxy.node1.node\n start_label:\n - !str flow\n - node1\n" + }, + "module.__init__": { + "proxy": "!import module.proxy" + }, + "module.proxy": { + "node1": "!import module.node1" + }, + "module.node1": { + "tr": "!from dff.core.engine.core.keywords TRANSITIONS", + "consts": "!import module.consts", + "d": { + "'1'\n": "flow", + "'2'\n": { + "'3'\n": "node2" + } + }, + "ints": { + "'3'\n": "'3'\n" + }, + "here": { + "(\"flow2\", \"node1\")": "print(\"cond\")" + }, + "node": { + "tr": { + "(\"flow\", \"node1\")": "print(\"cnd\")", + "(d[1], d[2][ints[3]])": "consts.conds[1]", + "consts.there[ints[3]]": "here[consts.there[ints[3]]]" + } + } + }, + "module.consts": { + "conds": { + "'1'\n": "print(\"2\")" + }, + "there": { + "'3'\n": "(\"flow2\", \"node1\")" + } + } + } + } + }, + "nodes": [ + { + "ref": [ + "main", + "act", + "script", + "glob[\"glob\"]" + ], + "local": false, + "id": [ + "GLOBAL" + ] + }, + { + "ref": [ + "module.node1", + "node" + ], + "local": false, + "id": [ + "flow2", + "node1" + ] + }, + { + "ref": [ + "module.node1", + "node" + ], + "local": false, + "start_label": true, + "id": [ + "flow", + "node1" + ] + }, + { + "ref": [ + "module.node1", + "node" + ], + "local": false, + "id": [ + "flow", + "node2" + ] + } + ], + "links": [ + { + "label_ref": [ + "module.consts", + "there", + "3" + ], + "label": "(\"flow2\", \"node1\")", + "condition_ref": [ + "module.node1", + "ints", + "3" + ], + "condition": "3", + "source": [ + "GLOBAL" + ], + "target": [ + "flow2", + "node1" + ], + "key": 0 + }, + { + "label_ref": [ + "module.node1", + "node", + "tr" + ], + "label": "(\"flow\", \"node1\")", + "condition_ref": [ + "module.node1", + "node", + "tr", + "(\"flow\", \"node1\")" + ], + "condition": "print(\"cnd\")", + "source": [ + "flow2", + "node1" + ], + "target": [ + "flow", + "node1" + ], + "key": 0 + }, + { + "label_ref": [ + "module.node1", + "node", + "tr" + ], + "label": "(d[1], d[2][ints[3]])", + "condition_ref": [ + "module.consts", + "conds", + "1" + ], + "condition": "print(\"2\")", + "source": [ + "flow2", + "node1" + ], + "target": [ + "flow", + "node2" + ], + "key": 0 + }, + { + "label_ref": [ + "module.consts", + "there", + "3" + ], + "label": "(\"flow2\", \"node1\")", + "condition_ref": [ + "module.node1", + "here", + "(\"flow2\", \"node1\")" + ], + "condition": "print(\"cond\")", + "source": [ + "flow2", + "node1" + ], + "target": [ + "flow2", + "node1" + ], + "key": 0 + }, + { + "label_ref": [ + "module.node1", + "node", + "tr" + ], + "label": "(\"flow\", \"node1\")", + "condition_ref": [ + "module.node1", + "node", + "tr", + "(\"flow\", \"node1\")" + ], + "condition": "print(\"cnd\")", + "source": [ + "flow", + "node1" + ], + "target": [ + "flow", + "node1" + ], + "key": 0 + }, + { + "label_ref": [ + "module.node1", + "node", + "tr" + ], + "label": "(d[1], d[2][ints[3]])", + "condition_ref": [ + "module.consts", + "conds", + "1" + ], + "condition": "print(\"2\")", + "source": [ + "flow", + "node1" + ], + "target": [ + "flow", + "node2" + ], + "key": 0 + }, + { + "label_ref": [ + "module.consts", + "there", + "3" + ], + "label": "(\"flow2\", \"node1\")", + "condition_ref": [ + "module.node1", + "here", + "(\"flow2\", \"node1\")" + ], + "condition": "print(\"cond\")", + "source": [ + "flow", + "node1" + ], + "target": [ + "flow2", + "node1" + ], + "key": 0 + }, + { + "label_ref": [ + "module.node1", + "node", + "tr" + ], + "label": "(\"flow\", \"node1\")", + "condition_ref": [ + "module.node1", + "node", + "tr", + "(\"flow\", \"node1\")" + ], + "condition": "print(\"cnd\")", + "source": [ + "flow", + "node2" + ], + "target": [ + "flow", + "node1" + ], + "key": 0 + }, + { + "label_ref": [ + "module.node1", + "node", + "tr" + ], + "label": "(d[1], d[2][ints[3]])", + "condition_ref": [ + "module.consts", + "conds", + "1" + ], + "condition": "print(\"2\")", + "source": [ + "flow", + "node2" + ], + "target": [ + "flow", + "node2" + ], + "key": 0 + }, + { + "label_ref": [ + "module.consts", + "there", + "3" + ], + "label": "(\"flow2\", \"node1\")", + "condition_ref": [ + "module.node1", + "here", + "(\"flow2\", \"node1\")" + ], + "condition": "print(\"cond\")", + "source": [ + "flow", + "node2" + ], + "target": [ + "flow2", + "node1" + ], + "key": 0 + } + ] +} \ No newline at end of file diff --git a/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_1/python_files/main.py b/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_1/python_files/main.py new file mode 100644 index 000000000..89f31d0c2 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_1/python_files/main.py @@ -0,0 +1,21 @@ +import module +from dff.core.engine.core.keywords import TRANSITIONS, GLOBAL +from dff.core.engine.core.actor import Actor + +glob = {"glob": GLOBAL} + +flow = {"flow": "flow"} + +act = Actor( + { + glob["glob"]: { + TRANSITIONS: {module.consts.there[module.node1.ints[module.node1.ints[3]]]: module.proxy.node1.ints[3]} + }, + flow["flow"]: { + "node1": module.proxy.node1.node, + module.proxy.node1.d[2][3]: module.node1.node, + }, + "flow2": {"node1": module.proxy.node1.node}, + }, + start_label=("flow", "node1"), +) diff --git a/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_1/python_files/module/__init__.py b/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_1/python_files/module/__init__.py new file mode 100644 index 000000000..f62115a6d --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_1/python_files/module/__init__.py @@ -0,0 +1 @@ +import proxy diff --git a/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_1/python_files/module/consts.py b/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_1/python_files/module/consts.py new file mode 100644 index 000000000..5d14ef76d --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_1/python_files/module/consts.py @@ -0,0 +1,3 @@ +conds = {1: print("2")} + +there = {3: ("flow2", "node1")} diff --git a/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_1/python_files/module/node1.py b/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_1/python_files/module/node1.py new file mode 100644 index 000000000..70955e5e9 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_1/python_files/module/node1.py @@ -0,0 +1,21 @@ +from dff.core.engine.core.keywords import TRANSITIONS as tr +import consts + +d = { + 1: "flow", + 2: { + 3: "node2", + }, +} + +ints = {3: 3} + +here = {("flow2", "node1"): print("cond")} + +node = { + tr: { + ("flow", "node1"): print("cnd"), + (d[1], d[2][ints[3]]): consts.conds[1], + consts.there[ints[3]]: here[consts.there[ints[3]]], + } +} diff --git a/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_1/python_files/module/proxy.py b/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_1/python_files/module/proxy.py new file mode 100644 index 000000000..f7aca3613 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_1/python_files/module/proxy.py @@ -0,0 +1 @@ +import node1 diff --git a/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_2/graph_files/graph.json b/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_2/graph_files/graph.json new file mode 100644 index 000000000..0f68ecd3c --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_2/graph_files/graph.json @@ -0,0 +1,1227 @@ +{ + "directed": true, + "multigraph": true, + "graph": { + "script": { + "namespaces": { + "main": { + "TRANSITIONS": "!from dff.core.engine.core.keywords TRANSITIONS", + "RESPONSE": "!from dff.core.engine.core.keywords RESPONSE", + "PROCESSING": "!from dff.core.engine.core.keywords PROCESSING", + "GLOBAL": "!from dff.core.engine.core.keywords GLOBAL", + "MISC": "!from dff.core.engine.core.keywords MISC", + "LOCAL": "!from dff.core.engine.core.keywords LOCAL", + "cnd": "!import dff.core.engine.conditions", + "lbl": "!import dff.core.engine.labels", + "Act": "!from dff.core.engine.core Actor", + "Context": "!from dff.core.engine.core Context", + "rsp": "!import dff.core.engine.responses", + "add_prefix": "!from functions add_prefix", + "tp": "!import typing", + "re": "!import re", + "transitions": "!import transitions", + "global_flow": "!from flow global_flow", + "script": { + "GLOBAL": { + "TRANSITIONS": { + "(\"greeting_flow\", \"node1\", 1.1)": "cnd.regexp(r\"\\b(hi|hello)\\b\", re.I)", + "(\"music_flow\", \"node1\", 1.1)": "cnd.regexp(r\"talk about music\")", + "lbl.to_fallback(0.1)": "cnd.true()", + "lbl.forward()": "cnd.all([cnd.regexp(r\"next\\b\"),cnd.has_last_labels(labels=[(\"music_flow\", i) for i in [\"node2\", \"node3\"]]),])", + "lbl.repeat(0.2)": "cnd.all([cnd.regexp(r\"repeat\", re.I),cnd.negation(cnd.has_last_labels(flow_labels=[\"global_flow\"])),])" + }, + "PROCESSING": { + "'1'\n": "add_prefix(\"l1_global\")", + "'2'\n": "add_prefix(\"l2_global\")" + }, + "MISC": { + "var1": "global_data", + "var2": "global_data", + "var3": "global_data" + }, + "RESPONSE": "!str" + }, + "!str global_flow": { + "LOCAL": { + "PROCESSING": { + "'2'\n": "add_prefix(\"l2_local\")", + "'3'\n": "add_prefix(\"l3_local\")" + } + }, + "start_node": { + "RESPONSE": "!str", + "TRANSITIONS": { + "(\"music_flow\", \"node1\")": "cnd.regexp(r\"talk about music\")", + "(\"greeting_flow\", \"node1\")": "cnd.regexp(r\"hi|hello\", re.IGNORECASE)", + "fallback_node": "cnd.true()" + } + }, + "fallback_node": { + "RESPONSE": "Ooops", + "TRANSITIONS": { + "(\"music_flow\", \"node1\")": "cnd.regexp(r\"talk about music\")", + "(\"greeting_flow\", \"node1\")": "cnd.regexp(r\"hi|hello\", re.IGNORECASE)", + "lbl.previous()": "cnd.regexp(r\"previous\", re.IGNORECASE)", + "lbl.repeat()": "cnd.true()" + } + } + }, + "greeting_flow": { + "node1": { + "RESPONSE": "rsp.choice([\"Hi, what is up?\", \"Hello, how are you?\"])", + "TRANSITIONS": { + "(\"global_flow\", \"fallback_node\", 0.1)": "cnd.true()", + "node2": "cnd.regexp(r\"how are you\")" + }, + "MISC": { + "var3": "info_of_step_1" + } + }, + "node2": { + "RESPONSE": "Good. What do you want to talk about?", + "TRANSITIONS": { + "lbl.to_fallback(0.1)": "cnd.true()", + "lbl.forward(0.5)": "cnd.regexp(r\"talk about\")", + "(\"music_flow\", \"node1\")": "cnd.regexp(r\"talk about music\")", + "lbl.previous()": "cnd.regexp(r\"previous\", re.IGNORECASE)" + } + }, + "node3": { + "RESPONSE": "!py foo", + "TRANSITIONS": { + "lbl.forward()": "cnd.regexp(r\"bye\")" + } + }, + "node4": { + "RESPONSE": "!py bar(\"bye\")", + "TRANSITIONS": { + "node1": "cnd.regexp(r\"hi|hello\", re.IGNORECASE)", + "lbl.to_fallback()": "cnd.true()" + } + } + }, + "music_flow": { + "node1": { + "RESPONSE": "'I love `System of a Down` group, would you like to tell about it? '\n", + "TRANSITIONS": { + "lbl.forward()": "cnd.regexp(r\"yes|yep|ok\", re.IGNORECASE)", + "lbl.to_fallback()": "cnd.true()" + } + }, + "node2": { + "RESPONSE": "System of a Down is an Armenian-American heavy metal band formed in in 1994.", + "TRANSITIONS": { + "lbl.forward()": "cnd.regexp(r\"next\", re.IGNORECASE)", + "lbl.repeat()": "cnd.regexp(r\"repeat\", re.IGNORECASE)", + "lbl.to_fallback()": "cnd.true()" + } + }, + "node3": { + "RESPONSE": "The band achieved commercial success with the release of five studio albums.", + "TRANSITIONS": { + "lbl.forward()": "cnd.regexp(r\"next\", re.IGNORECASE)", + "lbl.backward()": "cnd.regexp(r\"back\", re.IGNORECASE)", + "lbl.repeat()": "cnd.regexp(r\"repeat\", re.IGNORECASE)", + "lbl.to_fallback()": "cnd.true()" + } + }, + "node4": { + "RESPONSE": "That's all what I know", + "TRANSITIONS": { + "transitions.greeting_flow_n2_transition": "cnd.regexp(r\"next\", re.IGNORECASE)", + "transitions.high_priority_node_transition(\"greeting_flow\", \"node4\")": "cnd.regexp(r\"next time\", re.IGNORECASE)", + "lbl.to_fallback()": "cnd.true()" + } + } + } + }, + "actor": "!call\nname: Act\nargs:\n fallback_label:\n - !str global_flow\n - fallback_node\n script: script\n start_label:\n - !str global_flow\n - start_node\n" + }, + "flow": { + "TRANSITIONS": "!from dff.core.engine.core.keywords TRANSITIONS", + "RESPONSE": "!from dff.core.engine.core.keywords RESPONSE", + "PROCESSING": "!from dff.core.engine.core.keywords PROCESSING", + "LOCAL": "!from dff.core.engine.core.keywords LOCAL", + "cnd": "!import dff.core.engine.conditions", + "lbl": "!import dff.core.engine.labels", + "re": "!import re", + "add_prefix": "!from functions add_prefix", + "global_flow": { + "LOCAL": { + "PROCESSING": { + "'2'\n": "add_prefix(\"l2_local\")", + "'3'\n": "add_prefix(\"l3_local\")" + } + }, + "start_node": { + "RESPONSE": "!str", + "TRANSITIONS": { + "(\"music_flow\", \"node1\")": "cnd.regexp(r\"talk about music\")", + "(\"greeting_flow\", \"node1\")": "cnd.regexp(r\"hi|hello\", re.IGNORECASE)", + "fallback_node": "cnd.true()" + } + }, + "fallback_node": { + "RESPONSE": "Ooops", + "TRANSITIONS": { + "(\"music_flow\", \"node1\")": "cnd.regexp(r\"talk about music\")", + "(\"greeting_flow\", \"node1\")": "cnd.regexp(r\"hi|hello\", re.IGNORECASE)", + "lbl.previous()": "cnd.regexp(r\"previous\", re.IGNORECASE)", + "lbl.repeat()": "cnd.true()" + } + } + } + } + } + } + }, + "nodes": [ + { + "ref": [ + "main", + "script", + "GLOBAL" + ], + "local": false, + "id": [ + "GLOBAL" + ] + }, + { + "ref": [ + "main", + "script", + "greeting_flow", + "node1" + ], + "local": false, + "id": [ + "greeting_flow", + "node1" + ] + }, + { + "ref": [ + "main", + "script", + "music_flow", + "node1" + ], + "local": false, + "id": [ + "music_flow", + "node1" + ] + }, + { + "id": [ + "NONE" + ] + }, + { + "ref": [ + "main", + "script", + "global_flow", + "LOCAL" + ], + "local": true, + "id": [ + "global_flow", + "dff.core.engine.core.keywords.LOCAL" + ] + }, + { + "ref": [ + "main", + "script", + "global_flow", + "start_node" + ], + "local": false, + "start_label": true, + "id": [ + "global_flow", + "start_node" + ] + }, + { + "ref": [ + "main", + "script", + "global_flow", + "fallback_node" + ], + "local": false, + "fallback_label": true, + "id": [ + "global_flow", + "fallback_node" + ] + }, + { + "ref": [ + "main", + "script", + "greeting_flow", + "node2" + ], + "local": false, + "id": [ + "greeting_flow", + "node2" + ] + }, + { + "ref": [ + "main", + "script", + "greeting_flow", + "node3" + ], + "local": false, + "id": [ + "greeting_flow", + "node3" + ] + }, + { + "ref": [ + "main", + "script", + "greeting_flow", + "node4" + ], + "local": false, + "id": [ + "greeting_flow", + "node4" + ] + }, + { + "ref": [ + "main", + "script", + "music_flow", + "node2" + ], + "local": false, + "id": [ + "music_flow", + "node2" + ] + }, + { + "ref": [ + "main", + "script", + "music_flow", + "node3" + ], + "local": false, + "id": [ + "music_flow", + "node3" + ] + }, + { + "ref": [ + "main", + "script", + "music_flow", + "node4" + ], + "local": false, + "id": [ + "music_flow", + "node4" + ] + } + ], + "links": [ + { + "label_ref": [ + "main", + "script", + "GLOBAL", + "TRANSITIONS" + ], + "label": "(\"greeting_flow\", \"node1\", 1.1)", + "condition_ref": [ + "main", + "script", + "GLOBAL", + "TRANSITIONS", + "(\"greeting_flow\", \"node1\", 1.1)" + ], + "condition": "cnd.regexp(r\"\\b(hi|hello)\\b\", re.I)", + "source": [ + "GLOBAL" + ], + "target": [ + "greeting_flow", + "node1" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "GLOBAL", + "TRANSITIONS" + ], + "label": "(\"music_flow\", \"node1\", 1.1)", + "condition_ref": [ + "main", + "script", + "GLOBAL", + "TRANSITIONS", + "(\"music_flow\", \"node1\", 1.1)" + ], + "condition": "cnd.regexp(r\"talk about music\")", + "source": [ + "GLOBAL" + ], + "target": [ + "music_flow", + "node1" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "GLOBAL", + "TRANSITIONS" + ], + "label": "lbl.to_fallback(0.1)", + "condition_ref": [ + "main", + "script", + "GLOBAL", + "TRANSITIONS", + "lbl.to_fallback(0.1)" + ], + "condition": "cnd.true()", + "source": [ + "GLOBAL" + ], + "target": [ + "NONE" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "GLOBAL", + "TRANSITIONS" + ], + "label": "lbl.forward()", + "condition_ref": [ + "main", + "script", + "GLOBAL", + "TRANSITIONS", + "lbl.forward()" + ], + "condition": "cnd.all([cnd.regexp(r\"next\\b\"),cnd.has_last_labels(labels=[(\"music_flow\", i) for i in [\"node2\", \"node3\"]]),])", + "source": [ + "GLOBAL" + ], + "target": [ + "NONE" + ], + "key": 1 + }, + { + "label_ref": [ + "main", + "script", + "GLOBAL", + "TRANSITIONS" + ], + "label": "lbl.repeat(0.2)", + "condition_ref": [ + "main", + "script", + "GLOBAL", + "TRANSITIONS", + "lbl.repeat(0.2)" + ], + "condition": "cnd.all([cnd.regexp(r\"repeat\", re.I),cnd.negation(cnd.has_last_labels(flow_labels=[\"global_flow\"])),])", + "source": [ + "GLOBAL" + ], + "target": [ + "NONE" + ], + "key": 2 + }, + { + "label_ref": [ + "main", + "script", + "greeting_flow", + "node1", + "TRANSITIONS" + ], + "label": "(\"global_flow\", \"fallback_node\", 0.1)", + "condition_ref": [ + "main", + "script", + "greeting_flow", + "node1", + "TRANSITIONS", + "(\"global_flow\", \"fallback_node\", 0.1)" + ], + "condition": "cnd.true()", + "source": [ + "greeting_flow", + "node1" + ], + "target": [ + "global_flow", + "fallback_node" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "greeting_flow", + "node1", + "TRANSITIONS" + ], + "label": "node2", + "condition_ref": [ + "main", + "script", + "greeting_flow", + "node1", + "TRANSITIONS", + "node2" + ], + "condition": "cnd.regexp(r\"how are you\")", + "source": [ + "greeting_flow", + "node1" + ], + "target": [ + "NONE" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "music_flow", + "node1", + "TRANSITIONS" + ], + "label": "lbl.forward()", + "condition_ref": [ + "main", + "script", + "music_flow", + "node1", + "TRANSITIONS", + "lbl.forward()" + ], + "condition": "cnd.regexp(r\"yes|yep|ok\", re.IGNORECASE)", + "source": [ + "music_flow", + "node1" + ], + "target": [ + "NONE" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "music_flow", + "node1", + "TRANSITIONS" + ], + "label": "lbl.to_fallback()", + "condition_ref": [ + "main", + "script", + "music_flow", + "node1", + "TRANSITIONS", + "lbl.to_fallback()" + ], + "condition": "cnd.true()", + "source": [ + "music_flow", + "node1" + ], + "target": [ + "NONE" + ], + "key": 1 + }, + { + "label_ref": [ + "main", + "script", + "global_flow", + "start_node", + "TRANSITIONS" + ], + "label": "(\"music_flow\", \"node1\")", + "condition_ref": [ + "main", + "script", + "global_flow", + "start_node", + "TRANSITIONS", + "(\"music_flow\", \"node1\")" + ], + "condition": "cnd.regexp(r\"talk about music\")", + "source": [ + "global_flow", + "start_node" + ], + "target": [ + "music_flow", + "node1" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "global_flow", + "start_node", + "TRANSITIONS" + ], + "label": "(\"greeting_flow\", \"node1\")", + "condition_ref": [ + "main", + "script", + "global_flow", + "start_node", + "TRANSITIONS", + "(\"greeting_flow\", \"node1\")" + ], + "condition": "cnd.regexp(r\"hi|hello\", re.IGNORECASE)", + "source": [ + "global_flow", + "start_node" + ], + "target": [ + "greeting_flow", + "node1" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "global_flow", + "start_node", + "TRANSITIONS" + ], + "label": "fallback_node", + "condition_ref": [ + "main", + "script", + "global_flow", + "start_node", + "TRANSITIONS", + "fallback_node" + ], + "condition": "cnd.true()", + "source": [ + "global_flow", + "start_node" + ], + "target": [ + "NONE" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "global_flow", + "fallback_node", + "TRANSITIONS" + ], + "label": "(\"music_flow\", \"node1\")", + "condition_ref": [ + "main", + "script", + "global_flow", + "fallback_node", + "TRANSITIONS", + "(\"music_flow\", \"node1\")" + ], + "condition": "cnd.regexp(r\"talk about music\")", + "source": [ + "global_flow", + "fallback_node" + ], + "target": [ + "music_flow", + "node1" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "global_flow", + "fallback_node", + "TRANSITIONS" + ], + "label": "(\"greeting_flow\", \"node1\")", + "condition_ref": [ + "main", + "script", + "global_flow", + "fallback_node", + "TRANSITIONS", + "(\"greeting_flow\", \"node1\")" + ], + "condition": "cnd.regexp(r\"hi|hello\", re.IGNORECASE)", + "source": [ + "global_flow", + "fallback_node" + ], + "target": [ + "greeting_flow", + "node1" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "global_flow", + "fallback_node", + "TRANSITIONS" + ], + "label": "lbl.previous()", + "condition_ref": [ + "main", + "script", + "global_flow", + "fallback_node", + "TRANSITIONS", + "lbl.previous()" + ], + "condition": "cnd.regexp(r\"previous\", re.IGNORECASE)", + "source": [ + "global_flow", + "fallback_node" + ], + "target": [ + "NONE" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "global_flow", + "fallback_node", + "TRANSITIONS" + ], + "label": "lbl.repeat()", + "condition_ref": [ + "main", + "script", + "global_flow", + "fallback_node", + "TRANSITIONS", + "lbl.repeat()" + ], + "condition": "cnd.true()", + "source": [ + "global_flow", + "fallback_node" + ], + "target": [ + "NONE" + ], + "key": 1 + }, + { + "label_ref": [ + "main", + "script", + "greeting_flow", + "node2", + "TRANSITIONS" + ], + "label": "lbl.to_fallback(0.1)", + "condition_ref": [ + "main", + "script", + "greeting_flow", + "node2", + "TRANSITIONS", + "lbl.to_fallback(0.1)" + ], + "condition": "cnd.true()", + "source": [ + "greeting_flow", + "node2" + ], + "target": [ + "NONE" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "greeting_flow", + "node2", + "TRANSITIONS" + ], + "label": "lbl.forward(0.5)", + "condition_ref": [ + "main", + "script", + "greeting_flow", + "node2", + "TRANSITIONS", + "lbl.forward(0.5)" + ], + "condition": "cnd.regexp(r\"talk about\")", + "source": [ + "greeting_flow", + "node2" + ], + "target": [ + "NONE" + ], + "key": 1 + }, + { + "label_ref": [ + "main", + "script", + "greeting_flow", + "node2", + "TRANSITIONS" + ], + "label": "lbl.previous()", + "condition_ref": [ + "main", + "script", + "greeting_flow", + "node2", + "TRANSITIONS", + "lbl.previous()" + ], + "condition": "cnd.regexp(r\"previous\", re.IGNORECASE)", + "source": [ + "greeting_flow", + "node2" + ], + "target": [ + "NONE" + ], + "key": 2 + }, + { + "label_ref": [ + "main", + "script", + "greeting_flow", + "node2", + "TRANSITIONS" + ], + "label": "(\"music_flow\", \"node1\")", + "condition_ref": [ + "main", + "script", + "greeting_flow", + "node2", + "TRANSITIONS", + "(\"music_flow\", \"node1\")" + ], + "condition": "cnd.regexp(r\"talk about music\")", + "source": [ + "greeting_flow", + "node2" + ], + "target": [ + "music_flow", + "node1" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "greeting_flow", + "node3", + "TRANSITIONS" + ], + "label": "lbl.forward()", + "condition_ref": [ + "main", + "script", + "greeting_flow", + "node3", + "TRANSITIONS", + "lbl.forward()" + ], + "condition": "cnd.regexp(r\"bye\")", + "source": [ + "greeting_flow", + "node3" + ], + "target": [ + "NONE" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "greeting_flow", + "node4", + "TRANSITIONS" + ], + "label": "node1", + "condition_ref": [ + "main", + "script", + "greeting_flow", + "node4", + "TRANSITIONS", + "node1" + ], + "condition": "cnd.regexp(r\"hi|hello\", re.IGNORECASE)", + "source": [ + "greeting_flow", + "node4" + ], + "target": [ + "NONE" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "greeting_flow", + "node4", + "TRANSITIONS" + ], + "label": "lbl.to_fallback()", + "condition_ref": [ + "main", + "script", + "greeting_flow", + "node4", + "TRANSITIONS", + "lbl.to_fallback()" + ], + "condition": "cnd.true()", + "source": [ + "greeting_flow", + "node4" + ], + "target": [ + "NONE" + ], + "key": 1 + }, + { + "label_ref": [ + "main", + "script", + "music_flow", + "node2", + "TRANSITIONS" + ], + "label": "lbl.forward()", + "condition_ref": [ + "main", + "script", + "music_flow", + "node2", + "TRANSITIONS", + "lbl.forward()" + ], + "condition": "cnd.regexp(r\"next\", re.IGNORECASE)", + "source": [ + "music_flow", + "node2" + ], + "target": [ + "NONE" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "music_flow", + "node2", + "TRANSITIONS" + ], + "label": "lbl.repeat()", + "condition_ref": [ + "main", + "script", + "music_flow", + "node2", + "TRANSITIONS", + "lbl.repeat()" + ], + "condition": "cnd.regexp(r\"repeat\", re.IGNORECASE)", + "source": [ + "music_flow", + "node2" + ], + "target": [ + "NONE" + ], + "key": 1 + }, + { + "label_ref": [ + "main", + "script", + "music_flow", + "node2", + "TRANSITIONS" + ], + "label": "lbl.to_fallback()", + "condition_ref": [ + "main", + "script", + "music_flow", + "node2", + "TRANSITIONS", + "lbl.to_fallback()" + ], + "condition": "cnd.true()", + "source": [ + "music_flow", + "node2" + ], + "target": [ + "NONE" + ], + "key": 2 + }, + { + "label_ref": [ + "main", + "script", + "music_flow", + "node3", + "TRANSITIONS" + ], + "label": "lbl.forward()", + "condition_ref": [ + "main", + "script", + "music_flow", + "node3", + "TRANSITIONS", + "lbl.forward()" + ], + "condition": "cnd.regexp(r\"next\", re.IGNORECASE)", + "source": [ + "music_flow", + "node3" + ], + "target": [ + "NONE" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "music_flow", + "node3", + "TRANSITIONS" + ], + "label": "lbl.backward()", + "condition_ref": [ + "main", + "script", + "music_flow", + "node3", + "TRANSITIONS", + "lbl.backward()" + ], + "condition": "cnd.regexp(r\"back\", re.IGNORECASE)", + "source": [ + "music_flow", + "node3" + ], + "target": [ + "NONE" + ], + "key": 1 + }, + { + "label_ref": [ + "main", + "script", + "music_flow", + "node3", + "TRANSITIONS" + ], + "label": "lbl.repeat()", + "condition_ref": [ + "main", + "script", + "music_flow", + "node3", + "TRANSITIONS", + "lbl.repeat()" + ], + "condition": "cnd.regexp(r\"repeat\", re.IGNORECASE)", + "source": [ + "music_flow", + "node3" + ], + "target": [ + "NONE" + ], + "key": 2 + }, + { + "label_ref": [ + "main", + "script", + "music_flow", + "node3", + "TRANSITIONS" + ], + "label": "lbl.to_fallback()", + "condition_ref": [ + "main", + "script", + "music_flow", + "node3", + "TRANSITIONS", + "lbl.to_fallback()" + ], + "condition": "cnd.true()", + "source": [ + "music_flow", + "node3" + ], + "target": [ + "NONE" + ], + "key": 3 + }, + { + "label_ref": [ + "main", + "script", + "music_flow", + "node4", + "TRANSITIONS" + ], + "label": "transitions.greeting_flow_n2_transition", + "condition_ref": [ + "main", + "script", + "music_flow", + "node4", + "TRANSITIONS", + "transitions.greeting_flow_n2_transition" + ], + "condition": "cnd.regexp(r\"next\", re.IGNORECASE)", + "source": [ + "music_flow", + "node4" + ], + "target": [ + "NONE" + ], + "key": 0 + }, + { + "label_ref": [ + "main", + "script", + "music_flow", + "node4", + "TRANSITIONS" + ], + "label": "transitions.high_priority_node_transition(\"greeting_flow\", \"node4\")", + "condition_ref": [ + "main", + "script", + "music_flow", + "node4", + "TRANSITIONS", + "transitions.high_priority_node_transition(\"greeting_flow\", \"node4\")" + ], + "condition": "cnd.regexp(r\"next time\", re.IGNORECASE)", + "source": [ + "music_flow", + "node4" + ], + "target": [ + "NONE" + ], + "key": 1 + }, + { + "label_ref": [ + "main", + "script", + "music_flow", + "node4", + "TRANSITIONS" + ], + "label": "lbl.to_fallback()", + "condition_ref": [ + "main", + "script", + "music_flow", + "node4", + "TRANSITIONS", + "lbl.to_fallback()" + ], + "condition": "cnd.true()", + "source": [ + "music_flow", + "node4" + ], + "target": [ + "NONE" + ], + "key": 2 + } + ] +} \ No newline at end of file diff --git a/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_2/python_files/flow.py b/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_2/python_files/flow.py new file mode 100755 index 000000000..191b7e443 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_2/python_files/flow.py @@ -0,0 +1,36 @@ +from dff.core.engine.core.keywords import TRANSITIONS, RESPONSE, PROCESSING, LOCAL +import dff.core.engine.conditions as cnd +import dff.core.engine.labels as lbl +import re + +from functions import add_prefix + + +global_flow = { + LOCAL: {PROCESSING: {2: add_prefix("l2_local"), 3: add_prefix("l3_local")}}, + "start_node": { # This is an initial node, it doesn't need an `RESPONSE` + RESPONSE: "", + TRANSITIONS: { + ("music_flow", "node1"): cnd.regexp(r"talk about music"), # first check + ("greeting_flow", "node1"): cnd.regexp( + r"hi|hello", re.IGNORECASE + ), # second check + # ("global_flow", "fallback_node"): cnd.true(), # third check + "fallback_node": cnd.true(), # third check + # "fallback_node" is equivalent to ("global_flow", "fallback_node") + }, + }, + "fallback_node": { # We get to this node if an error occurred while the agent was running + RESPONSE: "Ooops", + TRANSITIONS: { + ("music_flow", "node1"): cnd.regexp(r"talk about music"), # first check + ("greeting_flow", "node1"): cnd.regexp( + r"hi|hello", re.IGNORECASE + ), # second check + lbl.previous(): cnd.regexp(r"previous", re.IGNORECASE), # third check + # lbl.previous() is equivalent to ("PREVIOUS_flow", "PREVIOUS_node") + lbl.repeat(): cnd.true(), # fourth check + # lbl.repeat() is equivalent to ("global_flow", "fallback_node") + }, + }, +} diff --git a/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_2/python_files/functions.py b/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_2/python_files/functions.py new file mode 100755 index 000000000..754c6c889 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_2/python_files/functions.py @@ -0,0 +1,11 @@ +from dff.core.engine.core import Actor, Context + + +def add_prefix(prefix): + def add_prefix_processing(ctx: Context, actor: Actor, *args, **kwargs) -> Context: + processed_node = ctx.a_s.get("processed_node", ctx.a_s["next_node"]) + processed_node.response = f"{prefix}: {processed_node.response}" + ctx.a_s["processed_node"] = processed_node + return ctx + + return add_prefix_processing diff --git a/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_2/python_files/main.py b/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_2/python_files/main.py new file mode 100755 index 000000000..ec76c0dd5 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_2/python_files/main.py @@ -0,0 +1,152 @@ +from dff.core.engine.core.keywords import ( + TRANSITIONS, + RESPONSE, + PROCESSING, + GLOBAL, + MISC, + LOCAL, +) +import dff.core.engine.conditions as cnd +import dff.core.engine.labels as lbl +from dff.core.engine.core import Actor as Act, Context +import dff.core.engine.responses as rsp +from functions import add_prefix +import typing as tp, re, transitions +from flow import global_flow + + +script = { + GLOBAL: { + TRANSITIONS: { + ("greeting_flow", "node1", 1.1): cnd.regexp(r"\b(hi|hello)\b", re.I), + ("music_flow", "node1", 1.1): cnd.regexp(r"talk about music"), + lbl.to_fallback(0.1): cnd.true(), + lbl.forward(): cnd.all( + [ + cnd.regexp(r"next\b"), + cnd.has_last_labels( + labels=[("music_flow", i) for i in ["node2", "node3"]] + ), + ] + ), + lbl.repeat(0.2): cnd.all( + [ + cnd.regexp(r"repeat", re.I), + cnd.negation(cnd.has_last_labels(flow_labels=["global_flow"])), + ] + ), + }, + PROCESSING: {1: add_prefix("l1_global"), 2: add_prefix("l2_global")}, + MISC: { + "var1": "global_data", + "var2": "global_data", + "var3": "global_data", + }, + RESPONSE: "", + }, + "global_flow": { + LOCAL: {PROCESSING: {2: add_prefix("l2_local"), 3: add_prefix("l3_local")}}, + "start_node": { # This is an initial node, it doesn't need an `RESPONSE` + RESPONSE: "", + TRANSITIONS: { + ("music_flow", "node1"): cnd.regexp(r"talk about music"), # first check + ("greeting_flow", "node1"): cnd.regexp( + r"hi|hello", re.IGNORECASE + ), # second check + # ("global_flow", "fallback_node"): cnd.true(), # third check + "fallback_node": cnd.true(), # third check + # "fallback_node" is equivalent to ("global_flow", "fallback_node") + }, + }, + "fallback_node": { # We get to this node if an error occurred while the agent was running + RESPONSE: "Ooops", + TRANSITIONS: { + ("music_flow", "node1"): cnd.regexp(r"talk about music"), # first check + ("greeting_flow", "node1"): cnd.regexp( + r"hi|hello", re.IGNORECASE + ), # second check + lbl.previous(): cnd.regexp(r"previous", re.IGNORECASE), # third check + # lbl.previous() is equivalent to ("PREVIOUS_flow", "PREVIOUS_node") + lbl.repeat(): cnd.true(), # fourth check + # lbl.repeat() is equivalent to ("global_flow", "fallback_node") + }, + }, + }, + "greeting_flow": { + "node1": { + RESPONSE: rsp.choice( + ["Hi, what is up?", "Hello, how are you?"] + ), # When the agent goes to node1, we return "Hi, how are you?" + TRANSITIONS: { + ("global_flow", "fallback_node", 0.1): cnd.true(), # second check + "node2": cnd.regexp(r"how are you"), # first check + # "node2" is equivalent to ("greeting_flow", "node2", 1.0) + }, + MISC: {"var3": "info_of_step_1"}, + }, + "node2": { + RESPONSE: "Good. What do you want to talk about?", + TRANSITIONS: { + lbl.to_fallback(0.1): cnd.true(), # third check + # lbl.to_fallback(0.1) is equivalent to ("global_flow", "fallback_node", 0.1) + lbl.forward(0.5): cnd.regexp(r"talk about"), # second check + # lbl.forward(0.5) is equivalent to ("greeting_flow", "node3", 0.5) + ("music_flow", "node1"): cnd.regexp(r"talk about music"), # first check + lbl.previous(): cnd.regexp(r"previous", re.IGNORECASE), # third check + # ("music_flow", "node1") is equivalent to ("music_flow", "node1", 1.0) + }, + }, + "node3": {RESPONSE: foo, TRANSITIONS: {lbl.forward(): cnd.regexp(r"bye")}}, + "node4": { + RESPONSE: bar("bye"), + TRANSITIONS: { + "node1": cnd.regexp(r"hi|hello", re.IGNORECASE), # first check + lbl.to_fallback(): cnd.true(), # second check + }, + }, + }, + "music_flow": { + "node1": { + RESPONSE: "I love `System of a Down` group, would you like to tell about it? ", + TRANSITIONS: { + lbl.forward(): cnd.regexp(r"yes|yep|ok", re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + "node2": { + RESPONSE: "System of a Down is an Armenian-American heavy metal band formed in in 1994.", + TRANSITIONS: { + lbl.forward(): cnd.regexp(r"next", re.IGNORECASE), + lbl.repeat(): cnd.regexp(r"repeat", re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + "node3": { + RESPONSE: "The band achieved commercial success with the release of five studio albums.", + TRANSITIONS: { + lbl.forward(): cnd.regexp(r"next", re.IGNORECASE), + lbl.backward(): cnd.regexp(r"back", re.IGNORECASE), + lbl.repeat(): cnd.regexp(r"repeat", re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + "node4": { + RESPONSE: "That's all what I know", + TRANSITIONS: { + transitions.greeting_flow_n2_transition: cnd.regexp( + r"next", re.IGNORECASE + ), + transitions.high_priority_node_transition( + "greeting_flow", "node4" + ): cnd.regexp(r"next time", re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + }, +} + +actor = Act( + fallback_label=("global_flow", "fallback_node"), + script=script, + start_label=("global_flow", "start_node"), +) diff --git a/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_2/python_files/transitions.py b/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_2/python_files/transitions.py new file mode 100755 index 000000000..c01823d49 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2graph/complex_tests/test_2/python_files/transitions.py @@ -0,0 +1,18 @@ +from dff.core.engine.core.types import NodeLabel3Type +from dff.core.engine.core import Actor, Context +import typing as tp + + +def greeting_flow_n2_transition( + ctx: Context, actor: Actor, *args, **kwargs +) -> NodeLabel3Type: + return "greeting_flow", "node2", 1.0 + + +def high_priority_node_transition( + flow_label: str, label: str +) -> tp.Callable[..., NodeLabel3Type]: + def transition(ctx: Context, actor: Actor, *args, **kwargs) -> NodeLabel3Type: + return flow_label, label, 2.0 + + return transition diff --git a/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_1/python_files/__init__.py b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_1/python_files/__init__.py new file mode 100755 index 000000000..e69de29bb diff --git a/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_1/python_files/flows/__init__.py b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_1/python_files/flows/__init__.py new file mode 100755 index 000000000..23ad7351b --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_1/python_files/flows/__init__.py @@ -0,0 +1,2 @@ +from fallback_flow import fallback_flow as ff +from start_flow import start_flow as sf diff --git a/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_1/python_files/flows/fallback_flow.py b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_1/python_files/flows/fallback_flow.py new file mode 100755 index 000000000..8c9137f13 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_1/python_files/flows/fallback_flow.py @@ -0,0 +1,4 @@ +from ..nodes.fallback_node import fallback_node +from ..nodes import start_node as sn + +fallback_flow = {"fallback_node": fallback_node, "other_node": sn.start_node} diff --git a/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_1/python_files/flows/start_flow.py b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_1/python_files/flows/start_flow.py new file mode 100755 index 000000000..500410866 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_1/python_files/flows/start_flow.py @@ -0,0 +1,7 @@ +from ..nodes import sn +from ..nodes.fallback_node import fallback_node + +start_flow = { + "start_node": sn, + "other_node": fallback_node, +} diff --git a/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_1/python_files/main.py b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_1/python_files/main.py new file mode 100755 index 000000000..898e81130 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_1/python_files/main.py @@ -0,0 +1,8 @@ +from dff.core.engine.core.actor import Actor as act +from dff.core.engine.core.keywords import GLOBAL as glb, RESPONSE as rsp + +import flows + +script = {glb: {rsp: "glb"}, "start_flow": flows.sf, "fallback_flow": flows.ff} + +actor = act(fallback_label=("fallback_flow", "fallback_node"), start_label=("start_flow", "start_node"), script=script) diff --git a/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_1/python_files/nodes/__init__.py b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_1/python_files/nodes/__init__.py new file mode 100755 index 000000000..779608b38 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_1/python_files/nodes/__init__.py @@ -0,0 +1,2 @@ +from start_node import start_node as sn +from fallback_node import fallback_node as fn diff --git a/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_1/python_files/nodes/fallback_node.py b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_1/python_files/nodes/fallback_node.py new file mode 100755 index 000000000..565febf18 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_1/python_files/nodes/fallback_node.py @@ -0,0 +1,3 @@ +from dff.core.engine.core.keywords import RESPONSE as rp + +fallback_node = {rp: "bye"} diff --git a/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_1/python_files/nodes/start_node.py b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_1/python_files/nodes/start_node.py new file mode 100755 index 000000000..382f21230 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_1/python_files/nodes/start_node.py @@ -0,0 +1,3 @@ +from dff.core.engine.core.keywords import RESPONSE as rsp + +start_node = {rsp: "hi"} diff --git a/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_1/yaml_files/script.yaml b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_1/yaml_files/script.yaml new file mode 100755 index 000000000..4734326b1 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_1/yaml_files/script.yaml @@ -0,0 +1,48 @@ +namespaces: + python_files.main: + act: !from dff.core.engine.core.actor Actor + glb: !from dff.core.engine.core.keywords GLOBAL + rsp: !from dff.core.engine.core.keywords RESPONSE + flows: !import python_files.flows + script: + glb: + rsp: !str glb + start_flow: flows.sf + fallback_flow: flows.ff + actor: !call + name: act + args: + fallback_label: + - fallback_flow + - fallback_node + start_label: + - start_flow + - start_node + script: script + python_files.__init__: {} + python_files.flows.__init__: + ff: !from python_files.flows.fallback_flow fallback_flow + sf: !from python_files.flows.start_flow start_flow + python_files.flows.fallback_flow: + fallback_node: !from python_files.nodes.fallback_node fallback_node + sn: !from python_files.nodes start_node + fallback_flow: + !str fallback_node: fallback_node + other_node: sn.start_node + python_files.nodes.fallback_node: + rp: !from dff.core.engine.core.keywords RESPONSE + fallback_node: + rp: bye + python_files.nodes.__init__: + sn: !from python_files.nodes.start_node start_node + fn: !from python_files.nodes.fallback_node fallback_node + python_files.nodes.start_node: + rsp: !from dff.core.engine.core.keywords RESPONSE + start_node: + rsp: hi + python_files.flows.start_flow: + sn: !from python_files.nodes sn + fallback_node: !from python_files.nodes.fallback_node fallback_node + start_flow: + start_node: sn + other_node: fallback_node diff --git a/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_2/python_files/flows/__init__.py b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_2/python_files/flows/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_2/python_files/flows/start.py b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_2/python_files/flows/start.py new file mode 100644 index 000000000..415e2115c --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_2/python_files/flows/start.py @@ -0,0 +1,3 @@ +from ..nodes.node_1 import node + +flow = {"start_node": node} diff --git a/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_2/python_files/main.py b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_2/python_files/main.py new file mode 100644 index 000000000..543cb999a --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_2/python_files/main.py @@ -0,0 +1,4 @@ +from script import act +from dff.core.engine import function_does_not_exist + +result = function_does_not_exist(act, script={1: 1}, value=2) diff --git a/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_2/python_files/nodes/__init__.py b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_2/python_files/nodes/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_2/python_files/nodes/node_1.py b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_2/python_files/nodes/node_1.py new file mode 100644 index 000000000..6d8fa94d1 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_2/python_files/nodes/node_1.py @@ -0,0 +1,3 @@ +import dff.core.engine.core.keywords as kw + +node = {kw.RESPONSE: "hey"} diff --git a/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_2/python_files/script.py b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_2/python_files/script.py new file mode 100644 index 000000000..2a67e5085 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_2/python_files/script.py @@ -0,0 +1,5 @@ +from flows.start import flow +from dff.core.engine.core.actor import Actor + + +act = Actor(script={"start_flow": flow}, start_label=("start_flow", "start_node")) diff --git a/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_2/yaml_files/script.yaml b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_2/yaml_files/script.yaml new file mode 100644 index 000000000..8fbb49859 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_2/yaml_files/script.yaml @@ -0,0 +1,32 @@ +namespaces: + main: + act: !from script act + function_does_not_exist: !from dff.core.engine function_does_not_exist + result: !call + name: function_does_not_exist + args: + 0: act + script: + '1': '1' + value: '2' + script: + flow: !from flows.start flow + Actor: !from dff.core.engine.core.actor Actor + act: !call + name: Actor + args: + script: + start_flow: flow + start_label: + - start_flow + - start_node + flows.start: + node: !from nodes.node_1 node + flow: + start_node: node + flows.__init__: {} + nodes.node_1: + kw: !import dff.core.engine.core.keywords + node: + kw.RESPONSE: hey + nodes.__init__: {} diff --git a/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_3/python_files/flow.py b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_3/python_files/flow.py new file mode 100755 index 000000000..191b7e443 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_3/python_files/flow.py @@ -0,0 +1,36 @@ +from dff.core.engine.core.keywords import TRANSITIONS, RESPONSE, PROCESSING, LOCAL +import dff.core.engine.conditions as cnd +import dff.core.engine.labels as lbl +import re + +from functions import add_prefix + + +global_flow = { + LOCAL: {PROCESSING: {2: add_prefix("l2_local"), 3: add_prefix("l3_local")}}, + "start_node": { # This is an initial node, it doesn't need an `RESPONSE` + RESPONSE: "", + TRANSITIONS: { + ("music_flow", "node1"): cnd.regexp(r"talk about music"), # first check + ("greeting_flow", "node1"): cnd.regexp( + r"hi|hello", re.IGNORECASE + ), # second check + # ("global_flow", "fallback_node"): cnd.true(), # third check + "fallback_node": cnd.true(), # third check + # "fallback_node" is equivalent to ("global_flow", "fallback_node") + }, + }, + "fallback_node": { # We get to this node if an error occurred while the agent was running + RESPONSE: "Ooops", + TRANSITIONS: { + ("music_flow", "node1"): cnd.regexp(r"talk about music"), # first check + ("greeting_flow", "node1"): cnd.regexp( + r"hi|hello", re.IGNORECASE + ), # second check + lbl.previous(): cnd.regexp(r"previous", re.IGNORECASE), # third check + # lbl.previous() is equivalent to ("PREVIOUS_flow", "PREVIOUS_node") + lbl.repeat(): cnd.true(), # fourth check + # lbl.repeat() is equivalent to ("global_flow", "fallback_node") + }, + }, +} diff --git a/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_3/python_files/functions.py b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_3/python_files/functions.py new file mode 100755 index 000000000..754c6c889 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_3/python_files/functions.py @@ -0,0 +1,11 @@ +from dff.core.engine.core import Actor, Context + + +def add_prefix(prefix): + def add_prefix_processing(ctx: Context, actor: Actor, *args, **kwargs) -> Context: + processed_node = ctx.a_s.get("processed_node", ctx.a_s["next_node"]) + processed_node.response = f"{prefix}: {processed_node.response}" + ctx.a_s["processed_node"] = processed_node + return ctx + + return add_prefix_processing diff --git a/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_3/python_files/main.py b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_3/python_files/main.py new file mode 100755 index 000000000..ec76c0dd5 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_3/python_files/main.py @@ -0,0 +1,152 @@ +from dff.core.engine.core.keywords import ( + TRANSITIONS, + RESPONSE, + PROCESSING, + GLOBAL, + MISC, + LOCAL, +) +import dff.core.engine.conditions as cnd +import dff.core.engine.labels as lbl +from dff.core.engine.core import Actor as Act, Context +import dff.core.engine.responses as rsp +from functions import add_prefix +import typing as tp, re, transitions +from flow import global_flow + + +script = { + GLOBAL: { + TRANSITIONS: { + ("greeting_flow", "node1", 1.1): cnd.regexp(r"\b(hi|hello)\b", re.I), + ("music_flow", "node1", 1.1): cnd.regexp(r"talk about music"), + lbl.to_fallback(0.1): cnd.true(), + lbl.forward(): cnd.all( + [ + cnd.regexp(r"next\b"), + cnd.has_last_labels( + labels=[("music_flow", i) for i in ["node2", "node3"]] + ), + ] + ), + lbl.repeat(0.2): cnd.all( + [ + cnd.regexp(r"repeat", re.I), + cnd.negation(cnd.has_last_labels(flow_labels=["global_flow"])), + ] + ), + }, + PROCESSING: {1: add_prefix("l1_global"), 2: add_prefix("l2_global")}, + MISC: { + "var1": "global_data", + "var2": "global_data", + "var3": "global_data", + }, + RESPONSE: "", + }, + "global_flow": { + LOCAL: {PROCESSING: {2: add_prefix("l2_local"), 3: add_prefix("l3_local")}}, + "start_node": { # This is an initial node, it doesn't need an `RESPONSE` + RESPONSE: "", + TRANSITIONS: { + ("music_flow", "node1"): cnd.regexp(r"talk about music"), # first check + ("greeting_flow", "node1"): cnd.regexp( + r"hi|hello", re.IGNORECASE + ), # second check + # ("global_flow", "fallback_node"): cnd.true(), # third check + "fallback_node": cnd.true(), # third check + # "fallback_node" is equivalent to ("global_flow", "fallback_node") + }, + }, + "fallback_node": { # We get to this node if an error occurred while the agent was running + RESPONSE: "Ooops", + TRANSITIONS: { + ("music_flow", "node1"): cnd.regexp(r"talk about music"), # first check + ("greeting_flow", "node1"): cnd.regexp( + r"hi|hello", re.IGNORECASE + ), # second check + lbl.previous(): cnd.regexp(r"previous", re.IGNORECASE), # third check + # lbl.previous() is equivalent to ("PREVIOUS_flow", "PREVIOUS_node") + lbl.repeat(): cnd.true(), # fourth check + # lbl.repeat() is equivalent to ("global_flow", "fallback_node") + }, + }, + }, + "greeting_flow": { + "node1": { + RESPONSE: rsp.choice( + ["Hi, what is up?", "Hello, how are you?"] + ), # When the agent goes to node1, we return "Hi, how are you?" + TRANSITIONS: { + ("global_flow", "fallback_node", 0.1): cnd.true(), # second check + "node2": cnd.regexp(r"how are you"), # first check + # "node2" is equivalent to ("greeting_flow", "node2", 1.0) + }, + MISC: {"var3": "info_of_step_1"}, + }, + "node2": { + RESPONSE: "Good. What do you want to talk about?", + TRANSITIONS: { + lbl.to_fallback(0.1): cnd.true(), # third check + # lbl.to_fallback(0.1) is equivalent to ("global_flow", "fallback_node", 0.1) + lbl.forward(0.5): cnd.regexp(r"talk about"), # second check + # lbl.forward(0.5) is equivalent to ("greeting_flow", "node3", 0.5) + ("music_flow", "node1"): cnd.regexp(r"talk about music"), # first check + lbl.previous(): cnd.regexp(r"previous", re.IGNORECASE), # third check + # ("music_flow", "node1") is equivalent to ("music_flow", "node1", 1.0) + }, + }, + "node3": {RESPONSE: foo, TRANSITIONS: {lbl.forward(): cnd.regexp(r"bye")}}, + "node4": { + RESPONSE: bar("bye"), + TRANSITIONS: { + "node1": cnd.regexp(r"hi|hello", re.IGNORECASE), # first check + lbl.to_fallback(): cnd.true(), # second check + }, + }, + }, + "music_flow": { + "node1": { + RESPONSE: "I love `System of a Down` group, would you like to tell about it? ", + TRANSITIONS: { + lbl.forward(): cnd.regexp(r"yes|yep|ok", re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + "node2": { + RESPONSE: "System of a Down is an Armenian-American heavy metal band formed in in 1994.", + TRANSITIONS: { + lbl.forward(): cnd.regexp(r"next", re.IGNORECASE), + lbl.repeat(): cnd.regexp(r"repeat", re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + "node3": { + RESPONSE: "The band achieved commercial success with the release of five studio albums.", + TRANSITIONS: { + lbl.forward(): cnd.regexp(r"next", re.IGNORECASE), + lbl.backward(): cnd.regexp(r"back", re.IGNORECASE), + lbl.repeat(): cnd.regexp(r"repeat", re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + "node4": { + RESPONSE: "That's all what I know", + TRANSITIONS: { + transitions.greeting_flow_n2_transition: cnd.regexp( + r"next", re.IGNORECASE + ), + transitions.high_priority_node_transition( + "greeting_flow", "node4" + ): cnd.regexp(r"next time", re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + }, +} + +actor = Act( + fallback_label=("global_flow", "fallback_node"), + script=script, + start_label=("global_flow", "start_node"), +) diff --git a/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_3/python_files/transitions.py b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_3/python_files/transitions.py new file mode 100755 index 000000000..c01823d49 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_3/python_files/transitions.py @@ -0,0 +1,18 @@ +from dff.core.engine.core.types import NodeLabel3Type +from dff.core.engine.core import Actor, Context +import typing as tp + + +def greeting_flow_n2_transition( + ctx: Context, actor: Actor, *args, **kwargs +) -> NodeLabel3Type: + return "greeting_flow", "node2", 1.0 + + +def high_priority_node_transition( + flow_label: str, label: str +) -> tp.Callable[..., NodeLabel3Type]: + def transition(ctx: Context, actor: Actor, *args, **kwargs) -> NodeLabel3Type: + return flow_label, label, 2.0 + + return transition diff --git a/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_3/yaml_files/script.yaml b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_3/yaml_files/script.yaml new file mode 100755 index 000000000..00b0c04b7 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/complex_tests/test_3/yaml_files/script.yaml @@ -0,0 +1,138 @@ +namespaces: + main: + TRANSITIONS: !from dff.core.engine.core.keywords TRANSITIONS + RESPONSE: !from dff.core.engine.core.keywords RESPONSE + PROCESSING: !from dff.core.engine.core.keywords PROCESSING + GLOBAL: !from dff.core.engine.core.keywords GLOBAL + MISC: !from dff.core.engine.core.keywords MISC + LOCAL: !from dff.core.engine.core.keywords LOCAL + cnd: !import dff.core.engine.conditions + lbl: !import dff.core.engine.labels + Act: !from dff.core.engine.core Actor + Context: !from dff.core.engine.core Context + rsp: !import dff.core.engine.responses + add_prefix: !from functions add_prefix + tp: !import typing + re: !import re + transitions: !import transitions + global_flow: !from flow global_flow + script: + GLOBAL: + TRANSITIONS: + ("greeting_flow", "node1", 1.1): cnd.regexp(r"\b(hi|hello)\b", re.I) + ("music_flow", "node1", 1.1): cnd.regexp(r"talk about music") + lbl.to_fallback(0.1): cnd.true() + lbl.forward(): cnd.all([cnd.regexp(r"next\b"),cnd.has_last_labels(labels=[("music_flow", i) for i in ["node2", "node3"]]),]) + lbl.repeat(0.2): cnd.all([cnd.regexp(r"repeat", re.I),cnd.negation(cnd.has_last_labels(flow_labels=["global_flow"])),]) + PROCESSING: + '1': add_prefix("l1_global") + '2': add_prefix("l2_global") + MISC: + var1: global_data + var2: global_data + var3: global_data + RESPONSE: !str + !str global_flow: + LOCAL: + PROCESSING: + '2': add_prefix("l2_local") + '3': add_prefix("l3_local") + start_node: + RESPONSE: !str + TRANSITIONS: + ("music_flow", "node1"): cnd.regexp(r"talk about music") + ("greeting_flow", "node1"): cnd.regexp(r"hi|hello", re.IGNORECASE) + fallback_node: cnd.true() + fallback_node: + RESPONSE: Ooops + TRANSITIONS: + ("music_flow", "node1"): cnd.regexp(r"talk about music") + ("greeting_flow", "node1"): cnd.regexp(r"hi|hello", re.IGNORECASE) + lbl.previous(): cnd.regexp(r"previous", re.IGNORECASE) + lbl.repeat(): cnd.true() + greeting_flow: + node1: + RESPONSE: rsp.choice(["Hi, what is up?", "Hello, how are you?"]) + TRANSITIONS: + ("global_flow", "fallback_node", 0.1): cnd.true() + node2: cnd.regexp(r"how are you") + MISC: + var3: info_of_step_1 + node2: + RESPONSE: Good. What do you want to talk about? + TRANSITIONS: + lbl.to_fallback(0.1): cnd.true() + lbl.forward(0.5): cnd.regexp(r"talk about") + ("music_flow", "node1"): cnd.regexp(r"talk about music") + lbl.previous(): cnd.regexp(r"previous", re.IGNORECASE) + node3: + RESPONSE: !py foo + TRANSITIONS: + lbl.forward(): cnd.regexp(r"bye") + node4: + RESPONSE: !py bar("bye") + TRANSITIONS: + node1: cnd.regexp(r"hi|hello", re.IGNORECASE) + lbl.to_fallback(): cnd.true() + music_flow: + node1: + RESPONSE: 'I love `System of a Down` group, would you like to tell about it? ' + TRANSITIONS: + lbl.forward(): cnd.regexp(r"yes|yep|ok", re.IGNORECASE) + lbl.to_fallback(): cnd.true() + node2: + RESPONSE: System of a Down is an Armenian-American heavy metal band formed in in 1994. + TRANSITIONS: + lbl.forward(): cnd.regexp(r"next", re.IGNORECASE) + lbl.repeat(): cnd.regexp(r"repeat", re.IGNORECASE) + lbl.to_fallback(): cnd.true() + node3: + RESPONSE: The band achieved commercial success with the release of five studio albums. + TRANSITIONS: + lbl.forward(): cnd.regexp(r"next", re.IGNORECASE) + lbl.backward(): cnd.regexp(r"back", re.IGNORECASE) + lbl.repeat(): cnd.regexp(r"repeat", re.IGNORECASE) + lbl.to_fallback(): cnd.true() + node4: + RESPONSE: That's all what I know + TRANSITIONS: + transitions.greeting_flow_n2_transition: cnd.regexp(r"next", re.IGNORECASE) + transitions.high_priority_node_transition("greeting_flow", "node4"): cnd.regexp(r"next time", re.IGNORECASE) + lbl.to_fallback(): cnd.true() + actor: !call + name: Act + args: + fallback_label: + - !str global_flow + - fallback_node + script: script + start_label: + - !str global_flow + - start_node + flow: + TRANSITIONS: !from dff.core.engine.core.keywords TRANSITIONS + RESPONSE: !from dff.core.engine.core.keywords RESPONSE + PROCESSING: !from dff.core.engine.core.keywords PROCESSING + LOCAL: !from dff.core.engine.core.keywords LOCAL + cnd: !import dff.core.engine.conditions + lbl: !import dff.core.engine.labels + re: !import re + add_prefix: !from functions add_prefix + global_flow: + LOCAL: + PROCESSING: + '2': add_prefix("l2_local") + '3': add_prefix("l3_local") + start_node: + RESPONSE: !str + TRANSITIONS: + ("music_flow", "node1"): cnd.regexp(r"talk about music") + ("greeting_flow", "node1"): cnd.regexp(r"hi|hello", re.IGNORECASE) + fallback_node: cnd.true() + fallback_node: + RESPONSE: Ooops + TRANSITIONS: + ("music_flow", "node1"): cnd.regexp(r"talk about music") + ("greeting_flow", "node1"): cnd.regexp(r"hi|hello", re.IGNORECASE) + lbl.previous(): cnd.regexp(r"previous", re.IGNORECASE) + lbl.repeat(): cnd.true() diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_1/python_files/main.py b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_1/python_files/main.py new file mode 100755 index 000000000..25accbcec --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_1/python_files/main.py @@ -0,0 +1,5 @@ +import abc + +d = {} + +c = {1: d} diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_1/yaml_files/script.yaml b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_1/yaml_files/script.yaml new file mode 100755 index 000000000..70e34cdf5 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_1/yaml_files/script.yaml @@ -0,0 +1,6 @@ +namespaces: + main: + abc: !import abc + d: {} + c: + '1': d diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_10/python_files/main.py b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_10/python_files/main.py new file mode 100755 index 000000000..d1835b353 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_10/python_files/main.py @@ -0,0 +1,8 @@ +from dff.core.engine.core.actor import Actor + +from scripts import script + +actor = Actor( + script, + ("flow", "node"), +) diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_10/python_files/scripts.py b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_10/python_files/scripts.py new file mode 100755 index 000000000..51e04eacf --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_10/python_files/scripts.py @@ -0,0 +1,3 @@ +from dff.core.engine.core import keywords as kw + +another_script = script = {"flow": {"node": {kw.RESPONSE: "hi"}}} diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_10/yaml_files/script.yaml b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_10/yaml_files/script.yaml new file mode 100755 index 000000000..1d9fca44c --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_10/yaml_files/script.yaml @@ -0,0 +1,18 @@ +namespaces: + main: + Actor: !from dff.core.engine.core.actor Actor + script: !from scripts script + actor: !call + name: Actor + args: + script: script + start_label: + - flow + - node + scripts: + kw: !from dff.core.engine.core keywords + another_script: + flow: + node: + kw.RESPONSE: hi + script: another_script diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_11/python_files/main.py b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_11/python_files/main.py new file mode 100755 index 000000000..b9fa0e579 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_11/python_files/main.py @@ -0,0 +1,11 @@ +from dff.core.engine.core.actor import Actor +from dff.core.engine.core import keywords as kw + +dictionary = {"node": {kw.RESPONSE: "hi"}} + +script = {"flow": dictionary} + +actor = Actor( + script, + ("flow", "node"), +) diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_11/yaml_files/script.yaml b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_11/yaml_files/script.yaml new file mode 100755 index 000000000..f6bab01a1 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_11/yaml_files/script.yaml @@ -0,0 +1,16 @@ +namespaces: + main: + Actor: !from dff.core.engine.core.actor Actor + kw: !from dff.core.engine.core keywords + dictionary: + node: + kw.RESPONSE: hi + script: + flow: dictionary + actor: !call + name: Actor + args: + script: script + start_label: + - flow + - node diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_12/python_files/main.py b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_12/python_files/main.py new file mode 100755 index 000000000..92a87f19d --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_12/python_files/main.py @@ -0,0 +1,13 @@ +from dff.core.engine.core.actor import Actor +from dff.core.engine.core import keywords as kw + +strs = strings = {"node": "node"} + +dicts = {1: {strs["node"]: 1}} + +script = {"flow": dicts[1]} + +actor = Actor( + script, + ("flow", strings["node"]), +) diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_12/yaml_files/script.yaml b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_12/yaml_files/script.yaml new file mode 100755 index 000000000..e69de29bb diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_13/python_files/main.py b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_13/python_files/main.py new file mode 100755 index 000000000..991827574 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_13/python_files/main.py @@ -0,0 +1,13 @@ +from dff.core.engine.core.actor import Actor +from dff.core.engine.core import keywords as kw + +strs = strings = {"node": "node"} + +dicts = {1: {strs["node"]: {kw.RESPONSE: "hi"}}} + +script = {"flow": dicts[1]} + +actor = Actor( + script, + ("flow", strings["node"]), +) diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_13/yaml_files/script.yaml b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_13/yaml_files/script.yaml new file mode 100755 index 000000000..93defa690 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_13/yaml_files/script.yaml @@ -0,0 +1,20 @@ +namespaces: + main: + Actor: !from dff.core.engine.core.actor Actor + kw: !from dff.core.engine.core keywords + strs: + node: node + strings: strs + dicts: + '1': + strs["node"]: + kw.RESPONSE: hi + script: + flow: dicts[1] + actor: !call + name: Actor + args: + script: script + start_label: + - flow + - strings["node"] diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_14/python_files/main.py b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_14/python_files/main.py new file mode 100755 index 000000000..bb649e039 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_14/python_files/main.py @@ -0,0 +1,15 @@ +from dff.core.engine.core.actor import Actor +from dff.core.engine.core import keywords as kw + +node = {3: "node"} + +strs = strings = {1: {"node": {1: "node"}}, 2: {"node": {2: "node"}}} + +dicts = {1: {strs[1][node[3]][1]: {kw.RESPONSE: "hi"}}} + +script = {"flow": dicts[1]} + +actor = Actor( + script, + ("flow", strings[2][node[3]][2]), +) diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_14/yaml_files/script.yaml b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_14/yaml_files/script.yaml new file mode 100755 index 000000000..d00ab5bf4 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_14/yaml_files/script.yaml @@ -0,0 +1,27 @@ +namespaces: + main: + Actor: !from dff.core.engine.core.actor Actor + kw: !from dff.core.engine.core keywords + node: + '3': node + strs: + '1': + !str node: + '1': !str node + '2': + !str node: + '2': !str node + strings: strs + dicts: + '1': + strs[1][node[3]][1]: + kw.RESPONSE: hi + script: + flow: dicts[1] + actor: !call + name: Actor + args: + script: script + start_label: + - flow + - strings[2][node[3]][2] diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_15/python_files/main.py b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_15/python_files/main.py new file mode 100755 index 000000000..144ac0d67 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_15/python_files/main.py @@ -0,0 +1,15 @@ +from dff.core.engine.core.actor import Actor +from dff.core.engine.core import keywords as kw + +node = {3: "node"} + +strs = strings = {1: {"node": {1: "node_incorrect"}}, 2: {"node": {2: "node"}}} + +dicts = {1: {strs[1][node[3]][1]: {kw.RESPONSE: "hi"}}} + +script = {"flow": dicts[1]} + +actor = Actor( + script, + ("flow", strings[2][node[3]][2]), +) diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_15/yaml_files/script.yaml b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_15/yaml_files/script.yaml new file mode 100755 index 000000000..e69de29bb diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_16/python_files/main.py b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_16/python_files/main.py new file mode 100755 index 000000000..a127134d5 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_16/python_files/main.py @@ -0,0 +1,19 @@ +from dff.core.engine.core.actor import Actor +from dff.core.engine.core import keywords as kw + +node = {3: "node"} + +keywords_2 = {2: kw.RESPONSE} + +keywords = {"rsp": keywords_2[2]} + +strs = strings = {1: {"node": {1: "node"}}, 2: {"node": {2: "node"}}} + +dicts = {1: {strs[1][node[3]][1]: {keywords["rsp"]: "hi"}}} + +script = {"flow": dicts[1]} + +actor = Actor( + script, + ("flow", strings[2][node[3]][2]), +) diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_16/yaml_files/script.yaml b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_16/yaml_files/script.yaml new file mode 100755 index 000000000..d6fec71d2 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_16/yaml_files/script.yaml @@ -0,0 +1,31 @@ +namespaces: + main: + Actor: !from dff.core.engine.core.actor Actor + kw: !from dff.core.engine.core keywords + node: + '3': node + keywords_2: + '2': kw.RESPONSE + keywords: + rsp: keywords_2[2] + strs: + '1': + !str node: + '1': !str node + '2': + !str node: + '2': !str node + strings: strs + dicts: + '1': + strs[1][node[3]][1]: + keywords["rsp"]: hi + script: + flow: dicts[1] + actor: !call + name: Actor + args: + script: script + start_label: + - flow + - strings[2][node[3]][2] diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_17/python_files/main.py b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_17/python_files/main.py new file mode 100755 index 000000000..7c240322b --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_17/python_files/main.py @@ -0,0 +1,19 @@ +from dff.core.engine.core.actor import Actor +from dff.core.engine.core import keywords as kw +import proxy + + +keywords_2 = {2: kw.RESPONSE} + +keywords = {"rsp": keywords_2[2]} + +strs = strings = {1: {"node": {1: "node"}}, 2: {"node": {2: "node"}}} + +dicts = {1: {strs[1][proxy.node.node[3]][1]: {keywords["rsp"]: "hi"}}} + +script = {"flow": dicts[1]} + +actor = Actor( + script, + ("flow", strings[2][proxy.node.node[3]][2]), +) diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_17/python_files/node.py b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_17/python_files/node.py new file mode 100644 index 000000000..bb5ab051a --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_17/python_files/node.py @@ -0,0 +1 @@ +node = {3: "node"} diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_17/python_files/proxy.py b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_17/python_files/proxy.py new file mode 100644 index 000000000..5aefdb97c --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_17/python_files/proxy.py @@ -0,0 +1 @@ +import node diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_17/yaml_files/script.yaml b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_17/yaml_files/script.yaml new file mode 100755 index 000000000..523f854d8 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_17/yaml_files/script.yaml @@ -0,0 +1,35 @@ +namespaces: + main: + Actor: !from dff.core.engine.core.actor Actor + kw: !from dff.core.engine.core keywords + proxy: !import proxy + keywords_2: + '2': kw.RESPONSE + keywords: + rsp: keywords_2[2] + strs: + '1': + node: + '1': node + '2': + node: + '2': node + strings: strs + dicts: + '1': + strs[1][proxy.node.node[3]][1]: + keywords["rsp"]: hi + script: + flow: dicts[1] + actor: !call + name: Actor + args: + script: script + start_label: + - flow + - strings[2][proxy.node.node[3]][2] + proxy: + node: !import node + node: + node: + '3': node diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_18/python_files/main.py b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_18/python_files/main.py new file mode 100644 index 000000000..e6ddc685c --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_18/python_files/main.py @@ -0,0 +1,25 @@ +from dff.core.engine.core import Actor as act +import dff.core.engine.core.keywords as kw +import dff.core.engine.conditions as cnd + +actor = act( + { + "flow1": { + "node": { + kw.RESPONSE: "hey", + kw.TRANSITIONS: { + ("flow2", "node"): cnd.true() + } + } + }, + "flow2": { + "node": { + kw.RESPONSE: "hi", + kw.TRANSITIONS: { + ("flow1", "node"): cnd.true() + } + } + } + }, + ("flow1", "node") +) \ No newline at end of file diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_18/yaml_files/script.yaml b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_18/yaml_files/script.yaml new file mode 100644 index 000000000..779d9527d --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_18/yaml_files/script.yaml @@ -0,0 +1,22 @@ +namespaces: + main: + act: !from dff.core.engine.core Actor + kw: !import dff.core.engine.core.keywords + cnd: !import dff.core.engine.conditions + actor: !call + name: act + args: + script: + flow1: + node: + kw.RESPONSE: hey + kw.TRANSITIONS: + ("flow2", "node"): cnd.true() + flow2: + node: + kw.RESPONSE: hi + kw.TRANSITIONS: + ("flow1", "node"): cnd.true() + start_label: + - flow1 + - node diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_2/python_files/main.py b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_2/python_files/main.py new file mode 100755 index 000000000..670d3af12 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_2/python_files/main.py @@ -0,0 +1,9 @@ +from dff.core.engine.core.actor import Actor as Act1 +from dff.core.engine.core import Actor as Act2 + +script = { + 1: "hey", +} + +actor = Act1(script, (1,), (1,)) +actor = Act2(script, (1,), (1,)) diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_2/yaml_files/script.yaml b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_2/yaml_files/script.yaml new file mode 100755 index 000000000..e69de29bb diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_3/python_files/main.py b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_3/python_files/main.py new file mode 100755 index 000000000..202184adf --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_3/python_files/main.py @@ -0,0 +1,5 @@ +from dff.core.engine.core.actor import Actor + +script = {1: 1} + +actor = Actor(script, (1,), (1,)) diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_3/yaml_files/script.yaml b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_3/yaml_files/script.yaml new file mode 100755 index 000000000..e69de29bb diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_4/python_files/main.py b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_4/python_files/main.py new file mode 100755 index 000000000..671b12568 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_4/python_files/main.py @@ -0,0 +1,9 @@ +from dff.core.engine.core.actor import Actor +from dff.core.engine.core import keywords + +script = {keywords.GLOBAL: {keywords.RESPONSE: ""}} + +actor = Actor( + script, + (keywords.GLOBAL, keywords.RESPONSE), +) diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_4/yaml_files/script.yaml b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_4/yaml_files/script.yaml new file mode 100755 index 000000000..bce3ef860 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_4/yaml_files/script.yaml @@ -0,0 +1,14 @@ +namespaces: + main: + Actor: !from dff.core.engine.core.actor Actor + keywords: !from dff.core.engine.core keywords + script: + keywords.GLOBAL: + keywords.RESPONSE: !str + actor: !call + name: Actor + args: + script: script + start_label: + - keywords.GLOBAL + - keywords.RESPONSE diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_5/python_files/main.py b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_5/python_files/main.py new file mode 100755 index 000000000..c0c16d866 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_5/python_files/main.py @@ -0,0 +1,9 @@ +from dff.core.engine.core.actor import Actor +from dff.core.engine.core import keywords + +script = {keywords.GLOBAL: {"keywords.RESPONSE": ""}} + +actor = Actor( + script, + (keywords.GLOBAL, "keywords.RESPONSE"), +) diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_5/yaml_files/script.yaml b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_5/yaml_files/script.yaml new file mode 100755 index 000000000..e69de29bb diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_6/python_files/main.py b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_6/python_files/main.py new file mode 100755 index 000000000..205641c8a --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_6/python_files/main.py @@ -0,0 +1,9 @@ +from dff.core.engine.core.actor import Actor +from dff.core.engine.core import keywords + +script = {keywords.GLOBAL: {keywords.RESPONSE: ""}} + +actor = Actor( + script, + (keywords.GLOBAL, "keywords.RESPONSE"), +) diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_6/yaml_files/script.yaml b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_6/yaml_files/script.yaml new file mode 100755 index 000000000..e69de29bb diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_7/python_files/main.py b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_7/python_files/main.py new file mode 100755 index 000000000..ce7ff6219 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_7/python_files/main.py @@ -0,0 +1,9 @@ +from dff.core.engine.core.actor import Actor +from dff.core.engine.core import keywords + +script = {"flow": {"node": {keywords.RESPONS: "hi"}}} + +actor = Actor( + script, + ("flow", "node"), +) diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_7/yaml_files/script.yaml b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_7/yaml_files/script.yaml new file mode 100755 index 000000000..e69de29bb diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_8/python_files/main.py b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_8/python_files/main.py new file mode 100755 index 000000000..54bc0a92b --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_8/python_files/main.py @@ -0,0 +1,13 @@ +from dff.core.engine.core.actor import Actor +from dff.core.engine.core import keywords + +ints = {1: 2} + +strings = {1: {2: "flow"}} + +script = {strings[1][ints[1]]: {"node": {keywords.RESPONSE: "hi"}}} + +actor = Actor( + script, + (strings[1][ints[1]], "node"), +) diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_8/yaml_files/script.yaml b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_8/yaml_files/script.yaml new file mode 100755 index 000000000..d6c42e4cd --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_8/yaml_files/script.yaml @@ -0,0 +1,20 @@ +namespaces: + main: + Actor: !from dff.core.engine.core.actor Actor + keywords: !from dff.core.engine.core keywords + ints: + '1': '2' + strings: + '1': + '2': flow + script: + strings[1][ints[1]]: + node: + keywords.RESPONSE: hi + actor: !call + name: Actor + args: + script: script + start_label: + - strings[1][ints[1]] + - node diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_9/python_files/main.py b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_9/python_files/main.py new file mode 100755 index 000000000..7c3f26303 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_9/python_files/main.py @@ -0,0 +1,9 @@ +from dff.core.engine.core.actor import Actor +from dff.core.engine.core import keywords as kw + +another_script = script = {"flow": {"node": {kw.RESPONSE: "hi"}}} + +actor = Actor( + script, + ("flow", "node"), +) diff --git a/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_9/yaml_files/script.yaml b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_9/yaml_files/script.yaml new file mode 100755 index 000000000..f1a98d1e8 --- /dev/null +++ b/tests/parser/TEST_CASES/test_py2yaml/simple_tests/test_9/yaml_files/script.yaml @@ -0,0 +1,16 @@ +namespaces: + main: + Actor: !from dff.core.engine.core.actor Actor + kw: !from dff.core.engine.core keywords + another_script: + flow: + node: + kw.RESPONSE: hi + script: another_script + actor: !call + name: Actor + args: + script: script + start_label: + - flow + - node diff --git a/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_1/python_files/python_files/__init__.py b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_1/python_files/python_files/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_1/python_files/python_files/flows/__init__.py b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_1/python_files/python_files/flows/__init__.py new file mode 100644 index 000000000..c706afbfc --- /dev/null +++ b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_1/python_files/python_files/flows/__init__.py @@ -0,0 +1,2 @@ +from python_files.flows.fallback_flow import fallback_flow as ff +from python_files.flows.start_flow import start_flow as sf diff --git a/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_1/python_files/python_files/flows/fallback_flow.py b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_1/python_files/python_files/flows/fallback_flow.py new file mode 100644 index 000000000..d0d5aa42f --- /dev/null +++ b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_1/python_files/python_files/flows/fallback_flow.py @@ -0,0 +1,4 @@ +from python_files.nodes.fallback_node import fallback_node as fallback_node +from python_files.nodes import start_node as sn + +fallback_flow = {"fallback_node": fallback_node, "other_node": sn.start_node} diff --git a/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_1/python_files/python_files/flows/start_flow.py b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_1/python_files/python_files/flows/start_flow.py new file mode 100644 index 000000000..2fd4af221 --- /dev/null +++ b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_1/python_files/python_files/flows/start_flow.py @@ -0,0 +1,4 @@ +from python_files.nodes import sn as sn +from python_files.nodes.fallback_node import fallback_node as fallback_node + +start_flow = {"start_node": sn, "other_node": fallback_node} diff --git a/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_1/python_files/python_files/main.py b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_1/python_files/python_files/main.py new file mode 100644 index 000000000..4da63948a --- /dev/null +++ b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_1/python_files/python_files/main.py @@ -0,0 +1,11 @@ +from dff.core.engine.core.actor import Actor as act +from dff.core.engine.core.keywords import GLOBAL as glb +from dff.core.engine.core.keywords import RESPONSE as rsp +import python_files.flows as flows + +script = {glb: {rsp: "glb"}, "start_flow": flows.sf, "fallback_flow": flows.ff} +actor = act( + fallback_label=("fallback_flow", "fallback_node"), + start_label=("start_flow", "start_node"), + script=script, +) diff --git a/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_1/python_files/python_files/nodes/__init__.py b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_1/python_files/python_files/nodes/__init__.py new file mode 100644 index 000000000..06dbf7561 --- /dev/null +++ b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_1/python_files/python_files/nodes/__init__.py @@ -0,0 +1,2 @@ +from python_files.nodes.start_node import start_node as sn +from python_files.nodes.fallback_node import fallback_node as fn diff --git a/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_1/python_files/python_files/nodes/fallback_node.py b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_1/python_files/python_files/nodes/fallback_node.py new file mode 100644 index 000000000..565febf18 --- /dev/null +++ b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_1/python_files/python_files/nodes/fallback_node.py @@ -0,0 +1,3 @@ +from dff.core.engine.core.keywords import RESPONSE as rp + +fallback_node = {rp: "bye"} diff --git a/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_1/python_files/python_files/nodes/start_node.py b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_1/python_files/python_files/nodes/start_node.py new file mode 100644 index 000000000..382f21230 --- /dev/null +++ b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_1/python_files/python_files/nodes/start_node.py @@ -0,0 +1,3 @@ +from dff.core.engine.core.keywords import RESPONSE as rsp + +start_node = {rsp: "hi"} diff --git a/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_1/yaml_files/script.yaml b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_1/yaml_files/script.yaml new file mode 100755 index 000000000..4734326b1 --- /dev/null +++ b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_1/yaml_files/script.yaml @@ -0,0 +1,48 @@ +namespaces: + python_files.main: + act: !from dff.core.engine.core.actor Actor + glb: !from dff.core.engine.core.keywords GLOBAL + rsp: !from dff.core.engine.core.keywords RESPONSE + flows: !import python_files.flows + script: + glb: + rsp: !str glb + start_flow: flows.sf + fallback_flow: flows.ff + actor: !call + name: act + args: + fallback_label: + - fallback_flow + - fallback_node + start_label: + - start_flow + - start_node + script: script + python_files.__init__: {} + python_files.flows.__init__: + ff: !from python_files.flows.fallback_flow fallback_flow + sf: !from python_files.flows.start_flow start_flow + python_files.flows.fallback_flow: + fallback_node: !from python_files.nodes.fallback_node fallback_node + sn: !from python_files.nodes start_node + fallback_flow: + !str fallback_node: fallback_node + other_node: sn.start_node + python_files.nodes.fallback_node: + rp: !from dff.core.engine.core.keywords RESPONSE + fallback_node: + rp: bye + python_files.nodes.__init__: + sn: !from python_files.nodes.start_node start_node + fn: !from python_files.nodes.fallback_node fallback_node + python_files.nodes.start_node: + rsp: !from dff.core.engine.core.keywords RESPONSE + start_node: + rsp: hi + python_files.flows.start_flow: + sn: !from python_files.nodes sn + fallback_node: !from python_files.nodes.fallback_node fallback_node + start_flow: + start_node: sn + other_node: fallback_node diff --git a/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_2/python_files/flows/__init__.py b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_2/python_files/flows/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_2/python_files/flows/start.py b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_2/python_files/flows/start.py new file mode 100644 index 000000000..1691d6662 --- /dev/null +++ b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_2/python_files/flows/start.py @@ -0,0 +1,3 @@ +from nodes.node_1 import node as node + +flow = {"start_node": node} diff --git a/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_2/python_files/main.py b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_2/python_files/main.py new file mode 100644 index 000000000..02fed5a16 --- /dev/null +++ b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_2/python_files/main.py @@ -0,0 +1,4 @@ +from script import act as act +from dff.core.engine import function_does_not_exist as function_does_not_exist + +result = function_does_not_exist(act, script={1: 1}, value=2) diff --git a/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_2/python_files/nodes/__init__.py b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_2/python_files/nodes/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_2/python_files/nodes/node_1.py b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_2/python_files/nodes/node_1.py new file mode 100644 index 000000000..6d8fa94d1 --- /dev/null +++ b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_2/python_files/nodes/node_1.py @@ -0,0 +1,3 @@ +import dff.core.engine.core.keywords as kw + +node = {kw.RESPONSE: "hey"} diff --git a/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_2/python_files/script.py b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_2/python_files/script.py new file mode 100644 index 000000000..1e1b73508 --- /dev/null +++ b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_2/python_files/script.py @@ -0,0 +1,4 @@ +from flows.start import flow as flow +from dff.core.engine.core.actor import Actor as Actor + +act = Actor(script={"start_flow": flow}, start_label=("start_flow", "start_node")) diff --git a/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_2/yaml_files/script.yaml b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_2/yaml_files/script.yaml new file mode 100644 index 000000000..8fbb49859 --- /dev/null +++ b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_2/yaml_files/script.yaml @@ -0,0 +1,32 @@ +namespaces: + main: + act: !from script act + function_does_not_exist: !from dff.core.engine function_does_not_exist + result: !call + name: function_does_not_exist + args: + 0: act + script: + '1': '1' + value: '2' + script: + flow: !from flows.start flow + Actor: !from dff.core.engine.core.actor Actor + act: !call + name: Actor + args: + script: + start_flow: flow + start_label: + - start_flow + - start_node + flows.start: + node: !from nodes.node_1 node + flow: + start_node: node + flows.__init__: {} + nodes.node_1: + kw: !import dff.core.engine.core.keywords + node: + kw.RESPONSE: hey + nodes.__init__: {} diff --git a/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_3/python_files/flow.py b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_3/python_files/flow.py new file mode 100644 index 000000000..32f691479 --- /dev/null +++ b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_3/python_files/flow.py @@ -0,0 +1,29 @@ +from dff.core.engine.core.keywords import TRANSITIONS as TRANSITIONS +from dff.core.engine.core.keywords import RESPONSE as RESPONSE +from dff.core.engine.core.keywords import PROCESSING as PROCESSING +from dff.core.engine.core.keywords import LOCAL as LOCAL +import dff.core.engine.conditions as cnd +import dff.core.engine.labels as lbl +import re as re +from functions import add_prefix as add_prefix + +global_flow = { + LOCAL: {PROCESSING: {2: add_prefix("l2_local"), 3: add_prefix("l3_local")}}, + "start_node": { + RESPONSE: "", + TRANSITIONS: { + ("music_flow", "node1"): cnd.regexp(r"talk about music"), + ("greeting_flow", "node1"): cnd.regexp(r"hi|hello", re.IGNORECASE), + "fallback_node": cnd.true(), + }, + }, + "fallback_node": { + RESPONSE: "Ooops", + TRANSITIONS: { + ("music_flow", "node1"): cnd.regexp(r"talk about music"), + ("greeting_flow", "node1"): cnd.regexp(r"hi|hello", re.IGNORECASE), + lbl.previous(): cnd.regexp(r"previous", re.IGNORECASE), + lbl.repeat(): cnd.true(), + }, + }, +} diff --git a/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_3/python_files/main.py b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_3/python_files/main.py new file mode 100644 index 000000000..268995796 --- /dev/null +++ b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_3/python_files/main.py @@ -0,0 +1,133 @@ +from dff.core.engine.core.keywords import TRANSITIONS as TRANSITIONS +from dff.core.engine.core.keywords import RESPONSE as RESPONSE +from dff.core.engine.core.keywords import PROCESSING as PROCESSING +from dff.core.engine.core.keywords import GLOBAL as GLOBAL +from dff.core.engine.core.keywords import MISC as MISC +from dff.core.engine.core.keywords import LOCAL as LOCAL +import dff.core.engine.conditions as cnd +import dff.core.engine.labels as lbl +from dff.core.engine.core import Actor as Act +from dff.core.engine.core import Context as Context +import dff.core.engine.responses as rsp +from functions import add_prefix as add_prefix +import typing as tp +import re as re +import transitions as transitions +from flow import global_flow as global_flow + +script = { + GLOBAL: { + TRANSITIONS: { + ("greeting_flow", "node1", 1.1): cnd.regexp(r"\b(hi|hello)\b", re.I), + ("music_flow", "node1", 1.1): cnd.regexp(r"talk about music"), + lbl.to_fallback(0.1): cnd.true(), + lbl.forward(): cnd.all( + [ + cnd.regexp(r"next\b"), + cnd.has_last_labels( + labels=[("music_flow", i) for i in ["node2", "node3"]] + ), + ] + ), + lbl.repeat(0.2): cnd.all( + [ + cnd.regexp(r"repeat", re.I), + cnd.negation(cnd.has_last_labels(flow_labels=["global_flow"])), + ] + ), + }, + PROCESSING: {1: add_prefix("l1_global"), 2: add_prefix("l2_global")}, + MISC: {"var1": "global_data", "var2": "global_data", "var3": "global_data"}, + RESPONSE: "", + }, + "global_flow": { + LOCAL: {PROCESSING: {2: add_prefix("l2_local"), 3: add_prefix("l3_local")}}, + "start_node": { + RESPONSE: "", + TRANSITIONS: { + ("music_flow", "node1"): cnd.regexp(r"talk about music"), + ("greeting_flow", "node1"): cnd.regexp(r"hi|hello", re.IGNORECASE), + "fallback_node": cnd.true(), + }, + }, + "fallback_node": { + RESPONSE: "Ooops", + TRANSITIONS: { + ("music_flow", "node1"): cnd.regexp(r"talk about music"), + ("greeting_flow", "node1"): cnd.regexp(r"hi|hello", re.IGNORECASE), + lbl.previous(): cnd.regexp(r"previous", re.IGNORECASE), + lbl.repeat(): cnd.true(), + }, + }, + }, + "greeting_flow": { + "node1": { + RESPONSE: rsp.choice(["Hi, what is up?", "Hello, how are you?"]), + TRANSITIONS: { + ("global_flow", "fallback_node", 0.1): cnd.true(), + "node2": cnd.regexp(r"how are you"), + }, + MISC: {"var3": "info_of_step_1"}, + }, + "node2": { + RESPONSE: "Good. What do you want to talk about?", + TRANSITIONS: { + lbl.to_fallback(0.1): cnd.true(), + lbl.forward(0.5): cnd.regexp(r"talk about"), + ("music_flow", "node1"): cnd.regexp(r"talk about music"), + lbl.previous(): cnd.regexp(r"previous", re.IGNORECASE), + }, + }, + "node3": {RESPONSE: foo, TRANSITIONS: {lbl.forward(): cnd.regexp(r"bye")}}, + "node4": { + RESPONSE: bar("bye"), + TRANSITIONS: { + "node1": cnd.regexp(r"hi|hello", re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + }, + "music_flow": { + "node1": { + RESPONSE: "I love `System of a Down` group, would you like to tell about it? ", + TRANSITIONS: { + lbl.forward(): cnd.regexp(r"yes|yep|ok", re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + "node2": { + RESPONSE: "System of a Down is an Armenian-American heavy metal band formed in in 1994.", + TRANSITIONS: { + lbl.forward(): cnd.regexp(r"next", re.IGNORECASE), + lbl.repeat(): cnd.regexp(r"repeat", re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + "node3": { + RESPONSE: "The band achieved commercial success with the release of five studio albums.", + TRANSITIONS: { + lbl.forward(): cnd.regexp(r"next", re.IGNORECASE), + lbl.backward(): cnd.regexp(r"back", re.IGNORECASE), + lbl.repeat(): cnd.regexp(r"repeat", re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + "node4": { + RESPONSE: "That's all what I know", + TRANSITIONS: { + transitions.greeting_flow_n2_transition: cnd.regexp( + r"next", re.IGNORECASE + ), + transitions.high_priority_node_transition( + "greeting_flow", "node4" + ): cnd.regexp(r"next time", re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + }, +} +actor = Act( + fallback_label=("global_flow", "fallback_node"), + script=script, + start_label=("global_flow", "start_node"), +) diff --git a/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_3/yaml_files/script.yaml b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_3/yaml_files/script.yaml new file mode 100755 index 000000000..184ba3e4e --- /dev/null +++ b/tests/parser/TEST_CASES/test_yaml2py/complex_tests/test_3/yaml_files/script.yaml @@ -0,0 +1,138 @@ +namespaces: + main: + TRANSITIONS: !from dff.core.engine.core.keywords TRANSITIONS + RESPONSE: !from dff.core.engine.core.keywords RESPONSE + PROCESSING: !from dff.core.engine.core.keywords PROCESSING + GLOBAL: !from dff.core.engine.core.keywords GLOBAL + MISC: !from dff.core.engine.core.keywords MISC + LOCAL: !from dff.core.engine.core.keywords LOCAL + cnd: !import dff.core.engine.conditions + lbl: !import dff.core.engine.labels + Act: !from dff.core.engine.core Actor + Context: !from dff.core.engine.core Context + rsp: !import dff.core.engine.responses + add_prefix: !from functions add_prefix + tp: !import typing + re: !import re + transitions: !import transitions + global_flow: !from flow global_flow + script: + GLOBAL: + TRANSITIONS: + ("greeting_flow", "node1", 1.1): cnd.regexp(r"\b(hi|hello)\b", re.I) + ("music_flow", "node1", 1.1): cnd.regexp(r"talk about music") + lbl.to_fallback(0.1): cnd.true() + lbl.forward(): cnd.all([cnd.regexp(r"next\b"),cnd.has_last_labels(labels=[("music_flow", i) for i in ["node2", "node3"]]),]) + lbl.repeat(0.2): cnd.all([cnd.regexp(r"repeat", re.I),cnd.negation(cnd.has_last_labels(flow_labels=["global_flow"])),]) + PROCESSING: + '1': add_prefix("l1_global") + '2': add_prefix("l2_global") + MISC: + var1: global_data + var2: global_data + var3: global_data + RESPONSE: !str + !str global_flow: + LOCAL: + PROCESSING: + '2': add_prefix("l2_local") + '3': add_prefix("l3_local") + start_node: + RESPONSE: !str + TRANSITIONS: + ("music_flow", "node1"): cnd.regexp(r"talk about music") + ("greeting_flow", "node1"): cnd.regexp(r"hi|hello", re.IGNORECASE) + fallback_node: cnd.true() + fallback_node: + RESPONSE: Ooops + TRANSITIONS: + ("music_flow", "node1"): cnd.regexp(r"talk about music") + ("greeting_flow", "node1"): cnd.regexp(r"hi|hello", re.IGNORECASE) + lbl.previous(): cnd.regexp(r"previous", re.IGNORECASE) + lbl.repeat(): cnd.true() + greeting_flow: + node1: + RESPONSE: rsp.choice(["Hi, what is up?", "Hello, how are you?"]) + TRANSITIONS: + ("global_flow", "fallback_node", 0.1): cnd.true() + node2: cnd.regexp(r"how are you") + MISC: + var3: info_of_step_1 + node2: + RESPONSE: Good. What do you want to talk about? + TRANSITIONS: + lbl.to_fallback(0.1): cnd.true() + lbl.forward(0.5): cnd.regexp(r"talk about") + ("music_flow", "node1"): cnd.regexp(r"talk about music") + lbl.previous(): cnd.regexp(r"previous", re.IGNORECASE) + node3: + RESPONSE: !py foo + TRANSITIONS: + lbl.forward(): cnd.regexp(r"bye") + node4: + RESPONSE: !py bar("bye") + TRANSITIONS: + node1: cnd.regexp(r"hi|hello", re.IGNORECASE) + lbl.to_fallback(): cnd.true() + music_flow: + node1: + RESPONSE: 'I love `System of a Down` group, would you like to tell about it? ' + TRANSITIONS: + lbl.forward(): cnd.regexp(r"yes|yep|ok", re.IGNORECASE) + lbl.to_fallback(): cnd.true() + node2: + RESPONSE: System of a Down is an Armenian-American heavy metal band formed in in 1994. + TRANSITIONS: + lbl.forward(): cnd.regexp(r"next", re.IGNORECASE) + lbl.repeat(): cnd.regexp(r"repeat", re.IGNORECASE) + lbl.to_fallback(): cnd.true() + node3: + RESPONSE: The band achieved commercial success with the release of five studio albums. + TRANSITIONS: + lbl.forward(): cnd.regexp(r"next", re.IGNORECASE) + lbl.backward(): cnd.regexp(r"back", re.IGNORECASE) + lbl.repeat(): cnd.regexp(r"repeat", re.IGNORECASE) + lbl.to_fallback(): cnd.true() + node4: + RESPONSE: That's all what I know + TRANSITIONS: + transitions.greeting_flow_n2_transition: cnd.regexp(r"next", re.IGNORECASE) + transitions.high_priority_node_transition("greeting_flow", "node4"): cnd.regexp(r"next time", re.IGNORECASE) + lbl.to_fallback(): cnd.true() + actor: !call + name: Act + args: + fallback_label: + - !str global_flow + - fallback_node + script: script + start_label: + - !str global_flow + - start_node + flow: + TRANSITIONS: !from dff.core.engine.core.keywords TRANSITIONS + RESPONSE: !from dff.core.engine.core.keywords RESPONSE + PROCESSING: !from dff.core.engine.core.keywords PROCESSING + LOCAL: !from dff.core.engine.core.keywords LOCAL + cnd: !import dff.core.engine.conditions + lbl: !import dff.core.engine.labels + re: !import re + add_prefix: !from functions add_prefix + global_flow: + LOCAL: + PROCESSING: + '2': add_prefix("l2_local") + '3': add_prefix("l3_local") + start_node: + RESPONSE: !str + TRANSITIONS: + ("music_flow", "node1"): cnd.regexp(r"talk about music") + ("greeting_flow", "node1"): cnd.regexp(r"hi|hello", re.IGNORECASE) + fallback_node: cnd.true() + fallback_node: + RESPONSE: Ooops + TRANSITIONS: + ("music_flow", "node1"): cnd.regexp(r"talk about music") + ("greeting_flow", "node1"): cnd.regexp(r"hi|hello", re.IGNORECASE) + lbl.previous(): cnd.regexp(r"previous", re.IGNORECASE) + lbl.repeat(): cnd.true() diff --git a/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_1/python_files/main.py b/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_1/python_files/main.py new file mode 100644 index 000000000..a47a0f77c --- /dev/null +++ b/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_1/python_files/main.py @@ -0,0 +1,4 @@ +import abc as abc + +d = {} +c = {1: d} diff --git a/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_1/yaml_files/script.yaml b/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_1/yaml_files/script.yaml new file mode 100755 index 000000000..baa3ab014 --- /dev/null +++ b/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_1/yaml_files/script.yaml @@ -0,0 +1,6 @@ +namespaces: + main: + abc: !import abc + d: { } + c: + '1': d diff --git a/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_2/python_files/main.py b/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_2/python_files/main.py new file mode 100644 index 000000000..b0a68856d --- /dev/null +++ b/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_2/python_files/main.py @@ -0,0 +1,5 @@ +from dff.core.engine.core.actor import Actor as Actor +from dff.core.engine.core import keywords as keywords + +script = {keywords.GLOBAL: {keywords.RESPONSE: ""}} +actor = Actor(script=script, start_label=(keywords.GLOBAL, keywords.RESPONSE)) diff --git a/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_2/yaml_files/script.yaml b/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_2/yaml_files/script.yaml new file mode 100755 index 000000000..69c71d505 --- /dev/null +++ b/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_2/yaml_files/script.yaml @@ -0,0 +1,14 @@ +namespaces: + main: + Actor: !from dff.core.engine.core.actor Actor + keywords: !from dff.core.engine.core keywords + script: + keywords.GLOBAL: + keywords.RESPONSE: !str + actor: !call + name: Actor + args: + script: script + start_label: + - keywords.GLOBAL + - keywords.RESPONSE diff --git a/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_3/python_files/main.py b/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_3/python_files/main.py new file mode 100644 index 000000000..99ce5d1e8 --- /dev/null +++ b/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_3/python_files/main.py @@ -0,0 +1,7 @@ +from dff.core.engine.core.actor import Actor as Actor +from dff.core.engine.core import keywords as keywords + +ints = {1: 2} +strings = {1: {2: "flow"}} +script = {strings[1][ints[1]]: {"node": {keywords.RESPONSE: "hi"}}} +actor = Actor(script=script, start_label=(strings[1][ints[1]], "node")) diff --git a/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_3/yaml_files/script.yaml b/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_3/yaml_files/script.yaml new file mode 100755 index 000000000..da2a99a78 --- /dev/null +++ b/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_3/yaml_files/script.yaml @@ -0,0 +1,20 @@ +namespaces: + main: + Actor: !from dff.core.engine.core.actor Actor + keywords: !from dff.core.engine.core keywords + ints: + '1': '2' + strings: + '1': + '2': flow + script: + strings[1][ints[1]]: + node: + keywords.RESPONSE: hi + actor: !call + name: Actor + args: + script: script + start_label: + - strings[1][ints[1]] + - node diff --git a/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_4/python_files/main.py b/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_4/python_files/main.py new file mode 100644 index 000000000..239c22fa8 --- /dev/null +++ b/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_4/python_files/main.py @@ -0,0 +1,6 @@ +from dff.core.engine.core.actor import Actor as Actor +from dff.core.engine.core import keywords as kw + +another_script = {"flow": {"node": {kw.RESPONSE: "hi"}}} +script = another_script +actor = Actor(script=script, start_label=("flow", "node")) diff --git a/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_4/yaml_files/script.yaml b/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_4/yaml_files/script.yaml new file mode 100755 index 000000000..ed4c773a3 --- /dev/null +++ b/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_4/yaml_files/script.yaml @@ -0,0 +1,16 @@ +namespaces: + main: + Actor: !from dff.core.engine.core.actor Actor + kw: !from dff.core.engine.core keywords + another_script: + flow: + node: + kw.RESPONSE: hi + script: another_script + actor: !call + name: Actor + args: + script: script + start_label: + - flow + - node diff --git a/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_5/python_files/main.py b/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_5/python_files/main.py new file mode 100644 index 000000000..894a1e2ad --- /dev/null +++ b/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_5/python_files/main.py @@ -0,0 +1,4 @@ +from dff.core.engine.core.actor import Actor as Actor +from scripts import script as script + +actor = Actor(script=script, start_label=("flow", "node")) diff --git a/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_5/python_files/scripts.py b/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_5/python_files/scripts.py new file mode 100644 index 000000000..f7e203ecc --- /dev/null +++ b/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_5/python_files/scripts.py @@ -0,0 +1,4 @@ +from dff.core.engine.core import keywords as kw + +another_script = {"flow": {"node": {kw.RESPONSE: "hi"}}} +script = another_script diff --git a/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_5/yaml_files/script.yaml b/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_5/yaml_files/script.yaml new file mode 100755 index 000000000..a9413e93e --- /dev/null +++ b/tests/parser/TEST_CASES/test_yaml2py/simple_tests/test_5/yaml_files/script.yaml @@ -0,0 +1,18 @@ +namespaces: + main: + Actor: !from dff.core.engine.core.actor Actor + script: !from scripts script + actor: !call + name: Actor + args: + script: script + start_label: + - flow + - node + scripts: + kw: !from dff.core.engine.core keywords + another_script: + flow: + node: + kw.RESPONSE: hi + script: another_script diff --git a/tests/parser/__init__.py b/tests/parser/__init__.py new file mode 100644 index 000000000..449c45490 --- /dev/null +++ b/tests/parser/__init__.py @@ -0,0 +1,6 @@ +try: + import pytest + + pytest.register_assert_rewrite("tests.parser.utils") +except ImportError: + ... diff --git a/tests/parser/parser_test_generators.py b/tests/parser/parser_test_generators.py new file mode 100644 index 000000000..70077c5e5 --- /dev/null +++ b/tests/parser/parser_test_generators.py @@ -0,0 +1,109 @@ +""" +Generate results for test cases. +""" +from pathlib import Path +from shutil import rmtree, copytree +import difflib + +from dff.utils.parser.dff_project import DFFProject + + +TEST_DIR = Path(__file__).parent / "TEST_CASES" + + +def rebuild_complex_cases(): + """ + Expected structure of directories inside `TEST_DIR / complex_cases`: + - `python_files` directory containing dff project. + - `new_script.yaml` file -- an edited dff project. + + The function generates new files inside such directories: + - `script.yaml` -- a yaml representation of files in `python_files` (`to_yaml(python_files)`). + - `graph.json` -- a graph representation of files in `python_files` (`to_graph(python_files)`). + - `script.yaml.diff` -- a diff file for `script.yaml` and `new_script.yaml`. + - `result_creating` directory -- a python representation of `new_script.yaml` (`to_python(new_script.yaml)`). + - `result_editing` directory -- a result of editing `new_script.yaml` over `python_files` + (`to_python(new_script.yaml) -> python_files`). + + :raises RuntimeError: + If the directory is missing required filed. + """ + for working_dir in (TEST_DIR / "complex_cases").iterdir(): + if not working_dir.is_dir(): + continue + + # Generate script.yaml and graph.json + + python_dir = working_dir / "python_files" + main_file = python_dir / "main.py" + + if not python_dir.exists(): + raise RuntimeError(f"Python dir {python_dir} not found.") + + if not main_file.exists(): + raise RuntimeError(f"Main file {main_file} not found.") + + dff_project = DFFProject.from_python(python_dir, main_file) + + dff_project.to_yaml(working_dir / "script.yaml") + dff_project.to_graph(working_dir / "graph.json") + + # Generate diff file + + with open(working_dir / "script.yaml", "r") as fd: + original = fd.readlines() + + new_script = working_dir / "new_script.yaml" + + if not new_script.exists(): + raise RuntimeError(f"Edited script {new_script} not found.") + + with open(new_script, "r") as fd: + new = fd.readlines() + + diff = difflib.ndiff(original, new) + + with open(working_dir / "script.yaml.diff", "w") as fd: + fd.write("".join(diff)) + + # Generate results of to_python + + dff_project = DFFProject.from_yaml(working_dir / "new_script.yaml") + + creation_dir = working_dir / "result_creating" + + if creation_dir.exists(): + rmtree(creation_dir) + creation_dir.mkdir(exist_ok=True) + + editing_dir = working_dir / "result_editing" + + if editing_dir.exists(): + rmtree(editing_dir) + copytree(working_dir / "python_files", editing_dir) + + dff_project.to_python(working_dir / "result_creating") + dff_project.to_python(working_dir / "result_editing") + + +def rebuild_core_tutorials(): + engine_tutorial_dir = Path("tutorials/script/core") + + test_dir = TEST_DIR / "core_tutorials" + + if test_dir.exists(): + rmtree(test_dir) + test_dir.mkdir(parents=True) + + for file in engine_tutorial_dir.iterdir(): + if file.is_file(): + dff_project = DFFProject.from_python(engine_tutorial_dir, file, script_initializer="pipeline") + + dff_project.to_python(test_dir) + dff_project.to_yaml(test_dir / (file.stem + ".yaml")) + dff_project.to_graph(test_dir / (file.stem + ".json")) + + +if __name__ == "__main__": + rebuild_complex_cases() + rebuild_core_tutorials() diff --git a/tests/parser/test_basics.py b/tests/parser/test_basics.py new file mode 100644 index 000000000..edf8b89be --- /dev/null +++ b/tests/parser/test_basics.py @@ -0,0 +1,325 @@ +import ast +from sys import version_info +from pathlib import Path +from tempfile import TemporaryDirectory + +import pytest + +try: + from dff.utils.parser.base_parser_object import ( + Dict, + Expression, + Python, + Import, + Attribute, + Subscript, + Call, + Iterable, + ) + from dff.utils.parser.namespace import Namespace + from dff.utils.parser.dff_project import DFFProject + from .utils import assert_dirs_equal +except ImportError: + pytest.skip(reason="`parser` is not available", allow_module_level=True) + + +def test_just_works(): + obj = Expression.from_str("{1: {2: '3'}}") + assert isinstance(obj, Dict) + assert str(obj.children["value_1"]) == "{\n 2: '3',\n}" + + +def test_path(): + obj = Expression.from_str("{1: {2: '3'}}") + assert obj.children["value_1"].children["key_2"] == obj.resolve_path(("value_1", "key_2")) + + +def test_multiple_keys(): + obj = Expression.from_str("{1: 1, '1': '1'}") + assert obj.resolve_path(("value_1",)) == "1" + assert obj.resolve_path(("key_1",)) == "1" + assert obj.resolve_path(("value_'1'",)) == "'1'" + assert obj.resolve_path(("key_'1'",)) == "'1'" + + +def test_get_item(): + obj = Expression.from_str("{1: 1, '1': '1'}") + assert isinstance(obj, Dict) + assert obj["1"] == "1" + assert obj["'1'"] == "'1'" + + +def test_import_resolution(): + namespace1 = Namespace.from_ast(ast.parse("import namespace2"), location=["namespace1"]) + namespace2 = Namespace.from_ast(ast.parse("import namespace1"), location=["namespace2"]) + dff_project = DFFProject([namespace1, namespace2], validate=False) + import_stmt = dff_project.resolve_path(("namespace1", "namespace2")) + + assert isinstance(import_stmt, Import) + assert import_stmt._resolve_once == namespace2 + assert import_stmt == namespace1.resolve_path(("namespace2",)) + assert import_stmt.path == ("namespace1", "namespace2") + + +def test_multiple_imports(): + namespace1 = Namespace.from_ast(ast.parse("import namespace2, namespace3"), location=["namespace1"]) + namespace2 = Namespace.from_ast(ast.parse(""), location=["namespace2"]) + namespace3 = Namespace.from_ast(ast.parse(""), location=["namespace3"]) + dff_project = DFFProject([namespace1, namespace2, namespace3], validate=False) + + import_2 = dff_project.resolve_path(("namespace1", "namespace2")) + import_3 = dff_project.resolve_path(("namespace1", "namespace3")) + + assert isinstance(import_2, Import) + assert isinstance(import_3, Import) + + assert import_2._resolve_once == namespace2 + assert import_3._resolve_once == namespace3 + + assert str(import_2) == "import namespace2" + assert str(import_3) == "import namespace3" + + +def test_multilevel_import_resolution(): + namespace1 = Namespace.from_ast(ast.parse("import module.namespace2 as n2"), location=["namespace1"]) + namespace2 = Namespace.from_ast(ast.parse("import namespace1"), location=["module", "namespace2"]) + dff_project = DFFProject([namespace1, namespace2], validate=False) + import_stmt1 = dff_project.resolve_path(("namespace1", "n2")) + + assert isinstance(import_stmt1, Import) + assert import_stmt1._resolve_once == namespace2 + assert import_stmt1 == namespace1.resolve_path(("n2",)) + assert import_stmt1.path == ("namespace1", "n2") + + +def test_assignment(): + namespace = Namespace.from_ast(ast.parse("a = 1"), location=["namespace"]) + + assert namespace["a"] == Python.from_str("1") + assert namespace.resolve_path(("a", "value")) == Python.from_str("1") + + +def test_import_from(): + namespace1 = Namespace.from_ast(ast.parse("import module.namespace2 as n2"), location=["namespace1"]) + namespace2 = Namespace.from_ast(ast.parse("from . import a"), location=["module", "namespace2"]) + namespace3 = Namespace.from_ast(ast.parse("a = 1"), location=["module", "__init__"]) + dff_project = DFFProject([namespace1, namespace2, namespace3], validate=False) + + assert namespace2["a"] == Python.from_str("1") + assert Python.from_str("1") == namespace2["a"] + assert dff_project["namespace1"]["n2"].absolute["a"] == Python.from_str("1") + + +def test_name(): + namespace1 = Namespace.from_ast(ast.parse("a=b=1\nc=a\nd=b"), location=["namespace1"]) + + assert namespace1["c"] == Python.from_str("1") + assert namespace1["d"] == Python.from_str("1") + + +def test_attribute(): + namespace1 = Namespace.from_ast(ast.parse("import namespace2 as n2\na=n2.a"), location=["namespace1"]) + namespace2 = Namespace.from_ast(ast.parse("a=1"), location=["namespace2"]) + _ = DFFProject([namespace1, namespace2], validate=False) + + assert isinstance(namespace1["a"], Attribute) + assert namespace1["a"] == namespace2["a"] == Python.from_str("1") + + +def test_subscript(): + namespace = Namespace.from_ast(ast.parse("a = {1: {2: 3}}\nb = a[1][2]"), location=["namespace"]) + + assert isinstance(namespace["b"], Subscript) + assert namespace["b"] == Python.from_str("3") + + +def test_iterable(): + namespace = Namespace.from_ast(ast.parse("a = [1, 2, 3]\nb = a[2]"), location=["namespace"]) + + assert namespace["b"] == "3" + assert namespace["a"]["0"] == "1" + + for index, element in enumerate(namespace["a"]): + assert element == str(index + 1) + + assert len(namespace["a"]) == 3 + + # test obj dump + for obj in ((), [], (1,), [1], {1}, (1, 2), [1, 2], {1, 2}): + expr = Expression.from_obj(obj) + assert isinstance(expr, Iterable) + assert expr.dump() == repr(obj) + + +def test_call(): + namespace = Namespace.from_ast(ast.parse("import Actor\na = Actor(1, 2, c=3)"), location=["namespace"]) + _ = DFFProject([namespace], validate=False) + + call = namespace["a"] + + assert isinstance(call, Call) + assert repr(call.resolve_path(("func",))) == "Name(Actor)" + assert call.resolve_path(("arg_1",)) == Python.from_str("2") + assert call.resolve_path(("keyword_c",)) == Python.from_str("3") + assert call.func_name == "Actor" + + namespace = Namespace.from_ast(ast.parse("a = (lambda x, y, z: 1)(1, 2, c=3)"), location=["namespace"]) + _ = DFFProject([namespace], validate=False) + + call = namespace["a"] + assert isinstance(call, Call) + if version_info >= (3, 9): + assert call.func_name == "lambda x, y, z: 1" + else: + assert call.func_name == "(lambda x, y, z: 1)" + + +def test_comprehensions(): + list_comp_str = "[x for x in a]" + set_comp_str = "{x for q in b for x in q}" + dict_comp_str = "{x: x ** 2 for q in c if q for x in q if x > 0}" + gen_comp_str = "((x, q, z) for x in a if x > 0 if x < 10 for q, z in b if q.startswith('i') for y in c if true(y))" + namespace = Namespace.from_ast( + ast.parse( + f"import a, b, c\n" + f"list_comp={list_comp_str}\n" + f"set_comp={set_comp_str}\n" + f"dict_comp={dict_comp_str}\n" + f"gen_comp={gen_comp_str}" + ), + location=["namespace"], + ) + _ = DFFProject([namespace], validate=False) + + assert str(namespace["list_comp"]) == list_comp_str + assert str(namespace["set_comp"]) == set_comp_str + if version_info >= (3, 9): + assert str(namespace["dict_comp"]) == dict_comp_str + else: + assert str(namespace["dict_comp"]) == "{x: (x ** 2) for q in c if q for x in q if (x > 0)}" + if version_info >= (3, 9): + assert ( + str(namespace["gen_comp"]) + == "((x, q, z) for x in a if x > 0 if x < 10 for (q, z) in b if q.startswith('i') for y in c if true(y))" + ) + else: + assert ( + str(namespace["gen_comp"]) + == "((x, q, z) for x in a if (x > 0) if (x < 10) for (q, z) in b if q.startswith('i') " + "for y in c if true(y))" + ) + + +def test_dependency_extraction(): + namespace1 = Namespace.from_ast(ast.parse("import namespace2\na = namespace2.a"), location=["namespace1"]) + namespace2 = Namespace.from_ast( + ast.parse( + "from namespace3 import c\nimport namespace3\nfrom namespace4 import d\na = print(c[d] + namespace3.j[1])" + ), + location=["namespace2"], + ) + namespace3 = Namespace.from_ast(ast.parse("c=e\ne={1: 2}\nf=1\nj={1: 2}"), location=["namespace3"]) + namespace4 = Namespace.from_ast(ast.parse("d=1\nq=4\nz=1"), location=["namespace4"]) + + _ = DFFProject([namespace1, namespace2, namespace3, namespace4], validate=False) + + assert namespace1["a"].dependencies() == { + "namespace1": {"a", "namespace2"}, + "namespace2": {"c", "d", "a", "namespace3"}, + "namespace3": {"c", "e", "j"}, + "namespace4": {"d"}, + } + + +def test_eq_operator(): + namespace = Namespace.from_ast(ast.parse("import dff.keywords as kw\na = kw.RESPONSE\nb=a"), location=["namespace"]) + _ = DFFProject([namespace], validate=False) + + assert "dff.keywords.RESPONSE" == namespace["a"] + assert namespace["b"] in ["dff.keywords.RESPONSE"] + + +def test_long_import_chain(): + dff_project = DFFProject.from_dict( + { + "main": {"imp": "from imp_1 import imp"}, + "imp_1": {"imp": "from imp_2 import imp"}, + "imp_2": {"imp": "from module.imp import obj"}, + "module.imp": {"obj": "from variables import number"}, + "module.variables": {"number": "1"}, + }, + validate=False, + ) + + assert dff_project["main"]["imp"].absolute == "1" + assert dff_project["main"]["imp"] == "1" + + +def test_deep_import(): + dff_project = DFFProject.from_dict( + { + "main": {"obj": "1"}, + "module.__init__": {}, + "module.main": {"obj": "from ..main import obj", "missing": "from ..main import missing"}, + "module.module.__init__": {}, + "module.module.main": { + "obj": "from ...main import obj", + "missing": "from ...main import missing", + "other_missing": "from ..main import missing", + }, + }, + validate=False, + ) + assert dff_project.resolve_path(("module.main", "obj")) == "1" + assert dff_project.resolve_path(("module.main", "missing")) == "main.missing" + assert dff_project.resolve_path(("module.module.main", "obj")) == "1" + assert dff_project.resolve_path(("module.module.main", "missing")) == "main.missing" + assert dff_project.resolve_path(("module.module.main", "other_missing")) == "main.missing" + + +def test_get_methods(): + ... # todo: add tests for get methods of Dict and Iterable + + +def test_namespace_filter(): + init_file = "from dff.pipeline import Pipeline\nunused_var=1\npipeline=Pipeline.from_script({'':{'':{}}},('',''))" + clean_file = "from dff.pipeline import Pipeline\npipeline=Pipeline.from_script({'':{'':{}}},('',''))" + + with TemporaryDirectory() as tmpdir, TemporaryDirectory() as tmpdir_clean: + file = Path(tmpdir) / "main.py" + file.touch() + with open(file, "w", encoding="utf-8") as fd: + fd.write(init_file) + file_clean = Path(tmpdir_clean) / "main.py" + file_clean.touch() + with open(file_clean, "w", encoding="utf-8") as fd: + fd.write(clean_file) + with TemporaryDirectory() as result, TemporaryDirectory() as correct_result: + DFFProject.from_python(Path(tmpdir), file).to_python(Path(result)) + DFFProject.from_python(Path(tmpdir_clean), file_clean).to_python(Path(correct_result)) + assert_dirs_equal(Path(result), Path(correct_result)) + + +def test_statement_extraction(): + file = ( + "import obj_1 as obj, obj_2, obj_3 as obj_3\n" + "from module import m_obj_1 as m_obj, m_obj_2, m_obj_3 as m_obj_3\n" + "a = b = c = 1" + ) + + namespace = Namespace.from_ast(ast.parse(file), location=["main"]) + _ = DFFProject(namespaces=[namespace], validate=False) + + assert ( + namespace.dump() == "import obj_1 as obj\n" + "import obj_2\n" + "import obj_3 as obj_3\n" + "from module import m_obj_1 as m_obj\n" + "from module import m_obj_2\n" + "from module import m_obj_3 as m_obj_3\n" + "\n" + "c = 1\n\n" + "a = c\n\n" + "b = c\n" + "" + ) diff --git a/tests/parser/test_complex.py b/tests/parser/test_complex.py new file mode 100644 index 000000000..b8a4183b1 --- /dev/null +++ b/tests/parser/test_complex.py @@ -0,0 +1,210 @@ +from inspect import signature +import ast + +import pytest + +try: + from dff.utils.parser.dff_project import DFFProject + from dff.utils.parser.base_parser_object import Expression, Call, Python + from dff.utils.parser.namespace import Namespace +except ImportError: + pytest.skip(reason="`parser` is not available", allow_module_level=True) + + +def test_referenced_object(): + dff_project = DFFProject.from_dict( + { + "main": { + "proxy_1": "import proxy_1", + "proxy_2": "import proxy_2", + "nonexistent": "import mod", + "other_nonexistent": "from module import object", + "number": "proxy_1.prox.numbers[proxy_1.numbers.numbers[proxy_2.vars.lower_number]]" + "[proxy_2.vars.number]", + "object": "nonexistent._1._2", + "other_object": "other_nonexistent._3._4", + "value_nonexistent": "other_nonexistent[proxy_1.prox.numbers[1][2]]", + "index_nonexistent": "proxy_2.numbers[object]", + "second_index_nonexistent": "proxy_2.numbers[1][object]", + }, + "proxy_1": {"prox": "import proxy_2", "numbers": "import other_variables"}, + "proxy_2": { + "numbers": "from variables import dictionary", + "vars": "import variables", + }, + "variables": { + "dictionary": "{1: {2: 3}}", + "number": "2", + "lower_number": "1", + }, + "other_variables": {"numbers": "{1: 1, 2: 2}"}, + }, + validate=False, + ) + + assert dff_project["main"]["number"] == "3" + assert dff_project["main"]["proxy_2"] == dff_project["proxy_2"] + assert dff_project["main"]["nonexistent"] == "mod" + assert dff_project["main"]["other_nonexistent"] == "module.object" + assert dff_project["main"]["object"] == "mod._1._2" + assert dff_project["main"]["other_object"] == "module.object._3._4" + assert dff_project["main"]["value_nonexistent"] == "module.object[3]" + assert dff_project["main"]["index_nonexistent"] == "{1: {2: 3,},}[mod._1._2]" + assert dff_project["main"]["second_index_nonexistent"] == "{2: 3,}[mod._1._2]" + + +def test_get_args(): + def func(param, another: int = 1, *args, **kwargs): + ... + + func_call = Expression.from_str("func(1, 2, 3, 4, stuff={'key': 'value'})") + assert isinstance(func_call, Call) + args = func_call.get_args(signature(func)) + assert args == { + "param": Expression.from_obj(1), + "another": Expression.from_obj(2), + "args": Expression.from_obj((3, 4)), + "kwargs": Expression.from_obj({"stuff": {"key": "value"}}), + } + + func_call = Expression.from_str("func()") + assert isinstance(func_call, Call) + with pytest.raises(TypeError) as exc_info: + _ = func_call.get_args(signature(func)) + assert exc_info.value.args[0] == "missing a required argument: 'param'" + + func_call = Expression.from_str("func(param=2)") + assert isinstance(func_call, Call) + args = func_call.get_args(signature(func)) + assert args == { + "param": Expression.from_obj(2), + "another": Expression.from_obj(1), + "args": Expression.from_obj(()), + "kwargs": Expression.from_obj({}), + } + + # test alternative naming + + def func(*func_args, **func_kwargs): + ... + + func_call = Expression.from_str("func(1, 2, 3, 4, stuff={'key': 'value'})") + assert isinstance(func_call, Call) + args = func_call.get_args(signature(func)) + assert args == { + "func_args": Expression.from_obj((1, 2, 3, 4)), + "func_kwargs": Expression.from_obj({"stuff": {"key": "value"}}), + } + + # test self / cls omitting + + def func(self): + ... + + func_call = Expression.from_str("func()") + assert isinstance(func_call, Call) + args = func_call.get_args(signature(func)) + assert args == {} + + def func(cls): + ... + + func_call = Expression.from_str("func()") + assert isinstance(func_call, Call) + args = func_call.get_args(signature(func)) + assert args == {} + + +def test_python_dependencies(): + dff_project = DFFProject.from_dict( + { + "main": { + "transitions": "from .transitions import transitions", + "variables": "import variables", + "unrelated_variable": "1", + "python_object": "len(transitions[1]) > 0 > -2 > (lambda: -5 + variables.number)()", + }, + "transitions": { + "number": "from .variables import number", + "transitions": { + "1": "'cnd'", + "'two'": "'label'", + }, + }, + "variables": { + "number": "7", + "unrelated_number": "0", + }, + }, + validate=False, + ) + python_object = dff_project["main"]["python_object"] + assert isinstance(python_object, Python) + + assert python_object.dependencies() == { + "main": { + "variables", + "transitions", + "python_object", + }, + "transitions": { + "transitions", + }, + "variables": { + "number", + }, + } + + +def test_python_dumping(): + file = "\n".join( + [ + "import obj", + "import obj2", + "", + "", + "def func():", + " ...", + "", + "", + "other_obj = obj", + "", + "another_obj = obj", + "", + "", + "async def func():", + " ...", + "", + "", + # `Base` is needed because python3.8- and python3.9+ dump parentless class definitions differently + "class Class(Base):", + " ...", + "", + ] + ) + file_dict = { + "main": { + "obj": "import obj", + "obj2": "import obj2", + "0": "def func():\n ...", + "other_obj": "obj", + "another_obj": "obj", + "1": "async def func():\n ...", + "2": "class Class(Base):\n ...", + } + } + + object_filter = {"main": set(file_dict["main"].keys())} + + # init from ast + namespace = Namespace.from_ast(ast.parse(file), location=["main"]) + dff_project = DFFProject(namespaces=[namespace], validate=False) + + assert dff_project["main"].dump() == file + assert dff_project.to_dict(object_filter=object_filter) == file_dict + + # init from dict + dff_project = DFFProject.from_dict(file_dict, validate=False) + + assert dff_project["main"].dump() == file + assert dff_project.to_dict(object_filter=object_filter) == file_dict diff --git a/tests/parser/test_dff_project.py b/tests/parser/test_dff_project.py new file mode 100644 index 000000000..9ffcc8a68 --- /dev/null +++ b/tests/parser/test_dff_project.py @@ -0,0 +1,99 @@ +from pathlib import Path +from shutil import copytree +from tempfile import TemporaryDirectory + +import pytest + +try: + from dff.utils.parser.dff_project import DFFProject + from tests.parser.utils import assert_dirs_equal, assert_files_equal +except ImportError: + pytest.skip(reason="`parser` is not available", allow_module_level=True) + +TEST_DIR = Path(__file__).parent / "TEST_CASES" + +ENGINE_TUTORIAL_DIR = Path(__file__).parent.parent.parent / "tutorials" / "script" / "core" + + +# todo: add more parameters? +@pytest.mark.parametrize("test_case", [str(working_dir) for working_dir in (TEST_DIR / "complex_cases").iterdir()]) +def test_conversions(test_case: str, tmp_path): + working_dir = Path(test_case) + python_dir = working_dir / "python_files" + main_file = python_dir / "main.py" + yaml_script = working_dir / "script.yaml" + graph_script = working_dir / "graph.json" + + # from_python -> to_yaml & to_graph + dff_project = DFFProject.from_python(python_dir, main_file) + dff_project.to_yaml(tmp_path / "script.yaml") + dff_project.to_graph(tmp_path / "graph.json") + assert_files_equal(tmp_path / "script.yaml", yaml_script) + assert_files_equal(tmp_path / "graph.json", graph_script) + + # from_yaml -> to_graph + dff_project = DFFProject.from_yaml(yaml_script) + dff_project.to_graph(tmp_path / "graph.json") + assert_files_equal(tmp_path / "graph.json", graph_script) + + # from_graph -> to_yaml + dff_project = DFFProject.from_graph(graph_script) + dff_project.to_yaml(tmp_path / "script.yaml") + assert_files_equal(tmp_path / "script.yaml", yaml_script) + + # from_yaml(new_script) -> to_python + + dff_project = DFFProject.from_yaml(working_dir / "new_script.yaml") + + # test creating + with TemporaryDirectory() as tmpdir: + created = Path(tmpdir) + dff_project.to_python(created) + + assert_dirs_equal(working_dir / "result_creating", created) + + # test editing + with TemporaryDirectory() as tmpdir: + edited = Path(copytree(working_dir / "python_files", tmpdir + "/edited")) + + dff_project.to_python(edited) + + assert_dirs_equal(working_dir / "result_editing", edited) + + +@pytest.mark.parametrize( + "tutorial_name", + [ + "1_basics", + "2_conditions", + "3_responses", + "4_transitions", + "5_global_transitions", + "6_context_serialization", + "7_pre_response_processing", + "8_misc", + "9_pre_transitions_processing", + ], +) +def test_core_tutorials(tutorial_name: str, tmp_path): + python_name = tutorial_name + ".py" + + dff_project = DFFProject.from_python( + ENGINE_TUTORIAL_DIR, (ENGINE_TUTORIAL_DIR / python_name), script_initializer="pipeline" + ) + + dff_project.to_yaml(tmp_path / (tutorial_name + ".yaml")) + + assert_files_equal(tmp_path / (tutorial_name + ".yaml"), TEST_DIR / "core_tutorials" / (tutorial_name + ".yaml")) + + dff_project = DFFProject.from_yaml(tmp_path / (tutorial_name + ".yaml")) + + dff_project.to_graph(tmp_path / (tutorial_name + ".json")) + + assert_files_equal(tmp_path / (tutorial_name + ".json"), TEST_DIR / "core_tutorials" / (tutorial_name + ".json")) + + dff_project = DFFProject.from_graph(tmp_path / (tutorial_name + ".json")) + + dff_project.to_python(tmp_path) + + assert_files_equal((tmp_path / python_name), TEST_DIR / "core_tutorials" / python_name) diff --git a/tests/parser/utils.py b/tests/parser/utils.py new file mode 100644 index 000000000..d9bb8f7b5 --- /dev/null +++ b/tests/parser/utils.py @@ -0,0 +1,27 @@ +from pathlib import Path +from filecmp import dircmp +from typing import List + + +def assert_files_equal(file1: Path, file2: Path): + with open(file1, "r") as first, open(file2, "r") as second: + assert list(first.readlines()) == list(second.readlines()) + + +def assert_dirs_equal(dir1: Path, dir2: Path): + subdir_stack: List[str] = [] + + def _assert_dir_eq(dir_cmp: dircmp): + assert len(dir_cmp.left_only) == 0 + assert len(dir_cmp.right_only) == 0 + for diff_file in dir_cmp.diff_files: + with open(dir1.joinpath(*subdir_stack, diff_file), "r") as first, open( + dir2.joinpath(*subdir_stack, diff_file), "r" + ) as second: + assert list(first.readlines()) == list(second.readlines()) + for name, subdir in dir_cmp.subdirs.items(): + subdir_stack.append(name) + _assert_dir_eq(subdir) + subdir_stack.pop() + + _assert_dir_eq(dircmp(dir1, dir2)) diff --git a/tests/viewer/TEST_CASES/flow.py b/tests/viewer/TEST_CASES/flow.py new file mode 100644 index 000000000..473cdd23d --- /dev/null +++ b/tests/viewer/TEST_CASES/flow.py @@ -0,0 +1,32 @@ +from dff.script.core.keywords import TRANSITIONS, RESPONSE, PROCESSING, LOCAL +import dff.script.conditions as cnd +import dff.script.labels as lbl +import re + +from functions import add_prefix + + +global_flow = { + LOCAL: {PROCESSING: {2: add_prefix("l2_local"), 3: add_prefix("l3_local")}}, + "start_node": { # This is an initial node, it doesn't need an `RESPONSE` + RESPONSE: "", + TRANSITIONS: { + ("music_flow", "node1"): cnd.regexp(r"talk about music"), # first check + ("greeting_flow", "node1"): cnd.regexp(r"hi|hello", re.IGNORECASE), # second check + # ("global_flow", "fallback_node"): cnd.true(), # third check + "fallback_node": cnd.true(), # third check + # "fallback_node" is equivalent to ("global_flow", "fallback_node") + }, + }, + "fallback_node": { # We get to this node if an error occurred while the agent was running + RESPONSE: "Ooops", + TRANSITIONS: { + ("music_flow", "node1"): cnd.regexp(r"talk about music"), # first check + ("greeting_flow", "node1"): cnd.regexp(r"hi|hello", re.IGNORECASE), # second check + lbl.previous(): cnd.regexp(r"previous", re.IGNORECASE), # third check + # lbl.previous() is equivalent to ("PREVIOUS_flow", "PREVIOUS_node") + lbl.repeat(): cnd.true(), # fourth check + # lbl.repeat() is equivalent to ("global_flow", "fallback_node") + }, + }, +} diff --git a/tests/viewer/TEST_CASES/functions.py b/tests/viewer/TEST_CASES/functions.py new file mode 100644 index 000000000..736e27df9 --- /dev/null +++ b/tests/viewer/TEST_CASES/functions.py @@ -0,0 +1,11 @@ +from dff.script import Actor, Context + + +def add_prefix(prefix): + def add_prefix_processing(ctx: Context, actor: Actor, *args, **kwargs) -> Context: + processed_node = ctx.a_s.get("processed_node", ctx.a_s["next_node"]) + processed_node.response = f"{prefix}: {processed_node.response}" + ctx.a_s["processed_node"] = processed_node + return ctx + + return add_prefix_processing diff --git a/tests/viewer/TEST_CASES/main.py b/tests/viewer/TEST_CASES/main.py new file mode 100644 index 000000000..2db2f6a7a --- /dev/null +++ b/tests/viewer/TEST_CASES/main.py @@ -0,0 +1,150 @@ +from dff.script import ( + TRANSITIONS, + RESPONSE, + PRE_RESPONSE_PROCESSING, + GLOBAL, + MISC, + LOCAL, +) +import dff.script.conditions as cnd +import dff.script.labels as lbl +from dff.script import Context # noqa: F401 +from dff.pipeline import Pipeline +import dff.script.responses as rsp +from functions import add_prefix +import typing as tp, re, transitions # noqa: E401, F401 +from flow import global_flow # noqa: F401 + + +script = { + GLOBAL: { + TRANSITIONS: { + ("greeting_flow", "node1", 1.1): cnd.regexp(r"\b(hi|hello)\b", re.I), + ("music_flow", "node1", 1.1): cnd.regexp(r"talk about music"), + lbl.to_fallback(0.1): cnd.true(), + lbl.forward(): cnd.all( + [ + cnd.regexp(r"next\b"), + cnd.has_last_labels(labels=[("music_flow", i) for i in ["node2", "node3"]]), + ] + ), + lbl.repeat(0.2): cnd.all( + [ + cnd.regexp(r"repeat", re.I), + cnd.negation(cnd.has_last_labels(flow_labels=["global_flow"])), + ] + ), + }, + PRE_RESPONSE_PROCESSING: {1: add_prefix("l1_global"), 2: add_prefix("l2_global")}, + MISC: { + "var1": "global_data", + "var2": "global_data", + "var3": "global_data", + }, + RESPONSE: "", + }, + "global_flow": { + LOCAL: { + PRE_RESPONSE_PROCESSING: {2: add_prefix("l2_local"), 3: add_prefix("l3_local")}, + TRANSITIONS: { + ("greeting_flow", "node1"): cnd.regexp(r"greetings"), + }, + }, + "start_node": { # This is an initial node, it doesn't need an `RESPONSE` + RESPONSE: "", + TRANSITIONS: { + ("music_flow", "node1"): cnd.regexp(r"talk about music"), # first check + ("greeting_flow", "node1"): cnd.regexp(r"hi|hello", re.IGNORECASE), # second check + # ("global_flow", "fallback_node"): cnd.true(), # third check + "fallback_node": cnd.true(), # third check + # "fallback_node" is equivalent to ("global_flow", "fallback_node") + }, + }, + "fallback_node": { # We get to this node if an error occurred while the agent was running + RESPONSE: "Ooops", + TRANSITIONS: { + ("music_flow", "node1"): cnd.regexp(r"talk about music"), # first check + ("greeting_flow", "node1"): cnd.regexp(r"hi|hello", re.IGNORECASE), # second check + lbl.previous(): cnd.regexp(r"previous", re.IGNORECASE), # third check + # lbl.previous() is equivalent to ("PREVIOUS_flow", "PREVIOUS_node") + lbl.repeat(): cnd.true(), # fourth check + # lbl.repeat() is equivalent to ("global_flow", "fallback_node") + }, + }, + }, + "greeting_flow": { + "node1": { + RESPONSE: rsp.choice( + ["Hi, what is up?", "Hello, how are you?"] + ), # When the agent goes to node1, we return "Hi, how are you?" + TRANSITIONS: { + ("global_flow", "fallback_node", 0.1): cnd.true(), # second check + "node2": cnd.regexp(r"how are you"), # first check + # "node2" is equivalent to ("greeting_flow", "node2", 1.0) + }, + MISC: {"var3": "info_of_step_1"}, + }, + "node2": { + RESPONSE: "Good. What do you want to talk about?", + TRANSITIONS: { + lbl.to_fallback(0.1): cnd.true(), # third check + # lbl.to_fallback(0.1) is equivalent to ("global_flow", "fallback_node", 0.1) + lbl.forward(0.5): cnd.regexp(r"talk about"), # second check + # lbl.forward(0.5) is equivalent to ("greeting_flow", "node3", 0.5) + ("music_flow", "node1"): cnd.regexp(r"talk about music"), # first check + lbl.previous(): cnd.regexp(r"previous", re.IGNORECASE), # third check + # ("music_flow", "node1") is equivalent to ("music_flow", "node1", 1.0) + }, + }, + "node3": {RESPONSE: foo, TRANSITIONS: {lbl.forward(): cnd.regexp(r"bye")}}, # noqa: F821 + "node4": { + RESPONSE: bar("bye"), # noqa: F821 + TRANSITIONS: { + "node1": cnd.regexp(r"hi|hello", re.IGNORECASE), # first check + lbl.to_fallback(): cnd.true(), # second check + }, + }, + }, + "music_flow": { + "node1": { + RESPONSE: "I love `System of a Down` group, would you like to tell about it? ", + TRANSITIONS: { + lbl.forward(): cnd.regexp(r"yes|yep|ok", re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + "node2": { + RESPONSE: "System of a Down is an Armenian-American heavy metal band formed in in 1994.", + TRANSITIONS: { + lbl.forward(): cnd.regexp(r"next", re.IGNORECASE), + lbl.repeat(): cnd.regexp(r"repeat", re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + "node3": { + RESPONSE: "The band achieved commercial success with the release of five studio albums.", + TRANSITIONS: { + lbl.forward(): cnd.regexp(r"next", re.IGNORECASE), + lbl.backward(): cnd.regexp(r"back", re.IGNORECASE), + lbl.repeat(): cnd.regexp(r"repeat", re.IGNORECASE), + lbl.to_fallback(): cnd.true(), + }, + }, + "node4": { + RESPONSE: "That's all what I know", + TRANSITIONS: { + transitions.greeting_flow_n2_transition: cnd.regexp(r"next", re.IGNORECASE), + transitions.high_priority_node_transition("greeting_flow", "node4"): cnd.regexp( + r"next time", re.IGNORECASE + ), + lbl.to_fallback(): cnd.true(), + }, + }, + }, +} + +actor = Pipeline.from_script( + fallback_label=("global_flow", "fallback_node"), + script=script, + start_label=("global_flow", "start_node"), +) diff --git a/tests/viewer/TEST_CASES/transitions.py b/tests/viewer/TEST_CASES/transitions.py new file mode 100644 index 000000000..2f90b6088 --- /dev/null +++ b/tests/viewer/TEST_CASES/transitions.py @@ -0,0 +1,14 @@ +from dff.script.core.types import NodeLabel3Type +from dff.script import Actor, Context +import typing as tp + + +def greeting_flow_n2_transition(ctx: Context, actor: Actor, *args, **kwargs) -> NodeLabel3Type: + return "greeting_flow", "node2", 1.0 + + +def high_priority_node_transition(flow_label: str, label: str) -> tp.Callable[..., NodeLabel3Type]: + def transition(ctx: Context, actor: Actor, *args, **kwargs) -> NodeLabel3Type: + return flow_label, label, 2.0 + + return transition diff --git a/tests/viewer/__init__.py b/tests/viewer/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/viewer/opts_off.dot b/tests/viewer/opts_off.dot new file mode 100644 index 000000000..50d4de73d --- /dev/null +++ b/tests/viewer/opts_off.dot @@ -0,0 +1,80 @@ +digraph { + graph [compound=true, + fontname="Helvetica,Arial,sans-serif", + overlap=ipsep, + splines=spline + ]; + node [fillcolor="#ffffffbf", + fontname="Helvetica,Arial,sans-serif", + shape=box, + style="rounded, filled" + ]; + subgraph "cluster_'greeting_flow'" { + graph [color="#96b0af", + label=GREETING_FLOW, + style="rounded, filled" + ]; + "('NODE', \"'greeting_flow'\", \"'node1'\")" [label=<

NODE1


Transitions
(
cnd.true())
(
cnd.true())
(
cnd.regexp('how are you'))
(
cnd.all([cnd.regexp('next\\b'), cnd.has_last_labels(labels=[('music_flow', i) for i in ['node2', 'node3']])]))
(
cnd.all([cnd.regexp('repeat', re.I), cnd.negation(cnd.has_last_labels(flow_labels=['global_flow']))]))
>]; + "('NODE', \"'greeting_flow'\", \"'node2'\")" [label=<

NODE2


Transitions
(
cnd.regexp('talk about music'))
(
cnd.true())
(
cnd.true())
(
cnd.all([cnd.regexp('next\\b'), cnd.has_last_labels(labels=[('music_flow', i) for i in ['node2', 'node3']])]))
(
cnd.regexp('talk about'))
(
cnd.all([cnd.regexp('repeat', re.I), cnd.negation(cnd.has_last_labels(flow_labels=['global_flow']))]))
>]; + "('NODE', \"'greeting_flow'\", \"'node3'\")" [label=<

NODE3


Transitions
(
cnd.true())
(
cnd.all([cnd.regexp('next\\b'), cnd.has_last_labels(labels=[('music_flow', i) for i in ['node2', 'node3']])]))
(
cnd.regexp('bye'))
(
cnd.all([cnd.regexp('repeat', re.I), cnd.negation(cnd.has_last_labels(flow_labels=['global_flow']))]))
>]; + "('NODE', \"'greeting_flow'\", \"'node4'\")" [label=<

NODE4


Transitions
(
cnd.regexp('hi|hello', re.IGNORECASE))
(
cnd.all([cnd.regexp('next\\b'), cnd.has_last_labels(labels=[('music_flow', i) for i in ['node2', 'node3']])]))
(
cnd.true())
(
cnd.true())
(
cnd.all([cnd.regexp('repeat', re.I), cnd.negation(cnd.has_last_labels(flow_labels=['global_flow']))]))
>]; + } + subgraph "cluster_'music_flow'" { + graph [color="#c6ae82", + label=MUSIC_FLOW, + style="rounded, filled" + ]; + "('NODE', \"'music_flow'\", \"'node1'\")" [label=<

NODE1


Transitions
(
cnd.true())
(
cnd.true())
(
cnd.all([cnd.regexp('next\\b'), cnd.has_last_labels(labels=[('music_flow', i) for i in ['node2', 'node3']])]))
(
cnd.regexp('yes|yep|ok', re.IGNORECASE))
(
cnd.all([cnd.regexp('repeat', re.I), cnd.negation(cnd.has_last_labels(flow_labels=['global_flow']))]))
>]; + "('NODE', \"'music_flow'\", \"'node2'\")" [label=<

NODE2


Transitions
(
cnd.true())
(
cnd.true())
(
cnd.all([cnd.regexp('next\\b'), cnd.has_last_labels(labels=[('music_flow', i) for i in ['node2', 'node3']])]))
(
cnd.regexp('next', re.IGNORECASE))
(
cnd.all([cnd.regexp('repeat', re.I), cnd.negation(cnd.has_last_labels(flow_labels=['global_flow']))]))
(
cnd.regexp('repeat', re.IGNORECASE))
>]; + "('NODE', \"'music_flow'\", \"'node3'\")" [label=<

NODE3


Transitions
(
cnd.true())
(
cnd.true())
(
cnd.all([cnd.regexp('next\\b'), cnd.has_last_labels(labels=[('music_flow', i) for i in ['node2', 'node3']])]))
(
cnd.regexp('next', re.IGNORECASE))
(
cnd.all([cnd.regexp('repeat', re.I), cnd.negation(cnd.has_last_labels(flow_labels=['global_flow']))]))
(
cnd.regexp('repeat', re.IGNORECASE))
(
cnd.regexp('back', re.IGNORECASE))
>]; + "('NODE', \"'music_flow'\", \"'node4'\")" [label=<

NODE4


Transitions
(
cnd.true())
(
cnd.true())
(
cnd.all([cnd.regexp('next\\b'), cnd.has_last_labels(labels=[('music_flow', i) for i in ['node2', 'node3']])]))
(
cnd.all([cnd.regexp('repeat', re.I), cnd.negation(cnd.has_last_labels(flow_labels=['global_flow']))]))
>]; + } + subgraph "cluster_'global_flow'" { + graph [color="#f78378", + label=GLOBAL_FLOW, + style="rounded, filled" + ]; + "('NODE', \"'global_flow'\", \"'fallback_node'\")" [label=<

FALLBACK_NODE


Transitions
(
cnd.regexp('talk about music'))
(
cnd.regexp('hi|hello', re.IGNORECASE))
(
cnd.true())
(
cnd.all([cnd.regexp('repeat', re.I), cnd.negation(cnd.has_last_labels(flow_labels=['global_flow']))]))
(
cnd.true())
(
cnd.all([cnd.regexp('next\\b'), cnd.has_last_labels(labels=[('music_flow', i) for i in ['node2', 'node3']])]))
>]; + "('NODE', \"'global_flow'\", \"'start_node'\")" [label=<

START_NODE


Transitions
(
cnd.regexp('talk about music'))
(
cnd.regexp('hi|hello', re.IGNORECASE))
(
cnd.true())
(
cnd.true())
(
cnd.all([cnd.regexp('next\\b'), cnd.has_last_labels(labels=[('music_flow', i) for i in ['node2', 'node3']])]))
(
cnd.all([cnd.regexp('repeat', re.I), cnd.negation(cnd.has_last_labels(flow_labels=['global_flow']))]))
>]; + } + "('NODE', \"'greeting_flow'\", \"'node1'\")":repeat -> "('NODE', \"'greeting_flow'\", \"'node1'\")"; + "('NODE', \"'greeting_flow'\", \"'node1'\")":"('global_flow', 'fallback_node', 0.1)" -> "('NODE', \"'global_flow'\", \"'fallback_node'\")"; + "('NODE', \"'greeting_flow'\", \"'node1'\")":to_fallback -> "('NODE', \"'global_flow'\", \"'fallback_node'\")"; + "('NODE', \"'greeting_flow'\", \"'node1'\")":"'node2'" -> "('NODE', \"'greeting_flow'\", \"'node2'\")"; + "('NODE', \"'greeting_flow'\", \"'node1'\")":forward -> "('NODE', \"'greeting_flow'\", \"'node2'\")"; + "('NODE', \"'global_flow'\", \"'fallback_node'\")":"('greeting_flow', 'node1')" -> "('NODE', \"'greeting_flow'\", \"'node1'\")"; + "('NODE', \"'global_flow'\", \"'fallback_node'\")":to_fallback -> "('NODE', \"'global_flow'\", \"'fallback_node'\")"; + "('NODE', \"'global_flow'\", \"'fallback_node'\")":repeat -> "('NODE', \"'global_flow'\", \"'fallback_node'\")"; + "('NODE', \"'global_flow'\", \"'fallback_node'\")":"('music_flow', 'node1')" -> "('NODE', \"'music_flow'\", \"'node1'\")"; + "('NODE', \"'global_flow'\", \"'fallback_node'\")":forward -> "('NODE', \"'global_flow'\", \"'start_node'\")"; + "('NODE', \"'greeting_flow'\", \"'node2'\")":to_fallback -> "('NODE', \"'global_flow'\", \"'fallback_node'\")"; + "('NODE', \"'greeting_flow'\", \"'node2'\")":repeat -> "('NODE', \"'greeting_flow'\", \"'node2'\")"; + "('NODE', \"'greeting_flow'\", \"'node2'\")":"('music_flow', 'node1')" -> "('NODE', \"'music_flow'\", \"'node1'\")"; + "('NODE', \"'greeting_flow'\", \"'node2'\")":forward -> "('NODE', \"'greeting_flow'\", \"'node3'\")"; + "('NODE', \"'music_flow'\", \"'node1'\")":to_fallback -> "('NODE', \"'global_flow'\", \"'fallback_node'\")"; + "('NODE', \"'music_flow'\", \"'node1'\")":repeat -> "('NODE', \"'music_flow'\", \"'node1'\")"; + "('NODE', \"'music_flow'\", \"'node1'\")":forward -> "('NODE', \"'music_flow'\", \"'node2'\")"; + "('NODE', \"'music_flow'\", \"'node2'\")":to_fallback -> "('NODE', \"'global_flow'\", \"'fallback_node'\")"; + "('NODE', \"'music_flow'\", \"'node2'\")":repeat -> "('NODE', \"'music_flow'\", \"'node2'\")"; + "('NODE', \"'music_flow'\", \"'node2'\")":forward -> "('NODE', \"'music_flow'\", \"'node3'\")"; + "('NODE', \"'global_flow'\", \"'start_node'\")":"('greeting_flow', 'node1')" -> "('NODE', \"'greeting_flow'\", \"'node1'\")"; + "('NODE', \"'global_flow'\", \"'start_node'\")":"'fallback_node'" -> "('NODE', \"'global_flow'\", \"'fallback_node'\")"; + "('NODE', \"'global_flow'\", \"'start_node'\")":to_fallback -> "('NODE', \"'global_flow'\", \"'fallback_node'\")"; + "('NODE', \"'global_flow'\", \"'start_node'\")":forward -> "('NODE', \"'global_flow'\", \"'fallback_node'\")"; + "('NODE', \"'global_flow'\", \"'start_node'\")":"('music_flow', 'node1')" -> "('NODE', \"'music_flow'\", \"'node1'\")"; + "('NODE', \"'global_flow'\", \"'start_node'\")":repeat -> "('NODE', \"'global_flow'\", \"'start_node'\")"; + "('NODE', \"'greeting_flow'\", \"'node3'\")":to_fallback -> "('NODE', \"'global_flow'\", \"'fallback_node'\")"; + "('NODE', \"'greeting_flow'\", \"'node3'\")":repeat -> "('NODE', \"'greeting_flow'\", \"'node3'\")"; + "('NODE', \"'greeting_flow'\", \"'node3'\")":forward -> "('NODE', \"'greeting_flow'\", \"'node4'\")"; + "('NODE', \"'greeting_flow'\", \"'node4'\")":"'node1'" -> "('NODE', \"'greeting_flow'\", \"'node1'\")"; + "('NODE', \"'greeting_flow'\", \"'node4'\")":forward -> "('NODE', \"'greeting_flow'\", \"'node1'\")"; + "('NODE', \"'greeting_flow'\", \"'node4'\")":to_fallback -> "('NODE', \"'global_flow'\", \"'fallback_node'\")"; + "('NODE', \"'greeting_flow'\", \"'node4'\")":repeat -> "('NODE', \"'greeting_flow'\", \"'node4'\")"; + "('NODE', \"'music_flow'\", \"'node3'\")":to_fallback -> "('NODE', \"'global_flow'\", \"'fallback_node'\")"; + "('NODE', \"'music_flow'\", \"'node3'\")":backward -> "('NODE', \"'music_flow'\", \"'node2'\")"; + "('NODE', \"'music_flow'\", \"'node3'\")":repeat -> "('NODE', \"'music_flow'\", \"'node3'\")"; + "('NODE', \"'music_flow'\", \"'node3'\")":forward -> "('NODE', \"'music_flow'\", \"'node4'\")"; + "('NODE', \"'music_flow'\", \"'node4'\")":to_fallback -> "('NODE', \"'global_flow'\", \"'fallback_node'\")"; + "('NODE', \"'music_flow'\", \"'node4'\")":forward -> "('NODE', \"'music_flow'\", \"'node1'\")"; + "('NODE', \"'music_flow'\", \"'node4'\")":repeat -> "('NODE', \"'music_flow'\", \"'node4'\")"; +} diff --git a/tests/viewer/opts_on.dot b/tests/viewer/opts_on.dot new file mode 100644 index 000000000..66337b24a --- /dev/null +++ b/tests/viewer/opts_on.dot @@ -0,0 +1,94 @@ +digraph { + graph [compound=true, + fontname="Helvetica,Arial,sans-serif", + overlap=ipsep, + splines=spline + ]; + node [fillcolor="#ffffffbf", + fontname="Helvetica,Arial,sans-serif", + shape=box, + style="rounded, filled" + ]; + subgraph cluster_virtual { + graph [color="#96b0af", + label=VIRTUAL, + style="rounded, filled" + ]; + "('NODE', 'virtual', 'GLOBAL')" [label=<

GLOBAL


Transitions
(
cnd.regexp('\\b(hi|hello)\\b', re.I))
(
cnd.regexp('talk about music'))
>]; + "('NODE', 'virtual', 'UNRESOLVED')" [label=<

UNRESOLVED
>]; + } + subgraph "cluster_'greeting_flow'" { + graph [color="#c6ae82", + label=GREETING_FLOW, + style="rounded, filled" + ]; + "('NODE', \"'greeting_flow'\", \"'node1'\")" [label=<



NODE1


Response

rsp.choice(['Hi, what is up?', 'Hello, how are you?'])


Misc

{"'var3'": "'info_of_step_1'"}


Transitions
(
cnd.true())
(
cnd.true())
(
cnd.regexp('how are you'))
(
cnd.all([cnd.regexp('next\\b'), cnd.has_last_labels(labels=[('music_flow', i) for i in ['node2', 'node3']])]))
(
cnd.all([cnd.regexp('repeat', re.I), cnd.negation(cnd.has_last_labels(flow_labels=['global_flow']))]))
>]; + "('NODE', \"'greeting_flow'\", \"'node2'\")" [label=<


NODE2


Response

'Good. What do you want to talk about?'


Transitions
(
cnd.regexp('talk about music'))
(
cnd.true())
(
cnd.true())
(
cnd.all([cnd.regexp('next\\b'), cnd.has_last_labels(labels=[('music_flow', i) for i in ['node2', 'node3']])]))
(
cnd.regexp('talk about'))
(
cnd.all([cnd.regexp('repeat', re.I), cnd.negation(cnd.has_last_labels(flow_labels=['global_flow']))]))
>]; + "('NODE', \"'greeting_flow'\", \"'node3'\")" [label=<


NODE3


Response

foo


Transitions
(
cnd.true())
(
cnd.all([cnd.regexp('next\\b'), cnd.has_last_labels(labels=[('music_flow', i) for i in ['node2', 'node3']])]))
(
cnd.regexp('bye'))
(
cnd.all([cnd.regexp('repeat', re.I), cnd.negation(cnd.has_last_labels(flow_labels=['global_flow']))]))
>]; + "('NODE', \"'greeting_flow'\", \"'node4'\")" [label=<


NODE4


Response

bar('bye')


Transitions
(
cnd.regexp('hi|hello', re.IGNORECASE))
(
cnd.all([cnd.regexp('next\\b'), cnd.has_last_labels(labels=[('music_flow', i) for i in ['node2', 'node3']])]))
(
cnd.true())
(
cnd.true())
(
cnd.all([cnd.regexp('repeat', re.I), cnd.negation(cnd.has_last_labels(flow_labels=['global_flow']))]))
>]; + } + subgraph "cluster_'music_flow'" { + graph [color="#f78378", + label=MUSIC_FLOW, + style="rounded, filled" + ]; + "('NODE', \"'music_flow'\", \"'node1'\")" [label=<


NODE1


Response

'I love `System of a Down` group, would you like to tell about it? '


Transitions
(
cnd.true())
(
cnd.true())
(
cnd.all([cnd.regexp('next\\b'), cnd.has_last_labels(labels=[('music_flow', i) for i in ['node2', 'node3']])]))
(
cnd.regexp('yes|yep|ok', re.IGNORECASE))
(
cnd.all([cnd.regexp('repeat', re.I), cnd.negation(cnd.has_last_labels(flow_labels=['global_flow']))]))
>]; + "('NODE', \"'music_flow'\", \"'node2'\")" [label=<


NODE2


Response

'System of a Down is an Armenian-American heavy metal band formed in in 1994.'


Transitions
(
cnd.true())
(
cnd.true())
(
cnd.all([cnd.regexp('next\\b'), cnd.has_last_labels(labels=[('music_flow', i) for i in ['node2', 'node3']])]))
(
cnd.regexp('next', re.IGNORECASE))
(
cnd.all([cnd.regexp('repeat', re.I), cnd.negation(cnd.has_last_labels(flow_labels=['global_flow']))]))
(
cnd.regexp('repeat', re.IGNORECASE))
>]; + "('NODE', \"'music_flow'\", \"'node3'\")" [label=<


NODE3


Response

'The band achieved commercial success with the release of five studio albums.'


Transitions
(
cnd.true())
(
cnd.true())
(
cnd.all([cnd.regexp('next\\b'), cnd.has_last_labels(labels=[('music_flow', i) for i in ['node2', 'node3']])]))
(
cnd.regexp('next', re.IGNORECASE))
(
cnd.all([cnd.regexp('repeat', re.I), cnd.negation(cnd.has_last_labels(flow_labels=['global_flow']))]))
(
cnd.regexp('repeat', re.IGNORECASE))
(
cnd.regexp('back', re.IGNORECASE))
>]; + "('NODE', \"'music_flow'\", \"'node4'\")" [label=<


NODE4


Response

"That's all what I know"


Transitions
(
cnd.regexp('next', re.IGNORECASE))
(
cnd.regexp('next time', re.IGNORECASE))
(
cnd.true())
(
cnd.true())
(
cnd.all([cnd.regexp('next\\b'), cnd.has_last_labels(labels=[('music_flow', i) for i in ['node2', 'node3']])]))
(
cnd.all([cnd.regexp('repeat', re.I), cnd.negation(cnd.has_last_labels(flow_labels=['global_flow']))]))
>]; + } + subgraph "cluster_'global_flow'" { + graph [color="#ff7b9c", + label=GLOBAL_FLOW, + style="rounded, filled" + ]; + "('NODE', \"'global_flow'\", \"'fallback_node'\")" [label=<


FALLBACK_NODE


Response

'Ooops'


Transitions
(
cnd.regexp('talk about music'))
(
cnd.regexp('hi|hello', re.IGNORECASE))
(
cnd.true())
(
cnd.all([cnd.regexp('repeat', re.I), cnd.negation(cnd.has_last_labels(flow_labels=['global_flow']))]))
(
cnd.true())
(
cnd.all([cnd.regexp('next\\b'), cnd.has_last_labels(labels=[('music_flow', i) for i in ['node2', 'node3']])]))
>]; + "('LOCAL_NODE', \"'global_flow'\", 'LOCAL')" [label=<


LOCAL


Pre_Response_Processing

{'2': "add_prefix('l2_local')", '3': "add_prefix('l3_local')"}


Transitions
(
cnd.regexp('greetings'))
>]; + "('NODE', \"'global_flow'\", \"'start_node'\")" [label=<


START_NODE


Response

''


Transitions
(
cnd.regexp('talk about music'))
(
cnd.regexp('hi|hello', re.IGNORECASE))
(
cnd.true())
(
cnd.true())
(
cnd.all([cnd.regexp('next\\b'), cnd.has_last_labels(labels=[('music_flow', i) for i in ['node2', 'node3']])]))
(
cnd.all([cnd.regexp('repeat', re.I), cnd.negation(cnd.has_last_labels(flow_labels=['global_flow']))]))
>]; + } + "('NODE', 'virtual', 'GLOBAL')":"('greeting_flow', 'node1', 1.1)" -> "('NODE', \"'greeting_flow'\", \"'node1'\")"; + "('NODE', 'virtual', 'GLOBAL')":"('music_flow', 'node1', 1.1)" -> "('NODE', \"'music_flow'\", \"'node1'\")"; + "('NODE', \"'greeting_flow'\", \"'node1'\")":repeat -> "('NODE', \"'greeting_flow'\", \"'node1'\")"; + "('NODE', \"'greeting_flow'\", \"'node1'\")":"('global_flow', 'fallback_node', 0.1)" -> "('NODE', \"'global_flow'\", \"'fallback_node'\")"; + "('NODE', \"'greeting_flow'\", \"'node1'\")":to_fallback -> "('NODE', \"'global_flow'\", \"'fallback_node'\")"; + "('NODE', \"'greeting_flow'\", \"'node1'\")":"'node2'" -> "('NODE', \"'greeting_flow'\", \"'node2'\")"; + "('NODE', \"'greeting_flow'\", \"'node1'\")":forward -> "('NODE', \"'greeting_flow'\", \"'node2'\")"; + "('NODE', \"'music_flow'\", \"'node1'\")":repeat -> "('NODE', \"'music_flow'\", \"'node1'\")"; + "('NODE', \"'music_flow'\", \"'node1'\")":to_fallback -> "('NODE', \"'global_flow'\", \"'fallback_node'\")"; + "('NODE', \"'music_flow'\", \"'node1'\")":forward -> "('NODE', \"'music_flow'\", \"'node2'\")"; + "('NODE', \"'global_flow'\", \"'fallback_node'\")":"('greeting_flow', 'node1')" -> "('NODE', \"'greeting_flow'\", \"'node1'\")"; + "('NODE', \"'global_flow'\", \"'fallback_node'\")":"('music_flow', 'node1')" -> "('NODE', \"'music_flow'\", \"'node1'\")"; + "('NODE', \"'global_flow'\", \"'fallback_node'\")":to_fallback -> "('NODE', \"'global_flow'\", \"'fallback_node'\")"; + "('NODE', \"'global_flow'\", \"'fallback_node'\")":repeat -> "('NODE', \"'global_flow'\", \"'fallback_node'\")"; + "('NODE', \"'global_flow'\", \"'fallback_node'\")":forward -> "('NODE', \"'global_flow'\", \"'start_node'\")"; + "('NODE', \"'greeting_flow'\", \"'node2'\")":"('music_flow', 'node1')" -> "('NODE', \"'music_flow'\", \"'node1'\")"; + "('NODE', \"'greeting_flow'\", \"'node2'\")":to_fallback -> "('NODE', \"'global_flow'\", \"'fallback_node'\")"; + "('NODE', \"'greeting_flow'\", \"'node2'\")":repeat -> "('NODE', \"'greeting_flow'\", \"'node2'\")"; + "('NODE', \"'greeting_flow'\", \"'node2'\")":forward -> "('NODE', \"'greeting_flow'\", \"'node3'\")"; + "('NODE', \"'music_flow'\", \"'node2'\")":to_fallback -> "('NODE', \"'global_flow'\", \"'fallback_node'\")"; + "('NODE', \"'music_flow'\", \"'node2'\")":repeat -> "('NODE', \"'music_flow'\", \"'node2'\")"; + "('NODE', \"'music_flow'\", \"'node2'\")":forward -> "('NODE', \"'music_flow'\", \"'node3'\")"; + "('LOCAL_NODE', \"'global_flow'\", 'LOCAL')":"('greeting_flow', 'node1')" -> "('NODE', \"'greeting_flow'\", \"'node1'\")" [minlen=1]; + "('NODE', \"'global_flow'\", \"'start_node'\")":"('greeting_flow', 'node1')" -> "('NODE', \"'greeting_flow'\", \"'node1'\")"; + "('NODE', \"'global_flow'\", \"'start_node'\")":"('music_flow', 'node1')" -> "('NODE', \"'music_flow'\", \"'node1'\")"; + "('NODE', \"'global_flow'\", \"'start_node'\")":"'fallback_node'" -> "('NODE', \"'global_flow'\", \"'fallback_node'\")"; + "('NODE', \"'global_flow'\", \"'start_node'\")":to_fallback -> "('NODE', \"'global_flow'\", \"'fallback_node'\")"; + "('NODE', \"'global_flow'\", \"'start_node'\")":forward -> "('NODE', \"'global_flow'\", \"'fallback_node'\")"; + "('NODE', \"'global_flow'\", \"'start_node'\")":repeat -> "('NODE', \"'global_flow'\", \"'start_node'\")"; + "('NODE', \"'greeting_flow'\", \"'node3'\")":to_fallback -> "('NODE', \"'global_flow'\", \"'fallback_node'\")"; + "('NODE', \"'greeting_flow'\", \"'node3'\")":repeat -> "('NODE', \"'greeting_flow'\", \"'node3'\")"; + "('NODE', \"'greeting_flow'\", \"'node3'\")":forward -> "('NODE', \"'greeting_flow'\", \"'node4'\")"; + "('NODE', \"'greeting_flow'\", \"'node4'\")":"'node1'" -> "('NODE', \"'greeting_flow'\", \"'node1'\")"; + "('NODE', \"'greeting_flow'\", \"'node4'\")":forward -> "('NODE', \"'greeting_flow'\", \"'node1'\")"; + "('NODE', \"'greeting_flow'\", \"'node4'\")":to_fallback -> "('NODE', \"'global_flow'\", \"'fallback_node'\")"; + "('NODE', \"'greeting_flow'\", \"'node4'\")":repeat -> "('NODE', \"'greeting_flow'\", \"'node4'\")"; + "('NODE', \"'music_flow'\", \"'node3'\")":to_fallback -> "('NODE', \"'global_flow'\", \"'fallback_node'\")"; + "('NODE', \"'music_flow'\", \"'node3'\")":backward -> "('NODE', \"'music_flow'\", \"'node2'\")"; + "('NODE', \"'music_flow'\", \"'node3'\")":repeat -> "('NODE', \"'music_flow'\", \"'node3'\")"; + "('NODE', \"'music_flow'\", \"'node3'\")":forward -> "('NODE', \"'music_flow'\", \"'node4'\")"; + "('NODE', \"'music_flow'\", \"'node4'\")":forward -> "('NODE', \"'music_flow'\", \"'node1'\")"; + "('NODE', \"'music_flow'\", \"'node4'\")":to_fallback -> "('NODE', \"'global_flow'\", \"'fallback_node'\")"; + "('NODE', \"'music_flow'\", \"'node4'\")":repeat -> "('NODE', \"'music_flow'\", \"'node4'\")"; + "('NODE', \"'music_flow'\", \"'node4'\")":"transitions.greeting_flow_n2_transition" -> "('NODE', 'virtual', 'UNRESOLVED')"; + "('NODE', \"'music_flow'\", \"'node4'\")":"transitions.high_priority_node_transition('greeting_flow', 'node4')" -> "('NODE', 'virtual', 'UNRESOLVED')"; +} diff --git a/tests/viewer/test_df_script_viewer.py b/tests/viewer/test_df_script_viewer.py new file mode 100644 index 000000000..6291ed330 --- /dev/null +++ b/tests/viewer/test_df_script_viewer.py @@ -0,0 +1,210 @@ +import os +import difflib +import time +import multiprocessing +from pathlib import Path + +import pytest + +try: + from dff.utils.viewer import app + from dff.utils.viewer import graph + from dff.utils.viewer import graph_plot + from dff.utils.viewer import chord_plot + from dff.utils.viewer import cli + from dff.utils.viewer import preprocessing + from dff.utils.viewer import utils +except ImportError: + pytest.skip(allow_module_level=True, reason="Missing dependencies for dff parser.") + +dot_exec_result = os.system("which dot") +if dot_exec_result != 0: + pytest.skip(allow_module_level=True, reason="Graphviz missing from the system.") + + +@pytest.fixture(scope="session") +def example_dir(): + example_d = Path(__file__).parent / "TEST_CASES" + yield example_d + + +@pytest.fixture(scope="function") +def nx_graph(example_dir): + G = graph.get_graph(example_dir / "main.py", example_dir.absolute()) + yield G + + +@pytest.mark.parametrize("show_global", [True, False]) +@pytest.mark.parametrize("show_local", [True, False]) +@pytest.mark.parametrize("show_isolates", [True, False]) +@pytest.mark.parametrize("show_unresolved", [True, False]) +def test_preprocessing(nx_graph, show_global, show_local, show_isolates, show_unresolved): + G = preprocessing.preprocess(**locals()) + glob = ("NODE", preprocessing.VIRTUAL_FLOW_KEY, "GLOBAL") in G.nodes + assert glob == show_global + unresolved = ("NODE", preprocessing.VIRTUAL_FLOW_KEY, preprocessing.UNRESOLVED_KEY) in G.nodes + assert unresolved == show_unresolved + + +@pytest.mark.parametrize("show_misc", [True, False]) +@pytest.mark.parametrize("show_response", [True, False]) +@pytest.mark.parametrize("show_processing", [True, False]) +def test_plotting(nx_graph, show_misc, show_response, show_processing): + nx_graph = preprocessing.preprocess(**locals()) + testing_plot = graph_plot.get_plot(**locals()) + plotly_fig = utils.graphviz_to_plotly(testing_plot) + assert app.create_app(plotly_fig) + assert plotly_fig + plotly_fig_2 = chord_plot.get_plot(**locals()) + assert plotly_fig_2 + + +@pytest.mark.parametrize( + ["params", "reference_file"], + [ + ( + dict( + show_misc=False, + show_processing=False, + show_response=False, + show_global=False, + show_local=False, + show_isolates=False, + show_unresolved=False, + random_seed=1, + ), + Path(__file__).parent / "opts_off.dot", + ), + ( + dict( + show_misc=True, + show_processing=True, + show_response=True, + show_global=True, + show_local=True, + show_isolates=True, + show_unresolved=True, + random_seed=1, + ), + Path(__file__).parent / "opts_on.dot", + ), + ], +) +def test_plotting_2(nx_graph, params, reference_file, tmp_path): + nx_graph = preprocessing.preprocess(nx_graph, **params) + testing_plot = graph_plot.get_plot(nx_graph, **params) + plot_file = tmp_path / "plot" + testing_plot.render(filename=plot_file) + test_lines = plot_file.open().readlines() + reference_lines = reference_file.open().readlines() + diff = difflib.unified_diff(test_lines, reference_lines) + assert len(list(diff)) == 0 + + +@pytest.mark.parametrize( + ["params", "reference_file"], + [ + ( + [ + "--random_seed=1", + ], + Path(__file__).parent / "opts_off.dot", + ), + ( + [ + "--show_misc", + "--show_processing", + "--show_response", + "--show_global", + "--show_local", + "--show_isolates", + "--show_unresolved", + "--random_seed=1", + ], + Path(__file__).parent / "opts_on.dot", + ), + ], +) +def test_image_cli(params, example_dir, reference_file, tmp_path): + plot_file = str((tmp_path / "plot").absolute()) + entrypoint, entrydir = str((example_dir / "main.py").absolute()), str(example_dir.absolute()) + cli.make_image( + args=[ + *params, + "--type=chord", + f"--entry_point={entrypoint}", + f"--project_root_dir={entrydir}", + "-f", + "jpeg", + "-o", + f"{plot_file}.jpg", + ] + ) + cli.make_image( + args=[ + *params, + f"--entry_point={entrypoint}", + f"--project_root_dir={entrydir}", + "-f", + "jpeg", + "-o", + f"{plot_file}.jpg", + ] + ) + cli.make_image( + args=[ + *params, + f"--entry_point={entrypoint}", + f"--project_root_dir={entrydir}", + "-f", + "dot", + "-o", + f"{plot_file}", + ] + ) + test_lines = Path(plot_file).open().readlines() + reference_lines = reference_file.open().readlines() + diff = difflib.unified_diff(test_lines, reference_lines) + assert len(list(diff)) == 0 + + +@pytest.mark.parametrize(["_type"], [(["-t", "chord"],), (["-t", "graph"],)]) +@pytest.mark.parametrize( + ["params"], + [ + ( + [ + "--random_seed=1", + ], + ), + ( + [ + "--show_misc", + "--show_processing", + "--show_response", + "--show_global", + "--show_local", + "--show_isolates", + "--random_seed=1", + ], + ), + ], +) +def test_server_cli(params, _type, example_dir): + entrypoint, entrydir = str((example_dir / "main.py").absolute()), str(example_dir.absolute()) + args = _type + [*params, "-e", entrypoint, "-d", entrydir, "-H", "localhost", "-P", "5000"] + try: + from pytest_cov.embed import cleanup_on_sigterm + except ImportError: + pass + else: + cleanup_on_sigterm() + + process = multiprocessing.Process(target=cli.make_server, name="Image", args=(args,)) + try: + process.start() + time.sleep(3) + assert process.is_alive() + process.terminate() + finally: + process.join()