diff --git a/.ci/assert_version.py b/.ci/assert_version.py
index 98daa9c9..154e117e 100755
--- a/.ci/assert_version.py
+++ b/.ci/assert_version.py
@@ -1,24 +1,26 @@
+# pylint: disable=line-too-long
import os
import json
+import sys
from pathlib import Path
SETUP_JSON = Path(__file__).resolve().parent.parent.joinpath("setup.json")
with open(SETUP_JSON, "r") as fp:
- setup = json.load(fp)
+ SETUP = json.load(fp)
-package_version = "v" + setup["version"]
+PACKAGE_VERSION = "v" + SETUP["version"]
-tag_version = os.getenv("TAG_VERSION")
-tag_version = tag_version[len("refs/tags/") :]
+TAG_VERSION = os.getenv("TAG_VERSION")
+TAG_VERSION = TAG_VERSION[len("refs/tags/") :]
-if tag_version == package_version:
- print(f"The versions match: tag:'{tag_version}' == package:'{package_version}'")
- exit(0)
+if TAG_VERSION == PACKAGE_VERSION:
+ print(f"The versions match: tag:'{TAG_VERSION}' == package:'{PACKAGE_VERSION}'")
+ sys.exit(0)
print(
- f"""The current package version '{package_version}' does not equal the tag version '{tag_version}'.
+ f"""The current package version '{PACKAGE_VERSION}' does not equal the tag version '{TAG_VERSION}'.
Update setup.json with new version.
Please remove the tag from both GitHub and your local repository!"""
)
-exit(1)
+sys.exit(1)
diff --git a/.ci/optimade-version.json b/.ci/optimade-version.json
deleted file mode 100644
index 425c2353..00000000
--- a/.ci/optimade-version.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "schemaVersion": 1,
- "label": "OPTiMaDe",
- "message": "v0.10.0",
- "color": "yellowgreen"
-}
\ No newline at end of file
diff --git a/.ci/optimade_version_update.py b/.ci/optimade_version_update.py
deleted file mode 100755
index 55f8741b..00000000
--- a/.ci/optimade_version_update.py
+++ /dev/null
@@ -1,51 +0,0 @@
-import json
-from pathlib import Path
-
-shields_json = Path(__file__).resolve().parent.joinpath("optimade-version.json")
-config_json = (
- Path(__file__).resolve().parent.parent.joinpath("aiida_optimade/config.json")
-)
-
-with open(shields_json, "r") as fp:
- shield = json.load(fp)
-
-with open(config_json, "r") as fp:
- config = json.load(fp)
-
-shield_version = shield["message"]
-current_version = config["version"]
-
-if shield_version == current_version:
- # The shield has the newest implemented version
- print(
- f"""They are the same: {current_version}
-Shield file:
-{json.dumps(shield, indent=2)}"""
- )
- exit(0)
-
-print(
- f"""The shield version is outdated.
-Shield version: {shield_version}
-Current version: {current_version}
-"""
-)
-
-shield["message"] = current_version
-with open(shields_json, "w") as fp:
- json.dump(shield, fp, indent=2)
-
-# Check file was saved correctly
-with open(shields_json, "r") as fp:
- update_shield = json.load(fp)
-
-if update_shield["message"] == current_version:
- print(f"Successfully updated the shield version to {update_shield['message']}")
- exit(0)
-else:
- print(
- f"""Something went wrong !
-Shield file:
-{json.dumps(update_shield, indent=2)}"""
- )
- exit(1)
diff --git a/server_template.cfg b/.ci/server_template.cfg
similarity index 100%
rename from server_template.cfg
rename to .ci/server_template.cfg
diff --git a/.docker/run.sh b/.docker/run.sh
index b7f86e5a..c33aafc3 100755
--- a/.docker/run.sh
+++ b/.docker/run.sh
@@ -2,4 +2,10 @@
set -ex
mkdir -p $AIIDA_PATH/.aiida
cp /profiles/$AIIDA_PROFILE.json $AIIDA_PATH/.aiida/config.json
+
+# make docker.host.internal available
+# see https://github.com/docker/for-linux/issues/264#issuecomment-387525409
+# ltalirz: Only works for Mac, not Linux
+# echo -e "`/sbin/ip route|awk '/default/ { print $3 }'`\tdocker.host.internal" | tee -a /etc/hosts > /dev/null
+
uvicorn aiida_optimade.main:app --host 0.0.0.0 --port 80
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index f3d2c567..4f67c078 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -18,7 +18,6 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip
- pip install -U setuptools
pip install flake8
- name: Lint with flake8
@@ -34,16 +33,14 @@ jobs:
strategy:
fail-fast: false
- matrix:
- python-version: [3.7, 3.8]
steps:
- uses: actions/checkout@v1
- - name: Set up Python ${{ matrix.python-version}}
+ - name: Set up Python 3.7
uses: actions/setup-python@v1
with:
- python-version: ${{ matrix.python-version}}
+ python-version: 3.7
- name: Install dependencies
run: |
@@ -52,8 +49,7 @@ jobs:
pip install pre-commit
- name: Test with pre-commit
- run: |
- pre-commit run --all-files || ( git status --short ; git diff ; exit 1 )
+ run: pre-commit run --all-files || ( git status --short ; git diff ; exit 1 )
pytest:
@@ -62,7 +58,7 @@ jobs:
strategy:
fail-fast: false
matrix:
- python-version: [3.7, 3.8]
+ python-version: [3.6, 3.7, 3.8]
backend: ['django', 'sqlalchemy']
steps:
@@ -100,21 +96,21 @@ jobs:
pip install -e .[dev]
reentry scan
+ - name: Setup server.cfg for AiiDA OPTiMaDe server
+ run: cp .ci/server_template.cfg ./server.cfg
+
- name: Setup up environment for AiiDA
env:
AIIDA_TEST_BACKEND: ${{ matrix.backend }}
- run: |
- .github/workflows/setup_aiida.sh
+ run: .github/workflows/setup_aiida.sh
- name: Load test data
- run: |
- verdi import --migration --non-interactive .github/aiida/optimade.aiida
+ run: verdi import --migration --non-interactive .github/aiida/optimade.aiida
- name: Test with pytest
env:
AIIDA_PROFILE: test_${{ matrix.backend }}
- run: |
- pytest --cov=./aiida_optimade/ --cov-report=xml
+ run: pytest --cov=./aiida_optimade/ --cov-report=xml
- name: Upload coverage to Codecov
if: matrix.python-version == 3.7
@@ -124,3 +120,55 @@ jobs:
xml: ./coverage.xml
flags: unittests
yml: ./.codecov.yml
+
+ docker-image:
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v1
+
+ # - uses: harmon758/postgresql-action@v1
+ # with:
+ # postgresql version: '11'
+ # postgresql db: test_django
+ # postgresql user: 'postgres'
+ # postgresql password: ''
+
+ # - name: Set up Python 3.7
+ # uses: actions/setup-python@v1
+ # with:
+ # python-version: 3.7
+
+ # - name: Install system dependencies
+ # run: |
+ # wget -O - "https://github.com/rabbitmq/signing-keys/releases/download/2.0/rabbitmq-release-signing-key.asc" | sudo apt-key add -
+ # echo 'deb https://dl.bintray.com/rabbitmq-erlang/debian bionic erlang' | sudo tee -a /etc/apt/sources.list.d/bintray.rabbitmq.list
+ # echo 'deb https://dl.bintray.com/rabbitmq/debian bionic main' | sudo tee -a /etc/apt/sources.list.d/bintray.rabbitmq.list
+ # sudo apt update
+ # sudo apt install postgresql postgresql-server-dev-all postgresql-client rabbitmq-server graphviz
+ # sudo systemctl status rabbitmq-server.service
+
+ # # Install optimade-python-tools from github
+ # - name: Install python dependencies
+ # run: |
+ # cd ${GITHUB_WORKSPACE}/..
+ # git clone https://github.com/Materials-Consortia/optimade-python-tools
+ # pip install -e optimade-python-tools
+ # cd ${GITHUB_WORKSPACE}
+ # pip install -e .
+ # reentry scan
+
+ # - name: Setup up environment for AiiDA
+ # env:
+ # AIIDA_TEST_BACKEND: django
+ # run: .github/workflows/setup_aiida.sh
+
+ # - name: Load test data
+ # run: verdi import --migration --non-interactive .github/aiida/optimade.aiida
+
+ - name: Build the Docker image
+ run: docker-compose -f profiles/docker-compose.yml build
+ # .github/workflows/wait_for_it.sh localhost:3253 -t 120
+ # sleep 15
+ # curl http://localhost:3253/optimade/info > info.json
+ # grep -F "www.aiida.net" info.json || exit 1
diff --git a/.github/workflows/wait_for_it.sh b/.github/workflows/wait_for_it.sh
new file mode 100755
index 00000000..eca6c3b9
--- /dev/null
+++ b/.github/workflows/wait_for_it.sh
@@ -0,0 +1,161 @@
+#!/usr/bin/env bash
+# Use this script to test if a given TCP host/port are available
+
+cmdname=$(basename $0)
+
+echoerr() { if [[ $QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
+
+usage()
+{
+ cat << USAGE >&2
+Usage:
+ $cmdname host:port [-s] [-t timeout] [-- command args]
+ -h HOST | --host=HOST Host or IP under test
+ -p PORT | --port=PORT TCP port under test
+ Alternatively, you specify the host and port as host:port
+ -s | --strict Only execute subcommand if the test succeeds
+ -q | --quiet Don't output any status messages
+ -t TIMEOUT | --timeout=TIMEOUT
+ Timeout in seconds, zero for no timeout
+ -- COMMAND ARGS Execute command with args after the test finishes
+USAGE
+ exit 1
+}
+
+wait_for()
+{
+ if [[ $TIMEOUT -gt 0 ]]; then
+ echoerr "$cmdname: waiting $TIMEOUT seconds for $HOST:$PORT"
+ else
+ echoerr "$cmdname: waiting for $HOST:$PORT without a timeout"
+ fi
+ start_ts=$(date +%s)
+ while :
+ do
+ (echo > /dev/tcp/$HOST/$PORT) >/dev/null 2>&1
+ result=$?
+ if [[ $result -eq 0 ]]; then
+ end_ts=$(date +%s)
+ echoerr "$cmdname: $HOST:$PORT is available after $((end_ts - start_ts)) seconds"
+ break
+ fi
+ sleep 1
+ done
+ return $result
+}
+
+wait_for_wrapper()
+{
+ # In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
+ if [[ $QUIET -eq 1 ]]; then
+ timeout $TIMEOUT $0 --quiet --child --host=$HOST --port=$PORT --timeout=$TIMEOUT &
+ else
+ timeout $TIMEOUT $0 --child --host=$HOST --port=$PORT --timeout=$TIMEOUT &
+ fi
+ PID=$!
+ trap "kill -INT -$PID" INT
+ wait $PID
+ RESULT=$?
+ if [[ $RESULT -ne 0 ]]; then
+ echoerr "$cmdname: timeout occurred after waiting $TIMEOUT seconds for $HOST:$PORT"
+ fi
+ return $RESULT
+}
+
+# process arguments
+while [[ $# -gt 0 ]]
+do
+ case "$1" in
+ *:* )
+ hostport=(${1//:/ })
+ HOST=${hostport[0]}
+ PORT=${hostport[1]}
+ shift 1
+ ;;
+ --child)
+ CHILD=1
+ shift 1
+ ;;
+ -q | --quiet)
+ QUIET=1
+ shift 1
+ ;;
+ -s | --strict)
+ STRICT=1
+ shift 1
+ ;;
+ -h)
+ HOST="$2"
+ if [[ $HOST == "" ]]; then break; fi
+ shift 2
+ ;;
+ --host=*)
+ HOST="${1#*=}"
+ shift 1
+ ;;
+ -p)
+ PORT="$2"
+ if [[ $PORT == "" ]]; then break; fi
+ shift 2
+ ;;
+ --port=*)
+ PORT="${1#*=}"
+ shift 1
+ ;;
+ -t)
+ TIMEOUT="$2"
+ if [[ $TIMEOUT == "" ]]; then break; fi
+ shift 2
+ ;;
+ --timeout=*)
+ TIMEOUT="${1#*=}"
+ shift 1
+ ;;
+ --)
+ shift
+ CLI="$@"
+ break
+ ;;
+ --help)
+ usage
+ ;;
+ *)
+ echoerr "Unknown argument: $1"
+ usage
+ ;;
+ esac
+done
+
+if [[ "$HOST" == "" || "$PORT" == "" ]]; then
+ echoerr "Error: you need to provide a host and port to test."
+ usage
+fi
+
+TIMEOUT=${TIMEOUT:-15}
+STRICT=${STRICT:-0}
+CHILD=${CHILD:-0}
+QUIET=${QUIET:-0}
+
+if [[ $CHILD -gt 0 ]]; then
+ wait_for
+ RESULT=$?
+ exit $RESULT
+else
+ if [[ $TIMEOUT -gt 0 ]]; then
+ wait_for_wrapper
+ RESULT=$?
+ else
+ wait_for
+ RESULT=$?
+ fi
+fi
+
+if [[ $CLI != "" ]]; then
+ if [[ $RESULT -ne 0 && $STRICT -eq 1 ]]; then
+ echoerr "$cmdname: strict mode, refusing to execute subprocess"
+ exit $RESULT
+ fi
+ exec $CLI
+else
+ exit $RESULT
+fi
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 07132b61..d655b488 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -24,14 +24,3 @@ repos:
.codecov.yml
)$
language: system
-
- - id: optimade-version
- name: Update OPTiMaDe shield
- description: If needed, the json for the OPTiMaDe shield will be updated with new config version
- entry: python ./.ci/optimade_version_update.py
- files: >
- (?x)^(
- .ci/optimade-version.json|
- aiida_optimade/config.json
- )$
- language: system
diff --git a/.pylintrc b/.pylintrc
new file mode 100644
index 00000000..68a49ef4
--- /dev/null
+++ b/.pylintrc
@@ -0,0 +1,508 @@
+[MASTER]
+
+# A comma-separated list of package or module names from where C extensions may
+# be loaded. Extensions are loading into the active Python interpreter and may
+# run arbitrary code.
+extension-pkg-whitelist=
+
+# Add files or directories to the blacklist. They should be base names, not
+# paths.
+ignore=CVS
+
+# Add files or directories matching the regex patterns to the blacklist. The
+# regex matches against base names, not paths.
+ignore-patterns=
+
+# Python code to execute, usually for sys.path manipulation such as
+# pygtk.require().
+#init-hook=
+
+# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
+# number of processors available to use.
+jobs=1
+
+# Control the amount of potential inferred values when inferring a single
+# object. This can help the performance when dealing with large functions or
+# complex, nested conditions.
+limit-inference-results=100
+
+# List of plugins (as comma separated values of python module names) to load,
+# usually to register additional checkers.
+load-plugins=
+
+# Pickle collected data for later comparisons.
+persistent=yes
+
+# Specify a configuration file.
+#rcfile=
+
+# When enabled, pylint would attempt to guess common misconfiguration and emit
+# user-friendly hints instead of false-positive error messages.
+suggestion-mode=yes
+
+# Allow loading of arbitrary C extensions. Extensions are imported into the
+# active Python interpreter and may run arbitrary code.
+unsafe-load-any-extension=no
+
+
+[MESSAGES CONTROL]
+
+# Only show warnings with the listed confidence levels. Leave empty to show
+# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED.
+confidence=
+
+# Disable the message, report, category or checker with the given id(s). You
+# can either give multiple identifiers separated by comma (,) or put this
+# option multiple times (only on the command line, not in the configuration
+# file where it should appear only once). You can also use "--disable=all" to
+# disable everything first and then reenable specific checks. For example, if
+# you want to run only the similarities checker, you can use "--disable=all
+# --enable=similarities". If you want to run only the classes checker, but have
+# no Warning level messages displayed, use "--disable=all --enable=classes
+# --disable=W".
+disable=import-outside-toplevel,
+ missing-module-docstring,
+ locally-disabled,
+ bad-continuation,
+ fixme,
+ too-many-instance-attributes
+
+# Enable the message, report, category or checker with the given id(s). You can
+# either give multiple identifier separated by comma (,) or put this option
+# multiple time (only on the command line, not in the configuration file where
+# it should appear only once). See also the "--disable" option for examples.
+enable=c-extension-no-member
+
+
+[REPORTS]
+
+# Python expression which should return a score less than or equal to 10. You
+# have access to the variables 'error', 'warning', 'refactor', and 'convention'
+# which contain the number of messages in each category, as well as 'statement'
+# which is the total number of statements analyzed. This score is used by the
+# global evaluation report (RP0004).
+evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
+
+# Template used to display messages. This is a python new-style format string
+# used to format the message information. See doc for all details.
+#msg-template=
+
+# Set the output format. Available formats are text, parseable, colorized, json
+# and msvs (visual studio). You can also give a reporter class, e.g.
+# mypackage.mymodule.MyReporterClass.
+output-format=text
+
+# Tells whether to display a full report or only the messages.
+reports=no
+
+# Activate the evaluation score.
+score=yes
+
+
+[REFACTORING]
+
+# Maximum number of nested blocks for function / method body
+max-nested-blocks=5
+
+# Complete name of functions that never returns. When checking for
+# inconsistent-return-statements if a never returning function is called then
+# it will be considered as an explicit return statement and no message will be
+# printed.
+never-returning-functions=sys.exit
+
+
+[LOGGING]
+
+# Format style used to check logging format string. `old` means using %
+# formatting, `new` is for `{}` formatting,and `fstr` is for f-strings.
+logging-format-style=old
+
+# Logging modules to check that the string format arguments are in logging
+# function parameter format.
+logging-modules=logging
+
+
+[SPELLING]
+
+# Limits count of emitted suggestions for spelling mistakes.
+max-spelling-suggestions=4
+
+# Spelling dictionary name. Available dictionaries: none. To make it work,
+# install the python-enchant package.
+spelling-dict=
+
+# List of comma separated words that should not be checked.
+spelling-ignore-words=
+
+# A path to a file that contains the private dictionary; one word per line.
+spelling-private-dict-file=
+
+# Tells whether to store unknown words to the private dictionary (see the
+# --spelling-private-dict-file option) instead of raising a message.
+spelling-store-unknown-words=no
+
+
+[BASIC]
+
+# Naming style matching correct argument names.
+argument-naming-style=snake_case
+
+# Regular expression matching correct argument names. Overrides argument-
+# naming-style.
+#argument-rgx=
+
+# Naming style matching correct attribute names.
+attr-naming-style=snake_case
+
+# Regular expression matching correct attribute names. Overrides attr-naming-
+# style.
+#attr-rgx=
+
+# Bad variable names which should always be refused, separated by a comma.
+bad-names=foo,
+ bar,
+ baz,
+ toto,
+ tutu,
+ tata
+
+# Naming style matching correct class attribute names.
+class-attribute-naming-style=any
+
+# Regular expression matching correct class attribute names. Overrides class-
+# attribute-naming-style.
+#class-attribute-rgx=
+
+# Naming style matching correct class names.
+class-naming-style=PascalCase
+
+# Regular expression matching correct class names. Overrides class-naming-
+# style.
+#class-rgx=
+
+# Naming style matching correct constant names.
+const-naming-style=UPPER_CASE
+
+# Regular expression matching correct constant names. Overrides const-naming-
+# style.
+#const-rgx=
+
+# Minimum line length for functions/classes that require docstrings, shorter
+# ones are exempt.
+docstring-min-length=-1
+
+# Naming style matching correct function names.
+function-naming-style=snake_case
+
+# Regular expression matching correct function names. Overrides function-
+# naming-style.
+#function-rgx=
+
+# Good variable names which should always be accepted, separated by a comma.
+good-names=i,
+ j,
+ k,
+ ex,
+ Run,
+ _
+
+# Include a hint for the correct naming format with invalid-name.
+include-naming-hint=no
+
+# Naming style matching correct inline iteration names.
+inlinevar-naming-style=any
+
+# Regular expression matching correct inline iteration names. Overrides
+# inlinevar-naming-style.
+#inlinevar-rgx=
+
+# Naming style matching correct method names.
+method-naming-style=snake_case
+
+# Regular expression matching correct method names. Overrides method-naming-
+# style.
+#method-rgx=
+
+# Naming style matching correct module names.
+module-naming-style=snake_case
+
+# Regular expression matching correct module names. Overrides module-naming-
+# style.
+#module-rgx=
+
+# Colon-delimited sets of names that determine each other's naming style when
+# the name regexes allow several styles.
+name-group=
+
+# Regular expression which should only match function or class names that do
+# not require a docstring.
+no-docstring-rgx=^_
+
+# List of decorators that produce properties, such as abc.abstractproperty. Add
+# to this list to register other decorators that produce valid properties.
+# These decorators are taken in consideration only for invalid-name.
+property-classes=abc.abstractproperty
+
+# Naming style matching correct variable names.
+variable-naming-style=snake_case
+
+# Regular expression matching correct variable names. Overrides variable-
+# naming-style.
+#variable-rgx=
+
+
+[SIMILARITIES]
+
+# Ignore comments when computing similarities.
+ignore-comments=yes
+
+# Ignore docstrings when computing similarities.
+ignore-docstrings=yes
+
+# Ignore imports when computing similarities.
+ignore-imports=no
+
+# Minimum lines number of a similarity.
+min-similarity-lines=4
+
+
+[FORMAT]
+
+# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
+expected-line-ending-format=
+
+# Regexp for a line that is allowed to be longer than the limit.
+ignore-long-lines=^\s*(# )??$
+
+# Number of spaces of indent required inside a hanging or continued line.
+indent-after-paren=4
+
+# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
+# tab).
+indent-string=' '
+
+# Maximum number of characters on a single line.
+max-line-length=88
+
+# Maximum number of lines in a module.
+max-module-lines=1000
+
+# List of optional constructs for which whitespace checking is disabled. `dict-
+# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
+# `trailing-comma` allows a space between comma and closing bracket: (a, ).
+# `empty-line` allows space-only lines.
+no-space-check=trailing-comma,
+ dict-separator
+
+# Allow the body of a class to be on the same line as the declaration if body
+# contains single statement.
+single-line-class-stmt=no
+
+# Allow the body of an if to be on the same line as the test if there is no
+# else.
+single-line-if-stmt=no
+
+
+[MISCELLANEOUS]
+
+# List of note tags to take in consideration, separated by a comma.
+notes=FIXME,
+ XXX,
+ TODO
+
+
+[TYPECHECK]
+
+# List of decorators that produce context managers, such as
+# contextlib.contextmanager. Add to this list to register other decorators that
+# produce valid context managers.
+contextmanager-decorators=contextlib.contextmanager
+
+# List of members which are set dynamically and missed by pylint inference
+# system, and so shouldn't trigger E1101 when accessed. Python regular
+# expressions are accepted.
+generated-members=
+
+# Tells whether missing members accessed in mixin class should be ignored. A
+# mixin class is detected if its name ends with "mixin" (case insensitive).
+ignore-mixin-members=yes
+
+# Tells whether to warn about missing members when the owner of the attribute
+# is inferred to be None.
+ignore-none=yes
+
+# This flag controls whether pylint should warn about no-member and similar
+# checks whenever an opaque object is returned when inferring. The inference
+# can return multiple potential results while evaluating a Python object, but
+# some branches might not be evaluated, which results in partial inference. In
+# that case, it might be useful to still emit no-member and other checks for
+# the rest of the inferred objects.
+ignore-on-opaque-inference=yes
+
+# List of class names for which member attributes should not be checked (useful
+# for classes with dynamically set attributes). This supports the use of
+# qualified names.
+ignored-classes=optparse.Values,thread._local,_thread._local
+
+# List of module names for which member attributes should not be checked
+# (useful for modules/projects where namespaces are manipulated during runtime
+# and thus existing member attributes cannot be deduced by static analysis). It
+# supports qualified module names, as well as Unix pattern matching.
+ignored-modules=
+
+# Show a hint with possible names when a member name was not found. The aspect
+# of finding the hint is based on edit distance.
+missing-member-hint=yes
+
+# The minimum edit distance a name should have in order to be considered a
+# similar match for a missing member name.
+missing-member-hint-distance=1
+
+# The total number of similar names that should be taken in consideration when
+# showing a hint for a missing member.
+missing-member-max-choices=1
+
+# List of decorators that change the signature of a decorated function.
+signature-mutators=
+
+
+[STRING]
+
+# This flag controls whether the implicit-str-concat-in-sequence should
+# generate a warning on implicit string concatenation in sequences defined over
+# several lines.
+check-str-concat-over-line-jumps=no
+
+
+[VARIABLES]
+
+# List of additional names supposed to be defined in builtins. Remember that
+# you should avoid defining new builtins when possible.
+additional-builtins=
+
+# Tells whether unused global variables should be treated as a violation.
+allow-global-unused-variables=yes
+
+# List of strings which can identify a callback function by name. A callback
+# name must start or end with one of those strings.
+callbacks=cb_,
+ _cb
+
+# A regular expression matching the name of dummy variables (i.e. expected to
+# not be used).
+dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
+
+# Argument names that match this expression will be ignored. Default to name
+# with leading underscore.
+ignored-argument-names=_.*|^ignored_|^unused_
+
+# Tells whether we should check for unused import in __init__ files.
+init-import=no
+
+# List of qualified module names which can have objects that can redefine
+# builtins.
+redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
+
+
+[IMPORTS]
+
+# List of modules that can be imported at any level, not just the top level
+# one.
+allow-any-import-level=
+
+# Allow wildcard imports from modules that define __all__.
+allow-wildcard-with-all=no
+
+# Analyse import fallback blocks. This can be used to support both Python 2 and
+# 3 compatible code, which means that the block might have code that exists
+# only in one or another interpreter, leading to false positives when analysed.
+analyse-fallback-blocks=no
+
+# Deprecated modules which should not be used, separated by a comma.
+deprecated-modules=optparse,tkinter.tix
+
+# Create a graph of external dependencies in the given file (report RP0402 must
+# not be disabled).
+ext-import-graph=
+
+# Create a graph of every (i.e. internal and external) dependencies in the
+# given file (report RP0402 must not be disabled).
+import-graph=
+
+# Create a graph of internal dependencies in the given file (report RP0402 must
+# not be disabled).
+int-import-graph=
+
+# Force import order to recognize a module as part of the standard
+# compatibility libraries.
+known-standard-library=
+
+# Force import order to recognize a module as part of a third party library.
+known-third-party=enchant
+
+# Couples of modules and preferred modules, separated by a comma.
+preferred-modules=
+
+
+[DESIGN]
+
+# Maximum number of arguments for function / method.
+max-args=5
+
+# Maximum number of attributes for a class (see R0902).
+max-attributes=7
+
+# Maximum number of boolean expressions in an if statement (see R0916).
+max-bool-expr=5
+
+# Maximum number of branch for function / method body.
+max-branches=12
+
+# Maximum number of locals for function / method body.
+max-locals=15
+
+# Maximum number of parents for a class (see R0901).
+max-parents=7
+
+# Maximum number of public methods for a class (see R0904).
+max-public-methods=20
+
+# Maximum number of return / yield for function / method body.
+max-returns=6
+
+# Maximum number of statements in function / method body.
+max-statements=50
+
+# Minimum number of public methods for a class (see R0903).
+min-public-methods=2
+
+
+[CLASSES]
+
+# List of method names used to declare (i.e. assign) instance attributes.
+defining-attr-methods=__init__,
+ __new__,
+ setUp,
+ __post_init__
+
+# List of member names, which should be excluded from the protected access
+# warning.
+exclude-protected=_asdict,
+ _fields,
+ _replace,
+ _source,
+ _make
+
+# List of valid names for the first argument in a class method.
+valid-classmethod-first-arg=cls
+
+# List of valid names for the first argument in a metaclass class method.
+valid-metaclass-classmethod-first-arg=cls
+
+
+[EXCEPTIONS]
+
+# Exceptions that will emit a warning when being caught. Defaults to
+# "BaseException, Exception".
+overgeneral-exceptions=BaseException,
+ Exception
diff --git a/Dockerfile b/Dockerfile
index a69232e3..3d47d9e8 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -9,7 +9,8 @@ RUN pip install -e optimade-python-tools
RUN pip install uvicorn
# copy repo contents
-COPY setup.py ./
+COPY setup.py setup.json README.md ./
+COPY .ci/server_template.cfg ./server.cfg
COPY aiida_optimade ./aiida_optimade
RUN pip install -e .
diff --git a/README.md b/README.md
index fffaaa9d..49984707 100644
--- a/README.md
+++ b/README.md
@@ -2,7 +2,7 @@
| Latest release | Build status | Activity |
|:--------------:|:------------:|:--------:|
-| [](https://pypi.org/project/aiida-optimade/) [](https://pypi.org/project/aiida-optimade/) [](https://github.com/Materials-Consortia/OPTiMaDe/) | [](https://github.com/aiidateam/aiida-optimade/actions/) [](https://codecov.io/gh/aiidateam/aiida-optimade) | [](https://github.com/aiidateam/aiida-optimade) |
+| [](https://pypi.org/project/aiida-optimade/)
[](https://pypi.org/project/aiida-optimade/)
[](https://github.com/Materials-Consortia/OPTiMaDe/) | [](https://github.com/aiidateam/aiida-optimade/actions/)
[](https://codecov.io/gh/aiidateam/aiida-optimade) | [](https://github.com/aiidateam/aiida-optimade) |
This is a RESTful API server created with [FastAPI](https://fastapi.tiangolo.com/) that exposes an AiiDA database according to the [OPTiMaDe specification](https://github.com/Materials-Consortia/OPTiMaDe/blob/develop/optimade.rst).
@@ -19,8 +19,6 @@ Lastly, the server utilizes the FastAPI concept of [routers](https://fastapi.tia
Environment where AiiDA is installed.
AiiDA database containing `StructureData` nodes, since these are the _only_ AiiDA nodes that are currently exposed with this API (under the `/structures` endpoint).
-> **Note**: At the moment, `aiida-optimade` works most optimally with an AiiDA database using the SQLAlchemy backend.
-
## Installation
```shell
@@ -35,10 +33,10 @@ pip install -e aiida-optimade/
```shell
# specify AiiDA profile (will use default otherwise)
export AIIDA_PROFILE=optimade
-sh run.sh
+./aiida-optimade/run.sh
```
-Navigate to `http://127.0.0.1:5000/optimade/info`
+Navigate to `http://localhost:5000/optimade/info`
## Running via docker
@@ -48,7 +46,7 @@ Adapt `profiles/quicksetup.json` and `profiles/docker-compose.yml` appropriately
docker-compose -f profiles/docker-compose.yml up --build
```
-Navigate to `http://127.0.0.1:3253/optimade/info`
+Navigate to `http://localhost:3253/optimade/info`
Stop by using
diff --git a/aiida_optimade/__init__.py b/aiida_optimade/__init__.py
index e69de29b..6a9beea8 100644
--- a/aiida_optimade/__init__.py
+++ b/aiida_optimade/__init__.py
@@ -0,0 +1 @@
+__version__ = "0.4.0"
diff --git a/aiida_optimade/aiida_session.py b/aiida_optimade/aiida_session.py
deleted file mode 100644
index 9544667f..00000000
--- a/aiida_optimade/aiida_session.py
+++ /dev/null
@@ -1,94 +0,0 @@
-import json
-
-from sqlalchemy import create_engine
-from sqlalchemy.orm import sessionmaker
-
-from aiida_optimade.main import profile
-from aiida_optimade.common import AiidaError
-
-if profile.database_backend == "django":
- from aiida.orm.implementation.django.querybuilder import (
- DjangoQueryBuilder as QueryBuilder,
- )
- from aiida.orm.implementation.django.backend import DjangoBackend as Backend
-elif profile.database_backend == "sqlalchemy":
- from aiida.orm.implementation.sqlalchemy.querybuilder import (
- SqlaQueryBuilder as QueryBuilder,
- )
- from aiida.orm.implementation.sqlalchemy.backend import SqlaBackend as Backend
-else:
- raise AiidaError(
- f'Unknown AiiDA backend "{profile.database_backend}" for profile {profile}'
- )
-
-
-separator = ":" if profile.database_port else ""
-engine_url = "postgresql://{user}:{password}@{hostname}{separator}{port}/{name}".format(
- separator=separator,
- user=profile.database_username,
- password=profile.database_password,
- hostname=profile.database_hostname,
- port=profile.database_port,
- name=profile.database_name,
-)
-
-engine = create_engine(
- engine_url,
- json_serializer=json.dumps,
- json_deserializer=json.loads,
- encoding="utf-8",
-)
-
-SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
-
-
-class OptimadeDjangoQueryBuilder(QueryBuilder):
- """New DjangoQueryBuilder"""
-
- def __init__(self, backend, session):
- QueryBuilder.__init__(self, backend)
- self.__optimade_session = session
-
- def get_session(self):
- return self.__optimade_session
-
-
-class OptimadeDjangoBackend(Backend):
- """New DjangoBackend"""
-
- def __init__(self):
- super().__init__()
- self.__optimade_session = SessionLocal()
-
- def query(self):
- """Special OPTiMaDe query()"""
- return OptimadeDjangoQueryBuilder(self, self.__optimade_session)
-
- def close(self):
- self.__optimade_session.close()
-
-
-class OptimadeSqlaQueryBuilder(QueryBuilder):
- """New SqlaQueryBuilder"""
-
- def __init__(self, backend, session):
- QueryBuilder.__init__(self, backend)
- self.__optimade_session = session
-
- def get_session(self):
- return self.__optimade_session
-
-
-class OptimadeSqlaBackend(Backend):
- """New SqlaBackend"""
-
- def __init__(self):
- super().__init__()
- self.__optimade_session = SessionLocal()
-
- def query(self):
- """Special OPTiMaDe query()"""
- return OptimadeSqlaQueryBuilder(self, self.__optimade_session)
-
- def close(self):
- self.__optimade_session.close()
diff --git a/aiida_optimade/common/exceptions.py b/aiida_optimade/common/exceptions.py
index 6886e4ef..f0c41e87 100644
--- a/aiida_optimade/common/exceptions.py
+++ b/aiida_optimade/common/exceptions.py
@@ -18,7 +18,10 @@ class OptimadeIntegrityError(Exception):
class CausationError(Exception):
- """Cause-and-effect error, something MUST be done before something else is possible."""
+ """Cause-and-effect error
+
+ Something MUST be done before something else is possible.
+ """
class AiidaError(Exception):
diff --git a/aiida_optimade/config.json b/aiida_optimade/config.json
index 5bbae704..8eaacf4a 100644
--- a/aiida_optimade/config.json
+++ b/aiida_optimade/config.json
@@ -1,6 +1,5 @@
{
- "version": "v0.10.0",
- "page_limit": 100,
+ "page_limit": 15,
"db_page_limit": 500,
"index_base_url": null,
"provider": {
@@ -9,6 +8,12 @@
"description": "AiiDA: Automated Interactive Infrastructure and Database for Computational Science (http://www.aiida.net)",
"homepage": "http://www.aiida.net"
},
+ "implementation": {
+ "name": "aiida-optimade",
+ "version": "0.4.0",
+ "source_url": "https://github.com/aiidateam/aiida-optimade",
+ "maintainer": {"email": "casper.andersen@epfl.ch"}
+ },
"provider_fields": {
"structures": [
"ctime"
diff --git a/aiida_optimade/config.py b/aiida_optimade/config.py
index efa05c41..53010ecb 100644
--- a/aiida_optimade/config.py
+++ b/aiida_optimade/config.py
@@ -1,27 +1,37 @@
+# pylint: disable=attribute-defined-outside-init
import json
from typing import Any
from pathlib import Path
from optimade.server.config import Config, NoFallback
+from aiida_optimade import __version__
+
class ServerConfig(Config):
"""Load config file"""
@staticmethod
- def _DEFAULTS(field: str) -> Any:
+ def _DEFAULTS(field: str) -> Any: # pylint: disable=invalid-name
res = {
- "version": "v1.0.0",
- "page_limit": 100,
+ "page_limit": 15,
"db_page_limit": 500,
"provider": {
"prefix": "_aiida_",
"name": "AiiDA",
- "description": "AiiDA: Automated Interactive Infrastructure and Database for Computational Science (http://www.aiida.net)",
+ "description": "AiiDA: Automated Interactive Infrastructure and "
+ "Database for Computational Science (http://www."
+ "aiida.net)",
"homepage": "http://www.aiida.net",
"index_base_url": None,
},
"provider_fields": {},
+ "implementation": {
+ "name": "aiida-optimade",
+ "version": __version__,
+ "source_url": "https://github.com/aiidateam/aiida-optimade",
+ "maintainer": {"email": "casper.andersen@epfl.ch"},
+ },
}
if field not in res:
raise NoFallback(f"No fallback value found for '{field}'")
@@ -29,7 +39,7 @@ def _DEFAULTS(field: str) -> Any:
def __init__(self, server_cfg: Path = None):
server = (
- Path(__file__).resolve().parent.parent.joinpath("server.cfg")
+ Path().resolve().joinpath("server.cfg")
if server_cfg is None
else server_cfg
)
@@ -41,7 +51,6 @@ def load_from_json(self):
with open(self._path) as config_file:
config = json.load(config_file)
- self.version = config.get("version", self._DEFAULTS("version"))
self.page_limit = int(config.get("page_limit", self._DEFAULTS("page_limit")))
self.db_page_limit = int(
config.get("db_page_limit", self._DEFAULTS("db_page_limit"))
@@ -50,6 +59,9 @@ def load_from_json(self):
self.provider_fields = config.get(
"provider_fields", self._DEFAULTS("provider_fields")
)
+ self.implementation = config.get(
+ "implementation", self._DEFAULTS("implementation")
+ )
CONFIG = ServerConfig()
diff --git a/aiida_optimade/entry_collections.py b/aiida_optimade/entry_collections.py
index ab22aa9f..cab4c282 100644
--- a/aiida_optimade/entry_collections.py
+++ b/aiida_optimade/entry_collections.py
@@ -2,7 +2,7 @@
from fastapi import HTTPException
-from aiida import orm
+from aiida.orm import Entity, QueryBuilder
from optimade.filterparser import LarkParser
from optimade.models import NonnegativeInt, EntryResource
@@ -11,7 +11,7 @@
from aiida_optimade.config import CONFIG
from aiida_optimade.query_params import EntryListingQueryParams, SingleEntryQueryParams
from aiida_optimade.mappers import ResourceMapper
-from aiida_optimade.transformers import AiidaTransformerV0_10_1
+from aiida_optimade.transformers import AiidaTransformer
from aiida_optimade.utils import retrieve_queryable_properties
@@ -28,21 +28,21 @@ class AiidaCollection:
def __init__(
self,
- collection: orm.entities.Collection,
+ entity: Entity,
resource_cls: EntryResource,
resource_mapper: ResourceMapper,
):
- self.collection = collection
+ self.entity = entity
self.parser = LarkParser()
self.resource_cls = resource_cls
self.resource_mapper = resource_mapper
- self.transformer = AiidaTransformerV0_10_1()
+ self.transformer = AiidaTransformer()
self.provider = CONFIG.provider["prefix"]
self.provider_fields = CONFIG.provider_fields[resource_mapper.ENDPOINT]
self.page_limit = CONFIG.page_limit
self.db_page_limit = CONFIG.db_page_limit
- self.parser = LarkParser(version=(0, 10, 0))
+ self.parser = LarkParser(version=(0, 10, 1))
# "Cache"
self._data_available: int = None
@@ -51,10 +51,11 @@ def __init__(
self._latest_filter: dict = None
def get_attribute_fields(self) -> set:
+ """Get all attribute properties/fields for OPTiMaDe entity"""
schema = self.resource_cls.schema()
attributes = schema["properties"]["attributes"]
if "allOf" in attributes:
- allOf = attributes.pop("allOf")
+ allOf = attributes.pop("allOf") # pylint: disable=invalid-name
for dict_ in allOf:
attributes.update(dict_)
if "$ref" in attributes:
@@ -66,9 +67,8 @@ def get_attribute_fields(self) -> set:
return set(attributes["properties"].keys())
@staticmethod
- def _find(
- backend: orm.implementation.Backend, entity_type: orm.Entity, **kwargs
- ) -> orm.QueryBuilder:
+ def _find(entity_type: Entity, **kwargs) -> QueryBuilder:
+ """Workhorse function to perform AiiDA QueryBuilder query"""
for key in kwargs:
if key not in {"filters", "order_by", "limit", "project", "offset"}:
raise ValueError(
@@ -83,51 +83,52 @@ def _find(
offset = kwargs.get("offset", None)
project = kwargs.get("project", [])
- query = orm.QueryBuilder(backend=backend, limit=limit, offset=offset)
+ query = QueryBuilder(limit=limit, offset=offset)
query.append(entity_type, project=project, filters=filters)
query.order_by(order_by)
return query
- def _find_all(
- self, backend: orm.implementation.Backend, **kwargs
- ) -> orm.QueryBuilder:
- query = self._find(backend, self.collection.entity_type, **kwargs)
+ def _find_all(self, **kwargs) -> list:
+ """Helper function to instantiate an AiiDA QueryBuilder"""
+ query = self._find(self.entity, **kwargs)
res = query.all()
del query
return res
- def count(
- self, backend: orm.implementation.Backend, **kwargs
- ): # pylint: disable=arguments-differ
- query = self._find(backend, self.collection.entity_type, **kwargs)
+ def count(self, **kwargs) -> int:
+ """Count amount of data returned for query"""
+ query = self._find(self.entity, **kwargs)
res = query.count()
del query
return res
@property
def data_available(self) -> int:
+ """Get amount of data available under endpoint"""
if self._data_available is None:
raise CausationError(
"data_available MUST be set before it can be retrieved."
)
return self._data_available
- def set_data_available(self, backend: orm.implementation.Backend):
+ def set_data_available(self):
"""Set _data_available if it has not yet been set"""
if not self._data_available:
- self._data_available = self.count(backend)
+ self._data_available = self.count()
@property
def data_returned(self) -> int:
+ """Get amount of data returned for query"""
if self._data_returned is None:
raise CausationError(
"data_returned MUST be set before it can be retrieved."
)
return self._data_returned
- def set_data_returned(self, backend: orm.implementation.Backend, **criteria):
- """Set _data_returned if it has not yet been set or new filter does not equal latest filter.
+ def set_data_returned(self, **criteria):
+ """Set _data_returned if it has not yet been set or new filter does not equal
+ latest filter.
NB! Nested lists in filters are not accounted for.
"""
@@ -139,14 +140,13 @@ def set_data_returned(self, backend: orm.implementation.Backend, **criteria):
if key in list(criteria.keys()):
del criteria[key]
self._latest_filter = criteria.get("filters", {})
- self._data_returned = self.count(backend, **criteria)
+ self._data_returned = self.count(**criteria)
- def find( # pylint: disable=arguments-differ
- self,
- backend: orm.implementation.Backend,
- params: Union[EntryListingQueryParams, SingleEntryQueryParams],
+ def find(
+ self, params: Union[EntryListingQueryParams, SingleEntryQueryParams]
) -> Tuple[List[EntryResource], NonnegativeInt, bool, NonnegativeInt, set]:
- self.set_data_available(backend)
+ """Find all requested AiiDA entities as OPTiMaDe JSON objects"""
+ self.set_data_available()
criteria = self._parse_params(params)
all_fields = criteria.pop("fields")
@@ -156,11 +156,11 @@ def find( # pylint: disable=arguments-differ
fields = all_fields.copy()
if criteria.get("filters", {}) and self._get_extras_filter_fields():
- self._check_and_calculate_entities(backend)
+ self._check_and_calculate_entities()
- self.set_data_returned(backend, **criteria)
+ self.set_data_returned(**criteria)
- entities = self._find_all(backend, **criteria)
+ entities = self._find_all(**criteria)
results = []
for entity in entities:
results.append(
@@ -174,15 +174,14 @@ def find( # pylint: disable=arguments-differ
if isinstance(params, EntryListingQueryParams):
criteria_no_limit = criteria.copy()
criteria_no_limit.pop("limit", None)
- more_data_available = len(results) < self.count(
- backend, **criteria_no_limit
- )
+ more_data_available = len(results) < self.count(**criteria_no_limit)
else:
more_data_available = False
if len(results) > 1:
raise HTTPException(
status_code=404,
- detail=f"Instead of a single entry, {len(results)} entries were found",
+ detail=f"Instead of a single entry, {len(results)} entries were "
+ "found",
)
if isinstance(params, SingleEntryQueryParams):
@@ -250,7 +249,8 @@ def _parse_params(self, params: EntryListingQueryParams) -> dict:
if limit > self.db_page_limit:
raise HTTPException(
status_code=403,
- detail=f"Max allowed page_limit is {self.db_page_limit}, you requested {limit}",
+ detail=f"Max allowed page_limit is {self.db_page_limit}, "
+ f"you requested {limit}",
)
if limit == 0:
limit = self.page_limit
@@ -258,7 +258,7 @@ def _parse_params(self, params: EntryListingQueryParams) -> dict:
# response_fields
# All OPTiMaDe fields
- fields = {"id", "type"}
+ fields = self.resource_mapper.TOP_LEVEL_NON_ATTRIBUTES_FIELDS.copy()
fields |= self.get_attribute_fields()
# All provider-specific fields
fields |= {self.provider + _ for _ in self.provider_fields}
@@ -268,8 +268,9 @@ def _parse_params(self, params: EntryListingQueryParams) -> dict:
)
# sort
- # NOTE: sorting only works for extras fields for the nodes already with calculated extras.
- # To calculate all extras, make a single filter query using any extra field.
+ # NOTE: sorting only works for extras fields for the nodes already with
+ # calculated extras. To calculate all extras, make a single filter query
+ # using any extra field.
if getattr(params, "sort", False):
sort_spec = []
for entity_property in params.sort.split(","):
@@ -312,10 +313,11 @@ def _get_extras_filter_fields(self) -> set:
if field.startswith(self.resource_mapper.PROJECT_PREFIX)
}
- def _check_and_calculate_entities(self, backend: orm.implementation.Backend):
+ def _check_and_calculate_entities(self):
"""Check all entities have OPTiMaDe extras, else calculate them
- For a bit of optimization, we only care about a field if it has specifically been queried for using "filter".
+ For a bit of optimization, we only care about a field if it has specifically
+ been queried for using "filter".
"""
extras_keys = [
key for key in self.resource_mapper.PROJECT_PREFIX.split(".") if key
@@ -323,8 +325,7 @@ def _check_and_calculate_entities(self, backend: orm.implementation.Backend):
filter_fields = [
{"!has_key": field for field in self._get_extras_filter_fields()}
]
- necessary_entities_qb = orm.QueryBuilder().append(
- self.collection.entity_type,
+ necessary_entities_qb = self._find_all(
filters={
"or": [
{extras_keys[0]: {"!has_key": extras_keys[1]}},
@@ -334,9 +335,10 @@ def _check_and_calculate_entities(self, backend: orm.implementation.Backend):
project="id",
)
- if necessary_entities_qb.count() > 0:
- # Necessary entities for the OPTiMaDe query exist with unknown OPTiMaDe fields.
- necessary_entity_ids = [pk[0] for pk in necessary_entities_qb.iterall()]
+ if necessary_entities_qb:
+ # Necessary entities for the OPTiMaDe query exist with unknown OPTiMaDe
+ # fields.
+ necessary_entity_ids = [pk[0] for pk in necessary_entities_qb]
# Create the missing OPTiMaDe fields:
# All OPTiMaDe fields
@@ -347,7 +349,7 @@ def _check_and_calculate_entities(self, backend: orm.implementation.Backend):
fields = list({self.resource_mapper.alias_for(f) for f in fields})
entities = self._find_all(
- backend, filters={"id": {"in": necessary_entity_ids}}, project=fields
+ filters={"id": {"in": necessary_entity_ids}}, project=fields
)
for entity in entities:
self.resource_cls(
diff --git a/aiida_optimade/exceptions.py b/aiida_optimade/exceptions.py
index 5177e28c..61f2882e 100644
--- a/aiida_optimade/exceptions.py
+++ b/aiida_optimade/exceptions.py
@@ -7,14 +7,17 @@
def http_exception_handler(request: Request, exc: StarletteHTTPException):
+ """Handle HTTPException"""
return general_exception(request, exc)
def request_validation_exception_handler(request: Request, exc: RequestValidationError):
+ """Handle RequestValidationError"""
return general_exception(request, exc)
def validation_exception_handler(request: Request, exc: ValidationError):
+ """Handle ValidationError, usually multiple"""
from optimade.models import Error, ErrorSource
status = 500
@@ -32,4 +35,5 @@ def validation_exception_handler(request: Request, exc: ValidationError):
def general_exception_handler(request: Request, exc: Exception):
+ """A catch 'em all to handle any other form of Python Exception"""
return general_exception(request, exc)
diff --git a/aiida_optimade/main.py b/aiida_optimade/main.py
index 283ae266..eaae9b2d 100644
--- a/aiida_optimade/main.py
+++ b/aiida_optimade/main.py
@@ -1,3 +1,4 @@
+# pylint: disable=line-too-long
import os
from pydantic import ValidationError
@@ -8,12 +9,13 @@
from aiida import load_profile
+from optimade import __api_version__
+
from aiida_optimade.common.exceptions import AiidaError
-from aiida_optimade.config import CONFIG
import aiida_optimade.exceptions as exc_handlers
-app = FastAPI(
+APP = FastAPI(
title="OPTiMaDe API for AiiDA",
description=(
"The [Open Databases Integration for Materials Design (OPTiMaDe) consortium](http://www.optimade.org/) "
@@ -21,72 +23,61 @@
"[Automated Interactive Infrastructure and Database for Computational Science (AiiDA)](http://www.aiida.net) "
"aims to help researchers with managing complex workflows and making them fully reproducible."
),
- version=CONFIG.version,
+ version=__api_version__,
docs_url="/optimade/extensions/docs",
redoc_url="/optimade/extensions/redoc",
openapi_url="/optimade/extensions/openapi.json",
)
-profile_name = os.getenv("AIIDA_PROFILE")
-profile = load_profile(profile_name)
-
-valid_prefixes = ["/optimade"]
-version = [int(_) for _ in CONFIG.version[1:].split(".")]
-while version:
- if version[0] or len(version) >= 2:
- valid_prefixes.append(
- "/optimade/v{}".format(".".join([str(_) for _ in version]))
- )
- version.pop(-1)
+PROFILE_NAME = os.getenv("AIIDA_PROFILE")
+load_profile(PROFILE_NAME)
-@app.middleware("http")
+@APP.middleware("http")
async def backend_middleware(request: Request, call_next):
+ """Use custom AiiDA backend for all requests"""
+ from aiida.manage.manager import get_manager
+ from aiida.backends.sqlalchemy import reset_session
+
response = None
- try:
- if profile.database_backend == "django":
- from aiida_optimade.aiida_session import (
- OptimadeDjangoBackend as OptimadeBackend,
- )
-
- from warnings import warn
-
- warn(
- "The django backend does not support the special 1 AiiDA DB session per 1 HTTP request implemented in this package!"
- )
-
- elif profile.database_backend == "sqlalchemy":
- from aiida_optimade.aiida_session import (
- OptimadeSqlaBackend as OptimadeBackend,
- )
- else:
- raise AiidaError(
- f'Unknown AiiDA backend "{profile.database_backend}" for profile {profile}'
- )
-
- request.state.backend = OptimadeBackend()
- response = await call_next(request)
- finally:
- request.state.backend.close()
+ # Reset global AiiDA session and engine
+ if get_manager().backend_loaded:
+ reset_session(get_manager().get_profile())
+
+ response = await call_next(request)
if response:
return response
raise AiidaError("Failed to properly handle AiiDA backend middleware")
-app.add_exception_handler(StarletteHTTPException, exc_handlers.http_exception_handler)
-app.add_exception_handler(
+APP.add_exception_handler(StarletteHTTPException, exc_handlers.http_exception_handler)
+APP.add_exception_handler(
RequestValidationError, exc_handlers.request_validation_exception_handler
)
-app.add_exception_handler(ValidationError, exc_handlers.validation_exception_handler)
-app.add_exception_handler(Exception, exc_handlers.general_exception_handler)
-
+APP.add_exception_handler(ValidationError, exc_handlers.validation_exception_handler)
+APP.add_exception_handler(Exception, exc_handlers.general_exception_handler)
+
+
+# Create the following prefixes:
+# /optimade
+# /optimade/vMajor (but only if Major >= 1)
+# /optimade/vMajor.Minor
+# /optimade/vMajor.Minor.Patch
+VALID_PREFIXES = ["/optimade"]
+VERSION = [int(_) for _ in __api_version__.split(".")]
+while VERSION:
+ if VERSION[0] or len(VERSION) >= 2:
+ VALID_PREFIXES.append(
+ "/optimade/v{}".format(".".join([str(_) for _ in VERSION]))
+ )
+ VERSION.pop(-1)
from aiida_optimade.routers import ( # pylint: disable=wrong-import-position
structures,
info,
)
-for prefix in valid_prefixes:
- app.include_router(structures.router, prefix=prefix)
- app.include_router(info.router, prefix=prefix)
+for prefix in VALID_PREFIXES:
+ APP.include_router(structures.ROUTER, prefix=prefix)
+ APP.include_router(info.ROUTER, prefix=prefix)
diff --git a/aiida_optimade/mappers/entries.py b/aiida_optimade/mappers/entries.py
index 15dbdb77..54adccb4 100644
--- a/aiida_optimade/mappers/entries.py
+++ b/aiida_optimade/mappers/entries.py
@@ -15,16 +15,18 @@ class ResourceMapper(metaclass=abc.ABCMeta):
ENDPOINT: str = ""
ALIASES: Tuple[Tuple[str, str]] = ()
+ TOP_LEVEL_NON_ATTRIBUTES_FIELDS: set = {"id", "type", "relationships", "links"}
TRANSLATOR: AiidaEntityTranslator = AiidaEntityTranslator
ALL_ATTRIBUTES: list = []
REQUIRED_ATTRIBUTES: list = []
@classmethod
def all_aliases(cls) -> Tuple[Tuple[str, str]]:
+ """Get all ALIASES as a tuple"""
res = (
tuple(
(CONFIG.provider["prefix"] + field, field)
- for field in CONFIG.provider_fields[cls.ENDPOINT]
+ for field in CONFIG.provider_fields.get(cls.ENDPOINT, {})
)
+ cls.ALIASES
)
@@ -44,9 +46,12 @@ def alias_for(cls, field):
return dict(cls.all_aliases()).get(field, field)
@abc.abstractclassmethod
- def map_back(self, entity_properties: dict) -> dict:
+ def map_back(cls, entity_properties: dict) -> dict:
"""Map properties from AiiDA to OPTiMaDe
+ :param entity_properties: Found AiiDA properties through QueryBuilder query
+ :type entity_properties: dict
+
:return: A resource object in OPTiMaDe format
:rtype: dict
"""
diff --git a/aiida_optimade/mappers/structures.py b/aiida_optimade/mappers/structures.py
index 2d4b74bf..153b48ce 100644
--- a/aiida_optimade/mappers/structures.py
+++ b/aiida_optimade/mappers/structures.py
@@ -13,10 +13,11 @@ class StructureMapper(ResourceMapper):
ENDPOINT = "structures"
ALIASES = (
- ("id", "id"),
("immutable_id", "uuid"),
("last_modified", "mtime"),
- ("type", "extras.something.non.existing.type"),
+ ("type", "attributes.something.non.existing"),
+ ("relationships", "attributes.something.non.existing"),
+ ("links", "attributes.something.non.existing"),
)
TRANSLATOR = StructureDataTranslator
ALL_ATTRIBUTES = list(StructureResourceAttributes.schema().get("properties").keys())
@@ -26,9 +27,12 @@ class StructureMapper(ResourceMapper):
def map_back(cls, entity_properties: dict) -> dict:
"""Map properties from AiiDA to OPTiMaDe
+ :param entity_properties: Found AiiDA properties through QueryBuilder query
+ :type entity_properties: dict
+
:return: A resource object in OPTiMaDe format
+ :rtype: dict
"""
-
mapping = ((real, alias) for alias, real in cls.all_aliases())
new_object_attributes = {}
@@ -38,34 +42,26 @@ def map_back(cls, entity_properties: dict) -> dict:
if (
real in entity_properties
and entity_properties[real] is not None
- and alias not in ["id", "type"]
+ and alias not in cls.TOP_LEVEL_NON_ATTRIBUTES_FIELDS
):
new_object_attributes[alias] = entity_properties[real]
- # Particular attributes
- # Remove "extras.optimade." prefix from reals to create aliases
- reals = []
- for field, value in entity_properties.items():
- if field.startswith(cls.PROJECT_PREFIX):
- if value is None:
- continue
- reals.append(field)
- for real in reals:
- alias = real[len(cls.PROJECT_PREFIX) :]
- new_object_attributes[alias] = entity_properties[real]
-
- if "id" in entity_properties:
- new_object["id"] = entity_properties["id"]
- else:
+ # We always need "id"
+ if "id" not in entity_properties:
raise KeyError(
f'"id" should be present in entity_properties: {entity_properties}'
)
+ for field in cls.TOP_LEVEL_NON_ATTRIBUTES_FIELDS:
+ value = entity_properties.get(field, None)
+ if value is not None:
+ new_object[field] = value
+
new_object["attributes"] = cls.build_attributes(
new_object_attributes, new_object["id"]
)
-
new_object["type"] = cls.ENDPOINT
+
return new_object
@classmethod
@@ -105,7 +101,8 @@ def build_attributes(cls, retrieved_attributes: dict, entry_pk: int) -> dict:
f"Parsing required {attribute} from "
f"{cls.TRANSLATOR} has not yet been implemented."
)
- # Print warning that parsing non-required attribute has not yet been implemented
+ # Print warning that parsing non-required attribute has not yet
+ # been implemented
else:
res[attribute] = create_attribute()
# Store new attributes in `extras`
diff --git a/aiida_optimade/query_params.py b/aiida_optimade/query_params.py
index 9e9b9a79..5c8b96de 100644
--- a/aiida_optimade/query_params.py
+++ b/aiida_optimade/query_params.py
@@ -1,5 +1,6 @@
+# pylint: disable=line-too-long,too-few-public-methods
from fastapi import Query
-from pydantic import EmailStr # pylint: disable=no-name-in-module
+from pydantic import EmailStr # pylint: disable=no-name-in-module,useless-suppression
from optimade.models import NonnegativeInt
diff --git a/aiida_optimade/routers/info.py b/aiida_optimade/routers/info.py
index 50274c70..6736b3ad 100644
--- a/aiida_optimade/routers/info.py
+++ b/aiida_optimade/routers/info.py
@@ -1,3 +1,4 @@
+# pylint: disable=missing-function-docstring
import urllib
from typing import Union
@@ -5,6 +6,8 @@
from starlette.exceptions import HTTPException as StarletteHTTPException
from starlette.requests import Request
+from optimade import __api_version__
+
from optimade.models import (
ErrorResponse,
InfoResponse,
@@ -12,19 +15,18 @@
StructureResource,
)
-from aiida_optimade.config import CONFIG
import aiida_optimade.utils as u
-router = APIRouter()
+ROUTER = APIRouter()
ENTRY_INFO_SCHEMAS = {"structures": StructureResource.schema}
-@router.get(
+@ROUTER.get(
"/info",
response_model=Union[InfoResponse, ErrorResponse],
- response_model_skip_defaults=False,
+ response_model_exclude_unset=False,
tags=["Info"],
)
def get_info(request: Request):
@@ -35,40 +37,41 @@ def get_info(request: Request):
meta=u.meta_values(str(request.url), 1, 1, more_data_available=False),
data=BaseInfoResource(
attributes=BaseInfoAttributes(
- api_version=CONFIG.version,
+ api_version=f"v{__api_version__}",
available_api_versions=[
{
"url": f"{parse_result.scheme}://{parse_result.netloc}",
- "version": f"{CONFIG.version[1:]}",
+ "version": __api_version__,
}
],
- entry_types_by_format={"json": ["structures"]},
+ entry_types_by_format={"json": list(ENTRY_INFO_SCHEMAS.keys())},
available_endpoints=[
"info",
- "structures",
"extensions/docs",
"extensions/redoc",
"extensions/openapi.json",
- ],
+ ]
+ + list(ENTRY_INFO_SCHEMAS.keys()),
)
),
)
-@router.get(
+@ROUTER.get(
"/info/{entry}",
response_model=Union[EntryInfoResponse, ErrorResponse],
- response_model_skip_defaults=True,
- tags=["Info", "Structure"],
+ response_model_exclude_unset=True,
+ tags=["Info"],
)
def get_info_entry(request: Request, entry: str):
from optimade.models import EntryInfoResource
- valid_entry_info_endpoints = {"structures"}
+ valid_entry_info_endpoints = ENTRY_INFO_SCHEMAS.keys()
if entry not in valid_entry_info_endpoints:
raise StarletteHTTPException(
status_code=404,
- detail=f"Entry info not found for {entry}, valid entry info endpoints are: {valid_entry_info_endpoints}",
+ detail=f"Entry info not found for {entry}, valid entry info endpoints are:"
+ f" {valid_entry_info_endpoints}",
)
schema = ENTRY_INFO_SCHEMAS[entry]()
diff --git a/aiida_optimade/routers/structures.py b/aiida_optimade/routers/structures.py
index cb39abd9..248482d5 100644
--- a/aiida_optimade/routers/structures.py
+++ b/aiida_optimade/routers/structures.py
@@ -1,9 +1,10 @@
+# pylint: disable=missing-function-docstring
from typing import Union
from fastapi import APIRouter, Depends
from starlette.requests import Request
-from aiida import orm
+from aiida.orm import StructureData
from optimade.models import (
ErrorResponse,
@@ -15,53 +16,41 @@
from aiida_optimade.query_params import EntryListingQueryParams, SingleEntryQueryParams
from aiida_optimade.entry_collections import AiidaCollection
from aiida_optimade.mappers import StructureMapper
-from aiida_optimade.utils import get_backend
from .utils import get_entries, get_single_entry
-router = APIRouter()
+ROUTER = APIRouter()
-structures = AiidaCollection(
- orm.StructureData.objects, StructureResource, StructureMapper
-)
+STRUCTURES = AiidaCollection(StructureData, StructureResource, StructureMapper)
-@router.get(
+@ROUTER.get(
"/structures",
response_model=Union[StructureResponseMany, ErrorResponse],
- response_model_skip_defaults=True,
- tags=["Structure"],
+ response_model_exclude_unset=True,
+ tags=["Structures"],
)
-def get_structures(
- request: Request,
- params: EntryListingQueryParams = Depends(),
- backend: orm.implementation.Backend = Depends(get_backend),
-):
+def get_structures(request: Request, params: EntryListingQueryParams = Depends()):
return get_entries(
- backend=backend,
- collection=structures,
+ collection=STRUCTURES,
response=StructureResponseMany,
request=request,
params=params,
)
-@router.get(
+@ROUTER.get(
"/structures/{entry_id}",
response_model=Union[StructureResponseOne, ErrorResponse],
- response_model_skip_defaults=True,
- tags=["Structure"],
+ response_model_exclude_unset=True,
+ tags=["Structures"],
)
def get_single_structure(
- request: Request,
- entry_id: int,
- params: SingleEntryQueryParams = Depends(),
- backend: orm.implementation.Backend = Depends(get_backend),
+ request: Request, entry_id: int, params: SingleEntryQueryParams = Depends()
):
return get_single_entry(
- backend=backend,
- collection=structures,
+ collection=STRUCTURES,
entry_id=entry_id,
response=StructureResponseOne,
request=request,
diff --git a/aiida_optimade/routers/utils.py b/aiida_optimade/routers/utils.py
index 7945eca2..f6718ad6 100644
--- a/aiida_optimade/routers/utils.py
+++ b/aiida_optimade/routers/utils.py
@@ -4,8 +4,6 @@
from starlette.exceptions import HTTPException as StarletteHTTPException
from starlette.requests import Request
-from aiida import orm
-
from optimade.models import (
ToplevelLinks,
EntryResource,
@@ -31,20 +29,20 @@ def handle_pagination(
query["page_offset"] = int(query.get("page_offset", ["0"])[0]) - int(
query.get("page_limit", [CONFIG.page_limit])[0]
)
+ urlencoded_prev = None
if query["page_offset"] > 0:
urlencoded_prev = urllib.parse.urlencode(query, doseq=True)
- pagination[
- "prev"
- ] = f"{parse_result.scheme}://{parse_result.netloc}{parse_result.path}?{urlencoded_prev}"
elif query["page_offset"] == 0 or abs(query["page_offset"]) < int(
query.get("page_limit", [CONFIG.page_limit])[0]
):
prev_query = query.copy()
prev_query.pop("page_offset")
urlencoded_prev = urllib.parse.urlencode(prev_query, doseq=True)
+ if urlencoded_prev:
pagination[
"prev"
- ] = f"{parse_result.scheme}://{parse_result.netloc}{parse_result.path}?{urlencoded_prev}"
+ ] = f"{parse_result.scheme}://{parse_result.netloc}{parse_result.path}"
+ pagination["prev"] += f"?{urlencoded_prev}"
# "next"
if more_data_available:
@@ -56,7 +54,9 @@ def handle_pagination(
urlencoded_next = urllib.parse.urlencode(query, doseq=True)
pagination[
"next"
- ] = f"{parse_result.scheme}://{parse_result.netloc}{parse_result.path}?{urlencoded_next}"
+ ] = f"{parse_result.scheme}://{parse_result.netloc}{parse_result.path}"
+ if urlencoded_next:
+ pagination["next"] += f"?{urlencoded_next}"
else:
pagination["next"] = None
@@ -64,17 +64,20 @@ def handle_pagination(
def handle_response_fields(
- results: Union[List[EntryResource], EntryResource], fields: set
+ results: Union[List[EntryResource], EntryResource],
+ fields: set,
+ collection: AiidaCollection,
) -> dict:
+ """Prune results to only include queried fields (from `response_fields`)"""
if not isinstance(results, list):
results = [results]
- non_attribute_fields = {"id", "type"}
+ non_attribute_fields = collection.resource_mapper.TOP_LEVEL_NON_ATTRIBUTES_FIELDS
top_level = {_ for _ in non_attribute_fields if _ in fields}
attribute_level = fields - non_attribute_fields
new_results = []
while results:
entry = results.pop(0)
- new_entry = entry.dict(exclude=top_level, skip_defaults=True)
+ new_entry = entry.dict(exclude=top_level, exclude_unset=True)
for field in attribute_level:
if field in new_entry["attributes"]:
del new_entry["attributes"][field]
@@ -85,7 +88,6 @@ def handle_response_fields(
def get_entries(
- backend: orm.implementation.Backend,
collection: AiidaCollection,
response: EntryResponseMany,
request: Request,
@@ -98,14 +100,14 @@ def get_entries(
more_data_available,
data_available,
fields,
- ) = collection.find(backend, params)
+ ) = collection.find(params)
pagination = handle_pagination(
request=request, more_data_available=more_data_available, nresults=len(results)
)
if fields:
- results = handle_response_fields(results, fields)
+ results = handle_response_fields(results, fields, collection)
return response(
links=ToplevelLinks(**pagination),
@@ -116,8 +118,7 @@ def get_entries(
)
-def get_single_entry( # pylint: disable=too-many-arguments
- backend: orm.implementation.Backend,
+def get_single_entry(
collection: AiidaCollection,
entry_id: str,
response: EntryResponseOne,
@@ -132,18 +133,19 @@ def get_single_entry( # pylint: disable=too-many-arguments
more_data_available,
data_available,
fields,
- ) = collection.find(backend, params)
+ ) = collection.find(params)
if more_data_available:
raise StarletteHTTPException(
status_code=500,
- detail=f"more_data_available MUST be False for single entry response, however it is {more_data_available}",
+ detail="more_data_available MUST be False for single entry response, "
+ f"however it is {more_data_available}",
)
links = ToplevelLinks(next=None)
if fields and results is not None:
- results = handle_response_fields(results, fields)[0]
+ results = handle_response_fields(results, fields, collection)[0]
return response(
links=links,
diff --git a/aiida_optimade/tests/__init__.py b/aiida_optimade/tests/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/aiida_optimade/transformers/aiida.py b/aiida_optimade/transformers/aiida.py
index 91c8bc35..8562aa3e 100644
--- a/aiida_optimade/transformers/aiida.py
+++ b/aiida_optimade/transformers/aiida.py
@@ -1,110 +1,21 @@
+# pylint: disable=no-self-use,too-many-public-methods
from lark import Transformer, v_args, Token
-class TransformerError(Exception):
- """Error in transforming filter expression"""
-
-
-__all__ = ("AiidaTransformerV0_9_7", "AiidaTransformerV0_10_1")
+__all__ = ("AiidaTransformer",)
# Conversion map from the OPTiMaDe operators to the QueryBuilder operators
-operator_conversion = {"=": "==", "!=": "!==", "in": "contains"}
+OPERATOR_CONVERSION = {"=": "==", "!=": "!==", "in": "contains"}
def op_conv_map(operator):
- return operator_conversion.get(operator, operator)
-
-
-def conjoin_args(args):
- """Conjoin from left to right.
-
- CONJUNCTION: AND | OR
-
- :param args: [ CONJUNCTION]
- :type args: list
-
- :return: AiiDA QueryBuilder filter
- :rtype: dict
- """
- if len(args) == 1: # Only
- return args[0]
-
- conjunction = args[1].value.lower()
- return {conjunction: [args[0], args[2]]}
-
-
-class AiidaTransformerV0_9_7(Transformer):
- """Transformer for AiiDA using QueryBuilder"""
-
- def start(self, args):
- return args[0]
-
- def expression(self, args):
- return conjoin_args(args)
-
- def term(self, args):
- if args[0] == "(":
- return conjoin_args(args[1:-1])
- return conjoin_args(args)
-
- def atom(self, args):
- """Optionally negate a comparison."""
- # Two cases:
- # 1. args is parsed comparison, or
- # 2. args is NOT token and parsed comparison
- # - [ Token(NOT, 'not'), {field: {op: val}} ]
- # -> {field: {!op: val}}
- if len(args) == 2:
- field, predicate = next(((k, v) for k, v in args[1].items()))
- for op in list(predicate.keys()):
- if op.startswith("!"):
- not_op = op[1:]
- else:
- not_op = "!" + op
- predicate[not_op] = predicate.pop(op)
- return {field: {"!in": predicate}}
-
- return args[0]
-
- def comparison(self, args):
- field = args[0].value
- if isinstance(args[2], list):
- if args[1].value != "=":
- raise NotImplementedError(
- "x,y,z values only supported for '=' operator"
- )
- return {field: {"in": args[2]}}
-
- op = op_conv_map(args[1].value)
- value_token = args[2]
- try:
- value = float(value_token.value)
- except ValueError:
- value = value_token.value
- if value.startswith('"') and value.endswith('"'):
- value = value[1:-1]
- else:
- if value.is_integer():
- value = int(value)
- return {field: {op: value}}
+ """Convert operator or return same operator"""
+ return OPERATOR_CONVERSION.get(operator, operator)
- def combined(self, args):
- elements = []
- for value_token in args:
- try:
- value = float(value_token.value)
- except ValueError:
- value = value_token.value
- if value.startswith('"') and value.endswith('"'):
- value = value[1:-1]
- else:
- if value.is_integer():
- value = int(value)
- elements.append(value)
- return elements
+class AiidaTransformer(Transformer):
+ """Transform OPTiMaDe query to AiiDA QueryBuilder queryhelp query"""
-class AiidaTransformerV0_10_1(Transformer):
reversed_operator_map = {
"<": ">",
"<=": ">=",
@@ -119,23 +30,39 @@ def __init__(self):
super().__init__()
def filter(self, arg):
- # filter: expression*
+ """filter: expression*"""
return arg[0] if arg else None
@v_args(inline=True)
def constant(self, value):
- # constant: string | number
- # Note: Do nothing!
+ """constant: string | number"""
+ # NOTE: Do nothing!
return value
@v_args(inline=True)
def value(self, value):
- # value: string | number | property
+ """value: string | number | property"""
+ # NOTE: Do nothing!
+ return value
+
+ @v_args(inline=True)
+ def non_string_value(self, value):
+ """ non_string_value: number | property """
# Note: Do nothing!
return value
+ @v_args(inline=True)
+ def not_implemented_string(self, value):
+ """ not_implemented_string: value
+
+ Raise NotImplementedError.
+ For further information, see Materials-Consortia/OPTiMaDe issue 157:
+ https://github.com/Materials-Consortia/OPTiMaDe/issues/157
+ """
+ raise NotImplementedError("Comparing strings is not yet implemented.")
+
def value_list(self, args):
- # value_list: [ OPERATOR ] value ( "," [ OPERATOR ] value )*
+ """value_list: [ OPERATOR ] value ( "," [ OPERATOR ] value )*"""
values = []
for value in args:
try:
@@ -150,25 +77,35 @@ def value_list(self, args):
return values
def value_zip(self, arg):
- # value_zip: [ OPERATOR ] value ":" [ OPERATOR ] value (":" [ OPERATOR ] value)*
+ """
+ value_zip: [ OPERATOR ] value ":" [ OPERATOR ] value (":" [ OPERATOR ] value)*
+ """
raise NotImplementedError
def value_zip_list(self, arg):
- # value_zip_list: value_zip ( "," value_zip )*
+ """
+ value_zip_list: value_zip ( "," value_zip )*
+ """
raise NotImplementedError
def expression(self, arg):
- # expression: expression_clause ( OR expression_clause )
- # expression with and without 'OR'
+ """
+ expression: expression_clause ( OR expression_clause )
+ expression with and without 'OR'
+ """
return {"or": arg} if len(arg) > 1 else arg[0]
def expression_clause(self, arg):
- # expression_clause: expression_phrase ( AND expression_phrase )*
- # expression_clause with and without 'AND'
+ """
+ expression_clause: expression_phrase ( AND expression_phrase )*
+ expression_clause with and without 'AND'
+ """
return {"and": arg} if len(arg) > 1 else arg[0]
def expression_phrase(self, arg):
- # expression_phrase: [ NOT ] ( comparison | predicate_comparison | "(" expression ")" )
+ """
+ expression_phrase: [ NOT ] ( comparison | "(" expression ")" )
+ """
if len(arg) == 1:
# without NOT
return arg[0]
@@ -178,30 +115,41 @@ def expression_phrase(self, arg):
@v_args(inline=True)
def comparison(self, value):
- # comparison: constant_first_comparison | property_first_comparison
- # Note: Do nothing!
+ """
+ comparison: constant_first_comparison | property_first_comparison
+ """
+ # NOTE: Do nothing!
return value
def property_first_comparison(self, arg):
- # property_first_comparison: property ( value_op_rhs | known_op_rhs | fuzzy_string_op_rhs | set_op_rhs |
- # set_zip_op_rhs )
+ """
+ property_first_comparison: property ( value_op_rhs |
+ known_op_rhs |
+ fuzzy_string_op_rhs |
+ set_op_rhs |
+ set_zip_op_rhs |
+ length_op_rhs )
+ """
return {arg[0]: arg[1]}
def constant_first_comparison(self, arg):
- # constant_first_comparison: constant value_op_rhs
- # TODO: Probably the value_op_rhs rule is not the best for implementing this.
- return {
- prop: {self.reversed_operator_map[oper]: arg[0]}
- for oper, prop in arg[1].items()
- }
+ """
+ constant_first_comparison: constant OPERATOR ( non_string_value |
+ not_implemented_string )
+ """
+ return {arg[2]: {self.reversed_operator_map[arg[1]]: arg[0]}}
@v_args(inline=True)
def value_op_rhs(self, operator, value):
- # value_op_rhs: OPERATOR value
+ """
+ value_op_rhs: OPERATOR value
+ """
return {op_conv_map(operator.value): value}
def known_op_rhs(self, arg):
- # known_op_rhs: IS ( KNOWN | UNKNOWN )
+ """
+ known_op_rhs: IS ( KNOWN | UNKNOWN )
+ """
if arg[1] == "KNOWN":
key = "!=="
if arg[1] == "UNKNOWN":
@@ -209,8 +157,11 @@ def known_op_rhs(self, arg):
return {key: None}
def fuzzy_string_op_rhs(self, arg):
- # fuzzy_string_op_rhs: CONTAINS string | STARTS [ WITH ] string | ENDS [ WITH ] string
-
+ """
+ fuzzy_string_op_rhs: CONTAINS string |
+ STARTS [ WITH ] string |
+ ENDS [ WITH ] string
+ """
# The WITH keyword may be omitted.
if isinstance(arg[1], Token) and arg[1].type == "WITH":
pattern = arg[2]
@@ -226,8 +177,12 @@ def fuzzy_string_op_rhs(self, arg):
return {"like": like}
def set_op_rhs(self, arg):
- # set_op_rhs: HAS ( [ OPERATOR ] value | ALL value_list | ANY value_list | ONLY value_list )
-
+ """
+ set_op_rhs: HAS ( [ OPERATOR ] value |
+ ALL value_list |
+ ANY value_list |
+ ONLY value_list )
+ """
if len(arg) == 2:
# only value without OPERATOR
return {"contains": [arg[1]]}
@@ -247,44 +202,62 @@ def set_op_rhs(self, arg):
)
def set_zip_op_rhs(self, arg):
- # set_zip_op_rhs: property_zip_addon HAS ( value_zip | ONLY value_zip_list | ALL value_zip_list |
- # ANY value_zip_list )
+ """
+ set_zip_op_rhs: property_zip_addon HAS ( value_zip |
+ ONLY value_zip_list |
+ ALL value_zip_list |
+ ANY value_zip_list )
+ """
raise NotImplementedError
- def predicate_comparison(self, arg):
- # predicate_comparison: LENGTH property OPERATOR value
- operator = arg[2].value
+ def length_op_rhs(self, arg):
+ """
+ length_op_rhs: LENGTH [ OPERATOR ] value
+ """
+ if len(arg) == 3:
+ operator = arg[1].value
+ else:
+ operator = "="
+
if operator in self.list_operator_map:
- return {arg[1]: {self.list_operator_map[operator]: arg[3]}}
+ return {self.list_operator_map[operator]: arg[-1]}
+
if operator in {">=", "<="}:
return {
- arg[1]: {
- "or": [
- {self.list_operator_map[operator[0]]: arg[3]},
- {self.list_operator_map[operator[1]]: arg[3]},
- ]
- }
+ "or": [
+ {self.list_operator_map[operator[0]]: arg[-1]},
+ {self.list_operator_map[operator[1]]: arg[-1]},
+ ]
}
- raise TransformerError(
- f"length_comparison has failed with {arg}. Unknown operator."
+ raise NotImplementedError(
+ f"length_comparison has failed with {arg}. "
+ "Unknown not-implemented operator."
)
def property_zip_addon(self, arg):
- # property_zip_addon: ":" property (":" property)*
+ """
+ property_zip_addon: ":" property (":" property)*
+ """
raise NotImplementedError
def property(self, arg):
- # property: IDENTIFIER ( "." IDENTIFIER )*
+ """
+ property: IDENTIFIER ( "." IDENTIFIER )*
+ """
return ".".join(arg)
@v_args(inline=True)
def string(self, string):
- # string: ESCAPED_STRING
+ """
+ string: ESCAPED_STRING
+ """
return string.strip('"')
def number(self, arg):
- # number: SIGNED_INT | SIGNED_FLOAT
+ """
+ number: SIGNED_INT | SIGNED_FLOAT
+ """
token = arg[0]
if token.type == "SIGNED_INT":
type_ = int
@@ -294,5 +267,6 @@ def number(self, arg):
def __default__(self, data, children, meta):
raise NotImplementedError(
- f"Calling __default__, i.e., unknown grammar concept. data: {data}, children: {children}, meta: {meta}"
+ "Calling __default__, i.e., unknown grammar concept. "
+ f"data: {data}, children: {children}, meta: {meta}"
)
diff --git a/aiida_optimade/translators/entities.py b/aiida_optimade/translators/entities.py
index ddae857f..5fff6754 100644
--- a/aiida_optimade/translators/entities.py
+++ b/aiida_optimade/translators/entities.py
@@ -8,7 +8,7 @@
__all__ = ("AiidaEntityTranslator",)
-class AiidaEntityTranslator:
+class AiidaEntityTranslator: # pylint: disable=too-few-public-methods
"""Create OPTiMaDe entry attributes from an AiiDA Entity Node - Base class
For speed and reusability, save attributes in the Node's extras.
@@ -30,7 +30,9 @@ def _get_unique_node_property(self, project: str) -> Union[Node, Any]:
raise AiidaEntityNotFound(
f"Could not find {self.AIIDA_ENTITY} with PK {self._pk}."
)
- return query.first()[0]
+ res = query.first()[0]
+ del query
+ return res
@property
def _node(self) -> Node:
@@ -48,7 +50,7 @@ def _node_loaded(self):
return bool(self.__node)
def _get_optimade_extras(self) -> Union[None, dict]:
- if self._node_loaded:
+ if self._node_loaded: # pylint: disable=using-constant-test
return self._node.extras.get(self.EXTRAS_KEY, None)
return self._get_unique_node_property(f"extras.{self.EXTRAS_KEY}")
@@ -82,10 +84,11 @@ def store_attributes(self):
)
else:
raise AiidaError(
- f'Unknown AiiDA backend "{profile.database_backend}" for profile {profile}'
+ f'Unknown AiiDA backend "{profile.database_backend}" for profile'
+ f"{profile}"
)
- # For posterity, this is how to the same, going through AiiDA's API:
+ # For posterity, this is how to do the same, going through AiiDA's API:
# self._node.set_extra(self.EXTRAS_KEY, optimade)
# Lastly, reset NODE in an attempt to remove it from memory
diff --git a/aiida_optimade/translators/structures.py b/aiida_optimade/translators/structures.py
index b8516182..3f7dfc72 100644
--- a/aiida_optimade/translators/structures.py
+++ b/aiida_optimade/translators/structures.py
@@ -1,3 +1,4 @@
+# pylint: disable=line-too-long
import itertools
from typing import List, Union
@@ -54,7 +55,7 @@ def has_vacancies(self):
def kind_has_vacancies(weights):
"""Copy of aiida.orm.Kinds:has_vacancies"""
w_sum = sum(weights)
- return not (1.0 - w_sum < _sum_threshold)
+ return not 1.0 - w_sum < _sum_threshold
return any(kind_has_vacancies(kind["weights"]) for kind in self._kinds)
@@ -77,6 +78,7 @@ def get_formula(self, mode="hill", separator=""):
return get_formula(symbol_list, mode=mode, separator=separator)
def get_symbol_weights(self) -> dict:
+ """Get weights of all symbols / chemical elements"""
occupation = {}.fromkeys(sorted(self.get_symbols_set()), 0.0)
for kind in self._kinds:
number_of_sites = len(
@@ -282,10 +284,7 @@ def chemical_formula_anonymous(self) -> str:
]
# NOTE: This does not expect more than Zz elements (26+26*26 = 702) - should be enough ...
anonymous_elements.append(symbol)
- map_anonymous = {
- symbol: new_symbol
- for symbol, new_symbol in zip(elements, anonymous_elements)
- }
+ map_anonymous = dict(zip(elements, anonymous_elements))
occupation = self.get_symbol_weights()
for symbol, weight in occupation.items():
diff --git a/aiida_optimade/utils.py b/aiida_optimade/utils.py
index e7f53210..d9b656b4 100644
--- a/aiida_optimade/utils.py
+++ b/aiida_optimade/utils.py
@@ -7,10 +7,13 @@
from starlette.requests import Request
from starlette.responses import JSONResponse
+from optimade import __api_version__
+
from optimade.models import (
ResponseMeta,
ResponseMetaQuery,
Provider,
+ Implementation,
Error,
ErrorResponse,
)
@@ -29,12 +32,13 @@ def meta_values(
query=ResponseMetaQuery(
representation=f"{parse_result.path}?{parse_result.query}"
),
- api_version=CONFIG.version,
+ api_version=f"v{__api_version__}",
time_stamp=datetime.utcnow(),
data_returned=data_returned,
more_data_available=more_data_available,
provider=Provider(**provider),
data_available=data_available,
+ implementation=Implementation(**CONFIG.implementation),
**kwargs,
)
@@ -42,10 +46,11 @@ def meta_values(
def general_exception(
request: Request, exc: Exception, **kwargs: Dict[str, Any]
) -> JSONResponse:
- tb = "".join(
+ """Helper to return Python exceptions as OPTiMaDe errors in JSON format"""
+ trace = "".join(
traceback.format_exception(etype=type(exc), value=exc, tb=exc.__traceback__)
)
- print(tb)
+ print(trace)
try:
status_code = exc.status_code
@@ -65,27 +70,24 @@ def general_exception(
content=jsonable_encoder(
ErrorResponse(
meta=meta_values(
- # TODO: Add debug and print only tb if debug = True
+ # TODO: Add debug and print only trace if debug = True
str(request.url),
0,
0,
False,
- **{CONFIG.provider["prefix"] + "traceback": tb},
+ **{CONFIG.provider["prefix"] + "traceback": trace},
),
errors=errors,
),
- skip_defaults=True,
+ exclude_unset=True,
),
)
-def get_backend(request: Request):
- return request.state.backend
-
-
def retrieve_queryable_properties(
schema: dict, queryable_properties: list
) -> Tuple[dict, dict]:
+ """Get all queryable properties from an OPTiMaDe schema"""
properties = {}
all_properties = {}
diff --git a/profiles/docker-compose.yml b/profiles/docker-compose.yml
index adf9907c..10db09f8 100644
--- a/profiles/docker-compose.yml
+++ b/profiles/docker-compose.yml
@@ -9,9 +9,8 @@ services:
environment:
AIIDA_PATH: /app
AIIDA_PROFILE: quicksetup
- # BOKEH_PREFIX: /curated-cofs
volumes:
- - '/Users/leopold/Personal/Postdoc-MARVEL/aiida_folders/aiida_rmq/.aiida/repository-quicksetup:/app/.aiida/repository-quicksetup'
- - '/Users/leopold/Personal/Postdoc-MARVEL/repos/aiida/aiida-optimade/profiles:/profiles'
+ - '/tmp:/app/.aiida/repository-quicksetup'
+ - '.:/profiles'
ports:
- '3253:80'
diff --git a/pytest.ini b/pytest.ini
index f912cf47..f5ac5222 100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -2,4 +2,5 @@
filterwarnings =
ignore:.*PY_SSIZE_T_CLEAN will be required for '#' formats.*:DeprecationWarning
ignore:.*"@coroutine" decorator is deprecated since Python 3.8, use "async def" instead.*:DeprecationWarning
- ignore:Using or importing the ABCs from 'collections' instead of from 'collections.abc' is deprecated, and in 3.8 it will stop working:DeprecationWarning
+ ignore:.*Using or importing the ABCs from 'collections' instead of from 'collections.abc' is deprecated.*:DeprecationWarning
+ ignore:.*Check the index_links.json file exists.*:UserWarning
diff --git a/run.sh b/run.sh
index 04ba8a08..8fb17039 100755
--- a/run.sh
+++ b/run.sh
@@ -6,4 +6,4 @@ else
export AIIDA_PROFILE="optimade_sqla"
fi
-uvicorn aiida_optimade.main:app --reload --port 5000
+uvicorn aiida_optimade.main:APP --reload --port 5000
diff --git a/setup.json b/setup.json
index ca65e113..7178dd92 100644
--- a/setup.json
+++ b/setup.json
@@ -1,6 +1,6 @@
{
"name": "aiida-optimade",
- "version": "0.3.0",
+ "version": "0.4.0",
"url": "https://github.com/aiidateam/aiida-optimade",
"license": "MIT License",
"author": "The AiiDA team",
@@ -16,6 +16,7 @@
"Operating System :: MacOS :: MacOS X",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3 :: Only",
+ "Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Intended Audience :: Developers",
"Topic :: Database",
diff --git a/setup.py b/setup.py
index 5340345b..42b68cae 100644
--- a/setup.py
+++ b/setup.py
@@ -2,27 +2,27 @@
from pathlib import Path
from setuptools import setup, find_packages
-module_dir = Path(__file__).resolve().parent
+MODULE_DIR = Path(__file__).resolve().parent
-with open(module_dir.joinpath("setup.json")) as fp:
- SETUP_JSON = json.load(fp)
+with open(MODULE_DIR.joinpath("setup.json")) as handle:
+ SETUP_JSON = json.load(handle)
-testing_deps = ["pytest~=3.6", "pytest-cov", "codecov"]
-dev_deps = ["pylint", "black", "pre-commit"] + testing_deps
+TESTING = ["pytest~=3.6", "pytest-cov", "codecov"]
+DEV = ["pylint", "black", "pre-commit", "invoke"] + TESTING
setup(
- long_description=open(module_dir.joinpath("README.md")).read(),
+ long_description=open(MODULE_DIR.joinpath("README.md")).read(),
long_description_content_type="text/markdown",
packages=find_packages(),
- python_requires=">=3.7",
+ python_requires=">=3.6",
install_requires=[
- "aiida-core~=1.0.1",
- "fastapi~=0.44",
+ "aiida-core~=1.0",
+ "fastapi~=0.47",
"lark-parser~=0.7.8",
- "optimade~=0.2",
- "pydantic<1.0.0",
+ "optimade~=0.3.1",
+ "pydantic~=1.3",
"uvicorn",
],
- extras_require={"dev": dev_deps, "testing": testing_deps},
+ extras_require={"dev": DEV, "testing": TESTING},
**SETUP_JSON
)
diff --git a/tasks.py b/tasks.py
new file mode 100644
index 00000000..dbe98ae8
--- /dev/null
+++ b/tasks.py
@@ -0,0 +1,85 @@
+import re
+
+from invoke import task
+
+from aiida_optimade import __version__
+
+
+@task
+def setver(_, patch=False, new_ver=""):
+ """Update the package version throughout the package"""
+
+ if (not patch and not new_ver) or (patch and new_ver):
+ raise Exception(
+ "Either use --patch or specify e.g. "
+ "--new-ver='Major.Minor.Patch(a|b|rc)?[0-9]+'"
+ )
+ if patch:
+ ver = [int(x) for x in __version__.split(".")]
+ ver[2] += 1
+ new_ver = ".".join(map(str, ver))
+ with open("aiida_optimade/__init__.py", "r") as handle:
+ lines = [
+ re.sub("__version__ = .+", '__version__ = "{}"'.format(new_ver), l.rstrip())
+ for l in handle
+ ]
+ with open("aiida_optimade/__init__.py", "w") as handle:
+ handle.write("\n".join(lines))
+ handle.write("\n")
+
+ with open("setup.json", "r") as handle:
+ lines = [
+ re.sub(
+ '"version": ([^,]+),', '"version": "{}",'.format(new_ver), l.rstrip()
+ )
+ for l in handle
+ ]
+ with open("setup.json", "w") as handle:
+ handle.write("\n".join(lines))
+ handle.write("\n")
+
+ with open("aiida_optimade/config.json", "r") as handle:
+ lines = [
+ re.sub(
+ '"version": ([^,]+),', '"version": "{}",'.format(new_ver), l.rstrip()
+ )
+ for l in handle
+ ]
+ with open("aiida_optimade/config.json", "w") as handle:
+ handle.write("\n".join(lines))
+ handle.write("\n")
+
+ print("Bumped version to {}".format(new_ver))
+
+
+@task
+def optimade_req(_, ver=""):
+ """Update the optimade-python-tools minimum version requirement"""
+
+ if not ver:
+ raise Exception("Please specify --ver='Major.Minor.Patch'")
+
+ with open("setup.py", "r") as handle:
+ lines = [
+ re.sub("optimade~=([^,]+)", f'optimade~={ver}"', l.rstrip()) for l in handle
+ ]
+ with open("setup.py", "w") as handle:
+ handle.write("\n".join(lines))
+ handle.write("\n")
+
+ with open("README.md", "r") as handle:
+ lines = [
+ re.sub(
+ "https://raw.githubusercontent.com/Materials-Consortia/"
+ "optimade-python-tools/v([^,]+)/.ci/",
+ "https://raw.githubusercontent.com/Materials-Consortia/"
+ f"optimade-python-tools/v{ver}/.ci/",
+ l.rstrip("\n"),
+ )
+ for l in handle
+ ]
+ with open("README.md", "w") as handle:
+ handle.write("\n".join(lines))
+ handle.write("\n")
+
+ print("Bumped OPTiMaDe Python Tools version requirement to {}".format(ver))
diff --git a/aiida_optimade/tests/test_server.py b/tests/test_server.py
similarity index 85%
rename from aiida_optimade/tests/test_server.py
rename to tests/test_server.py
index bb0fd85a..e93cb1a2 100644
--- a/aiida_optimade/tests/test_server.py
+++ b/tests/test_server.py
@@ -1,4 +1,7 @@
-# pylint: disable=no-member,wrong-import-position
+# pylint: disable=wrong-import-position,ungrouped-imports,useless-suppression
+# pylint: disable=missing-class-docstring,no-self-use,missing-function-docstring
+# pylint: disable=too-few-public-methods,too-many-public-methods
+
import os
import unittest
@@ -9,10 +12,6 @@
from aiida_optimade.config import CONFIG
from optimade.validator import ImplementationValidator
-# this must be changed before app is imported
-# some tests currently depend on this value remaining at 5
-CONFIG.page_limit = 5 # noqa: E402
-
# Use specific AiiDA profile
if os.getenv("AIIDA_PROFILE", None) is None:
os.environ["AIIDA_PROFILE"] = "optimade_v1_aiida_sqla"
@@ -29,14 +28,14 @@
EntryInfoResource,
)
-from aiida_optimade.main import app
+from aiida_optimade.main import APP
from aiida_optimade.routers import structures, info
# need to explicitly set base_url, as the default "http://testserver"
-# does not validate as pydantic UrlStr model
-app.include_router(structures.router)
-app.include_router(info.router)
-CLIENT = TestClient(app, base_url="http://localhost:5000/optimade")
+# does not validate as pydantic AnyUrl model
+APP.include_router(structures.ROUTER)
+APP.include_router(info.ROUTER)
+CLIENT = TestClient(APP, base_url="http://localhost:5000/optimade")
@pytest.mark.skip("References has not yet been implemented.")
@@ -148,20 +147,28 @@ def test_structures_endpoint_data(self):
assert self.json_response["meta"]["more_data_available"]
def test_get_next_responses(self):
- cursor = self.json_response["data"].copy()
+ total_data = self.json_response["meta"]["data_available"]
+ page_limit = 5
+
+ response = self.client.get(self.request_str + f"?page_limit={page_limit}")
+ json_response = response.json()
+ assert response.status_code == 200, f"Request failed: {response.json()}"
+
+ cursor = json_response["data"].copy()
+ assert json_response["meta"]["more_data_available"]
more_data_available = True
- next_request = self.json_response["links"]["next"]
+ next_request = json_response["links"]["next"]
id_ = len(cursor)
- while more_data_available and id_ < CONFIG.page_limit * 5:
+ while more_data_available and id_ < page_limit * 3:
next_response = self.client.get(next_request).json()
next_request = next_response["links"]["next"]
cursor.extend(next_response["data"])
more_data_available = next_response["meta"]["more_data_available"]
if more_data_available:
- assert len(next_response["data"]) == CONFIG.page_limit
+ assert len(next_response["data"]) == page_limit
else:
- assert len(next_response["data"]) == 1089 % CONFIG.page_limit
+ assert len(next_response["data"]) == total_data % page_limit
id_ += len(next_response["data"])
assert len(cursor) == id_
@@ -203,7 +210,11 @@ class TestFilterTests(unittest.TestCase):
"Un-skip when a fix for optimade-python-tools issue #102 is in place."
)
def test_custom_field(self):
- request = f'/structures?filter={CONFIG.provider["prefix"]}{CONFIG.provider_fields["structures"][0]}="2019-11-19T18:42:25.844780+01:00"'
+ request = (
+ f'/structures?filter={CONFIG.provider["prefix"]}'
+ f'{CONFIG.provider_fields["structures"][0]}'
+ '="2019-11-19T18:42:25.844780+01:00"'
+ )
expected_ids = ["1"]
self._check_response(request, expected_ids)
@@ -227,6 +238,19 @@ def test_gt_none(self):
expected_ids = []
self._check_response(request, expected_ids)
+ def test_rhs_statements(self):
+ request = "/structures?filter=18=5280"
+ request = (
+ f"/structures?filter={CONFIG.provider['prefix']}"
+ f"{CONFIG.provider_fields['structures'][0]} IS KNOWN AND nsites>=5280"
+ )
expected_ids = ["302", "683"]
self._check_response(request, expected_ids)
@@ -382,7 +409,10 @@ def test_brackets(self):
expected_ids = ["382", "574", "658", "1055"]
self._check_response(request, expected_ids)
- request = '/structures?filter=(elements HAS "Ga" AND nelements=7) OR (elements HAS "Ga" AND nsites=464)'
+ request = (
+ '/structures?filter=(elements HAS "Ga" AND nelements=7) OR '
+ '(elements HAS "Ga" AND nsites=464)'
+ )
expected_ids = ["574", "658"]
self._check_response(request, expected_ids)