From 9ef98abfd4fd5bdda0a7fa60881b0ef3b5c68382 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 16 Feb 2022 23:43:32 +0000 Subject: [PATCH 1/7] Bump google-github-actions/setup-gcloud from 0.4.0 to 0.5.1 Bumps [google-github-actions/setup-gcloud](https://github.com/google-github-actions/setup-gcloud) from 0.4.0 to 0.5.1. - [Release notes](https://github.com/google-github-actions/setup-gcloud/releases) - [Changelog](https://github.com/google-github-actions/setup-gcloud/blob/master/CHANGELOG.md) - [Commits](https://github.com/google-github-actions/setup-gcloud/compare/e0f83f24f541c30f1e9fe3c966963a10b8e647f5...04141d8a7edfc8c679682f23e7bbbe05cbe32bb3) --- updated-dependencies: - dependency-name: google-github-actions/setup-gcloud dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/ci-model-regression-on-schedule.yml | 4 ++-- .github/workflows/ci-model-regression.yml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci-model-regression-on-schedule.yml b/.github/workflows/ci-model-regression-on-schedule.yml index 59cd55af13aa..410f5a53ffe6 100644 --- a/.github/workflows/ci-model-regression-on-schedule.yml +++ b/.github/workflows/ci-model-regression-on-schedule.yml @@ -94,7 +94,7 @@ jobs: ./gomplate -f .github/runner/github-runner-deployment.yaml.tmpl -o runner_deployment.yaml # Setup gcloud CLI - - uses: google-github-actions/setup-gcloud@e0f83f24f541c30f1e9fe3c966963a10b8e647f5 + - uses: google-github-actions/setup-gcloud@04141d8a7edfc8c679682f23e7bbbe05cbe32bb3 with: version: "${{ env.GCLOUD_VERSION }}" service_account_key: ${{ secrets.GKE_SA_RASA_CI_CD_GPU_RASA_CI_CD }} @@ -668,7 +668,7 @@ jobs: steps: # Setup gcloud CLI - - uses: google-github-actions/setup-gcloud@e0f83f24f541c30f1e9fe3c966963a10b8e647f5 + - uses: google-github-actions/setup-gcloud@04141d8a7edfc8c679682f23e7bbbe05cbe32bb3 with: version: "${{ env.GCLOUD_VERSION }}" service_account_key: ${{ secrets.GKE_SA_RASA_CI_CD_GPU_RASA_CI_CD }} diff --git a/.github/workflows/ci-model-regression.yml b/.github/workflows/ci-model-regression.yml index b2241c74e48b..ab66aaaec5b0 100644 --- a/.github/workflows/ci-model-regression.yml +++ b/.github/workflows/ci-model-regression.yml @@ -202,7 +202,7 @@ jobs: gomplate -f .github/runner/github-runner-deployment.yaml.tmpl -o runner_deployment.yaml # Setup gcloud CLI - - uses: google-github-actions/setup-gcloud@e0f83f24f541c30f1e9fe3c966963a10b8e647f5 + - uses: google-github-actions/setup-gcloud@04141d8a7edfc8c679682f23e7bbbe05cbe32bb3 with: version: "${{ env.GCLOUD_VERSION }}" service_account_key: ${{ secrets.GKE_SA_RASA_CI_CD_GPU_RASA_CI_CD }} @@ -872,7 +872,7 @@ jobs: steps: # Setup gcloud CLI - - uses: google-github-actions/setup-gcloud@e0f83f24f541c30f1e9fe3c966963a10b8e647f5 + - uses: google-github-actions/setup-gcloud@04141d8a7edfc8c679682f23e7bbbe05cbe32bb3 with: version: "${{ env.GCLOUD_VERSION }}" service_account_key: ${{ secrets.GKE_SA_RASA_CI_CD_GPU_RASA_CI_CD }} From d5f04e24e59ce9c71b78eb1e42edfacf6d20f117 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 16 Feb 2022 23:43:39 +0000 Subject: [PATCH 2/7] Bump actions/github-script from 4.0.2 to 6 Bumps [actions/github-script](https://github.com/actions/github-script) from 4.0.2 to 6. - [Release notes](https://github.com/actions/github-script/releases) - [Commits](https://github.com/actions/github-script/compare/a3e7071a34d7e1f219a8a4de9a5e0a34d1ee1293...9ac08808f993958e9de277fe43a64532a609130e) --- updated-dependencies: - dependency-name: actions/github-script dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/ci-model-regression-on-schedule.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci-model-regression-on-schedule.yml b/.github/workflows/ci-model-regression-on-schedule.yml index 59cd55af13aa..e23bb283d7c7 100644 --- a/.github/workflows/ci-model-regression-on-schedule.yml +++ b/.github/workflows/ci-model-regression-on-schedule.yml @@ -383,7 +383,7 @@ jobs: - name: Check duplicate issue if: failure() && github.event_name == 'schedule' - uses: actions/github-script@a3e7071a34d7e1f219a8a4de9a5e0a34d1ee1293 # v4 + uses: actions/github-script@9ac08808f993958e9de277fe43a64532a609130e # v4 id: issue-exists with: result-encoding: string @@ -412,7 +412,7 @@ jobs: - name: Create GitHub Issue 📬 id: create-issue if: failure() && steps.issue-exists.outputs.result == 'false' && github.event_name == 'schedule' - uses: actions/github-script@a3e7071a34d7e1f219a8a4de9a5e0a34d1ee1293 # v4 + uses: actions/github-script@9ac08808f993958e9de277fe43a64532a609130e # v4 with: # do not use GITHUB_TOKEN here because it wouldn't trigger subsequent workflows github-token: ${{ secrets.RASABOT_GITHUB_TOKEN }} @@ -581,7 +581,7 @@ jobs: - name: Check duplicate issue if: steps.performance.outputs.is_dropped == 'true' - uses: actions/github-script@a3e7071a34d7e1f219a8a4de9a5e0a34d1ee1293 # v4 + uses: actions/github-script@9ac08808f993958e9de277fe43a64532a609130e # v4 id: issue-exists with: result-encoding: string @@ -608,7 +608,7 @@ jobs: - name: Create GitHub Issue 📬 id: create-issue if: steps.performance.outputs.is_dropped == 'true' && steps.issue-exists.outputs.result == 'false' - uses: actions/github-script@a3e7071a34d7e1f219a8a4de9a5e0a34d1ee1293 # v4 + uses: actions/github-script@9ac08808f993958e9de277fe43a64532a609130e # v4 with: # do not use GITHUB_TOKEN here because it wouldn't trigger subsequent workflows github-token: ${{ secrets.RASABOT_GITHUB_TOKEN }} From 9b570d3693d6470ba4ce0e20bd8e2a9783a49a67 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 17 Feb 2022 19:21:54 +0000 Subject: [PATCH 3/7] Bump pytest-timeout from 1.4.2 to 2.1.0 Bumps [pytest-timeout](https://github.com/pytest-dev/pytest-timeout) from 1.4.2 to 2.1.0. - [Release notes](https://github.com/pytest-dev/pytest-timeout/releases) - [Commits](https://github.com/pytest-dev/pytest-timeout/compare/1.4.2...2.1.0) --- updated-dependencies: - dependency-name: pytest-timeout dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- poetry.lock | 43 ++++++++++++++++++++++++++++++------------- pyproject.toml | 2 +- 2 files changed, 31 insertions(+), 14 deletions(-) diff --git a/poetry.lock b/poetry.lock index be0509a2673b..3e1e35bd6cd5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2159,14 +2159,14 @@ resolved_reference = "6c96e77db3b9530060ba1796f46e1481d7c4dd98" [[package]] name = "pytest-timeout" -version = "1.4.2" -description = "py.test plugin to abort hanging tests" +version = "2.1.0" +description = "pytest plugin to abort hanging tests" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.6" [package.dependencies] -pytest = ">=3.6.0" +pytest = ">=5.0.0" [[package]] name = "pytest-xdist" @@ -3446,7 +3446,7 @@ transformers = ["transformers"] [metadata] lock-version = "1.1" python-versions = ">=3.7,<3.9" -content-hash = "d58b68d407a8771ba07902dec0c0dc466b4ea2a25ecb5332a445ca37b9d84250" +content-hash = "c483c3745aa5791421ff95373e2508cbbd5f61dcc75f8e95f9e0d92cd2c985f9" [metadata.files] absl-py = [ @@ -3569,7 +3569,7 @@ black = [ {file = "black-21.7b0.tar.gz", hash = "sha256:c8373c6491de9362e39271630b65b964607bc5c79c83783547d76c839b3aa219"}, ] blis = [ - {file = "blis-0.7.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:98eba77b1e1fde7813bc0453ab78b6ae2067f5bc0fe9e3abc671b2895cfecf33"}, + {file = "blis-0.7.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5812a7c04561ae7332cf730f57d9f82cbd12c5f86a5bfad66ee244e51d06266d"}, {file = "blis-0.7.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eecfce3d8fce61dede7b0ae0dffa461c22072437b6cde85587db0c1aa75b450"}, {file = "blis-0.7.5-cp310-cp310-win_amd64.whl", hash = "sha256:0e476931f0d5703a21c77e7f69b8ebdeeea493fc7858a86f627ac2b376a12c8d"}, {file = "blis-0.7.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:5966ddf3bce84aa7bb09ce4ca059309602fa63280a5d5e5365bb2a294bd5a138"}, @@ -3774,7 +3774,7 @@ cycler = [ {file = "cycler-0.11.0.tar.gz", hash = "sha256:9c87405839a19696e837b3b818fed3f5f69f16f1eec1a1ad77e043dcea9c772f"}, ] cymem = [ - {file = "cymem-2.0.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2b4e27e739f09f16c7c0190f962ffe60dab39cb6a229d5c13e274d16f46a17e8"}, + {file = "cymem-2.0.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:700540b68e96a7056d0691d467df2bbaaf0934a3e6fe2383669998cbee19580a"}, {file = "cymem-2.0.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:971cf0a8437dfb4185c3049c086e463612fe849efadc0f5cc153fc81c501da7d"}, {file = "cymem-2.0.6-cp310-cp310-win_amd64.whl", hash = "sha256:6b0d1a6b0a1296f31fa9e4b7ae5ea49394084ecc883b1ae6fec4844403c43468"}, {file = "cymem-2.0.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b8e1c18bb00800425576710468299153caad20c64ddb6819d40a6a34e21ee21c"}, @@ -3973,6 +3973,7 @@ greenlet = [ {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97e5306482182170ade15c4b0d8386ded995a07d7cc2ca8f27958d34d6736497"}, {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6a36bb9474218c7a5b27ae476035497a6990e21d04c279884eb10d9b290f1b1"}, {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abb7a75ed8b968f3061327c433a0fbd17b729947b400747c334a9c29a9af6c58"}, + {file = "greenlet-1.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b336501a05e13b616ef81ce329c0e09ac5ed8c732d9ba7e3e983fcc1a9e86965"}, {file = "greenlet-1.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:14d4f3cd4e8b524ae9b8aa567858beed70c392fdec26dbdb0a8a418392e71708"}, {file = "greenlet-1.1.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:17ff94e7a83aa8671a25bf5b59326ec26da379ace2ebc4411d690d80a7fbcf23"}, {file = "greenlet-1.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9f3cba480d3deb69f6ee2c1825060177a22c7826431458c697df88e6aeb3caee"}, @@ -3985,6 +3986,7 @@ greenlet = [ {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9d29ca8a77117315101425ec7ec2a47a22ccf59f5593378fc4077ac5b754fce"}, {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21915eb821a6b3d9d8eefdaf57d6c345b970ad722f856cd71739493ce003ad08"}, {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eff9d20417ff9dcb0d25e2defc2574d10b491bf2e693b4e491914738b7908168"}, + {file = "greenlet-1.1.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b8c008de9d0daba7b6666aa5bbfdc23dcd78cafc33997c9b7741ff6353bafb7f"}, {file = "greenlet-1.1.2-cp36-cp36m-win32.whl", hash = "sha256:32ca72bbc673adbcfecb935bb3fb1b74e663d10a4b241aaa2f5a75fe1d1f90aa"}, {file = "greenlet-1.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f0214eb2a23b85528310dad848ad2ac58e735612929c8072f6093f3585fd342d"}, {file = "greenlet-1.1.2-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:b92e29e58bef6d9cfd340c72b04d74c4b4e9f70c9fa7c78b674d1fec18896dc4"}, @@ -3993,6 +3995,7 @@ greenlet = [ {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e12bdc622676ce47ae9abbf455c189e442afdde8818d9da983085df6312e7a1"}, {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c790abda465726cfb8bb08bd4ca9a5d0a7bd77c7ac1ca1b839ad823b948ea28"}, {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f276df9830dba7a333544bd41070e8175762a7ac20350786b322b714b0e654f5"}, + {file = "greenlet-1.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c5d5b35f789a030ebb95bff352f1d27a93d81069f2adb3182d99882e095cefe"}, {file = "greenlet-1.1.2-cp37-cp37m-win32.whl", hash = "sha256:64e6175c2e53195278d7388c454e0b30997573f3f4bd63697f88d855f7a6a1fc"}, {file = "greenlet-1.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b11548073a2213d950c3f671aa88e6f83cda6e2fb97a8b6317b1b5b33d850e06"}, {file = "greenlet-1.1.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:9633b3034d3d901f0a46b7939f8c4d64427dfba6bbc5a36b1a67364cf148a1b0"}, @@ -4001,6 +4004,7 @@ greenlet = [ {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e859fcb4cbe93504ea18008d1df98dee4f7766db66c435e4882ab35cf70cac43"}, {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00e44c8afdbe5467e4f7b5851be223be68adb4272f44696ee71fe46b7036a711"}, {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec8c433b3ab0419100bd45b47c9c8551248a5aee30ca5e9d399a0b57ac04651b"}, + {file = "greenlet-1.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2bde6792f313f4e918caabc46532aa64aa27a0db05d75b20edfc5c6f46479de2"}, {file = "greenlet-1.1.2-cp38-cp38-win32.whl", hash = "sha256:288c6a76705dc54fba69fbcb59904ae4ad768b4c768839b8ca5fdadec6dd8cfd"}, {file = "greenlet-1.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:8d2f1fb53a421b410751887eb4ff21386d119ef9cde3797bf5e7ed49fb51a3b3"}, {file = "greenlet-1.1.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:166eac03e48784a6a6e0e5f041cfebb1ab400b394db188c48b3a84737f505b67"}, @@ -4009,6 +4013,7 @@ greenlet = [ {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1692f7d6bc45e3200844be0dba153612103db241691088626a33ff1f24a0d88"}, {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7227b47e73dedaa513cdebb98469705ef0d66eb5a1250144468e9c3097d6b59b"}, {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ff61ff178250f9bb3cd89752df0f1dd0e27316a8bd1465351652b1b4a4cdfd3"}, + {file = "greenlet-1.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0051c6f1f27cb756ffc0ffbac7d2cd48cb0362ac1736871399a739b2885134d3"}, {file = "greenlet-1.1.2-cp39-cp39-win32.whl", hash = "sha256:f70a9e237bb792c7cc7e44c531fd48f5897961701cdaa06cf22fc14965c496cf"}, {file = "greenlet-1.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:013d61294b6cd8fe3242932c1c5e36e5d1db2c8afb58606c5a67efce62c1f5fd"}, {file = "greenlet-1.1.2.tar.gz", hash = "sha256:e30f5ea4ae2346e62cedde8794a56858a67b878dd79f7df76a0767e356b1744a"}, @@ -4486,7 +4491,7 @@ multidict = [ {file = "multidict-5.2.0.tar.gz", hash = "sha256:0dd1c93edb444b33ba2274b66f63def8a327d607c6c790772f448a53b6ea59ce"}, ] murmurhash = [ - {file = "murmurhash-1.0.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a814d559afe2a97ad40accf21ce96e8b04a3ff5a08f80c02b7acd427dbb7d567"}, + {file = "murmurhash-1.0.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1431d817e1fff1ed35f8dc54dd5b4d70165ec98076de8aca351805f8037293f3"}, {file = "murmurhash-1.0.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c7b8cc4a8db1c821b80f8ca70a25c3166b14d68ecef8693a117c6a0b1d74ace"}, {file = "murmurhash-1.0.6-cp310-cp310-win_amd64.whl", hash = "sha256:e40790fdaf65213d70da4ed9229f16f6d6376310dc8fc23eacc98e6151c6ae7e"}, {file = "murmurhash-1.0.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a78d53f047c3410ce4c589d9b47090f628f844ed5694418144e63cfe7f3da7e9"}, @@ -4689,7 +4694,7 @@ pluggy = [ {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] preshed = [ - {file = "preshed-3.0.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a9683730127658b531120b4ed5cff1f2a567318ab75e9ab0f22cc84ae1486c23"}, + {file = "preshed-3.0.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:66a71ced487516cf81fd0431a3a843514262ae2f33e9a7688b87562258fa75d5"}, {file = "preshed-3.0.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c98f725d8478f3ade4ab1ea00f50a92d2d9406d37276bc46fd8bab1d47452c4"}, {file = "preshed-3.0.6-cp310-cp310-win_amd64.whl", hash = "sha256:ea8aa9610837e907e8442e79300df0a861bfdb4dcaf026a5d9642a688ad04815"}, {file = "preshed-3.0.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e03ae3eee961106a517fcd827b5a7c51f7317236b3e665c989054ab8dc381d28"}, @@ -4751,6 +4756,11 @@ psutil = [ {file = "psutil-5.9.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:742c34fff804f34f62659279ed5c5b723bb0195e9d7bd9907591de9f8f6558e2"}, {file = "psutil-5.9.0-cp310-cp310-win32.whl", hash = "sha256:8293942e4ce0c5689821f65ce6522ce4786d02af57f13c0195b40e1edb1db61d"}, {file = "psutil-5.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:9b51917c1af3fa35a3f2dabd7ba96a2a4f19df3dec911da73875e1edaf22a40b"}, + {file = "psutil-5.9.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e9805fed4f2a81de98ae5fe38b75a74c6e6ad2df8a5c479594c7629a1fe35f56"}, + {file = "psutil-5.9.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c51f1af02334e4b516ec221ee26b8fdf105032418ca5a5ab9737e8c87dafe203"}, + {file = "psutil-5.9.0-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32acf55cb9a8cbfb29167cd005951df81b567099295291bcfd1027365b36591d"}, + {file = "psutil-5.9.0-cp36-cp36m-win32.whl", hash = "sha256:e5c783d0b1ad6ca8a5d3e7b680468c9c926b804be83a3a8e95141b05c39c9f64"}, + {file = "psutil-5.9.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d62a2796e08dd024b8179bd441cb714e0f81226c352c802fca0fd3f89eeacd94"}, {file = "psutil-5.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3d00a664e31921009a84367266b35ba0aac04a2a6cad09c550a89041034d19a0"}, {file = "psutil-5.9.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7779be4025c540d1d65a2de3f30caeacc49ae7a2152108adeaf42c7534a115ce"}, {file = "psutil-5.9.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:072664401ae6e7c1bfb878c65d7282d4b4391f1bc9a56d5e03b5a490403271b5"}, @@ -5031,8 +5041,8 @@ pytest-forked = [ ] pytest-sanic = [] pytest-timeout = [ - {file = "pytest-timeout-1.4.2.tar.gz", hash = "sha256:20b3113cf6e4e80ce2d403b6fb56e9e1b871b510259206d40ff8d609f48bda76"}, - {file = "pytest_timeout-1.4.2-py2.py3-none-any.whl", hash = "sha256:541d7aa19b9a6b4e475c759fd6073ef43d7cdc9a92d95644c260076eb257a063"}, + {file = "pytest-timeout-2.1.0.tar.gz", hash = "sha256:c07ca07404c612f8abbe22294b23c368e2e5104b521c1790195561f37e1ac3d9"}, + {file = "pytest_timeout-2.1.0-py3-none-any.whl", hash = "sha256:f6f50101443ce70ad325ceb4473c4255e9d74e3c7cd0ef827309dfa4c0d975c6"}, ] pytest-xdist = [ {file = "pytest-xdist-2.5.0.tar.gz", hash = "sha256:4580deca3ff04ddb2ac53eba39d76cb5dd5edeac050cb6fbc768b0dd712b4edf"}, @@ -5209,6 +5219,10 @@ rsa = [ {file = "ruamel.yaml-0.16.13.tar.gz", hash = "sha256:bb48c514222702878759a05af96f4b7ecdba9b33cd4efcf25c86b882cef3a942"}, ] "ruamel.yaml.clib" = [ + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6e7be2c5bcb297f5b82fee9c665eb2eb7001d1050deaba8471842979293a80b0"}, + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:221eca6f35076c6ae472a531afa1c223b9c29377e62936f61bc8e6e8bdc5f9e7"}, + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win32.whl", hash = "sha256:1070ba9dd7f9370d0513d649420c3b362ac2d687fe78c6e888f5b12bf8bc7bee"}, + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:77df077d32921ad46f34816a9a16e6356d8100374579bc35e15bab5d4e9377de"}, {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:cfdb9389d888c5b74af297e51ce357b800dd844898af9d4a547ffc143fa56751"}, {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7b2927e92feb51d830f531de4ccb11b320255ee95e791022555971c466af4527"}, {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-win32.whl", hash = "sha256:ada3f400d9923a190ea8b59c8f60680c4ef8a4b0dfae134d2f2ff68429adfab5"}, @@ -5472,7 +5486,7 @@ sqlalchemy = [ {file = "SQLAlchemy-1.4.31.tar.gz", hash = "sha256:582b59d1e5780a447aada22b461e50b404a9dc05768da1d87368ad8190468418"}, ] srsly = [ - {file = "srsly-2.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:834229df7377386e9990fd245e1ae12a72152997fd159a782a798b638721a7b2"}, + {file = "srsly-2.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5e22bbc1a20abf749fa53adf101c36bc369ec63f496c7a44bf4f5f287d724900"}, {file = "srsly-2.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:004d29a5abc0fe632434359c0be170490a69c4dce2c3de8a769944c37da7bb4b"}, {file = "srsly-2.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7ced7ec4993b4d4ad73cc442f8f7a518368348054d510864b1aa149e8d71654d"}, {file = "srsly-2.4.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:801c7e6e32c6a4721ab78ab7dafd01074fdb144f4876c09b25305c98f95c470f"}, @@ -5546,12 +5560,15 @@ tensorflow-io-gcs-filesystem = [ {file = "tensorflow_io_gcs_filesystem-0.24.0-cp310-cp310-win_amd64.whl", hash = "sha256:2f67d19a2f2579dc55f1590faf48c2e882cabb860992b5a9c7edb0ed8b3eb187"}, {file = "tensorflow_io_gcs_filesystem-0.24.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:cde835e68b2b43ddade07c999e7c3251bcd62b1ff165c34fbe9fc6e0f12c3ac9"}, {file = "tensorflow_io_gcs_filesystem-0.24.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:71c00638c9b6048480095f2738dfefd8f4b2e7b534190c91d699aee769bfa86e"}, + {file = "tensorflow_io_gcs_filesystem-0.24.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f63d70d7fce10c63f21bdd8e72244958afc0c495966831a547f038543c9633f7"}, {file = "tensorflow_io_gcs_filesystem-0.24.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d1eb5e9be62040c5a249ae8adaae7e61f65b59541139e4d6767157f25a224bf5"}, {file = "tensorflow_io_gcs_filesystem-0.24.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:cc093f160f79526d31f6070a3ddc000868d737a36ccf40984128661563383601"}, {file = "tensorflow_io_gcs_filesystem-0.24.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6e65009770a05a3b55c5f782348f785e5034d277a727832811ad737bd857c8c9"}, + {file = "tensorflow_io_gcs_filesystem-0.24.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:658764aaaf9419ddefb3daa95bdc84e5210c691ff73b8ac2606d5c839040206b"}, {file = "tensorflow_io_gcs_filesystem-0.24.0-cp38-cp38-win_amd64.whl", hash = "sha256:aa90b9a34ea8da4dbd534f77746d67375714db869524da889193c3042352679a"}, {file = "tensorflow_io_gcs_filesystem-0.24.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:825f396388748038ad38c35b091311982081f93a5db8ca9763fc874c3f555e6c"}, {file = "tensorflow_io_gcs_filesystem-0.24.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cbc71b3925508bf796644a0083a6f9284f71404654f53092bece701383a69520"}, + {file = "tensorflow_io_gcs_filesystem-0.24.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae96b20b973b1c3bbf2c068409035ead45177447ef51701f4e726f67cadc4695"}, {file = "tensorflow_io_gcs_filesystem-0.24.0-cp39-cp39-win_amd64.whl", hash = "sha256:2862e0869453ce1f872a28d1362768ee078ec227ea587dd69164081dea6d7177"}, ] tensorflow-text = [ @@ -5573,7 +5590,7 @@ terminaltables = [ {file = "terminaltables-3.1.10.tar.gz", hash = "sha256:ba6eca5cb5ba02bba4c9f4f985af80c54ec3dccf94cfcd190154386255e47543"}, ] thinc = [ - {file = "thinc-8.0.13-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ad8794a76725b85847528fd1a56471d5ac00f4104da8efb065ba572238e381a2"}, + {file = "thinc-8.0.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f818b9f012169a11beb3561c43dc52080588e50cf495733e492efab8b9b4135e"}, {file = "thinc-8.0.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f520daf45b7f42a04363852df43be1b423ae42d9327709d74f6c3279b3f73778"}, {file = "thinc-8.0.13-cp310-cp310-win_amd64.whl", hash = "sha256:2b217059c9e126220b77e7d6c9da56912c4e1eb4e8a11af14f17752e198e88cc"}, {file = "thinc-8.0.13-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0f956c693d180209075703072fd226a24408cbe80eb67bd3b6eea407f61cb283"}, diff --git a/pyproject.toml b/pyproject.toml index 6a10f3c9f359..cadd331e735c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -149,7 +149,7 @@ towncrier = "^21.3.0" toml = "^0.10.0" pep440-version-utils = "^0.3.0" pydoc-markdown = "^3.10.3" -pytest-timeout = "^1.4.2" +pytest-timeout = "^2.1.0" mypy = "^0.910" bandit = "^1.6.3" types-pkg-resources = "^0.1.3" From c502f1ac2e4a5b5528aca05440c5715a617fa987 Mon Sep 17 00:00:00 2001 From: Samuel Nogueira Bacelar Date: Mon, 21 Feb 2022 18:39:48 -0300 Subject: [PATCH 4/7] Revert "Bump google-github-actions/setup-gcloud from 0.4.0 to 0.5.1" --- .github/workflows/ci-model-regression-on-schedule.yml | 4 ++-- .github/workflows/ci-model-regression.yml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci-model-regression-on-schedule.yml b/.github/workflows/ci-model-regression-on-schedule.yml index bf7ca8f8c7b0..e23bb283d7c7 100644 --- a/.github/workflows/ci-model-regression-on-schedule.yml +++ b/.github/workflows/ci-model-regression-on-schedule.yml @@ -94,7 +94,7 @@ jobs: ./gomplate -f .github/runner/github-runner-deployment.yaml.tmpl -o runner_deployment.yaml # Setup gcloud CLI - - uses: google-github-actions/setup-gcloud@04141d8a7edfc8c679682f23e7bbbe05cbe32bb3 + - uses: google-github-actions/setup-gcloud@e0f83f24f541c30f1e9fe3c966963a10b8e647f5 with: version: "${{ env.GCLOUD_VERSION }}" service_account_key: ${{ secrets.GKE_SA_RASA_CI_CD_GPU_RASA_CI_CD }} @@ -668,7 +668,7 @@ jobs: steps: # Setup gcloud CLI - - uses: google-github-actions/setup-gcloud@04141d8a7edfc8c679682f23e7bbbe05cbe32bb3 + - uses: google-github-actions/setup-gcloud@e0f83f24f541c30f1e9fe3c966963a10b8e647f5 with: version: "${{ env.GCLOUD_VERSION }}" service_account_key: ${{ secrets.GKE_SA_RASA_CI_CD_GPU_RASA_CI_CD }} diff --git a/.github/workflows/ci-model-regression.yml b/.github/workflows/ci-model-regression.yml index ab66aaaec5b0..b2241c74e48b 100644 --- a/.github/workflows/ci-model-regression.yml +++ b/.github/workflows/ci-model-regression.yml @@ -202,7 +202,7 @@ jobs: gomplate -f .github/runner/github-runner-deployment.yaml.tmpl -o runner_deployment.yaml # Setup gcloud CLI - - uses: google-github-actions/setup-gcloud@04141d8a7edfc8c679682f23e7bbbe05cbe32bb3 + - uses: google-github-actions/setup-gcloud@e0f83f24f541c30f1e9fe3c966963a10b8e647f5 with: version: "${{ env.GCLOUD_VERSION }}" service_account_key: ${{ secrets.GKE_SA_RASA_CI_CD_GPU_RASA_CI_CD }} @@ -872,7 +872,7 @@ jobs: steps: # Setup gcloud CLI - - uses: google-github-actions/setup-gcloud@04141d8a7edfc8c679682f23e7bbbe05cbe32bb3 + - uses: google-github-actions/setup-gcloud@e0f83f24f541c30f1e9fe3c966963a10b8e647f5 with: version: "${{ env.GCLOUD_VERSION }}" service_account_key: ${{ secrets.GKE_SA_RASA_CI_CD_GPU_RASA_CI_CD }} From 13c59ab47db1b6985e038df55352a50862c50594 Mon Sep 17 00:00:00 2001 From: Samuel Nogueira Bacelar Date: Mon, 21 Feb 2022 18:40:02 -0300 Subject: [PATCH 5/7] Revert "Bump actions/github-script from 4.0.2 to 6" --- .github/workflows/ci-model-regression-on-schedule.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci-model-regression-on-schedule.yml b/.github/workflows/ci-model-regression-on-schedule.yml index bf7ca8f8c7b0..410f5a53ffe6 100644 --- a/.github/workflows/ci-model-regression-on-schedule.yml +++ b/.github/workflows/ci-model-regression-on-schedule.yml @@ -383,7 +383,7 @@ jobs: - name: Check duplicate issue if: failure() && github.event_name == 'schedule' - uses: actions/github-script@9ac08808f993958e9de277fe43a64532a609130e # v4 + uses: actions/github-script@a3e7071a34d7e1f219a8a4de9a5e0a34d1ee1293 # v4 id: issue-exists with: result-encoding: string @@ -412,7 +412,7 @@ jobs: - name: Create GitHub Issue 📬 id: create-issue if: failure() && steps.issue-exists.outputs.result == 'false' && github.event_name == 'schedule' - uses: actions/github-script@9ac08808f993958e9de277fe43a64532a609130e # v4 + uses: actions/github-script@a3e7071a34d7e1f219a8a4de9a5e0a34d1ee1293 # v4 with: # do not use GITHUB_TOKEN here because it wouldn't trigger subsequent workflows github-token: ${{ secrets.RASABOT_GITHUB_TOKEN }} @@ -581,7 +581,7 @@ jobs: - name: Check duplicate issue if: steps.performance.outputs.is_dropped == 'true' - uses: actions/github-script@9ac08808f993958e9de277fe43a64532a609130e # v4 + uses: actions/github-script@a3e7071a34d7e1f219a8a4de9a5e0a34d1ee1293 # v4 id: issue-exists with: result-encoding: string @@ -608,7 +608,7 @@ jobs: - name: Create GitHub Issue 📬 id: create-issue if: steps.performance.outputs.is_dropped == 'true' && steps.issue-exists.outputs.result == 'false' - uses: actions/github-script@9ac08808f993958e9de277fe43a64532a609130e # v4 + uses: actions/github-script@a3e7071a34d7e1f219a8a4de9a5e0a34d1ee1293 # v4 with: # do not use GITHUB_TOKEN here because it wouldn't trigger subsequent workflows github-token: ${{ secrets.RASABOT_GITHUB_TOKEN }} From e48697273f374f7eb67aec90363a51c127623a67 Mon Sep 17 00:00:00 2001 From: Samuel Nogueira Date: Mon, 11 Apr 2022 17:21:27 -0300 Subject: [PATCH 6/7] merging --- .../ci-model-regression-on-schedule.yml | 4 +- .github/workflows/ci-model-regression.yml | 4 +- .github/workflows/vulnerability-scan.yml | 58 ------------------- CHANGELOG.mdx | 31 ++++++++++ LICENSE.txt | 2 +- NOTICE | 4 +- README.md | 2 +- changelog/10412.bugfix.md | 14 ----- changelog/10798.doc.md | 2 - changelog/10897.improvement.md | 1 + changelog/10940.doc.md | 2 - changelog/10957.doc.md | 1 - data/test_domains/restaurant_form.yml | 28 ++++++--- docs/docs/policies.mdx | 8 +++ poetry.lock | 41 ++++++++++--- pyproject.toml | 4 +- rasa/cli/data.py | 2 +- rasa/core/policies/ted_policy.py | 27 +++++---- .../core/policies/unexpected_intent_policy.py | 3 +- rasa/nlu/convert.py | 14 +---- rasa/utils/tensorflow/constants.py | 2 + rasa/version.py | 2 +- tests/cli/test_rasa_data.py | 56 ++++++++++++++++-- tests/core/test_training.py | 2 +- 24 files changed, 178 insertions(+), 136 deletions(-) delete mode 100644 .github/workflows/vulnerability-scan.yml delete mode 100644 changelog/10412.bugfix.md delete mode 100644 changelog/10798.doc.md create mode 100644 changelog/10897.improvement.md delete mode 100644 changelog/10940.doc.md delete mode 100644 changelog/10957.doc.md diff --git a/.github/workflows/ci-model-regression-on-schedule.yml b/.github/workflows/ci-model-regression-on-schedule.yml index b7b95906c480..dfa07589ad1c 100644 --- a/.github/workflows/ci-model-regression-on-schedule.yml +++ b/.github/workflows/ci-model-regression-on-schedule.yml @@ -112,7 +112,7 @@ jobs: ./gomplate -f .github/runner/github-runner-deployment.yaml.tmpl -o runner_deployment.yaml # Setup gcloud CLI - - uses: google-github-actions/setup-gcloud@e0f83f24f541c30f1e9fe3c966963a10b8e647f5 + - uses: google-github-actions/setup-gcloud@37a9333538a8350a13fe9d8fa03e0d4742a1ad2e with: version: "${{ env.GCLOUD_VERSION }}" service_account_key: ${{ secrets.GKE_SA_RASA_CI_CD_GPU_RASA_CI_CD }} @@ -683,7 +683,7 @@ jobs: steps: # Setup gcloud CLI - - uses: google-github-actions/setup-gcloud@e0f83f24f541c30f1e9fe3c966963a10b8e647f5 + - uses: google-github-actions/setup-gcloud@37a9333538a8350a13fe9d8fa03e0d4742a1ad2e with: version: "${{ env.GCLOUD_VERSION }}" service_account_key: ${{ secrets.GKE_SA_RASA_CI_CD_GPU_RASA_CI_CD }} diff --git a/.github/workflows/ci-model-regression.yml b/.github/workflows/ci-model-regression.yml index 05c4670accc5..3c3f3a1df4f6 100644 --- a/.github/workflows/ci-model-regression.yml +++ b/.github/workflows/ci-model-regression.yml @@ -220,7 +220,7 @@ jobs: gomplate -f .github/runner/github-runner-deployment.yaml.tmpl -o runner_deployment.yaml # Setup gcloud CLI - - uses: google-github-actions/setup-gcloud@e0f83f24f541c30f1e9fe3c966963a10b8e647f5 + - uses: google-github-actions/setup-gcloud@37a9333538a8350a13fe9d8fa03e0d4742a1ad2e with: version: "${{ env.GCLOUD_VERSION }}" service_account_key: ${{ secrets.GKE_SA_RASA_CI_CD_GPU_RASA_CI_CD }} @@ -903,7 +903,7 @@ jobs: steps: # Setup gcloud CLI - - uses: google-github-actions/setup-gcloud@e0f83f24f541c30f1e9fe3c966963a10b8e647f5 + - uses: google-github-actions/setup-gcloud@37a9333538a8350a13fe9d8fa03e0d4742a1ad2e with: version: "${{ env.GCLOUD_VERSION }}" service_account_key: ${{ secrets.GKE_SA_RASA_CI_CD_GPU_RASA_CI_CD }} diff --git a/.github/workflows/vulnerability-scan.yml b/.github/workflows/vulnerability-scan.yml deleted file mode 100644 index 5a5300356727..000000000000 --- a/.github/workflows/vulnerability-scan.yml +++ /dev/null @@ -1,58 +0,0 @@ -name: Vulnerability Scan - -on: - schedule: - # Run every third day - - cron: 0 0 * * */3 - workflow_dispatch: - -jobs: - scan: - name: Vulnerability scan - runs-on: ubuntu-latest - - env: - DOCKERFILE: Dockerfile_with_poetry_lock - - steps: - - name: Checkout git repository 🕝 - uses: actions/checkout@v3 - - - name: Add poetry.lock 🔒 - # Trivy depends on the presence of `poetry.lock` to scan Python dependencies - run: | - BASE_IMAGE=rasa/rasa:latest-full - docker pull $BASE_IMAGE - - # Create Dockerfile which includes poetry.lock - tee -a $DOCKERFILE << END - FROM $BASE_IMAGE - COPY poetry.lock . - END - - IMAGE_NAME=rasa/rasa:latest-scanned - docker build -f $DOCKERFILE -t $IMAGE_NAME . - - echo "IMAGE_WITH_POETRY_LOCK=$IMAGE_NAME" >> $GITHUB_ENV - - - name: Scan image 🕵️‍♀️🕵️‍♂️ - uses: lazy-actions/gitrivy@6edf95fdc8b1fb841a974536316b209cd16f9000 # v3 - with: - # Needs the token so it can create an issue once a vulnerability was found - # do not use GITHUB_TOKEN here because it wouldn't trigger subsequent workflows - token: ${{ secrets.RASABOT_GITHUB_TOKEN }} - image: ${{ env.IMAGE_WITH_POETRY_LOCK }} - ignore_unfixed: true - issue_label: "tool:trivy,type:vulnerability" - severity: "LOW,MEDIUM,HIGH,CRITICAL" - fail_on_vulnerabilities: true - - - name: Notify slack on failure - if: failure() - env: - SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} - uses: voxmedia/github-action-slack-notify-build@212e9f7a9ca33368c8dd879d6053972128258985 - with: - channel_id: ${{ secrets.SLACK_ALERTS_CHANNEL_ID }} - status: FAILED - color: danger diff --git a/CHANGELOG.mdx b/CHANGELOG.mdx index c1f281f770c5..dc49a9018124 100644 --- a/CHANGELOG.mdx +++ b/CHANGELOG.mdx @@ -16,6 +16,37 @@ https://github.com/RasaHQ/rasa/tree/main/changelog/ . --> +## [3.0.10] - 2022-03-15## [3.0.10] - 2022-03-15 +### Bugfixes +- [#10675](https://github.com/rasahq/rasa/issues/10675): Fix broken conversion from Rasa JSON NLU data to Rasa YAML NLU data. + + +## [3.0.9] - 2022-03-11 +### Bugfixes +- [#10412](https://github.com/rasahq/rasa/issues/10412): Fix Socket IO connection issues by upgrading sanic to v21.12. + + The bug is caused by [an invalid function signature](https://github.com/sanic-org/sanic/issues/2272) and is fixed in [v21.12](https://sanic.readthedocs.io/en/v21.12.1/sanic/changelog.html#version-21-12-0). + + This update brings some deprecations in `sanic`: + + - Sanic and Blueprint may no longer have arbitrary properties attached to them + - Fixed this by moving user defined properties to the `instance.ctx` object + - Sanic and Blueprint forced to have compliant names + - Fixed this by using string literal names instead of the module's name via _\_name\_\_ + - `sanic.exceptions.abort` is Deprecated + - Fixed by replacing it with `sanic.exceptions.SanicException` + - `sanic.response.StreamingHTTPResponse` is deprecated + - Fixed by replacing it with sanic.response.ResponseStream +- [#10447](https://github.com/rasahq/rasa/issues/10447): Update `rasa data validate` to not fail when `active_loop` is `null` + +### Improved Documentation +- [#10798](https://github.com/rasahq/rasa/issues/10798): Updated the `model_confidence` parameter in `TEDPolicy` and `DIETClassifier`. The `linear_norm` is removed + as it is no longer supported. +- [#10940](https://github.com/rasahq/rasa/issues/10940): Added an additional step to `Receiving Messages` section in slack.mdx documentation. After a slack update this + additional step is needed to allow direct messages to the bot. +- [#10957](https://github.com/rasahq/rasa/issues/10957): Backport the updated deployment docs to 3.0.x. + + ## [3.0.8] - 2022-02-11 ### Improvements - [#10394](https://github.com/rasahq/rasa/issues/10394): Allow single tokens in rasa end-to-end test files to be annotated with multiple entities. diff --git a/LICENSE.txt b/LICENSE.txt index 7b68ecb1547e..0975a64b87b9 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -186,7 +186,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2021 Rasa Technologies GmbH + Copyright 2022 Rasa Technologies GmbH Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/NOTICE b/NOTICE index 91c4241fefc6..b03765f59ee2 100644 --- a/NOTICE +++ b/NOTICE @@ -1,5 +1,5 @@ Rasa Technologies GmbH -Copyright 2016-2019 Rasa Technologies GmbH +Copyright 2016-2022 Rasa Technologies GmbH This product includes software from spaCy (https://github.com/explosion/spaCy), -under the MIT License (see: rasa.nlu.extractors.crf_entity_extractor). \ No newline at end of file +under the MIT License (see: rasa.nlu.extractors.crf_entity_extractor). diff --git a/README.md b/README.md index a86774dfe424..36c824d81e0c 100644 --- a/README.md +++ b/README.md @@ -403,7 +403,7 @@ Currently, this means the following minor versions will receive bugfixes updates ## License Licensed under the Apache License, Version 2.0. -Copyright 2021 Rasa Technologies GmbH. [Copy of the license](LICENSE.txt). +Copyright 2022 Rasa Technologies GmbH. [Copy of the license](LICENSE.txt). A list of the Licenses of the dependencies of the project can be found at the bottom of the diff --git a/changelog/10412.bugfix.md b/changelog/10412.bugfix.md deleted file mode 100644 index 2dac79c043d8..000000000000 --- a/changelog/10412.bugfix.md +++ /dev/null @@ -1,14 +0,0 @@ -Fix Socket IO connection issues by upgrading sanic to v21.12. - -The bug is caused by [an invalid function signature](https://github.com/sanic-org/sanic/issues/2272) and is fixed in [v21.12](https://sanic.readthedocs.io/en/v21.12.1/sanic/changelog.html#version-21-12-0). - -This update brings some deprecations in `sanic`: - -- Sanic and Blueprint may no longer have arbitrary properties attached to them - - Fixed this by moving user defined properties to the `instance.ctx` object -- Sanic and Blueprint forced to have compliant names - - Fixed this by using string literal names instead of the module's name via _\_name\_\_ -- `sanic.exceptions.abort` is Deprecated - - Fixed by replacing it with `sanic.exceptions.SanicException` -- `sanic.response.StreamingHTTPResponse` is deprecated - - Fixed by replacing it with sanic.response.ResponseStream diff --git a/changelog/10798.doc.md b/changelog/10798.doc.md deleted file mode 100644 index 8fe93086e740..000000000000 --- a/changelog/10798.doc.md +++ /dev/null @@ -1,2 +0,0 @@ -Updated the `model_confidence` parameter in `TEDPolicy` and `DIETClassifier`. The `linear_norm` is removed -as it is no longer supported. \ No newline at end of file diff --git a/changelog/10897.improvement.md b/changelog/10897.improvement.md new file mode 100644 index 000000000000..7c5aca8b4f5f --- /dev/null +++ b/changelog/10897.improvement.md @@ -0,0 +1 @@ +Added flag `use_gpu` to `TEDPolicy` and `UnexpecTEDIntentPolicy` that can be used to enable training on CPU even when a GPU is available. \ No newline at end of file diff --git a/changelog/10940.doc.md b/changelog/10940.doc.md deleted file mode 100644 index d8c4b5f788d8..000000000000 --- a/changelog/10940.doc.md +++ /dev/null @@ -1,2 +0,0 @@ -Added an additional step to `Receiving Messages` section in slack.mdx documentation. After a slack update this -additional step is needed to allow direct messages to the bot. diff --git a/changelog/10957.doc.md b/changelog/10957.doc.md deleted file mode 100644 index d6e92c3a8f87..000000000000 --- a/changelog/10957.doc.md +++ /dev/null @@ -1 +0,0 @@ -Backport the updated deployment docs to 3.0.x. diff --git a/data/test_domains/restaurant_form.yml b/data/test_domains/restaurant_form.yml index 5196f2b201a6..dfce8651cc19 100644 --- a/data/test_domains/restaurant_form.yml +++ b/data/test_domains/restaurant_form.yml @@ -1,6 +1,7 @@ # all hashtags are comments :) intents: - inform + - request_restaurant entities: - number @@ -9,22 +10,31 @@ entities: slots: cuisine: type: text + influence_conversation: false + mappings: + - type: from_entity + entity: cuisine people: type: text - vegetarian: - type: bool - requested_slot: - type: text - influence_conversation: false - search_results: - type: any influence_conversation: false - + mappings: + - type: from_entity + entity: number + conditions: + - active_loop: restaurant_form + requested_slot: people responses: utter_ask_people: - - text: For how many people?" + - text: "For how many people?" utter_ask_cuisine: - text: "What cuisine would you like?" actions: - action_search_restaurants + + +forms: + restaurant_form: + required_slots: + - cuisine + - people diff --git a/docs/docs/policies.mdx b/docs/docs/policies.mdx index 0babc892d491..b7e3a40802fd 100644 --- a/docs/docs/policies.mdx +++ b/docs/docs/policies.mdx @@ -212,6 +212,10 @@ If you want to fine-tune your model, start by modifying the following parameters Please try using `linear_norm` as the value for `model_confidence`. This should make it easier to [handle actions predicted with low confidence](./fallback-handoff.mdx#handling-low-action-confidence). +* `use_gpu`: + This parameter defines whether a GPU (if available) will be used training. By default, `TEDPolicy` will be trained on GPU + if a GPU is available (i.e. `use_gpu` is `True`). To enforce that `TEDPolicy` uses only the CPU for training, set `use_gpu` to `False`. + The above configuration parameters are the ones you should configure to fit your model to your data. However, additional parameters exist that can be adapted. @@ -593,6 +597,10 @@ If you want to fine-tune model's performance, start by modifying the following p trigger `action_unlikely_intent` for any of the negative examples that it has encountered during training. +* `use_gpu`: + This parameter defines whether a GPU (if available) will be used training. By default, `UnexpecTEDIntentPolicy` will be trained on GPU + if a GPU is available (i.e. `use_gpu` is `True`). To enforce that `UnexpecTEDIntentPolicy` uses only the CPU for training, set `use_gpu` to `False`. + The above configuration parameters are the ones you should try tweaking according to your use case and training data. However, additional parameters exist that you could adapt. diff --git a/poetry.lock b/poetry.lock index e876daf58fad..1b73b316c809 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1140,6 +1140,21 @@ docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] perf = ["ipython"] testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] +[[package]] +name = "importlib-resources" +version = "5.6.0" +description = "Read resources from Python packages" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] + [[package]] name = "incremental" version = "21.3.0" @@ -1234,21 +1249,22 @@ testing = ["coverage (<5)", "pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3 [[package]] name = "jsonschema" -version = "3.2.0" +version = "4.4.0" description = "An implementation of JSON Schema validation for Python" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.7" [package.dependencies] attrs = ">=17.4.0" importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} -pyrsistent = ">=0.14.0" -six = ">=1.11.0" +importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} +pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" +typing-extensions = {version = "*", markers = "python_version < \"3.8\""} [package.extras] -format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] -format_nongpl = ["idna", "jsonpointer (>1.13)", "webcolors", "rfc3986-validator (>0.1.0)", "rfc3339-validator"] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format_nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] [[package]] name = "jwcrypto" @@ -3456,7 +3472,7 @@ transformers = ["transformers", "sentencepiece"] [metadata] lock-version = "1.1" python-versions = ">=3.7,<3.10" -content-hash = "5881b7bd0d328e2627fd1fad9558f9f3697044da09893f0b962581ca8765cfe6" +content-hash = "c8386ee02a95280911309cfd70e1c4a444774918bb012eefc16b13ac025aac19" [metadata.files] absl-py = [ @@ -4157,6 +4173,10 @@ importlib-metadata = [ {file = "importlib_metadata-4.11.2-py3-none-any.whl", hash = "sha256:d16e8c1deb60de41b8e8ed21c1a7b947b0bc62fab7e1d470bcdf331cea2e6735"}, {file = "importlib_metadata-4.11.2.tar.gz", hash = "sha256:b36ffa925fe3139b2f6ff11d6925ffd4fa7bc47870165e3ac260ac7b4f91e6ac"}, ] +importlib-resources = [ + {file = "importlib_resources-5.6.0-py3-none-any.whl", hash = "sha256:a9dd72f6cc106aeb50f6e66b86b69b454766dd6e39b69ac68450253058706bcc"}, + {file = "importlib_resources-5.6.0.tar.gz", hash = "sha256:1b93238cbf23b4cde34240dd8321d99e9bf2eb4bc91c0c99b2886283e7baad85"}, +] incremental = [ {file = "incremental-21.3.0-py2.py3-none-any.whl", hash = "sha256:92014aebc6a20b78a8084cdd5645eeaa7f74b8933f70fa3ada2cfbd1e3b54321"}, {file = "incremental-21.3.0.tar.gz", hash = "sha256:02f5de5aff48f6b9f665d99d48bfc7ec03b6e3943210de7cfc88856d755d6f57"}, @@ -4193,8 +4213,8 @@ jsonpickle = [ {file = "jsonpickle-2.0.0.tar.gz", hash = "sha256:0be49cba80ea6f87a168aa8168d717d00c6ca07ba83df3cec32d3b30bfe6fb9a"}, ] jsonschema = [ - {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, - {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, + {file = "jsonschema-4.4.0-py3-none-any.whl", hash = "sha256:77281a1f71684953ee8b3d488371b162419767973789272434bbc3f29d9c8823"}, + {file = "jsonschema-4.4.0.tar.gz", hash = "sha256:636694eb41b3535ed608fe04129f26542b59ed99808b4f688aa32dcf55317a83"}, ] jwcrypto = [ {file = "jwcrypto-1.0-py2.py3-none-any.whl", hash = "sha256:db93a656d9a7a35dda5a68deb5c9f301f4e60507d8aef1559e0637b9ac497137"}, @@ -4266,8 +4286,10 @@ libclang = [ {file = "libclang-13.0.0-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:069407eac2e20ea8f18212d28c6598db31014e7b8a77febc92e762ec133c3226"}, {file = "libclang-13.0.0-py2.py3-none-manylinux1_x86_64.whl", hash = "sha256:9c1e623340ccafe3a10a2abbc90f59593ff29f0c854f4ddb65b6220d9d998fb4"}, {file = "libclang-13.0.0-py2.py3-none-manylinux2014_aarch64.whl", hash = "sha256:b7de34393ed46c6cf7b22178d0d43cec2f2dab2f5f95450520a47fc1cf2df5ac"}, + {file = "libclang-13.0.0-py2.py3-none-manylinux2014_armv7l.whl", hash = "sha256:dfcdfc90eeb5d14f291e31c19b195fbcc29a8c0b2bd29fa331011f9536937a89"}, {file = "libclang-13.0.0-py2.py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:dcc7ecd83d91e23e95315d7aa6355ee8d45b43742ca1fb642583e0b2f935d50e"}, {file = "libclang-13.0.0-py2.py3-none-win_amd64.whl", hash = "sha256:b61dedc1b941f43acca1fa15df0a6669c6c3983197c6f3226ae03a766281dd37"}, + {file = "libclang-13.0.0-py2.py3-none-win_arm64.whl", hash = "sha256:b0acfcfbd1f6d411f654cf6ec4f09cecf0f80b3480e4c9f834d1dcb1f8bd6907"}, ] locket = [ {file = "locket-0.2.1-py2.py3-none-any.whl", hash = "sha256:12b6ada59d1f50710bca9704dbadd3f447dbf8dac6664575c1281cadab8e6449"}, @@ -5545,6 +5567,7 @@ tensorflow-hub = [ tensorflow-io-gcs-filesystem = [ {file = "tensorflow_io_gcs_filesystem-0.24.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:9c00f9a9880477b1dff0c71ee6734421ce99ac484ca2151793ebf2681fc0cb4c"}, {file = "tensorflow_io_gcs_filesystem-0.24.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b6ca3a9f751aa9c2f9851520e666d905ad14667281bbafeabe611b7b8f3e1bc5"}, + {file = "tensorflow_io_gcs_filesystem-0.24.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a9c7f26ef9248bdfccc91fdddd66623754a6b08bd4440a780f23feaed8c5be7"}, {file = "tensorflow_io_gcs_filesystem-0.24.0-cp310-cp310-win_amd64.whl", hash = "sha256:2f67d19a2f2579dc55f1590faf48c2e882cabb860992b5a9c7edb0ed8b3eb187"}, {file = "tensorflow_io_gcs_filesystem-0.24.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:cde835e68b2b43ddade07c999e7c3251bcd62b1ff165c34fbe9fc6e0f12c3ac9"}, {file = "tensorflow_io_gcs_filesystem-0.24.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:71c00638c9b6048480095f2738dfefd8f4b2e7b534190c91d699aee769bfa86e"}, diff --git a/pyproject.toml b/pyproject.toml index ffd594f5674a..94e1eeb357cb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,7 @@ exclude = "((.eggs | .git | .pytest_cache | build | dist))" [tool.poetry] name = "rasa" -version = "3.0.8" +version = "3.0.10" description = "Open source machine learning framework to automate text- and voice-based conversations: NLU, dialogue management, connect to Slack, Facebook, and more - Create chatbots and voice assistants" authors = [ "Rasa Technologies GmbH ",] maintainers = [ "Tom Bocklisch ",] @@ -87,7 +87,7 @@ webexteamssdk = ">=1.1.1,<1.7.0" mattermostwrapper = "~2.2" rocketchat_API = ">=0.6.31,<1.17.0" colorhash = "~1.0.2" -jsonschema = "~3.2" +jsonschema = ">=3.2,<4.5" packaging = ">=20.0,<21.0" pytz = ">=2019.1,<2022.0" rasa-sdk = "~3.0.5" diff --git a/rasa/cli/data.py b/rasa/cli/data.py index 3b56cc620815..b0d5dcef091d 100644 --- a/rasa/cli/data.py +++ b/rasa/cli/data.py @@ -224,7 +224,7 @@ def _validate_story_structure(validator: "Validator", args: argparse.Namespace) def _convert_nlu_data(args: argparse.Namespace) -> None: import rasa.nlu.convert - if args.format == "json": + if args.format in ["json", "yaml"]: rasa.nlu.convert.convert_training_data( args.data, args.out, args.format, args.language ) diff --git a/rasa/core/policies/ted_policy.py b/rasa/core/policies/ted_policy.py index a82189fb5a43..08df89130c63 100644 --- a/rasa/core/policies/ted_policy.py +++ b/rasa/core/policies/ted_policy.py @@ -4,6 +4,7 @@ from rasa.engine.recipes.default_recipe import DefaultV1Recipe from pathlib import Path from collections import defaultdict +import contextlib import numpy as np import tensorflow as tf @@ -119,6 +120,7 @@ SOFTMAX, BILOU_FLAG, EPOCH_OVERRIDE, + USE_GPU, ) @@ -341,6 +343,7 @@ def get_default_config() -> Dict[Text, Any]: POLICY_MAX_HISTORY: DEFAULT_MAX_HISTORY, # Determines the importance of policies, higher values take precedence POLICY_PRIORITY: DEFAULT_POLICY_PRIORITY, + USE_GPU: True, } def __init__( @@ -358,11 +361,9 @@ def __init__( super().__init__( config, model_storage, resource, execution_context, featurizer=featurizer ) - self.split_entities_config = rasa.utils.train_utils.init_split_entities( config[SPLIT_ENTITIES_BY_COMMA], SPLIT_ENTITIES_BY_COMMA_DEFAULT_VALUE ) - self._load_params(config) self.model = model @@ -724,7 +725,10 @@ def train( ) return self._resource - self.run_training(model_data, label_ids) + with ( + contextlib.nullcontext() if self.config["use_gpu"] else tf.device("/cpu:0") + ): + self.run_training(model_data, label_ids) self.persist() @@ -1074,13 +1078,16 @@ def _load( predict_data_example, ) = cls._construct_model_initialization_data(model_utilities["loaded_data"]) - model = cls._load_tf_model( - model_utilities, - model_data_example, - predict_data_example, - featurizer, - execution_context.is_finetuning, - ) + model = None + + with (contextlib.nullcontext() if config["use_gpu"] else tf.device("/cpu:0")): + model = cls._load_tf_model( + model_utilities, + model_data_example, + predict_data_example, + featurizer, + execution_context.is_finetuning, + ) return cls._load_policy_with_model( config, diff --git a/rasa/core/policies/unexpected_intent_policy.py b/rasa/core/policies/unexpected_intent_policy.py index 1f3aff795e97..f1910662bfcf 100644 --- a/rasa/core/policies/unexpected_intent_policy.py +++ b/rasa/core/policies/unexpected_intent_policy.py @@ -5,7 +5,6 @@ import numpy as np import tensorflow as tf - import rasa.utils.common from rasa.engine.graph import ExecutionContext from rasa.engine.recipes.default_recipe import DefaultV1Recipe @@ -100,6 +99,7 @@ LABEL_PAD_ID, POSITIVE_SCORES_KEY, NEGATIVE_SCORES_KEY, + USE_GPU, ) from rasa.utils.tensorflow import layers from rasa.utils.tensorflow.model_data import RasaModelData, FeatureArray, Data @@ -282,6 +282,7 @@ def get_default_config() -> Dict[Text, Any]: LOSS_TYPE: CROSS_ENTROPY, # Determines the importance of policies, higher values take precedence POLICY_PRIORITY: UNLIKELY_INTENT_POLICY_PRIORITY, + USE_GPU: True, } def __init__( diff --git a/rasa/nlu/convert.py b/rasa/nlu/convert.py index 7683eae51ece..6b8cbf7fe8b8 100644 --- a/rasa/nlu/convert.py +++ b/rasa/nlu/convert.py @@ -1,7 +1,7 @@ -import argparse import os from typing import Text, Union +from rasa.shared.nlu.training_data.formats.rasa_yaml import RasaYAMLWriter from rasa.shared.utils.cli import print_error import rasa.shared.nlu.training_data.loading from rasa.nlu.utils import write_to_file @@ -31,18 +31,10 @@ def convert_training_data( ) return + td = rasa.shared.nlu.training_data.loading.load_data(data_file, language) if output_format == "json": - td = rasa.shared.nlu.training_data.loading.load_data(data_file, language) output = td.nlu_as_json(indent=2) else: - print_error( - "Did not recognize output format. Supported output formats: 'json' and " - "'md'. Specify the desired output format with '--format'." - ) - return + output = RasaYAMLWriter().dumps(td) write_to_file(out_file, output) - - -def main(args: argparse.Namespace) -> None: - convert_training_data(args.data, args.out, args.format, args.language) diff --git a/rasa/utils/tensorflow/constants.py b/rasa/utils/tensorflow/constants.py index 3262b4aa2e2f..5006dee2488d 100644 --- a/rasa/utils/tensorflow/constants.py +++ b/rasa/utils/tensorflow/constants.py @@ -110,3 +110,5 @@ TF_DETERMINISTIC_OPS = "TF_DETERMINISTIC_OPS" EPOCH_OVERRIDE = "epoch_override" + +USE_GPU = "use_gpu" diff --git a/rasa/version.py b/rasa/version.py index 06d68d64d165..cce5cca97caa 100644 --- a/rasa/version.py +++ b/rasa/version.py @@ -1,3 +1,3 @@ # this file will automatically be changed, # do not add anything but the version number here! -__version__ = "3.0.8" +__version__ = "3.0.10" diff --git a/tests/cli/test_rasa_data.py b/tests/cli/test_rasa_data.py index 45aa035cd937..31cae53714e0 100644 --- a/tests/cli/test_rasa_data.py +++ b/tests/cli/test_rasa_data.py @@ -6,11 +6,13 @@ from collections import namedtuple from typing import Callable, Text +from _pytest.fixtures import FixtureRequest from _pytest.monkeypatch import MonkeyPatch from _pytest.pytester import RunResult from rasa.cli import data from rasa.shared.constants import LATEST_TRAINING_DATA_FORMAT_VERSION from rasa.shared.importers.importer import TrainingDataImporter +from rasa.shared.nlu.training_data.formats import RasaYAMLReader from rasa.validator import Validator import rasa.shared.utils.io @@ -54,7 +56,7 @@ def test_data_split_nlu(run_in_simple_project: Callable[..., RunResult]): assert yml_file.exists(), f"{yml_file} file does not exist" -def test_data_convert_nlu(run_in_simple_project: Callable[..., RunResult]): +def test_data_convert_nlu_json(run_in_simple_project: Callable[..., RunResult]): result = run_in_simple_project( "data", "convert", @@ -71,6 +73,41 @@ def test_data_convert_nlu(run_in_simple_project: Callable[..., RunResult]): assert os.path.exists("out_nlu_data.json") +def test_data_convert_nlu_yml( + run: Callable[..., RunResult], tmp_path: Path, request: FixtureRequest +): + + target_file = tmp_path / "out.yml" + + # The request rootdir is required as the `testdir` fixture in `run` changes the + # working directory + test_data_dir = Path(request.config.rootdir, "data", "examples", "rasa") + source_file = (test_data_dir / "demo-rasa.json").absolute() + result = run( + "data", + "convert", + "nlu", + "--data", + str(source_file), + "--out", + str(target_file), + "-f", + "yaml", + ) + + assert result.ret == 0 + assert target_file.exists() + + actual_data = RasaYAMLReader().read(target_file) + expected = RasaYAMLReader().read(test_data_dir / "demo-rasa.yml") + + assert len(actual_data.training_examples) == len(expected.training_examples) + assert len(actual_data.entity_synonyms) == len(expected.entity_synonyms) + assert len(actual_data.regex_features) == len(expected.regex_features) + assert len(actual_data.lookup_tables) == len(expected.lookup_tables) + assert actual_data.entities == expected.entities + + def test_data_split_help(run: Callable[..., RunResult]): output = run("data", "split", "nlu", "--help") @@ -204,21 +241,28 @@ def test_validate_files_form_not_found_invalid_domain( data.validate_files(namedtuple("Args", args.keys())(*args.values())) -def test_validate_files_with_active_loop_null(tmp_path: Path): - file_name = tmp_path / "rules.yml" +@pytest.mark.parametrize( + ("file_type", "data_type"), [("stories", "story"), ("rules", "rule")] +) +def test_validate_files_with_active_loop_null( + file_type: Text, data_type: Text, tmp_path: Path +): + file_name = tmp_path / f"{file_type}.yml" file_name.write_text( f""" version: "{LATEST_TRAINING_DATA_FORMAT_VERSION}" - rules: - - rule: test path + {file_type}: + - {data_type}: test path steps: - intent: request_restaurant - action: restaurant_form + - active_loop: restaurant_form - active_loop: null + - action: action_search_restaurants """ ) args = { - "domain": "data/test_restaurantbot/domain.yml", + "domain": "data/test_domains/restaurant_form.yml", "data": [file_name], "max_history": None, "config": None, diff --git a/tests/core/test_training.py b/tests/core/test_training.py index 8a78e3d62508..1ab8b2bebc2d 100644 --- a/tests/core/test_training.py +++ b/tests/core/test_training.py @@ -31,7 +31,7 @@ def test_training_script_with_restart_stories(tmp_path: Path, domain_path: Text) assert Path(model_file).is_file() -@pytest.mark.timeout(120, func_only=True) +@pytest.mark.timeout(160, func_only=True) async def test_random_seed( tmp_path: Path, monkeypatch: MonkeyPatch, domain_path: Text, stories_path: Text ): From 049b2fbc88044bc60ba714de228c60785b545962 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Dec 2022 12:02:02 +0000 Subject: [PATCH 7/7] Bump peterjgrainger/action-create-branch from 2.2.0 to 2.4.0 Bumps [peterjgrainger/action-create-branch](https://github.com/peterjgrainger/action-create-branch) from 2.2.0 to 2.4.0. - [Release notes](https://github.com/peterjgrainger/action-create-branch/releases) - [Commits](https://github.com/peterjgrainger/action-create-branch/compare/c2800a3a9edbba2218da6861fa46496cf8f3195a...08259812c8ebdbf1973747f9297e332fa078d3c1) --- updated-dependencies: - dependency-name: peterjgrainger/action-create-branch dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/automatic-release-to-main-merger.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/automatic-release-to-main-merger.yml b/.github/workflows/automatic-release-to-main-merger.yml index 1fa81e4970f9..0d0d85903f3b 100644 --- a/.github/workflows/automatic-release-to-main-merger.yml +++ b/.github/workflows/automatic-release-to-main-merger.yml @@ -85,7 +85,7 @@ jobs: - name: Create new branch 🐣 if: always() - uses: peterjgrainger/action-create-branch@c2800a3a9edbba2218da6861fa46496cf8f3195a + uses: peterjgrainger/action-create-branch@08259812c8ebdbf1973747f9297e332fa078d3c1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: