diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md index c0d18916bc9..8983abad838 100644 --- a/.github/ISSUE_TEMPLATE.md +++ b/.github/ISSUE_TEMPLATE.md @@ -48,5 +48,5 @@ output comes here ``` ## Additional information you deem important: -* issue happens only occasionally or under certain circumstances +* issue happens only occasionally or under certain circumstances * changes you did or observed in the environment diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index cb7a84a2405..e0b58f1723a 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -40,4 +40,3 @@ - [ ] I added tests to cover my changes. - [ ] My changes require further changes to the documentation. - [ ] I updated the documentation where necessary. - diff --git a/.markdownlint.yaml b/.markdownlint.yaml new file mode 100644 index 00000000000..29a95153ebc --- /dev/null +++ b/.markdownlint.yaml @@ -0,0 +1,45 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +MD001: false +MD002: false +MD003: false +MD004: false +MD005: false +MD006: false +MD007: false +MD009: false +MD010: false +MD012: false +MD013: false +MD014: false +MD019: false +MD022: false +MD023: false +MD024: false +MD026: false +MD029: false +MD030: false +MD031: false +MD032: false +MD033: false +MD034: false +MD037: false +MD040: false +MD041: false +MD042: false +MD045: false +MD046: false diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000000..1574b138cb7 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,198 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +default_stages: [commit, push] +default_language_version: + # force all unspecified Python hooks to run python3 + python: python3 +minimum_pre_commit_version: "1.20.0" +repos: + - repo: meta + hooks: + - id: identity + - id: check-hooks-apply + # - repo: https://github.com/thlorenz/doctoc.git + # rev: v2.0.0 + # hooks: + # - id: doctoc + # name: Add TOC for Markdown files + # files: ^CONTRIBUTING\.md$ + # args: + # - "--maxlevel" + # - "3" + # - repo: https://github.com/asottile/dead + # rev: v1.4.0 + # hooks: + # - id: dead + # name: Dead simple python dead code detection + # exclude: ^tests/dat/actions/malformed\.py$ + - repo: https://github.com/asottile/reorder_python_imports + rev: v2.4.0 + hooks: + - id: reorder-python-imports + exclude: ^tests/dat/actions/malformed\.py$ + - repo: https://github.com/psf/black + rev: 20.8b1 + hooks: + - id: black + name: Run black on Python files + exclude: ^tools/admin/wskadmin$|^tests/dat/actions/malformed\.py$ + # - repo: https://github.com/jumanjihouse/pre-commit-hooks + # rev: 2.1.5 + # hooks: + # - id: shellcheck + - repo: git://github.com/Lucas-C/pre-commit-hooks + rev: v1.1.9 + hooks: + # - id: insert-license + # name: Add license for all other files + # exclude: ^\.github/.*$ + # args: + # - --comment-style + # - "|#|" + # - --license-filepath + # - license-templates/LICENSE.txt + # - --fuzzy-match-generates-todo + # files: > + # \.cfg$|\.conf$|\.ini$|\.properties$|Dockerfile.*$ + # - id: insert-license + # name: Add license for all JS/CSS files + # files: \.(js|css)$ + # exclude: ^\.github/.*$ + # args: + # - --comment-style + # - "/*!| *| */" + # - --license-filepath + # - license-templates/LICENSE.txt + # - --fuzzy-match-generates-todo + # - id: insert-license + # name: Add license for all Markdown files + # files: ^README\.md$ + # exclude: ^\.github/.*$ + # args: + # - --comment-style + # - "" + # - --license-filepath + # - license-templates/LICENSE.txt + # - --fuzzy-match-generates-todo + # - id: insert-license + # name: Add license for all Shell files + # exclude: ^\.github/.*$ + # files: \.(sh|bash)$ + # args: + # - --comment-style + # - "|#|" + # - --license-filepath + # - license-templates/LICENSE.txt + # - --fuzzy-match-generates-todo + # - id: insert-license + # name: Add license for all Python files + # exclude: ^\.github/.*$ + # types: [python] + # args: + # - --comment-style + # - "|#|" + # - --license-filepath + # - license-templates/LICENSE.txt + # - --fuzzy-match-generates-todo + # - id: insert-license + # name: Add license for all XML files + # exclude: ^\.github/.*$ + # files: \.xml$ + # args: + # - --comment-style + # - "" + # - --license-filepath + # - license-templates/LICENSE.txt + # - --fuzzy-match-generates-todo + - id: insert-license + name: Add license for all YAML files + exclude: ^\.github/.*$ + types: [yaml] + files: \.(yml|yaml)$ + args: + - --comment-style + - "|#|" + - --license-filepath + - license-templates/LICENSE.txt + - --fuzzy-match-generates-todo +# - id: forbid-tabs +# - id: remove-tabs + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v3.4.0 + hooks: + - id: check-added-large-files + - id: check-ast + exclude: ^tests/dat/actions/malformed\.py$ + - id: check-builtin-literals + exclude: ^tests/dat/actions/malformed\.py$ + - id: check-case-conflict + - id: check-docstring-first + - id: check-executables-have-shebangs + - id: check-json + exclude: ^tests/dat/actions/invalid.*\.json$ + - id: check-merge-conflict + - id: check-vcs-permalinks + - id: check-symlinks + - id: check-xml + - id: check-yaml + exclude: ^tests/dat/apigw/local\.api\.bad\.yaml$ + - id: debug-statements + exclude: ^tests/dat/actions/malformed\.py$ + - id: destroyed-symlinks + # - id: detect-private-key + - id: end-of-file-fixer + files: \.(bal|bat|cfg|conf|cs|gradle|ini|java|md|properties|py|scala|sh|swift|txt|yml|yaml)$|^Dockerfile$ + - id: fix-byte-order-marker + exclude: ^tests/dat/actions/unicode\.tests/src/dotnet2\.2/openwhisk-unicodetests-dotnet\.sln$ + - id: fix-encoding-pragma + args: + - --remove + - id: mixed-line-ending + # - id: name-tests-test + # - id: pretty-format-json + - id: trailing-whitespace + exclude: ^\.github/ISSUE_TEMPLATE\.md$ + - repo: https://github.com/pre-commit/pygrep-hooks + rev: v1.8.0 + hooks: + - id: python-check-mock-methods + - id: python-no-eval + - id: python-no-log-warn + - repo: https://github.com/igorshubovych/markdownlint-cli + rev: v0.27.1 + hooks: + - id: markdownlint + name: Run markdownlint + entry: markdownlint + files: \.(md|mdown|markdown)$ + types: [markdown] + - repo: https://github.com/adrienverge/yamllint + rev: v1.26.0 + hooks: + - id: yamllint + name: Check YAML files with yamllint + entry: yamllint --strict + files: \.(yml|yaml)$ + types: [yaml] + # - repo: https://github.com/PyCQA/bandit + # rev: 1.7.0 + # hooks: + # - id: bandit + # - repo: https://gitlab.com/pycqa/flake8 + # rev: 3.8.4 + # hooks: + # - id: flake8 diff --git a/.travis.yml b/.travis.yml index 25d3d687671..c3a7c9d72c4 100644 --- a/.travis.yml +++ b/.travis.yml @@ -73,12 +73,12 @@ jobs: - ./tools/travis/checkAndUploadLogs.sh unit db name: "Unit Tests" - script: - - ./tools/travis/runSystemTests.sh - - ./tools/travis/checkAndUploadLogs.sh system + - ./tools/travis/runSystemTests.sh + - ./tools/travis/checkAndUploadLogs.sh system name: "System Tests" - script: - - ./tools/travis/runMultiRuntimeTests.sh - - ./tools/travis/checkAndUploadLogs.sh multi-runtime + - ./tools/travis/runMultiRuntimeTests.sh + - ./tools/travis/checkAndUploadLogs.sh multi-runtime name: "Multi-Runtime Tests" - script: - ./tools/travis/runStandaloneTests.sh diff --git a/.yamllint.yaml b/.yamllint.yaml new file mode 100644 index 00000000000..23912d4c3ec --- /dev/null +++ b/.yamllint.yaml @@ -0,0 +1,29 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +extends: default + +ignore: | + /tests/dat/apigw/local.api.yaml + /tests/dat/apigw/local.api.bad.yaml + +rules: + colons: disable + comments: disable + comments-indentation: disable + document-start: disable + line-length: disable + truthy: disable diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index aa9c195a9ae..b22dd76ba76 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -20,7 +20,7 @@ # Contributing to Apache OpenWhisk -Anyone can contribute to the OpenWhisk project and we welcome your contributions. +Anyone can contribute to the OpenWhisk project, and we welcome your contributions. There are multiple ways to contribute: report bugs, improve the docs, and contribute code, but you must follow these prerequisites and guidelines: @@ -37,7 +37,7 @@ Instructions on how to do this can be found here: [http://www.apache.org/licenses/#clas](http://www.apache.org/licenses/#clas) Sign the appropriate CLA and submit it to the Apache Software Foundation (ASF) secretary. You will receive a confirmation email from the ASF and be added to -the following list: http://people.apache.org/unlistedclas.html. Once your name is on this list, you are done and your PR can be merged. +the following list: http://people.apache.org/unlistedclas.html. Once your name is on this list, you are done, and your PR can be merged. Project committers will use this list to verify pull requests (PRs) come from contributors that have signed a CLA. @@ -49,7 +49,7 @@ Please raise any bug reports or enhancement requests on the respective project r list to see if your issue has already been raised. A good bug report is one that make it easy for us to understand what you were trying to do and what went wrong. -Provide as much context as possible so we can try to recreate the issue. +Provide as much context as possible, so we can try to recreate the issue. A good enhancement request comes with an explanation of what you are trying to do and how that enhancement would help you. @@ -69,3 +69,34 @@ code base. Some basic rules include: - all files must have the Apache license in the header. - all PRs must have passing builds for all operating systems. - the code is correctly formatted as defined in the [Scalariform plugin properties](tools/eclipse/scala.properties). If you use IntelliJ for development this [page](https://plugins.jetbrains.com/plugin/7480-scalariform) describes the setup and configuration of the plugin. + +#### Pre-commit + +A framework for managing and maintaining multi-language pre-commit hooks. +Pre-commit can be [installed](https://pre-commit.com/#installation) with `pip`, `curl`, `brew` or `conda`. + +You need to first install pre-commit and then install the pre-commit hooks with `pre-commit install`. +Now pre-commit will run automatically on git commit! + +It's usually a good idea to run the hooks against all the files when adding new hooks (usually pre-commit will only run on the changed files during git hooks). +Use `pre-commit run --all-files` to check all files. + +You can run the hook with `id`: `check-hooks-apply` against all files with: + +``` +pre-commit run check-hooks-apply --all-files +``` + +You can update your hooks to the latest version automatically by running `pre-commit autoupdate`. + +##### Hooks + +The hooks run: + +- [Black](https://github.com/psf/black) - The uncompromising [Python](https://www.python.org/) code formatter. +- [markdownlint-cli](https://github.com/igorshubovych/markdownlint-cli) - [markdownlint](https://github.com/DavidAnson/markdownlint) Command Line Interface. +- [yamllint](https://github.com/adrienverge/yamllint) - A linter for [YAML](https://yaml.org/) files. + +##### References + +- [markdownlint Rules](https://github.com/DavidAnson/markdownlint#rules--aliases) diff --git a/CREDITS.txt b/CREDITS.txt index 231383545c7..15306271644 100644 --- a/CREDITS.txt +++ b/CREDITS.txt @@ -22,7 +22,7 @@ the initial donation: * JavaScript (JS) client library for the OpenWhisk platform. * openwhisk-client-swift: * Swift-based client SDK for OpenWhisk compatible with Swift 2.x and runs on iOS 9, WatchOS 2, and Darwin. -* openwhisk-sdk-docker: +* openwhisk-sdk-docker: * SDK that shows how to create “Black box” Docker containers that can run Action (code). * openwhisk-client-go: * API Framework written in GoLang (in-progress) @@ -56,7 +56,7 @@ the initial donation: * Collection of OpenWhisk tools for OS X implemented in Swift 3. * openwhisk-debugger * The OpenWhisk debugger project -* openwhisk-podspecs: +* openwhisk-podspecs: * CocoaPods Podspecs repository for ‘openwhisk-client-swift’ subproject. * openwhisk-devtools: * Development tools for building and deploying Apache OpenWhisk @@ -68,7 +68,7 @@ the initial donation: * Sample application with Message Hub and Object Store. * openwhisk-sample-slackbot: * A proof-of-concept Slackbot to invoke OpenWhisk actions. - + The API Gateway code was a collaboration project jointly developed by both IBM Corporation and Adobe Systems Incorporated (http://www.adobe.com/) and a donation to the Apache OpenWhisk project by both companies. diff --git a/ansible/callbacks/logformatter.py b/ansible/callbacks/logformatter.py index c2aabb26a27..fad44a80d4f 100644 --- a/ansible/callbacks/logformatter.py +++ b/ansible/callbacks/logformatter.py @@ -17,11 +17,16 @@ * limitations under the License. */ """ -from __future__ import (absolute_import, division, print_function) +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + import os import sys import textwrap + from ansible.plugins.callback import CallbackBase + __metaclass__ = type @@ -35,15 +40,15 @@ def __init__(self): def emit(self, host, category, data): """Emit colorized output based upon data contents.""" if type(data) == dict: - cmd = data['cmd'] if 'cmd' in data else None - msg = data['msg'] if 'msg' in data else None - stdout = data['stdout'] if 'stdout' in data else None - stderr = data['stderr'] if 'stderr' in data else None - reason = data['reason'] if 'reason' in data else None + cmd = data["cmd"] if "cmd" in data else None + msg = data["msg"] if "msg" in data else None + stdout = data["stdout"] if "stdout" in data else None + stderr = data["stderr"] if "stderr" in data else None + reason = data["reason"] if "reason" in data else None print() if cmd: - print(hilite('[%s]\n> %s' % (category, cmd), category, wrap = False)) + print(hilite("[%s]\n> %s" % (category, cmd), category, wrap=False)) if reason: print(hilite(reason, category)) if msg: @@ -54,39 +59,39 @@ def emit(self, host, category, data): print(hilite(stderr, category)) def runner_on_failed(self, host, res, ignore_errors=False): - self.emit(host, 'FAILED', res) + self.emit(host, "FAILED", res) def runner_on_ok(self, host, res): pass def runner_on_skipped(self, host, item=None): - self.emit(host, 'SKIPPED', '...') + self.emit(host, "SKIPPED", "...") def runner_on_unreachable(self, host, res): - self.emit(host, 'UNREACHABLE', res) + self.emit(host, "UNREACHABLE", res) def runner_on_async_failed(self, host, res, jid): - self.emit(host, 'FAILED', res) + self.emit(host, "FAILED", res) -def hilite(msg, status, wrap = True): +def hilite(msg, status, wrap=True): """Highlight message.""" + def supports_color(): - if ((sys.platform != 'win32' or 'ANSICON' in os.environ) and - sys.stdout.isatty()): + if (sys.platform != "win32" or "ANSICON" in os.environ) and sys.stdout.isatty(): return True else: return False if supports_color(): attr = [] - if status == 'FAILED': + if status == "FAILED": # red - attr.append('31') + attr.append("31") else: # bold - attr.append('1') - text = '\x1b[%sm%s\x1b[0m' % (';'.join(attr), msg) + attr.append("1") + text = "\x1b[%sm%s\x1b[0m" % (";".join(attr), msg) else: text = msg return textwrap.fill(text, 80) if wrap else text diff --git a/ansible/elasticsearch.yml b/ansible/elasticsearch.yml index ed05e3de6d6..90fe4a7cee3 100644 --- a/ansible/elasticsearch.yml +++ b/ansible/elasticsearch.yml @@ -31,4 +31,4 @@ # 'host_group' above) to name host/elasticsearcher. name_prefix: "elasticsearch" roles: - - elasticsearch + - elasticsearch diff --git a/ansible/logs.yml b/ansible/logs.yml index 9109d078a60..9948398f8e8 100644 --- a/ansible/logs.yml +++ b/ansible/logs.yml @@ -57,25 +57,25 @@ - hosts: all:!ansible serial: 1 tasks: - - name: init var docker_host_flag - set_fact: - docker_host_flag: "" - - name: set host flag when using docker remote API - set_fact: - docker_host_flag: "--host tcp://{{ ansible_host }}:{{ docker.port }}" - when: environmentInformation.type != "local" - - name: get all docker containers - local_action: shell docker {{ docker_host_flag }} ps -a --format="{% raw %}{{.Names}}{% endraw %}" - register: container_names - - name: get logs from all containers - local_action: shell docker {{ docker_host_flag }} logs {{ item }} > "{{ openwhisk_home }}/logs/{{ item }}.log" 2>&1; exit 0 - with_items: "{{ container_names.stdout_lines | difference('whisk_docker_registry') }}" - when: "'docker' not in exclude_logs_from" - - name: workaround to make synchronize work - set_fact: - ansible_ssh_private_key_file: "{{ ansible_ssh_private_key_file }}" - when: ansible_ssh_private_key_file is defined - - name: fetch logs from all machines - synchronize: src="{{ whisk_logs_dir }}/" dest="{{ openwhisk_home }}/logs" mode=pull - when: "'machine' not in exclude_logs_from" - ignore_errors: true + - name: init var docker_host_flag + set_fact: + docker_host_flag: "" + - name: set host flag when using docker remote API + set_fact: + docker_host_flag: "--host tcp://{{ ansible_host }}:{{ docker.port }}" + when: environmentInformation.type != "local" + - name: get all docker containers + local_action: shell docker {{ docker_host_flag }} ps -a --format="{% raw %}{{.Names}}{% endraw %}" + register: container_names + - name: get logs from all containers + local_action: shell docker {{ docker_host_flag }} logs {{ item }} > "{{ openwhisk_home }}/logs/{{ item }}.log" 2>&1; exit 0 + with_items: "{{ container_names.stdout_lines | difference('whisk_docker_registry') }}" + when: "'docker' not in exclude_logs_from" + - name: workaround to make synchronize work + set_fact: + ansible_ssh_private_key_file: "{{ ansible_ssh_private_key_file }}" + when: ansible_ssh_private_key_file is defined + - name: fetch logs from all machines + synchronize: src="{{ whisk_logs_dir }}/" dest="{{ openwhisk_home }}/logs" mode=pull + when: "'machine' not in exclude_logs_from" + ignore_errors: true diff --git a/ansible/roles/cli/tasks/deploy.yml b/ansible/roles/cli/tasks/deploy.yml index 423375eea01..24220008151 100644 --- a/ansible/roles/cli/tasks/deploy.yml +++ b/ansible/roles/cli/tasks/deploy.yml @@ -60,9 +60,9 @@ - name: "Generate a list of individual tarballs to expand" find: - paths: "{{ openwhisk_cli.nginxdir.name }}" - patterns: '*.tgz' - recurse: true + paths: "{{ openwhisk_cli.nginxdir.name }}" + patterns: '*.tgz' + recurse: true register: individual_tarballs - name: "Unarchive the individual tarballs" @@ -74,9 +74,9 @@ - name: "Generate a list of individual zipfiles to expand" find: - paths: "{{ openwhisk_cli.nginxdir.name }}" - patterns: '*.zip' - recurse: true + paths: "{{ openwhisk_cli.nginxdir.name }}" + patterns: '*.zip' + recurse: true register: individual_zipfiles - name: "Unarchive the individual zipfiles into binaries" diff --git a/ansible/roles/controller/tasks/deploy.yml b/ansible/roles/controller/tasks/deploy.yml index 64724c41d54..2cecd764c4d 100644 --- a/ansible/roles/controller/tasks/deploy.yml +++ b/ansible/roles/controller/tasks/deploy.yml @@ -134,7 +134,7 @@ - name: Load config from template set_fact: - openwhisk_config: "{{ lookup('template', 'config.j2') | b64encode }}" + openwhisk_config: "{{ lookup('template', 'config.j2') | b64encode }}" - name: populate environment variables for controller set_fact: @@ -295,8 +295,8 @@ - name: populate volumes for controller set_fact: controller_volumes: - - "{{ whisk_logs_dir }}/{{ controller_name }}:/logs" - - "{{ controller.confdir }}/{{ controller_name }}:/conf" + - "{{ whisk_logs_dir }}/{{ controller_name }}:/logs" + - "{{ controller.confdir }}/{{ controller_name }}:/conf" - name: check if coverage collection is enabled set_fact: diff --git a/ansible/roles/controller/tasks/lean.yml b/ansible/roles/controller/tasks/lean.yml index 79cec6c7e48..981ca7028f6 100644 --- a/ansible/roles/controller/tasks/lean.yml +++ b/ansible/roles/controller/tasks/lean.yml @@ -51,4 +51,3 @@ userns_mode: "host" pid_mode: "host" privileged: "yes" - diff --git a/ansible/roles/couchdb/tasks/deploy.yml b/ansible/roles/couchdb/tasks/deploy.yml index f0250b1a517..41323c63536 100644 --- a/ansible/roles/couchdb/tasks/deploy.yml +++ b/ansible/roles/couchdb/tasks/deploy.yml @@ -183,4 +183,3 @@ password: "{{ db.credentials.admin.pass }}" force_basic_auth: yes when: (inventory_hostname == coordinator) and (db.instances|int >= 2) and (cluster_state.json.state != "cluster_finished") - diff --git a/ansible/roles/invoker/tasks/deploy.yml b/ansible/roles/invoker/tasks/deploy.yml index ea4ce48114b..71ab3111039 100644 --- a/ansible/roles/invoker/tasks/deploy.yml +++ b/ansible/roles/invoker/tasks/deploy.yml @@ -112,9 +112,9 @@ dest: "{{ invoker.confdir }}/{{ invoker_name }}" become: "{{ invoker.dir.become }}" with_items: - - "{{ openwhisk_home }}/ansible/roles/invoker/files/{{ invoker.ssl.keystore.name }}" - - "{{ openwhisk_home }}/ansible/roles/invoker/files/{{ invoker.ssl.key }}" - - "{{ openwhisk_home }}/ansible/roles/invoker/files/{{ invoker.ssl.cert }}" + - "{{ openwhisk_home }}/ansible/roles/invoker/files/{{ invoker.ssl.keystore.name }}" + - "{{ openwhisk_home }}/ansible/roles/invoker/files/{{ invoker.ssl.key }}" + - "{{ openwhisk_home }}/ansible/roles/invoker/files/{{ invoker.ssl.cert }}" loop_control: loop_var: inv_item @@ -125,8 +125,8 @@ dbUser: "{{ db.credentials.invoker.user }}" dbPass: "{{ db.credentials.invoker.pass }}" with_items: - - "{{ db.whisk.actions }}" - - "{{ db.whisk.activations }}" + - "{{ db.whisk.actions }}" + - "{{ db.whisk.activations }}" loop_control: loop_var: inv_item diff --git a/ansible/roles/nginx/tasks/deploy.yml b/ansible/roles/nginx/tasks/deploy.yml index a41720d91fd..5828d1f37fa 100644 --- a/ansible/roles/nginx/tasks/deploy.yml +++ b/ansible/roles/nginx/tasks/deploy.yml @@ -33,9 +33,9 @@ src: "{{ nginx.ssl.path }}/{{ item }}" dest: "{{ nginx.confdir }}" with_items: - - "{{ nginx.ssl.cert }}" - - "{{ nginx.ssl.key }}" - - "{{ nginx.ssl.client_ca_cert }}" + - "{{ nginx.ssl.cert }}" + - "{{ nginx.ssl.key }}" + - "{{ nginx.ssl.client_ca_cert }}" - name: copy password files for cert from local to remote in nginx config directory copy: @@ -48,8 +48,8 @@ src: "{{ openwhisk_home }}/ansible/roles/controller/files/{{ item }}" dest: "{{ nginx.confdir }}" with_items: - - "{{ controller.ssl.cert }}" - - "{{ controller.ssl.key }}" + - "{{ controller.ssl.cert }}" + - "{{ controller.ssl.key }}" when: controller.protocol == 'https' - name: ensure nginx log directory is created with permissions diff --git a/ansible/roles/zookeeper/tasks/deploy.yml b/ansible/roles/zookeeper/tasks/deploy.yml index 260cd86ff38..dfe9c7cf648 100644 --- a/ansible/roles/zookeeper/tasks/deploy.yml +++ b/ansible/roles/zookeeper/tasks/deploy.yml @@ -25,17 +25,17 @@ recreate: true restart_policy: "{{ docker.restart.policy }}" env: - TZ: "{{ docker.timezone }}" - ZOO_MY_ID: "{{ groups['zookeepers'].index(inventory_hostname) + 1 }}" - ZOO_SERVERS: "{% set zhosts = [] %} - {% for host in groups['zookeepers'] %} - {% if host == inventory_hostname %} - {{ zhosts.append('server.' + (loop.index|string) + '=' + '0.0.0.0:2888:3888') }} - {% else %} - {{ zhosts.append('server.' + (loop.index|string) + '=' + hostvars[host].ansible_host + ':' + ((2888+loop.index-1)|string) + ':' + ((3888+loop.index-1)|string) ) }} - {% endif %} - {% endfor %} - {{ zhosts | join(' ') }}" + TZ: "{{ docker.timezone }}" + ZOO_MY_ID: "{{ groups['zookeepers'].index(inventory_hostname) + 1 }}" + ZOO_SERVERS: "{% set zhosts = [] %} + {% for host in groups['zookeepers'] %} + {% if host == inventory_hostname %} + {{ zhosts.append('server.' + (loop.index|string) + '=' + '0.0.0.0:2888:3888') }} + {% else %} + {{ zhosts.append('server.' + (loop.index|string) + '=' + hostvars[host].ansible_host + ':' + ((2888+loop.index-1)|string) + ':' + ((3888+loop.index-1)|string) ) }} + {% endif %} + {% endfor %} + {{ zhosts | join(' ') }}" ports: - "{{ zookeeper.port + groups['zookeepers'].index(inventory_hostname) }}:2181" - "{{ 2888 + groups['zookeepers'].index(inventory_hostname) }}:2888" diff --git a/ansible/tasks/initdb.yml b/ansible/tasks/initdb.yml index 6f57019fcf5..1df21427b17 100644 --- a/ansible/tasks/initdb.yml +++ b/ansible/tasks/initdb.yml @@ -27,8 +27,8 @@ vars: dbName: "{{ db.whisk.auth }}" readers: - - "{{ db.credentials.controller.user }}" - - "{{ db.credentials.invoker.user }}" + - "{{ db.credentials.controller.user }}" + - "{{ db.credentials.invoker.user }}" - include_tasks: db/recreateDoc.yml vars: diff --git a/ansible/tasks/wipeDatabase.yml b/ansible/tasks/wipeDatabase.yml index da4c9e92431..a1eba729a32 100644 --- a/ansible/tasks/wipeDatabase.yml +++ b/ansible/tasks/wipeDatabase.yml @@ -28,10 +28,10 @@ vars: dbName: "{{ db.whisk.actions }}" readers: - - "{{ db.credentials.controller.user }}" - - "{{ db.credentials.invoker.user }}" + - "{{ db.credentials.controller.user }}" + - "{{ db.credentials.invoker.user }}" writers: - - "{{ db.credentials.controller.user }}" + - "{{ db.credentials.controller.user }}" - include_tasks: db/recreateDb.yml vars: @@ -41,11 +41,11 @@ vars: dbName: "{{ db.whisk.activations }}" readers: - - "{{ db.credentials.controller.user }}" - - "{{ db.credentials.invoker.user }}" + - "{{ db.credentials.controller.user }}" + - "{{ db.credentials.invoker.user }}" writers: - - "{{ db.credentials.controller.user }}" - - "{{ db.credentials.invoker.user }}" + - "{{ db.credentials.controller.user }}" + - "{{ db.credentials.invoker.user }}" - include_tasks: recreateViews.yml when: withViews == True diff --git a/ansible/teardown.yml b/ansible/teardown.yml index dadd9a46d94..0fff860dea7 100644 --- a/ansible/teardown.yml +++ b/ansible/teardown.yml @@ -22,5 +22,5 @@ - name: kill all docker containers shell: "{{ item }}" with_items: - - "RUNNING=$(docker ps -aq); if [ -n \"$RUNNING\" ]; then docker unpause $RUNNING > /dev/null; docker kill $RUNNING > /dev/null; docker rm -f -v $(docker ps -aq); fi" - - "DANGLING=$(docker images -q -f dangling=true); if [ -n \"$DANGLING\" ]; then docker rmi -f $DANGLING; fi" + - "RUNNING=$(docker ps -aq); if [ -n \"$RUNNING\" ]; then docker unpause $RUNNING > /dev/null; docker kill $RUNNING > /dev/null; docker rm -f -v $(docker ps -aq); fi" + - "DANGLING=$(docker images -q -f dangling=true); if [ -n \"$DANGLING\" ]; then docker rmi -f $DANGLING; fi" diff --git a/core/monitoring/user-events/compose/grafana/provisioning/dashboards/dashboard.yml b/core/monitoring/user-events/compose/grafana/provisioning/dashboards/dashboard.yml index a6ea486dfc8..e89fd657e91 100644 --- a/core/monitoring/user-events/compose/grafana/provisioning/dashboards/dashboard.yml +++ b/core/monitoring/user-events/compose/grafana/provisioning/dashboards/dashboard.yml @@ -18,11 +18,11 @@ apiVersion: 1 providers: -- name: 'Prometheus' - orgId: 1 - folder: '' - type: file - disableDeletion: false - editable: true - options: - path: /var/lib/grafana/dashboards + - name: 'Prometheus' + orgId: 1 + folder: '' + type: file + disableDeletion: false + editable: true + options: + path: /var/lib/grafana/dashboards diff --git a/core/monitoring/user-events/compose/grafana/provisioning/datasources/datasource.yml b/core/monitoring/user-events/compose/grafana/provisioning/datasources/datasource.yml index b67b13db4ba..55e14df0c6d 100644 --- a/core/monitoring/user-events/compose/grafana/provisioning/datasources/datasource.yml +++ b/core/monitoring/user-events/compose/grafana/provisioning/datasources/datasource.yml @@ -27,41 +27,41 @@ deleteDatasources: # whats available in the database datasources: # name of the datasource. Required -- name: Prometheus - # datasource type. Required - type: prometheus - # access mode. direct or proxy. Required - access: proxy - # org id. will default to orgId 1 if not specified - orgId: 1 - # url - url: http://prometheus:9090 - # database password, if used - password: - # database user, if used - user: - # database name, if used - database: - # enable/disable basic auth - basicAuth: true - # basic auth username - basicAuthUser: admin - # basic auth password - basicAuthPassword: foobar - # enable/disable with credentials headers - withCredentials: - # mark as default datasource. Max one per org - isDefault: true - # fields that will be converted to json and stored in json_data - jsonData: - graphiteVersion: "1.1" - tlsAuth: false - tlsAuthWithCACert: false - # json object of data that will be encrypted. - secureJsonData: - tlsCACert: "..." - tlsClientCert: "..." - tlsClientKey: "..." - version: 1 - # allow users to edit datasources from the UI. - editable: true + - name: Prometheus + # datasource type. Required + type: prometheus + # access mode. direct or proxy. Required + access: proxy + # org id. will default to orgId 1 if not specified + orgId: 1 + # url + url: http://prometheus:9090 + # database password, if used + password: + # database user, if used + user: + # database name, if used + database: + # enable/disable basic auth + basicAuth: true + # basic auth username + basicAuthUser: admin + # basic auth password + basicAuthPassword: foobar + # enable/disable with credentials headers + withCredentials: + # mark as default datasource. Max one per org + isDefault: true + # fields that will be converted to json and stored in json_data + jsonData: + graphiteVersion: "1.1" + tlsAuth: false + tlsAuthWithCACert: false + # json object of data that will be encrypted. + secureJsonData: + tlsCACert: "..." + tlsClientCert: "..." + tlsClientKey: "..." + version: 1 + # allow users to edit datasources from the UI. + editable: true diff --git a/core/monitoring/user-events/compose/prometheus/prometheus.yml b/core/monitoring/user-events/compose/prometheus/prometheus.yml index 453ab57bc34..58d0a77a7a7 100644 --- a/core/monitoring/user-events/compose/prometheus/prometheus.yml +++ b/core/monitoring/user-events/compose/prometheus/prometheus.yml @@ -27,4 +27,3 @@ scrape_configs: - job_name: 'openwhisk-metrics' static_configs: - targets: ['user-events:9095'] - diff --git a/core/standalone/bin/waitready b/core/standalone/bin/waitready index 643d5374ce4..54363a3bfa3 100755 --- a/core/standalone/bin/waitready +++ b/core/standalone/bin/waitready @@ -22,6 +22,6 @@ do sleep 1 ; echo server still not ready - retrying done wsk action update testme <(echo 'function main(){return {"ready":true}}') --kind nodejs:10 until wsk action invoke testme -r 2>/dev/null | grep 'ready' -do sleep 1 ; echo server initializing... +do sleep 1 ; echo server initializing... done wsk action delete testme diff --git a/core/standalone/src/main/resources/playground/ui/playground.css b/core/standalone/src/main/resources/playground/ui/playground.css index 8df6370e4b9..458c6c9c425 100644 --- a/core/standalone/src/main/resources/playground/ui/playground.css +++ b/core/standalone/src/main/resources/playground/ui/playground.css @@ -24,7 +24,7 @@ html, body { box-sizing: border-box; /* include the border and padding in width / height calcuations */ } -#editor { +#editor { flex: 1 1 auto; } @@ -83,10 +83,10 @@ html, body { border-radius: 8px; background-color: #26282C; color: white; - text-align: center; + text-align: center; text-decoration: none; - display: inline-block; - font-size: 12pt; + display: inline-block; + font-size: 12pt; cursor: pointer; } @@ -110,10 +110,10 @@ html, body { padding-bottom: 8px; padding-left: 20px; padding-right: 20px; - text-align: center; + text-align: center; text-decoration: none; - display: inline-block; - font-size: 10pt; + display: inline-block; + font-size: 10pt; cursor: pointer; } @@ -190,7 +190,7 @@ html, body { .panel-header { padding-top: 4px; padding-bottom: 4px; - font-size: 10pt; + font-size: 10pt; font-weight: bold; background-color: #202020; color: #9098A0; diff --git a/docs/dev/modules.md b/docs/dev/modules.md index 6367dc180e8..61757c4b26e 100644 --- a/docs/dev/modules.md +++ b/docs/dev/modules.md @@ -134,5 +134,3 @@ This page is generated via script `./gradlew :tools:dev:renderModuleDetails`. Se | [openwhisk-vscode](https://github.com/apache/openwhisk-vscode) | [DEPRECATED] - Visual Studio Code extension (prototype) for authoring OpenWhisk actions inside the editor. | | [openwhisk-workshop](https://github.com/apache/openwhisk-workshop) | [DEPRECATED] - OpenWhisk workshop to help developers learn how to build serverless applications using the platform. | | [openwhisk-xcode](https://github.com/apache/openwhisk-xcode) | [DEPRECATED] - Collection of OpenWhisk tools for OS X implemented in Swift 3. | - - diff --git a/docs/feeds.md b/docs/feeds.md index cc19d90cc04..02664e57b9d 100644 --- a/docs/feeds.md +++ b/docs/feeds.md @@ -122,4 +122,3 @@ The Cloudant *changes* feed is the canonical example -- it stands up a `cloudant The *alarm* feed is implemented with a similar pattern. The connection-based architecture is the highest performance option, but imposes more overhead on operations compared to the polling and hook architectures. - diff --git a/license-templates/LICENSE.txt b/license-templates/LICENSE.txt new file mode 100644 index 00000000000..caf46272b61 --- /dev/null +++ b/license-templates/LICENSE.txt @@ -0,0 +1,15 @@ + +Licensed to the Apache Software Foundation (ASF) under one or more +contributor license agreements. See the NOTICE file distributed with +this work for additional information regarding copyright ownership. +The ASF licenses this file to You under the Apache License, Version 2.0 +(the "License"); you may not use this file except in compliance with +the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/tests/dat/actions/README.md b/tests/dat/actions/README.md index 1ac83768156..8388dbb6fb2 100644 --- a/tests/dat/actions/README.md +++ b/tests/dat/actions/README.md @@ -33,4 +33,3 @@ These unicode tests are checked however in their corresponding runtime build. For Java artifacts, Java 8 is required. If not found in your path, the build script will skip those artifacts and corresponding tests will also be skipped. - diff --git a/tests/dat/actions/hello.py b/tests/dat/actions/hello.py index d2639d5e3de..16e12915599 100644 --- a/tests/dat/actions/hello.py +++ b/tests/dat/actions/hello.py @@ -21,7 +21,7 @@ def main(args): """Main.""" - name = args.get('name', 'stranger') - greeting = 'Hello ' + name + '!' + name = args.get("name", "stranger") + greeting = "Hello " + name + "!" print(greeting) - return {'greeting': greeting} + return {"greeting": greeting} diff --git a/tests/dat/actions/niam.py b/tests/dat/actions/niam.py index 2e8bbee2e64..1c29f5b9ab6 100644 --- a/tests/dat/actions/niam.py +++ b/tests/dat/actions/niam.py @@ -14,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - """Python Non-standard entry point test.""" diff --git a/tests/dat/actions/python-zip/__main__.py b/tests/dat/actions/python-zip/__main__.py index bc177329246..27d60127a74 100644 --- a/tests/dat/actions/python-zip/__main__.py +++ b/tests/dat/actions/python-zip/__main__.py @@ -14,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from greet import greet diff --git a/tests/dat/actions/python-zip/greet.py b/tests/dat/actions/python-zip/greet.py index 037dfd201df..f4eabb5a992 100644 --- a/tests/dat/actions/python-zip/greet.py +++ b/tests/dat/actions/python-zip/greet.py @@ -17,9 +17,9 @@ def greet(dict): - if 'name' in dict: - name = dict['name'] + if "name" in dict: + name = dict["name"] else: - name = 'stranger' - greeting = 'Hello ' + name + '!' - return {'greeting': greeting} + name = "stranger" + greeting = "Hello " + name + "!" + return {"greeting": greeting} diff --git a/tests/dat/actions/pythonVersion.py b/tests/dat/actions/pythonVersion.py index 3fc6955a6bc..327320bbcde 100644 --- a/tests/dat/actions/pythonVersion.py +++ b/tests/dat/actions/pythonVersion.py @@ -17,9 +17,9 @@ * limitations under the License. */ """ - import sys + def main(args): """Main.""" return {"version": sys.version_info.major} diff --git a/tests/dat/actions/sleep.py b/tests/dat/actions/sleep.py index a680a870e6d..a12a168ecd8 100644 --- a/tests/dat/actions/sleep.py +++ b/tests/dat/actions/sleep.py @@ -14,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - # # Python based OpenWhisk action that sleeps for the specified number # of milliseconds before returning. @@ -30,9 +29,11 @@ def main(parm): sleepTimeInMs = parm.get("sleepTimeInMs", 1) print("Specified sleep time is {} ms.".format(sleepTimeInMs)) - result = {"msg": "Terminated successfully after around {} ms.".format(sleepTimeInMs)} + result = { + "msg": "Terminated successfully after around {} ms.".format(sleepTimeInMs) + } time.sleep(sleepTimeInMs / 1000.0) - print(result['msg']) + print(result["msg"]) return result diff --git a/tests/dat/actions/stdenv.py b/tests/dat/actions/stdenv.py index 87f662b0a9d..afe00f4b1ea 100644 --- a/tests/dat/actions/stdenv.py +++ b/tests/dat/actions/stdenv.py @@ -14,11 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - """Unify action container environments.""" import os def main(dict): - return {"auth": os.environ['__OW_API_KEY'], - "edge": os.environ['__OW_API_HOST']} + return {"auth": os.environ["__OW_API_KEY"], "edge": os.environ["__OW_API_HOST"]} diff --git a/tests/dat/actions/unicode.tests/src/dotnet2.2/Apache.OpenWhisk.UnicodeTests.Dotnet/Unicode.cs b/tests/dat/actions/unicode.tests/src/dotnet2.2/Apache.OpenWhisk.UnicodeTests.Dotnet/Unicode.cs index 6963b869d85..fadd0909da2 100644 --- a/tests/dat/actions/unicode.tests/src/dotnet2.2/Apache.OpenWhisk.UnicodeTests.Dotnet/Unicode.cs +++ b/tests/dat/actions/unicode.tests/src/dotnet2.2/Apache.OpenWhisk.UnicodeTests.Dotnet/Unicode.cs @@ -32,4 +32,4 @@ public JObject Main(JObject args) return (message); } } -} \ No newline at end of file +} diff --git a/tests/performance/gatling_tests/src/gatling/resources/data/pythonAction.py b/tests/performance/gatling_tests/src/gatling/resources/data/pythonAction.py index b5f1f193433..2f5ac2b4776 100644 --- a/tests/performance/gatling_tests/src/gatling/resources/data/pythonAction.py +++ b/tests/performance/gatling_tests/src/gatling/resources/data/pythonAction.py @@ -15,9 +15,10 @@ # limitations under the License. # + def main(dict): - if 'text' in dict: - text = dict['text'] + if "text" in dict: + text = dict["text"] else: text = "stranger" greeting = "Hello " + text + "!" diff --git a/tools/actionProxy/invoke.py b/tools/actionProxy/invoke.py index 937f80ffce8..5797fdc9176 100755 --- a/tools/actionProxy/invoke.py +++ b/tools/actionProxy/invoke.py @@ -27,82 +27,118 @@ For additional help, try ./invoke.py -h """ - +import argparse +import base64 +import codecs +import json import os import re import sys -import json -import base64 + import requests -import codecs -import argparse + try: import argcomplete except ImportError: argcomplete = False + def main(): try: args = parseArgs() - exitCode = { - 'init' : init, - 'run' : run - }[args.cmd](args) + exitCode = {"init": init, "run": run}[args.cmd](args) except Exception as e: print(e) exitCode = 1 sys.exit(exitCode) + def dockerHost(): - dockerHost = 'localhost' - if 'DOCKER_HOST' in os.environ: + dockerHost = "localhost" + if "DOCKER_HOST" in os.environ: try: - dockerHost = re.compile('tcp://(.*):[\d]+').findall(os.environ['DOCKER_HOST'])[0] + dockerHost = re.compile("tcp://(.*):[\d]+").findall( + os.environ["DOCKER_HOST"] + )[0] except Exception: - print('cannot determine docker host from %s' % os.environ['DOCKER_HOST']) + print("cannot determine docker host from %s" % os.environ["DOCKER_HOST"]) sys.exit(-1) return dockerHost + def containerRoute(args, path): - return 'http://%s:%s/%s' % (args.host, args.port, path) + return "http://%s:%s/%s" % (args.host, args.port, path) + class objectify(object): def __init__(self, d): self.__dict__ = d -def parseArgs(): - parser = argparse.ArgumentParser(description='initialize and run an OpenWhisk action container') - parser.add_argument('-v', '--verbose', help='verbose output', action='store_true') - parser.add_argument('--host', help='action container host', default=dockerHost()) - parser.add_argument('-p', '--port', help='action container port number', default=8080, type=int) - - subparsers = parser.add_subparsers(title='available commands', dest='cmd') - - initmenu = subparsers.add_parser('init', help='initialize container with src or zip/tgz file') - initmenu.add_argument('-b', '--binary', help='treat artifact as binary', action='store_true') - initmenu.add_argument('-r', '--run', nargs='?', default=None, help='run after init') - initmenu.add_argument('main', nargs='?', default='main', help='name of the "main" entry method for the action') - initmenu.add_argument('artifact', help='a source file or zip/tgz archive') - initmenu.add_argument('env', nargs='?', help='the environment variables to export to the action, either a reference to a file or an inline JSON object', default=None) - runmenu = subparsers.add_parser('run', help='send arguments to container to run action') - runmenu.add_argument('payload', nargs='?', help='the arguments to send to the action, either a reference to a file or an inline JSON object', default=None) +def parseArgs(): + parser = argparse.ArgumentParser( + description="initialize and run an OpenWhisk action container" + ) + parser.add_argument("-v", "--verbose", help="verbose output", action="store_true") + parser.add_argument("--host", help="action container host", default=dockerHost()) + parser.add_argument( + "-p", "--port", help="action container port number", default=8080, type=int + ) + + subparsers = parser.add_subparsers(title="available commands", dest="cmd") + + initmenu = subparsers.add_parser( + "init", help="initialize container with src or zip/tgz file" + ) + initmenu.add_argument( + "-b", "--binary", help="treat artifact as binary", action="store_true" + ) + initmenu.add_argument("-r", "--run", nargs="?", default=None, help="run after init") + initmenu.add_argument( + "main", + nargs="?", + default="main", + help='name of the "main" entry method for the action', + ) + initmenu.add_argument("artifact", help="a source file or zip/tgz archive") + initmenu.add_argument( + "env", + nargs="?", + help="the environment variables to export to the action, either a reference to a file or an inline JSON object", + default=None, + ) + + runmenu = subparsers.add_parser( + "run", help="send arguments to container to run action" + ) + runmenu.add_argument( + "payload", + nargs="?", + help="the arguments to send to the action, either a reference to a file or an inline JSON object", + default=None, + ) if argcomplete: argcomplete.autocomplete(parser) return parser.parse_args() + def init(args): main = args.main artifact = args.artifact - if artifact and (args.binary or artifact.endswith('.zip') or artifact.endswith('tgz') or artifact.endswith('jar')): - with open(artifact, 'rb') as fp: + if artifact and ( + args.binary + or artifact.endswith(".zip") + or artifact.endswith("tgz") + or artifact.endswith("jar") + ): + with open(artifact, "rb") as fp: contents = fp.read() - contents = str(base64.b64encode(contents), 'utf-8') + contents = str(base64.b64encode(contents), "utf-8") binary = True - elif artifact != '': - with(codecs.open(artifact, 'r', 'utf-8')) as fp: + elif artifact != "": + with (codecs.open(artifact, "r", "utf-8")) as fp: contents = fp.read() binary = False else: @@ -110,15 +146,16 @@ def init(args): binary = False r = requests.post( - containerRoute(args, 'init'), - json = { + containerRoute(args, "init"), + json={ "value": { "code": contents, "binary": binary, "main": main, - "env": processPayload(args.env) + "env": processPayload(args.env), } - }) + }, + ) print(r.text) @@ -128,26 +165,29 @@ def init(args): runArgs.payload = args.run run(runArgs) + def run(args): value = processPayload(args.payload) if args.verbose: - print('Sending value: %s...' % json.dumps(value)[0:40]) - r = requests.post(containerRoute(args, 'run'), json = {"value": value}) - print(str(r.content, 'utf-8')) + print("Sending value: %s..." % json.dumps(value)[0:40]) + r = requests.post(containerRoute(args, "run"), json={"value": value}) + print(str(r.content, "utf-8")) + def processPayload(payload): if payload and os.path.exists(payload): with open(payload) as fp: return json.load(fp) try: - d = json.loads(payload if payload else '{}') + d = json.loads(payload if payload else "{}") if isinstance(d, dict): return d else: raise except: - print('payload must be a JSON object.') + print("payload must be a JSON object.") sys.exit(-1) -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/tools/admin/wskadmin b/tools/admin/wskadmin index f65d7c29b32..bc28ce0eba1 100755 --- a/tools/admin/wskadmin +++ b/tools/admin/wskadmin @@ -1,5 +1,4 @@ #!/usr/bin/env python - # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with @@ -16,21 +15,22 @@ # See the License for the specific language governing permissions and # limitations under the License. # - ## # Whisk Admin command line interface ## - import argparse import json import os import random import re -from subprocess import Popen, PIPE, STDOUT import string import sys import traceback import uuid +from subprocess import PIPE +from subprocess import Popen +from subprocess import STDOUT + import wskprop if sys.version_info.major >= 3: from urllib.parse import quote_plus diff --git a/tools/admin/wskprop.py b/tools/admin/wskprop.py index 3b1bd94c833..84fc66700ee 100644 --- a/tools/admin/wskprop.py +++ b/tools/admin/wskprop.py @@ -22,35 +22,38 @@ def propfile(base): - if base != '': - filename = '%s/whisk.properties' % base + if base != "": + filename = "%s/whisk.properties" % base if os.path.isfile(filename) and os.path.exists(filename): return filename else: parent = os.path.dirname(base) - return propfile(parent) if parent != base else '' + return propfile(parent) if parent != base else "" else: - return '' + return "" def importPropsIfAvailable(filename): - thefile = (open(filename, 'r') if os.path.isfile(filename) and - os.path.exists(filename) else []) + thefile = ( + open(filename, "r") + if os.path.isfile(filename) and os.path.exists(filename) + else [] + ) return importProps(thefile) def importProps(stream): props = {} for line in stream: - parts = line.split('=') + parts = line.split("=") if len(parts) >= 1: key = parts[0].strip() if len(parts) >= 2: val = parts[1].strip() - if key != '' and val != '': - props[key.upper().replace('.', '_')] = val - elif key != '': - props[key.upper().replace('.', '_')] = '' + if key != "" and val != "": + props[key.upper().replace(".", "_")] = val + elif key != "": + props[key.upper().replace(".", "_")] = "" return props @@ -58,25 +61,29 @@ def importProps(stream): # deferredInfo) prints a message if a required property is not found def checkRequiredProperties(requiredPropertiesByName, properties): """Return a tuple describing the requested required properties.""" - requiredPropertiesByValue = [getPropertyValue(key, properties) for key - in requiredPropertiesByName] - requiredProperties = dict(zip(requiredPropertiesByName, - requiredPropertiesByValue)) - invalidProperties = [key for key in requiredPropertiesByName if - requiredProperties[key] is None] - deferredInfo = '' + requiredPropertiesByValue = [ + getPropertyValue(key, properties) for key in requiredPropertiesByName + ] + requiredProperties = dict(zip(requiredPropertiesByName, requiredPropertiesByValue)) + invalidProperties = [ + key for key in requiredPropertiesByName if requiredProperties[key] is None + ] + deferredInfo = "" for key, value in requiredProperties.items(): - if value in (None, ''): - print('property "%s" not found in environment or ' - 'property file' % key) + if value in (None, ""): + print('property "%s" not found in environment or ' "property file" % key) else: - deferredInfo += 'using %(key)s = %(value)s\n' % {'key': key, - 'value': value} + deferredInfo += "using %(key)s = %(value)s\n" % {"key": key, "value": value} return (len(invalidProperties) == 0, requiredProperties, deferredInfo) def getPropertyValue(key, properties): evalue = os.environ.get(key) - value = (evalue if evalue != None and evalue != '' - else properties[key] if key in properties else None) + value = ( + evalue + if evalue != None and evalue != "" + else properties[key] + if key in properties + else None + ) return value diff --git a/tools/admin/wskutil.py b/tools/admin/wskutil.py index 333e6c8192d..7829febf8c7 100644 --- a/tools/admin/wskutil.py +++ b/tools/admin/wskutil.py @@ -17,11 +17,10 @@ * limitations under the License. */ """ - - -import os import json +import os import sys + if sys.version_info.major >= 3: from http.client import HTTPConnection, HTTPSConnection, IncompleteRead from urllib.parse import urlparse @@ -36,39 +35,58 @@ # global configurations, can control whether to allow untrusted certificates # on HTTPS connections -verify_cert = os.getenv('DB_VERIFY_CERT') is None or os.getenv('DB_VERIFY_CERT').lower() != 'false' -httpRequestProps = {'secure': verify_cert} - -def request(method, urlString, body = '', headers = {}, auth = None, verbose = False, https_proxy = os.getenv('https_proxy', None), timeout = 60): +verify_cert = ( + os.getenv("DB_VERIFY_CERT") is None + or os.getenv("DB_VERIFY_CERT").lower() != "false" +) +httpRequestProps = {"secure": verify_cert} + + +def request( + method, + urlString, + body="", + headers={}, + auth=None, + verbose=False, + https_proxy=os.getenv("https_proxy", None), + timeout=60, +): url = urlparse(urlString) - if url.scheme == 'http': - conn = HTTPConnection(url.netloc, timeout = timeout) + if url.scheme == "http": + conn = HTTPConnection(url.netloc, timeout=timeout) else: - if httpRequestProps['secure'] or not hasattr(ssl, '_create_unverified_context'): - conn = HTTPSConnection(url.netloc if https_proxy is None else https_proxy, timeout = timeout) + if httpRequestProps["secure"] or not hasattr(ssl, "_create_unverified_context"): + conn = HTTPSConnection( + url.netloc if https_proxy is None else https_proxy, timeout=timeout + ) else: - conn = HTTPSConnection(url.netloc if https_proxy is None else https_proxy, context=ssl._create_unverified_context(), timeout = timeout) + conn = HTTPSConnection( + url.netloc if https_proxy is None else https_proxy, + context=ssl._create_unverified_context(), + timeout=timeout, + ) if https_proxy: conn.set_tunnel(url.netloc) if auth is not None: auth = base64.b64encode(auth.encode()).decode() - headers['Authorization'] = 'Basic %s' % auth + headers["Authorization"] = "Basic %s" % auth if verbose: - print('========') - print('REQUEST:') - print('%s %s' % (method, urlString)) - print('Headers sent:') + print("========") + print("REQUEST:") + print("%s %s" % (method, urlString)) + print("Headers sent:") print(getPrettyJson(headers)) - if body != '': - print('Body sent:') + if body != "": + print("Body sent:") print(body) try: conn.request(method, urlString, body, headers) res = conn.getresponse() - body = '' + body = "" try: body = res.read() except IncompleteRead as e: @@ -79,21 +97,23 @@ def request(method, urlString, body = '', headers = {}, auth = None, verbose = F res.read = lambda: body if verbose: - print('--------') - print('RESPONSE:') - print('Got response with code %s' % res.status) - print('Body received:') + print("--------") + print("RESPONSE:") + print("Got response with code %s" % res.status) + print("Body received:") print(res.read()) - print('========') + print("========") return res except socket.timeout: - return ErrorResponse(status = 500, error = 'request timed out at %d seconds' % timeout) + return ErrorResponse( + status=500, error="request timed out at %d seconds" % timeout + ) except Exception as e: - return ErrorResponse(status = 500, error = str(e)) + return ErrorResponse(status=500, error=str(e)) def getPrettyJson(obj): - return json.dumps(obj, sort_keys=True, indent=4, separators=(',', ': ')) + return json.dumps(obj, sort_keys=True, indent=4, separators=(",", ": ")) # class to normalize responses for exceptions with no HTTP response for canonical error handling diff --git a/tools/build/checkLogs.py b/tools/build/checkLogs.py index b436042a3cf..286734e7394 100755 --- a/tools/build/checkLogs.py +++ b/tools/build/checkLogs.py @@ -20,35 +20,38 @@ * limitations under the License. */ """ - ## # CI/CD tool to assert that logs and databases are in certain bounds ## - import collections import itertools +import json import os import platform import sys -import json from functools import partial + def file_has_at_most_x_bytes(x, file): size = os.path.getsize(file) - if(size > x): - return [ (0, "file has %d bytes, expected %d bytes" % (size, x)) ] + if size > x: + return [(0, "file has %d bytes, expected %d bytes" % (size, x))] else: - return [ ] + return [] + # Checks that the database dump contains at most x entries def database_has_at_most_x_entries(x, file): with open(file) as db_file: data = json.load(db_file) - entries = len(data['rows']) - if(entries > x): - return [ (0, "found %d database entries, expected %d entries" % (entries, x)) ] + entries = len(data["rows"]) + if entries > x: + return [ + (0, "found %d database entries, expected %d entries" % (entries, x)) + ] else: - return [ ] + return [] + # Runs a series of file-by-file checks. def run_file_checks(file_path, checks): @@ -61,19 +64,21 @@ def run_file_checks(file_path, checks): return errors + # Helpers, rather than non-standard modules. def colors(): ansi = hasattr(sys.stderr, "isatty") and platform.system() != "Windows" def colorize(code, string): - return "%s%s%s" % (code, string, '\033[0m') if ansi else string + return "%s%s%s" % (code, string, "\033[0m") if ansi else string - blue = lambda s: colorize('\033[94m', s) - green = lambda s: colorize('\033[92m', s) - red = lambda s: colorize('\033[91m', s) + blue = lambda s: colorize("\033[94m", s) + green = lambda s: colorize("\033[92m", s) + red = lambda s: colorize("\033[91m", s) return collections.namedtuple("Colorizer", "blue green red")(blue, green, red) + # Script entrypoint. if __name__ == "__main__": if len(sys.argv) > 3: @@ -84,7 +89,7 @@ def colorize(code, string): tags_to_check = [] if len(sys.argv) == 3: - tags_to_check = {x.strip() for x in sys.argv[2].split(',')} + tags_to_check = {x.strip() for x in sys.argv[2].split(",")} col = colors() @@ -92,11 +97,11 @@ def colorize(code, string): sys.stderr.write("%s: %s is not a directory.\n" % (sys.argv[0], root_dir)) file_checks = [ - ("db-rules.log", {"db"}, [ partial(database_has_at_most_x_entries, 0) ]), - ("db-triggers.log", {"db"}, [ partial(database_has_at_most_x_entries, 0) ]), + ("db-rules.log", {"db"}, [partial(database_has_at_most_x_entries, 0)]), + ("db-triggers.log", {"db"}, [partial(database_has_at_most_x_entries, 0)]), # Assert that stdout of the container is correctly piped and empty - ("controller0.log", {"system"}, [ partial(file_has_at_most_x_bytes, 0) ]), - ("invoker0.log", {"system"}, [ partial(file_has_at_most_x_bytes, 0) ]) + ("controller0.log", {"system"}, [partial(file_has_at_most_x_bytes, 0)]), + ("invoker0.log", {"system"}, [partial(file_has_at_most_x_bytes, 0)]), ] all_errors = [] @@ -112,11 +117,13 @@ def colorize(code, string): if all_errors: files_with_errors = 0 - for path, triples in itertools.groupby(sorted(all_errors, key=sort_key), key=sort_key): + for path, triples in itertools.groupby( + sorted(all_errors, key=sort_key), key=sort_key + ): files_with_errors += 1 sys.stderr.write("%s:\n" % col.blue(path)) - pairs = sorted(map(lambda t: (t[1],t[2]), triples), key=lambda p: p[0]) + pairs = sorted(map(lambda t: (t[1], t[2]), triples), key=lambda p: p[0]) for line, msg in pairs: sys.stderr.write(" %4d: %s\n" % (line, msg)) @@ -127,7 +134,10 @@ def colorize(code, string): if files_with_errors == 1: message = "There were %d errors in a file." % len(all_errors) else: - message = "There were %d errors in %d files." % (len(all_errors), files_with_errors) + message = "There were %d errors in %d files." % ( + len(all_errors), + files_with_errors, + ) sys.stderr.write(col.red(message) + "\n") sys.exit(1) diff --git a/tools/build/citool b/tools/build/citool index 8a5403d3fae..01d39ef6e2a 100755 --- a/tools/build/citool +++ b/tools/build/citool @@ -1,5 +1,4 @@ #!/usr/bin/env python - # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with @@ -16,7 +15,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - ## # Jenkins/Travis tools script, for monitoring and analyzing jobs from CI. # @@ -32,11 +30,11 @@ # Example use to monitor a jenkins build B with job number N: # > citool -u https://jenkins.host:port -b B monitor N ## - +import argparse import sys import time import traceback -import argparse + if sys.version_info.major >= 3: from http.client import HTTPConnection, HTTPSConnection, TEMPORARY_REDIRECT, OK from urllib.parse import urlparse @@ -49,50 +47,85 @@ import json from xml.dom import minidom from subprocess import Popen, PIPE, STDOUT + def main(): exitCode = 0 try: args = parseArgs() - exitCode = { - 'monitor' : monitor, - 'cat': cat - }[args.cmd](args) + exitCode = {"monitor": monitor, "cat": cat}[args.cmd](args) except Exception as e: - print('Exception: ', e) + print("Exception: ", e) if args.verbose: traceback.print_exc() exitCode = 1 sys.exit(exitCode) -def parseArgs(): - parser = argparse.ArgumentParser(description='tool for analyzing logs from CI') - subparsers = parser.add_subparsers(title='available commands', dest='cmd') - - parser.add_argument('job', help='job number; for matrix jobs add the matrix index after a period (e.g., 401881768.2)') - - parser.add_argument('-b', '--build', help='build name', default='travis') - parser.add_argument('-v', '--verbose', help='verbose output', action='store_true') - parser.add_argument('-i', '--input-file', help='read logs from file rather than CI', action='store_true', dest='ifile') - parser.add_argument('-o', '--output-file', help='store intermediate buffer to a file (e.g., jenkins console or component logs)', action='store_true', dest='ofile') - parser.add_argument('-u', '--url', help='URL for CI build job (default is Travis CI)', default='https://api.travis-ci.com') - - subparser = subparsers.add_parser('monitor', help='report passing or failing tests (only failing tests by default)') - subparser.add_argument('-a', '--all', help='show all tests suites, passing and failing', action='store_true') - subparser.add_argument('-r', '--relax', help='relax regex match to include failed ansible tasks', action='store_true') - subparser.add_argument('-p', '--poll', help='repeat monitor every 10 seconds', action='store_true') - - subparser = subparsers.add_parser('cat', help='concatenate logs from build (limited to Jenkins)') - subparser.add_argument('artifactPath', help='path to artifacts store') - subparser.add_argument('-g', '--grep', help='run grep against logs using provided value') - subparser.add_argument('-s', '--sort', help='sort logs by timestamp', action='store_true') - subparser.add_argument('-n', '--invokers', help='number of invokers', type=int, default=3) - subparser.add_argument('-c', '--controllers', help='number of controllers', type=int, default=1) + parser.add_argument("-b", "--build", help="build name", default="travis") + parser.add_argument("-v", "--verbose", help="verbose output", action="store_true") + parser.add_argument( + "-i", + "--input-file", + help="read logs from file rather than CI", + action="store_true", + dest="ifile", + ) + parser.add_argument( + "-o", + "--output-file", + help="store intermediate buffer to a file (e.g., jenkins console or component logs)", + action="store_true", + dest="ofile", + ) + parser.add_argument( + "-u", + "--url", + help="URL for CI build job (default is Travis CI)", + default="https://api.travis-ci.com", + ) + + subparser = subparsers.add_parser( + "monitor", + help="report passing or failing tests (only failing tests by default)", + ) + subparser.add_argument( + "-a", + "--all", + help="show all tests suites, passing and failing", + action="store_true", + ) + subparser.add_argument( + "-r", + "--relax", + help="relax regex match to include failed ansible tasks", + action="store_true", + ) + subparser.add_argument( + "-p", "--poll", help="repeat monitor every 10 seconds", action="store_true" + ) + + subparser = subparsers.add_parser( + "cat", help="concatenate logs from build (limited to Jenkins)" + ) + subparser.add_argument("artifactPath", help="path to artifacts store") + subparser.add_argument( + "-g", "--grep", help="run grep against logs using provided value" + ) + subparser.add_argument( + "-s", "--sort", help="sort logs by timestamp", action="store_true" + ) + subparser.add_argument( + "-n", "--invokers", help="number of invokers", type=int, default=3 + ) + subparser.add_argument( + "-c", "--controllers", help="number of controllers", type=int, default=1 + ) return parser.parse_args() -def request(method, urlString, body = "", headers = {}, auth = None, verbose = False): + +def request(method, urlString, body="", headers={}, auth=None, verbose=False): url = urlparse(urlString) - if url.scheme == 'http': + if url.scheme == "http": conn = HTTPConnection(url.netloc) else: conn = HTTPSConnection(url.netloc) @@ -100,82 +133,94 @@ def request(method, urlString, body = "", headers = {}, auth = None, verbose = F if verbose: print("%s %s" % (method, urlString)) - conn.request(method.upper(), urlString, body, headers = headers) + conn.request(method.upper(), urlString, body, headers=headers) res = conn.getresponse() if verbose: - print('Got response with code %s' % res.status) + print("Got response with code %s" % res.status) return res -def shell(cmd, data = None, verbose = False): + +def shell(cmd, data=None, verbose=False): start = time.time() if verbose: - print('%s%s' % (cmd, ' ' if (data) else '')) + print("%s%s" % (cmd, " " if (data) else "")) - p = Popen(cmd, shell = True, stdout = PIPE, stderr = STDOUT, stdin = PIPE) - out, err = p.communicate(input = data) + p = Popen(cmd, shell=True, stdout=PIPE, stderr=STDOUT, stdin=PIPE) + out, err = p.communicate(input=data) p.wait() # stdout/stderr may be either text or bytes, depending on Python # version. In the latter case, decode to text. if isinstance(out, bytes): - out = out.decode('utf-8') + out = out.decode("utf-8") if isinstance(err, bytes): - err = err.decode('utf-8') + err = err.decode("utf-8") end = time.time() delta = end - start return (delta, out, err) + def getTravisHeaders(): - return {'User-Agent': 'wsk citool/0.0.1', - 'Travis-API-Version': 3, - 'Accept': 'application/vnd.travis-ci.2+json' + return { + "User-Agent": "wsk citool/0.0.1", + "Travis-API-Version": 3, + "Accept": "application/vnd.travis-ci.2+json", } + def getTravisMatrixId(parts, values): N = len(values) if len(parts) == 1: return -1 else: try: - matrix = int(parts[1]) -1 + matrix = int(parts[1]) - 1 if matrix < 0: - print('Matrix id must be positive. Valid values are [1..%s].' % N) + print("Matrix id must be positive. Valid values are [1..%s]." % N) exit(-1) if matrix >= N: - print('Matrix id is out of bounds. Valid values are [1..%s].' % N) + print("Matrix id is out of bounds. Valid values are [1..%s]." % N) exit(-1) return matrix except Exception: - print('Matrix id is not an integer as expected. Valid values are [1..%s].' % N) + print( + "Matrix id is not an integer as expected. Valid values are [1..%s]." % N + ) exit(-1) + def getJobUrl(args): - if args.build.lower() == 'travis': + if args.build.lower() == "travis": # Get build information - parts = args.job.split('.') + parts = args.job.split(".") jobid = parts[0] if len(parts) > 2: - print('Job is malformed') + print("Job is malformed") exit(-1) - buildUrl = '%s/build/%s' % (args.url, jobid) - buildRes = request('get', buildUrl, headers = getTravisHeaders(), verbose = args.verbose) + buildUrl = "%s/build/%s" % (args.url, jobid) + buildRes = request( + "get", buildUrl, headers=getTravisHeaders(), verbose=args.verbose + ) body = validateResponse(buildRes) try: body = json.loads(body) - index = getTravisMatrixId(parts, body['jobs']) - job = body['jobs'][index]['id'] + index = getTravisMatrixId(parts, body["jobs"]) + job = body["jobs"][index]["id"] except Exception: - print('Expected response to contain build and job-ids properties in %s' % body) + print( + "Expected response to contain build and job-ids properties in %s" % body + ) exit(-1) - url = '%s/job/%s' % (args.url, job) - else: # assume jenkins - url = '%s/job/%s/%s' % (args.url, args.build, args.job) + url = "%s/job/%s" % (args.url, job) + else: # assume jenkins + url = "%s/job/%s/%s" % (args.url, args.build, args.job) return url + def monitor(args): def poll(): (ex, finished) = monitorOnce(args) @@ -188,26 +233,29 @@ def monitor(args): (ex, finished) = monitorOnce(args) return ex + def monitorOnce(args): if args.ifile: - file = open('%s' % args.job, 'r') + file = open("%s" % args.job, "r") body = file.read() file.close() else: - if args.build.lower() == 'travis': - url = '%s/log.txt' % getJobUrl(args) - res = request('get', url, headers = getTravisHeaders(), verbose = args.verbose) + if args.build.lower() == "travis": + url = "%s/log.txt" % getJobUrl(args) + res = request("get", url, headers=getTravisHeaders(), verbose=args.verbose) if res.status == TEMPORARY_REDIRECT: - url = res.getheader('location') - res = request('get', url, headers = getTravisHeaders(), verbose = args.verbose) - else: # assume jenkins - url = '%s/logText/progressiveHtml' % getJobUrl(args) - res = request('get', url, verbose = args.verbose) + url = res.getheader("location") + res = request( + "get", url, headers=getTravisHeaders(), verbose=args.verbose + ) + else: # assume jenkins + url = "%s/logText/progressiveHtml" % getJobUrl(args) + res = request("get", url, verbose=args.verbose) body = validateResponse(res) if args.ofile: - file = open('%s-console.log' % args.job, 'wb') + file = open("%s-console.log" % args.job, "wb") file.write(body) file.close() if args.ifile or res.status == OK: @@ -217,85 +265,105 @@ def monitorOnce(args): print(body) return res.status + def validateResponse(res): body = res.read() if res.status != OK: - body = body.decode('utf-8') - if body.startswith('<'): + body = body.decode("utf-8") + if body.startswith("<"): dom = minidom.parseString(body) print(dom.toprettyxml()), else: print(body) exit(res.status) elif not body: - print('Build log is empty.') + print("Build log is empty.") exit(-1) else: return body + def grepForFailingTests(args, body): - cmd = 'grep :tests:test' + cmd = "grep :tests:test" # check that tests ran (time, output, error) = shell(cmd, body, args.verbose) - if output == '': - print('No tests detected.') + if output == "": + print("No tests detected.") # no tests: either build failure or task not yet reached, skip further check else: if args.relax: # this will match failed ansible tasks as well - cmd = 'grep -E "^> Task *.* FAILED|^\w+\.*.*[>|>] \w*.* FAILED%s"' % ("|PASSED" if args.all else "") + cmd = 'grep -E "^> Task *.* FAILED|^\w+\.*.*[>|>] \w*.* FAILED%s"' % ( + "|PASSED" if args.all else "" + ) else: - cmd = 'grep -E "^> Task *.* FAILED|^[\w.]+\s*[>|>] \w*.* FAILED%s"' % ("|PASSED" if args.all else "") + cmd = 'grep -E "^> Task *.* FAILED|^[\w.]+\s*[>|>] \w*.* FAILED%s"' % ( + "|PASSED" if args.all else "" + ) (time, output, error) = shell(cmd, body, args.verbose) - if output == '': - print('All tests passing.') + if output == "": + print("All tests passing.") else: - print(output.replace('>', '>')), + print(output.replace(">", ">")), + def reportBuildStatus(args, body): - lines = body.decode('utf8').rstrip('\n').rsplit('\n', 1) + lines = body.decode("utf8").rstrip("\n").rsplit("\n", 1) if len(lines) == 2: output = lines[1] - output = re.sub('<[^<]+?>', '', output).strip() + output = re.sub("<[^<]+?>", "", output).strip() else: output = None - if output and ('Finished: ' in output or output.startswith('Done.') or ('exceeded' in output and 'terminated' in output)): + if output and ( + "Finished: " in output + or output.startswith("Done.") + or ("exceeded" in output and "terminated" in output) + ): print(output) return (0, True) else: - print('Build: ONGOING') + print("Build: ONGOING") if output: print(output) return (0, False) + def cat(args): def getComponentList(components): list = [] - for k,v in components.items(): + for k, v in components.items(): if v > 1: for i in range(v): - list.append('%s%d' % (k, i)) + list.append("%s%d" % (k, i)) else: list.append(k) return list def getComponentLogs(component): - url = '%s/artifact/%s/%s/%s_logs.log' % (getJobUrl(args), args.artifactPath, component, component) - res = request('get', url, verbose = args.verbose) + url = "%s/artifact/%s/%s/%s_logs.log" % ( + getJobUrl(args), + args.artifactPath, + component, + component, + ) + res = request("get", url, verbose=args.verbose) body = res.read() if res.status == OK: return body else: - return '' + return "" def unzip(iterable): return zip(*iterable) def extractDate(line): - matches = re.search(r'\d{4}-[01]{1}\d{1}-[0-3]{1}\d{1}T[0-2]{1}\d{1}:[0-6]{1}\d{1}:[0-6]{1}\d{1}.\d{3}Z', line) + matches = re.search( + r"\d{4}-[01]{1}\d{1}-[0-3]{1}\d{1}T[0-2]{1}\d{1}:[0-6]{1}\d{1}:[0-6]{1}\d{1}.\d{3}Z", + line, + ) if matches is not None: date = matches.group(0) return date @@ -303,21 +371,18 @@ def cat(args): return None if args.ifile: - file = open('%s-build.log' % args.job, 'r') + file = open("%s-build.log" % args.job, "r") joined = file.read() file.close() - elif args.build.lower == 'travis': - print('Feature not yet supported for Travis builds.') + elif args.build.lower == "travis": + print("Feature not yet supported for Travis builds.") return 2 else: - components = { - 'controller': args.controllers, - 'invoker': args.invokers - } + components = {"controller": args.controllers, "invoker": args.invokers} logs = map(getComponentLogs, getComponentList(components)) - joined = ''.join(logs) + joined = "".join(logs) if args.ofile: - file = open('%s-build.log' % args.job, 'w') + file = open("%s-build.log" % args.job, "w") file.write(joined) file.close() @@ -326,13 +391,13 @@ def cat(args): (time, output, error) = shell(cmd, joined, args.verbose) output = output.strip() if args.sort: - parts = output.split('\n') - filter = [p for p in parts if p != ''] + parts = output.split("\n") + filter = [p for p in parts if p != ""] date = map(extractDate, filter) keyed = zip(date, parts) sort = sorted(keyed, key=lambda t: t[1]) msgs = unzip(sort)[1] - print('\n'.join(msgs)) + print("\n".join(msgs)) return 0 else: print(output) @@ -341,5 +406,6 @@ def cat(args): print(joined) return 0 -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/tools/build/redo b/tools/build/redo index 9ffa04ead7f..4f350c4a532 100755 --- a/tools/build/redo +++ b/tools/build/redo @@ -1,5 +1,4 @@ #!/usr/bin/env python - # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with @@ -16,26 +15,24 @@ # See the License for the specific language governing permissions and # limitations under the License. # - +import argparse import os -import sys import platform -import argparse +import re import shlex import subprocess -import re +import sys # the default openwhisk location in openwhisk checkouts -defaultOpenwhisk = os.path.dirname(os.path.realpath(__file__)) + '/../../' +defaultOpenwhisk = os.path.dirname(os.path.realpath(__file__)) + "/../../" # the openwhisk env var overrides if it exists -whiskHome = os.getenv('WHISK_HOME', defaultOpenwhisk) +whiskHome = os.getenv("WHISK_HOME", defaultOpenwhisk) + def main(): args = getArgs() - if (not args.build and - not args.teardown and - not args.deploy): + if not args.build and not args.teardown and not args.deploy: args.build = True args.teardown = True args.deploy = True @@ -46,65 +43,154 @@ def main(): os.chdir(wskhome) props = {} - props['ENV'] = args.target - props['WSK_HOME'] = wskhome - props['MAIN_DOCKER_ENDPOINT'] = getDockerHost() + props["ENV"] = args.target + props["WSK_HOME"] = wskhome + props["MAIN_DOCKER_ENDPOINT"] = getDockerHost() doComponentSequence(props, args, args.components) + def doComponentSequence(props, args, components): for c in components: component = getComponent(c) if not component: if args.yaml: - file = c if c.endswith('.yml') else '%s.yml' % c - component = makeComponent('custom deployment', 'deploying using %s' % file, yaml = file, modes = 'clean') + file = c if c.endswith(".yml") else "%s.yml" % c + component = makeComponent( + "custom deployment", + "deploying using %s" % file, + yaml=file, + modes="clean", + ) elif args.gradle: - file = c if c.endswith('.gradle') else '%s.gradle' % c - component = makeComponent('custom build target', 'building using %s' % file, yaml = False, gradle = True, tasks = c) + file = c if c.endswith(".gradle") else "%s.gradle" % c + component = makeComponent( + "custom build target", + "building using %s" % file, + yaml=False, + gradle=True, + tasks=c, + ) else: - print('unknown component %s' % c) + print("unknown component %s" % c) exit(1) - if component['steps']: - doComponentSequence(props, args, component['steps']) + if component["steps"]: + doComponentSequence(props, args, component["steps"]) else: doOne(component, args, props) + def getArgs(): def detectDeployTarget(): osname = platform.system() - if osname == 'Linux': - return 'local' - elif osname == 'Darwin': - if os.getenv('DOCKER_HOST', None) is not None: + if osname == "Linux": + return "local" + elif osname == "Darwin": + if os.getenv("DOCKER_HOST", None) is not None: # docker-machine typically has docker host set in the environment - return 'docker-machine' + return "docker-machine" else: # otherwise assume docker-for-mac - return 'local' + return "local" else: return None - parser = argparse.ArgumentParser(description='[re]build and [re]deploy a whisk component if no args are given, otherwise do what is instructed') - parser.add_argument('-b', '--build', help='build component', action='store_const', const=True, default=False) - parser.add_argument('-x', '--teardown', help='teardown component', action='store_const', const=True, default=False) - parser.add_argument('-d', '--deploy', help='deploy component', action='store_const', const=True, default=False) - parser.add_argument('-t', '--target', help='deploy target (one of [docker-machine, local])', default=detectDeployTarget()) - parser.add_argument('-y', '--yaml', help='deploy target using inferred YAML file if component is not one of known targets', action='store_const', const=True, default=False) - parser.add_argument('-g', '--gradle', help='use target using inferred gradle file if component is not one of known targets', action='store_const', const=True, default=False) - parser.add_argument('-n', '--just-print', help='prints the component configuration but does not run any targets', action='store_const', const=True, default=False, dest='skiprun') - parser.add_argument('-c', '--list-components', help='list known component names and exit', action='store_const', const=True, default=False, dest='list') - parser.add_argument('-a', '--additional-task-arguments', dest='extraArgs', action='append', help='pass additional arguments to gradle build') - parser.add_argument('-e', '--extra-ansible-vars', dest='extraAnsibleVars', action='append', help='pass extra vars to ansible-playbook') - parser.add_argument('components', nargs = '*', help='component name(s) to run (in order specified if more than one)') - parser.add_argument('--dir', help='whisk home directory') + parser = argparse.ArgumentParser( + description="[re]build and [re]deploy a whisk component if no args are given, otherwise do what is instructed" + ) + parser.add_argument( + "-b", + "--build", + help="build component", + action="store_const", + const=True, + default=False, + ) + parser.add_argument( + "-x", + "--teardown", + help="teardown component", + action="store_const", + const=True, + default=False, + ) + parser.add_argument( + "-d", + "--deploy", + help="deploy component", + action="store_const", + const=True, + default=False, + ) + parser.add_argument( + "-t", + "--target", + help="deploy target (one of [docker-machine, local])", + default=detectDeployTarget(), + ) + parser.add_argument( + "-y", + "--yaml", + help="deploy target using inferred YAML file if component is not one of known targets", + action="store_const", + const=True, + default=False, + ) + parser.add_argument( + "-g", + "--gradle", + help="use target using inferred gradle file if component is not one of known targets", + action="store_const", + const=True, + default=False, + ) + parser.add_argument( + "-n", + "--just-print", + help="prints the component configuration but does not run any targets", + action="store_const", + const=True, + default=False, + dest="skiprun", + ) + parser.add_argument( + "-c", + "--list-components", + help="list known component names and exit", + action="store_const", + const=True, + default=False, + dest="list", + ) + parser.add_argument( + "-a", + "--additional-task-arguments", + dest="extraArgs", + action="append", + help="pass additional arguments to gradle build", + ) + parser.add_argument( + "-e", + "--extra-ansible-vars", + dest="extraAnsibleVars", + action="append", + help="pass extra vars to ansible-playbook", + ) + parser.add_argument( + "components", + nargs="*", + help="component name(s) to run (in order specified if more than one)", + ) + parser.add_argument("--dir", help="whisk home directory") args = parser.parse_args() if args.target is None: - print('Use "--target" to specify a deployment target because one ' - 'could not be determined automatically for your platform ' - '(supported platforms are GNU/Linux (Ubuntu) and Mac OS X.') + print( + 'Use "--target" to specify a deployment target because one ' + "could not be determined automatically for your platform " + "(supported platforms are GNU/Linux (Ubuntu) and Mac OS X." + ) exit(-1) if args.dir is None: @@ -115,9 +201,9 @@ def getArgs(): args.dir = whiskHome if args.list: - print("{:<27}{:<40}".format(bold('component'), bold('description'))) + print("{:<27}{:<40}".format(bold("component"), bold("description"))) for c in Components: - print("{:<30}{:<40}".format(hilite(c['name']), c['description'])) + print("{:<30}{:<40}".format(hilite(c["name"]), c["description"])) exit(0) elif not args.components: parser.print_usage() @@ -125,8 +211,9 @@ def getArgs(): else: return args + class Playbook: - cmd = 'ansible-playbook' + cmd = "ansible-playbook" dir = False file = False @@ -140,231 +227,216 @@ class Playbook: def __init__(self, dir, file, modes, env): self.dir = dir self.file = file - self.modes = modes.split(',') + self.modes = modes.split(",") self.env = env def path(self, basedir): - return basedir + '/' + self.dir + return basedir + "/" + self.dir - def execcmd(self, props, mode = False, extraAnsibleVars = []): + def execcmd(self, props, mode=False, extraAnsibleVars=[]): if self.dir and self.file and (mode is False or mode in self.modes): - cmd = [ self.cmd ] + cmd = [self.cmd] if self.env: - cmd.append('-i %s/%s' % (self.env, props['ENV'])) + cmd.append("-i %s/%s" % (self.env, props["ENV"])) cmd.append(self.file) if mode: - cmd.append('-e mode=%s' % mode) + cmd.append("-e mode=%s" % mode) if extraAnsibleVars: - cmd.append(' '.join(map(lambda x: "-e '" + str(x) + "'", extraAnsibleVars))) - return ' '.join(cmd) + cmd.append( + " ".join(map(lambda x: "-e '" + str(x) + "'", extraAnsibleVars)) + ) + return " ".join(cmd) + class Gradle: - cmd = 'gradlew' + cmd = "gradlew" tasks = False components = False - def __init__(self, tasks, components = False): - self.tasks = tasks.split(',') + def __init__(self, tasks, components=False): + self.tasks = tasks.split(",") self.components = components - def execcmd(self, props, task, extraArgs = ''): + def execcmd(self, props, task, extraArgs=""): if task: if self.components and self.components is not True: - parts = map(lambda c: '%s:%s' % (c, task), self.components.split(',')) - parts = ' '.join(parts) + parts = map(lambda c: "%s:%s" % (c, task), self.components.split(",")) + parts = " ".join(parts) else: parts = task - dh = props['MAIN_DOCKER_ENDPOINT'] - return '%s %s %s --parallel %s' % ( - props['WSK_HOME'] + '/' + self.cmd, - parts, - extraArgs, - ('-PdockerHost=%s' % dh) if dh else '') + dh = props["MAIN_DOCKER_ENDPOINT"] + return "%s %s %s --parallel %s" % ( + props["WSK_HOME"] + "/" + self.cmd, + parts, + extraArgs, + ("-PdockerHost=%s" % dh) if dh else "", + ) + def getDockerHost(): - dh = os.getenv('DOCKER_HOST') - if dh is not None and dh.startswith('tcp://'): + dh = os.getenv("DOCKER_HOST") + if dh is not None and dh.startswith("tcp://"): return dh[6:] -def makeComponent(name, # component name, implies playbook default and gradle tasks roots - description, - yaml = True, # true for default file name else the file name - modes = '', - env = 'environments', - dir = 'ansible', - gradle = False, # gradle buildable iff true - tasks = 'distDocker', - steps = None): # comma separated, runs these steps in sequence, each step is a reference to another component (yaml/gradle not allowed) - yaml = ('%s.yml' % name) if yaml is True else yaml + +def makeComponent( + name, # component name, implies playbook default and gradle tasks roots + description, + yaml=True, # true for default file name else the file name + modes="", + env="environments", + dir="ansible", + gradle=False, # gradle buildable iff true + tasks="distDocker", + steps=None, +): # comma separated, runs these steps in sequence, each step is a reference to another component (yaml/gradle not allowed) + yaml = ("%s.yml" % name) if yaml is True else yaml playbook = Playbook(dir, yaml, modes, env) if yaml is not False else None gradle = Gradle(tasks, gradle) if gradle is not False else None if steps and (playbook is not None or gradle is not None): - print('Cannot create component "%s" with a sequence of steps and also ' - 'a playbook with gradle build target' % name) + print( + 'Cannot create component "%s" with a sequence of steps and also ' + "a playbook with gradle build target" % name + ) exit(-1) elif steps: - steps = map(lambda c: c.strip(), steps.split(',')) - return { 'name': name, 'description': description, 'playbook': playbook, 'gradle': gradle, 'steps': steps } + steps = map(lambda c: c.strip(), steps.split(",")) + return { + "name": name, + "description": description, + "playbook": playbook, + "gradle": gradle, + "steps": steps, + } -Components = [ - makeComponent('fresh', - 'setup, build, and deploy a fresh whisk system using couchdb', - yaml = False, - steps = 'setup, couchdb, initdb, wipedb, deploy, catalog'), - - makeComponent('fmt', - 'apply source code formats', - gradle = True, - yaml = False, - tasks = 'scalafmtAll'), - - makeComponent('setup', - 'system setup'), - - makeComponent('prereq', - 'install requisites'), - - makeComponent('couchdb', - 'deploy couchdb', - modes = 'clean'), - - makeComponent('initdb', - 'initialize db with guest/system keys'), - - makeComponent('wipedb', - 'recreate main db for entities', - yaml = 'wipe.yml'), - - makeComponent('elasticsearch', - 'deploy elasticsearch', - modes = 'clean'), - - makeComponent('build', - 'build system', - yaml = False, - gradle = True), - - makeComponent('deploy', - 'build/deploy system', - yaml = 'openwhisk.yml', - modes = 'clean', - gradle = True), - - makeComponent('teardown', - 'teardown all deployed containers', - yaml = 'teardown.yml'), - - makeComponent('kafka', - 'build/deploy kafka', - modes = 'clean'), - - makeComponent('controller', - 'build/deploy controller', - modes = 'clean', - gradle = 'core:controller'), - - makeComponent('invoker', - 'build/deploy invoker', - modes = 'clean', - gradle = ':core:invoker'), - - makeComponent('edge', - 'deploy edge'), - - makeComponent('cli', - 'download cli from api host', - modes = 'clean', - yaml = 'downloadcli.yml'), - - makeComponent('catalog', - 'install catalog', - yaml = 'postdeploy.yml'), - - makeComponent('apigw', - 'deploy api gateway', - gradle = False, - modes = 'clean', - yaml = 'routemgmt.yml apigateway.yml'), +Components = [ + makeComponent( + "fresh", + "setup, build, and deploy a fresh whisk system using couchdb", + yaml=False, + steps="setup, couchdb, initdb, wipedb, deploy, catalog", + ), + makeComponent( + "fmt", "apply source code formats", gradle=True, yaml=False, tasks="scalafmtAll" + ), + makeComponent("setup", "system setup"), + makeComponent("prereq", "install requisites"), + makeComponent("couchdb", "deploy couchdb", modes="clean"), + makeComponent("initdb", "initialize db with guest/system keys"), + makeComponent("wipedb", "recreate main db for entities", yaml="wipe.yml"), + makeComponent("elasticsearch", "deploy elasticsearch", modes="clean"), + makeComponent("build", "build system", yaml=False, gradle=True), + makeComponent( + "deploy", + "build/deploy system", + yaml="openwhisk.yml", + modes="clean", + gradle=True, + ), + makeComponent("teardown", "teardown all deployed containers", yaml="teardown.yml"), + makeComponent("kafka", "build/deploy kafka", modes="clean"), + makeComponent( + "controller", "build/deploy controller", modes="clean", gradle="core:controller" + ), + makeComponent( + "invoker", "build/deploy invoker", modes="clean", gradle=":core:invoker" + ), + makeComponent("edge", "deploy edge"), + makeComponent( + "cli", "download cli from api host", modes="clean", yaml="downloadcli.yml" + ), + makeComponent("catalog", "install catalog", yaml="postdeploy.yml"), + makeComponent( + "apigw", + "deploy api gateway", + gradle=False, + modes="clean", + yaml="routemgmt.yml apigateway.yml", + ), # the following (re)build images via gradle - makeComponent('runtime:([\w.-]+)', - 'build a runtime action container, matching name using the regex; NOTE: must use --dir for path to runtime directory', - yaml = False, - gradle = 'core:$1:distDocker'), - - makeComponent('actionproxy', - 'build action proxy container', - yaml = False, - gradle = 'tools:actionProxy'), - + makeComponent( + "runtime:([\w.-]+)", + "build a runtime action container, matching name using the regex; NOTE: must use --dir for path to runtime directory", + yaml=False, + gradle="core:$1:distDocker", + ), + makeComponent( + "actionproxy", + "build action proxy container", + yaml=False, + gradle="tools:actionProxy", + ), # required for tests - makeComponent('props', - 'build whisk.properties file (required for tests)', - yaml = 'properties.yml'), - + makeComponent( + "props", + "build whisk.properties file (required for tests)", + yaml="properties.yml", + ), # convenient to run all tests - makeComponent('tests', - 'run all tests', - yaml = False, - gradle = True, - tasks = 'test'), - - makeComponent('unit-tests', - 'run units tests', - yaml = False, - tasks = 'testUnit', - gradle = 'tests'), - - makeComponent('standalone', - 'run standalone server', - yaml = False, - tasks = 'bootRun', - gradle = 'core:standalone') + makeComponent("tests", "run all tests", yaml=False, gradle=True, tasks="test"), + makeComponent( + "unit-tests", "run units tests", yaml=False, tasks="testUnit", gradle="tests" + ), + makeComponent( + "standalone", + "run standalone server", + yaml=False, + tasks="bootRun", + gradle="core:standalone", + ), ] + def getComponent(component): for c in Components: - if c['name'] == component: + if c["name"] == component: return c else: - parts = re.match(c['name'], component) + parts = re.match(c["name"], component) if parts: name = parts.group(1) - return makeComponent('runtime:' + name, - 'build a ' + name + ' runtime action container', - yaml = False, - gradle = 'core:' + name) + return makeComponent( + "runtime:" + name, + "build a " + name + " runtime action container", + yaml=False, + gradle="core:" + name, + ) return False + def bold(string): if sys.stdin.isatty(): attr = [] - attr.append('1') - return '\x1b[%sm%s\x1b[0m' % (';'.join(attr), string) + attr.append("1") + return "\x1b[%sm%s\x1b[0m" % (";".join(attr), string) else: return string -def hilite(string, isError = False): + +def hilite(string, isError=False): if sys.stdin.isatty(): attr = [] - attr.append('34' if not isError else '31') # blue or red if isError - attr.append('1') - return '\x1b[%sm%s\x1b[0m' % (';'.join(attr), string) + attr.append("34" if not isError else "31") # blue or red if isError + attr.append("1") + return "\x1b[%sm%s\x1b[0m" % (";".join(attr), string) else: return string -def run(cmd, dir, skiprun, allowToFail = False): + +def run(cmd, dir, skiprun, allowToFail=False): if cmd is not None: print(hilite(cmd)) if not skiprun: args = shlex.split(cmd) - p = subprocess.Popen(args, cwd = dir) + p = subprocess.Popen(args, cwd=dir) p.wait() if p.returncode and not allowToFail: - abort('command failed', p.returncode) + abort("command failed", p.returncode) + def runAndGetStdout(cmd): print(hilite(cmd)) @@ -374,38 +446,43 @@ def runAndGetStdout(cmd): # stdout/stderr may be either text or bytes, depending on Python # version. In the latter case, decode to text. if isinstance(out, bytes): - out = out.decode('utf-8') + out = out.decode("utf-8") if isinstance(err, bytes): - err = err.decode('utf-8') + err = err.decode("utf-8") if p.returncode: print(hilite(out)) print(hilite(err, True)) - abort('command failed', p.returncode) + abort("command failed", p.returncode) return out -def abort(msg, code = -1): + +def abort(msg, code=-1): print(hilite(msg, True)) exit(code) + def doOne(component, args, props): - basedir = props['WSK_HOME'] - playbook = component['playbook'] - gradle = component['gradle'] - print(bold(component['description'])) + basedir = props["WSK_HOME"] + playbook = component["playbook"] + gradle = component["gradle"] + print(bold(component["description"])) - extraArgs = '' if args.extraArgs is None or [] else ' '.join(map(str, args.extraArgs)) + extraArgs = ( + "" if args.extraArgs is None or [] else " ".join(map(str, args.extraArgs)) + ) if args.build and gradle is not None: cmd = gradle.execcmd(props, gradle.tasks[0], extraArgs) run(cmd, basedir, args.skiprun) if args.teardown and playbook is not None: - cmd = playbook.execcmd(props, 'clean', extraAnsibleVars = args.extraAnsibleVars) + cmd = playbook.execcmd(props, "clean", extraAnsibleVars=args.extraAnsibleVars) run(cmd, playbook.path(basedir), args.skiprun) if args.deploy and playbook is not None: - cmd = playbook.execcmd(props, extraAnsibleVars = args.extraAnsibleVars) + cmd = playbook.execcmd(props, extraAnsibleVars=args.extraAnsibleVars) run(cmd, playbook.path(basedir), args.skiprun) -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/tools/db/cleanUpActivations.py b/tools/db/cleanUpActivations.py index a51f73e42e1..a035571aa70 100755 --- a/tools/db/cleanUpActivations.py +++ b/tools/db/cleanUpActivations.py @@ -18,13 +18,13 @@ * limitations under the License. */ """ - import argparse import time + import couchdb.client try: - long # Python 2 + long # Python 2 except NameError: long = int # Python 3 @@ -37,18 +37,46 @@ def deleteOldActivations(args): db = couchdb.client.Server(args.dbUrl)[args.dbName] endkey = long(time.time() * 1000) - args.days * DAY while True: - activationIds = db.view("activations/byDate", limit=args.docsPerRequest, start_key=0, end_key=endkey) + activationIds = db.view( + "activations/byDate", limit=args.docsPerRequest, start_key=0, end_key=endkey + ) if activationIds: - documentsToDelete = [couchdb.client.Document(_id=entry.value[0], _rev=entry.value[1], _deleted=True) for entry in activationIds] + documentsToDelete = [ + couchdb.client.Document( + _id=entry.value[0], _rev=entry.value[1], _deleted=True + ) + for entry in activationIds + ] db.update(documentsToDelete) else: return -parser = argparse.ArgumentParser(description="Utility to delete old activations older than x days in given database.") -parser.add_argument("--dbUrl", required=True, help="Server URL of the database, that has to be cleaned of old activations. E.g. 'https://xxx:yyy@domain.couch.com:443'") -parser.add_argument("--dbName", required=True, help="Name of the Database of the activations to be truncated.") -parser.add_argument("--days", required=True, type=int, help="How many days of the activations to be kept.") -parser.add_argument("--docsPerRequest", type=int, default=200, help="Number of documents handled on each CouchDb Request. Default is 200.") + +parser = argparse.ArgumentParser( + description="Utility to delete old activations older than x days in given database." +) +parser.add_argument( + "--dbUrl", + required=True, + help="Server URL of the database, that has to be cleaned of old activations. E.g. 'https://xxx:yyy@domain.couch.com:443'", +) +parser.add_argument( + "--dbName", + required=True, + help="Name of the Database of the activations to be truncated.", +) +parser.add_argument( + "--days", + required=True, + type=int, + help="How many days of the activations to be kept.", +) +parser.add_argument( + "--docsPerRequest", + type=int, + default=200, + help="Number of documents handled on each CouchDb Request. Default is 200.", +) args = parser.parse_args() deleteOldActivations(args) diff --git a/tools/db/cosmosDbUtil.py b/tools/db/cosmosDbUtil.py index 87c3b23e287..d833f0fcc46 100755 --- a/tools/db/cosmosDbUtil.py +++ b/tools/db/cosmosDbUtil.py @@ -1,5 +1,4 @@ #!/usr/bin/env python - # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with @@ -16,16 +15,16 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -from collections import namedtuple +import argparse import glob -import sys import os -import argparse +import sys import traceback +from collections import namedtuple + +import pydocumentdb.document_client as document_client import pydocumentdb.documents as documents import pydocumentdb.errors as document_errors -import pydocumentdb.document_client as document_client try: import argcomplete @@ -36,7 +35,9 @@ # ROOT_DIR is the OpenWhisk repository root ROOT_DIR = os.path.join(os.path.join(CLI_DIR, os.pardir), os.pardir) -DbContext = namedtuple('DbContext', ['client', 'db', 'whisks', 'subjects', 'activations']) +DbContext = namedtuple( + "DbContext", ["client", "db", "whisks", "subjects", "activations"] +) verbose = False @@ -47,13 +48,11 @@ def main(): args = parse_args() verbose = args.verbose client = init_client(args) - exit_code = { - 'init': init_cmd, - 'prune': prune_cmd, - 'drop': drop_cmd - }[args.cmd](args, client) + exit_code = {"init": init_cmd, "prune": prune_cmd, "drop": drop_cmd}[args.cmd]( + args, client + ) except Exception as e: - print('Exception: ', e) + print("Exception: ", e) traceback.print_exc() exit_code = 1 @@ -61,23 +60,34 @@ def main(): def parse_args(): - parser = argparse.ArgumentParser(description='OpenWhisk CosmosDB bootstrap tool') - parser.add_argument('--endpoint', help='DB Endpoint url like https://example.documents.azure.com:443/', - required=True) - parser.add_argument('--key', help='DB access key', required=True) - parser.add_argument('-v', '--verbose', help='Verbose mode', action="store_true") - - subparsers = parser.add_subparsers(title='available commands', dest='cmd') - - propmenu = subparsers.add_parser('init', help='initialize database') - propmenu.add_argument('db', help='Database name under which the collections would be created') - propmenu.add_argument('--dir', help='Directory under which auth files are stored') - - propmenu = subparsers.add_parser('prune', help='remove stale databases created by test') - propmenu.add_argument('--prefix', help='Database name prefix which are matched for removal', default="travis-") - - propmenu = subparsers.add_parser('drop', help='drop database') - propmenu.add_argument('db', help='Database name to be removed') + parser = argparse.ArgumentParser(description="OpenWhisk CosmosDB bootstrap tool") + parser.add_argument( + "--endpoint", + help="DB Endpoint url like https://example.documents.azure.com:443/", + required=True, + ) + parser.add_argument("--key", help="DB access key", required=True) + parser.add_argument("-v", "--verbose", help="Verbose mode", action="store_true") + + subparsers = parser.add_subparsers(title="available commands", dest="cmd") + + propmenu = subparsers.add_parser("init", help="initialize database") + propmenu.add_argument( + "db", help="Database name under which the collections would be created" + ) + propmenu.add_argument("--dir", help="Directory under which auth files are stored") + + propmenu = subparsers.add_parser( + "prune", help="remove stale databases created by test" + ) + propmenu.add_argument( + "--prefix", + help="Database name prefix which are matched for removal", + default="travis-", + ) + + propmenu = subparsers.add_parser("drop", help="drop database") + propmenu.add_argument("db", help="Database name to be removed") if argcomplete: argcomplete.autocomplete(parser) @@ -104,7 +114,7 @@ def prune_cmd(args, client): def drop_cmd(args, client): db = get_db(client, args.db) if db is not None: - client.DeleteDatabase(db['_self']) + client.DeleteDatabase(db["_self"]) log("Removed database : %s" % args.db) else: log("Database %s not found" % args.db) @@ -112,75 +122,70 @@ def drop_cmd(args, client): def init_auth(ctx): for subject in find_default_subjects(): - link = create_link(ctx.db, ctx.subjects, subject['id']) - options = {'partitionKey': subject.get('id')} + link = create_link(ctx.db, ctx.subjects, subject["id"]) + options = {"partitionKey": subject.get("id")} try: ctx.client.ReadDocument(link, options) - log('Subject already exists : ' + subject['id']) + log("Subject already exists : " + subject["id"]) except document_errors.HTTPFailure as e: if e.status_code == 404: - ctx.client.CreateDocument(ctx.subjects['_self'], subject, options) - log('Created subject : ' + subject['id']) + ctx.client.CreateDocument(ctx.subjects["_self"], subject, options) + log("Created subject : " + subject["id"]) else: raise e def create_link(db, coll, doc_id): - return 'dbs/' + db['id'] + '/colls/' + coll['id'] + '/docs/' + doc_id + return "dbs/" + db["id"] + "/colls/" + coll["id"] + "/docs/" + doc_id def find_default_subjects(): files_dir = os.path.join(ROOT_DIR, "ansible/files") for name in glob.glob1(files_dir, "auth.*"): - auth_file = open(os.path.join(files_dir, name), 'r') + auth_file = open(os.path.join(files_dir, name), "r") uuid, key = auth_file.read().strip().split(":") - subject = name[name.index('.') + 1:] + subject = name[name.index(".") + 1 :] doc = { - 'id': subject, - 'subject': subject, - 'namespaces': [ - { - 'name': subject, - 'uuid': uuid, - 'key': key - } - ] + "id": subject, + "subject": subject, + "namespaces": [{"name": subject, "uuid": uuid, "key": key}], } auth_file.close() yield doc def init_client(args): - return document_client.DocumentClient(args.endpoint, {'masterKey': args.key}) + return document_client.DocumentClient(args.endpoint, {"masterKey": args.key}) def get_db(client, db_name): - query = client.QueryDatabases('SELECT * FROM root r WHERE r.id=\'' + db_name + '\'') + query = client.QueryDatabases("SELECT * FROM root r WHERE r.id='" + db_name + "'") return next(iter(query), None) def get_or_create_db(client, db_name): db = get_db(client, db_name) if db is None: - db = client.CreateDatabase({'id': db_name}) + db = client.CreateDatabase({"id": db_name}) log('Created database "%s"' % db_name) return db def init_coll(client, db, coll_name): - query = client.QueryCollections(db['_self'], 'SELECT * FROM root r WHERE r.id=\'' + coll_name + '\'') + query = client.QueryCollections( + db["_self"], "SELECT * FROM root r WHERE r.id='" + coll_name + "'" + ) it = iter(query) coll = next(it, None) if coll is None: - collection_definition = {'id': coll_name, - 'partitionKey': - { - 'paths': ['/id'], - 'kind': documents.PartitionKind.Hash - } - } + collection_definition = { + "id": coll_name, + "partitionKey": {"paths": ["/id"], "kind": documents.PartitionKind.Hash}, + } collection_options = {} # {'offerThroughput': 10100} - coll = client.CreateCollection(db['_self'], collection_definition, collection_options) + coll = client.CreateCollection( + db["_self"], collection_definition, collection_options + ) log('Created collection "%s"' % coll_name) return coll @@ -190,5 +195,5 @@ def log(msg): print(msg) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/tools/db/deleteLogsFromActivations.py b/tools/db/deleteLogsFromActivations.py index 7fa09733710..db23bbc1cc6 100755 --- a/tools/db/deleteLogsFromActivations.py +++ b/tools/db/deleteLogsFromActivations.py @@ -18,23 +18,25 @@ * limitations under the License. */ """ - import argparse import time + import couchdb.client try: - long # Python 2 + long # Python 2 except NameError: long = int # Python 3 DAY = 1000 * 60 * 60 * 24 + def removeLogFromActivation(viewResult): doc = viewResult.doc doc["logs"] = [] return doc + # # Delete activations # @@ -42,18 +44,47 @@ def deleteLogsFromOldActivations(args): db = couchdb.client.Server(args.dbUrl)[args.dbName] endkey = long(time.time() * 1000) - args.days * DAY while True: - activations = db.view("logCleanup/byDateWithLogs", limit=args.docsPerRequest, start_key=0, end_key=endkey, include_docs=True) + activations = db.view( + "logCleanup/byDateWithLogs", + limit=args.docsPerRequest, + start_key=0, + end_key=endkey, + include_docs=True, + ) if activations: - activationsWithoutLogs = [removeLogFromActivation(activation) for activation in activations] + activationsWithoutLogs = [ + removeLogFromActivation(activation) for activation in activations + ] db.update(activationsWithoutLogs) else: return -parser = argparse.ArgumentParser(description="Utility to delete logs from activations that are older than x days in given database.") -parser.add_argument("--dbUrl", required=True, help="Server URL of the database, that has to be cleaned of old activations. E.g. 'https://xxx:yyy@domain.couch.com:443'") -parser.add_argument("--dbName", required=True, help="Name of the Database of the activations to be truncated.") -parser.add_argument("--days", required=True, type=int, help="How many days of the logs in activations to be kept.") -parser.add_argument("--docsPerRequest", type=int, default=20, help="Number of documents handled on each CouchDb Request. Default is 20.") + +parser = argparse.ArgumentParser( + description="Utility to delete logs from activations that are older than x days in given database." +) +parser.add_argument( + "--dbUrl", + required=True, + help="Server URL of the database, that has to be cleaned of old activations. E.g. 'https://xxx:yyy@domain.couch.com:443'", +) +parser.add_argument( + "--dbName", + required=True, + help="Name of the Database of the activations to be truncated.", +) +parser.add_argument( + "--days", + required=True, + type=int, + help="How many days of the logs in activations to be kept.", +) +parser.add_argument( + "--docsPerRequest", + type=int, + default=20, + help="Number of documents handled on each CouchDb Request. Default is 20.", +) args = parser.parse_args() deleteLogsFromOldActivations(args) diff --git a/tools/db/moveCodeToAttachment.py b/tools/db/moveCodeToAttachment.py index 254362c9df7..fe41aa213ee 100755 --- a/tools/db/moveCodeToAttachment.py +++ b/tools/db/moveCodeToAttachment.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -'''Python script update actions. +"""Python script update actions. /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -16,54 +16,60 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -''' - +""" import argparse -import couchdb.client import time + +import couchdb.client from couchdb import ResourceNotFound + def updateNonJavaAction(db, doc, id): updated = False - code = doc['exec']['code'] + code = doc["exec"]["code"] if not isinstance(code, dict): - db.put_attachment(doc, code, 'codefile', 'text/plain') + db.put_attachment(doc, code, "codefile", "text/plain") doc = db.get(id) - doc['exec']['code'] = { - 'attachmentName': 'codefile', - 'attachmentType': 'text/plain' + doc["exec"]["code"] = { + "attachmentName": "codefile", + "attachmentType": "text/plain", } db.save(doc) updated = True return updated + def createNonMigratedDoc(db): try: - db['_design/nonMigrated'] + db["_design/nonMigrated"] except ResourceNotFound: - db.save({ - '_id': '_design/nonMigrated', - 'language': 'javascript', - 'views': { - 'actions': { - 'map': 'function (doc) { var isAction = function (doc) { return (doc.exec !== undefined) }; var isMigrated = function (doc) { return (doc._attachments !== undefined && doc._attachments.codefile !== undefined && typeof doc.code != \'string\') }; if (isAction(doc) && !isMigrated(doc)) try { emit([doc.name]); } catch (e) {} }' - } + db.save( + { + "_id": "_design/nonMigrated", + "language": "javascript", + "views": { + "actions": { + "map": "function (doc) { var isAction = function (doc) { return (doc.exec !== undefined) }; var isMigrated = function (doc) { return (doc._attachments !== undefined && doc._attachments.codefile !== undefined && typeof doc.code != 'string') }; if (isAction(doc) && !isMigrated(doc)) try { emit([doc.name]); } catch (e) {} }" + } + }, } - }) + ) + def deleteNonMigratedDoc(db): - del db['_design/nonMigrated'] + del db["_design/nonMigrated"] + def main(args): db = couchdb.client.Server(args.dbUrl)[args.dbName] createNonMigratedDoc(db) - docs = db.view('_design/nonMigrated/_view/actions') + docs = db.view("_design/nonMigrated/_view/actions") docCount = len(docs) docIndex = 1 - print('Number of actions to update: {}'.format(docCount)) + print("Number of actions to update: {}".format(docCount)) for row in docs: id = row.id @@ -71,15 +77,15 @@ def main(args): print('Updating action {0}/{1}: "{2}"'.format(docIndex, docCount, id)) - if 'exec' in doc and 'code' in doc['exec']: - if doc['exec']['kind'] != 'java': + if "exec" in doc and "code" in doc["exec"]: + if doc["exec"]["kind"] != "java": updated = updateNonJavaAction(db, doc, id) else: updated = False if updated: print('Updated action: "{0}"'.format(id)) - time.sleep(.500) + time.sleep(0.500) else: print('Action already updated: "{0}"'.format(id)) @@ -87,9 +93,16 @@ def main(args): deleteNonMigratedDoc(db) -parser = argparse.ArgumentParser(description='Utility to update database action schema.') -parser.add_argument('--dbUrl', required=True, help='Server URL of the database. E.g. \"https://xxx:yyy@domain.couch.com:443\"') -parser.add_argument('--dbName', required=True, help='Name of the Database to update.') + +parser = argparse.ArgumentParser( + description="Utility to update database action schema." +) +parser.add_argument( + "--dbUrl", + required=True, + help='Server URL of the database. E.g. "https://xxx:yyy@domain.couch.com:443"', +) +parser.add_argument("--dbName", required=True, help="Name of the Database to update.") args = parser.parse_args() main(args) diff --git a/tools/db/replicateDbs.py b/tools/db/replicateDbs.py index 7de83a23576..8de38d60da5 100755 --- a/tools/db/replicateDbs.py +++ b/tools/db/replicateDbs.py @@ -18,19 +18,19 @@ * limitations under the License. */ """ - - import argparse -import time +import functools import re +import time + import couchdb.client -import functools + def retry(fn, retries): try: return fn() except: - if (retries > 0): + if retries > 0: time.sleep(1) return retry(fn, retries - 1) else: @@ -59,13 +59,21 @@ def isExcluded(dbName): # is the databaseName is in the list of excluded database isNameExcluded = dbNameWithoutPrefix in excludedDatabases # if one of the basenames matches, the database is excluded - isBaseNameExcluded = functools.reduce(lambda x, y: x or y, [dbNameWithoutPrefix.startswith(en) for en in excludedBaseNames], False) + isBaseNameExcluded = functools.reduce( + lambda x, y: x or y, + [dbNameWithoutPrefix.startswith(en) for en in excludedBaseNames], + False, + ) return isNameExcluded or isBaseNameExcluded # Create backup of all databases with given prefix print("----- Create backups -----") - for db in [dbName for dbName in sourceDb if dbName.startswith(args.dbPrefix) and not isExcluded(dbName)]: - backupDb = backupPrefix + db if not args.continuous else 'continuous_' + db + for db in [ + dbName + for dbName in sourceDb + if dbName.startswith(args.dbPrefix) and not isExcluded(dbName) + ]: + backupDb = backupPrefix + db if not args.continuous else "continuous_" + db replicateDesignDocument = { "_id": backupDb, "source": args.sourceDbUrl + "/" + db, @@ -78,7 +86,9 @@ def isExcluded(dbName): filterName = "snapshotFilters" filterDesignDocument = sourceDb[db].get("_design/%s" % filterName) if not args.continuous and filterDesignDocument: - replicateDesignDocument["filter"] = "%s/withoutDeletedAndDesignDocuments" % filterName + replicateDesignDocument["filter"] = ( + "%s/withoutDeletedAndDesignDocuments" % filterName + ) replicator.save(replicateDesignDocument) def isBackupDb(dbName): @@ -92,14 +102,20 @@ def isExpired(timestamp): # Delete all documents in the _replicator-database of old backups to avoid that they continue after they are deprecated print("----- Delete backup-documents older than %d seconds -----" % args.expires) - for doc in [doc for doc in replicator.view('_all_docs', include_docs=True) if isBackupDb(doc.id) and isExpired(extractTimestamp(doc.id))]: + for doc in [ + doc + for doc in replicator.view("_all_docs", include_docs=True) + if isBackupDb(doc.id) and isExpired(extractTimestamp(doc.id)) + ]: print("deleting backup document: %s" % doc.id) # Get again the latest version of the document to delete the right revision and avoid Conflicts retry(lambda: replicator.delete(replicator[doc.id]), 5) # Delete all backup-databases, that are older than specified print("----- Delete backups older than %d seconds -----" % args.expires) - for db in [db for db in targetDb if isBackupDb(db) and isExpired(extractTimestamp(db))]: + for db in [ + db for db in targetDb if isBackupDb(db) and isExpired(extractTimestamp(db)) + ]: print("deleting backup: %s" % db) targetDb.delete(db) @@ -114,30 +130,72 @@ def replayDatabases(args): for db in [dbName for dbName in sourceDb if dbName.startswith(args.dbPrefix)]: plainDbName = db.replace(args.dbPrefix, "") - (identifier, _) = sourceDb["_replicator"].save({ - "source": args.sourceDbUrl + "/" + db, - "target": args.targetDbUrl + "/" + plainDbName, - "create_target": True - }) + (identifier, _) = sourceDb["_replicator"].save( + { + "source": args.sourceDbUrl + "/" + db, + "target": args.targetDbUrl + "/" + plainDbName, + "create_target": True, + } + ) print("replaying backup: %s -> %s (%s)" % (db, plainDbName, identifier)) -parser = argparse.ArgumentParser(description="Utility to create a backup of all databases with the defined prefix.") -parser.add_argument("--sourceDbUrl", required=True, help="Server URL of the source database, that has to be backed up. E.g. 'https://xxx:yyy@domain.couch.com:443'") -parser.add_argument("--targetDbUrl", required=True, help="Server URL of the target database, where the backup is stored. Like sourceDbUrl.") -subparsers = parser.add_subparsers(help='sub-command help') + +parser = argparse.ArgumentParser( + description="Utility to create a backup of all databases with the defined prefix." +) +parser.add_argument( + "--sourceDbUrl", + required=True, + help="Server URL of the source database, that has to be backed up. E.g. 'https://xxx:yyy@domain.couch.com:443'", +) +parser.add_argument( + "--targetDbUrl", + required=True, + help="Server URL of the target database, where the backup is stored. Like sourceDbUrl.", +) +subparsers = parser.add_subparsers(help="sub-command help") # Replicate -replicateParser = subparsers.add_parser("replicate", help="Replicates source databases to the target database.") -replicateParser.add_argument("--dbPrefix", required=True, help="Prefix of the databases, that should be backed up.") -replicateParser.add_argument("--expires", required=True, type=int, help="Deletes all backups, that are older than the given value in seconds.") -replicateParser.add_argument("--continuous", action="store_true", help="Wether or not the backup should be continuous") -replicateParser.add_argument("--exclude", default="", help="Comma separated list of database names, that should not be backed up. (Without prefix).") -replicateParser.add_argument("--excludeBaseName", default="", help="Comma separated list of database base names. All databases, that have this basename in their name will not be backed up. (Without prefix).") +replicateParser = subparsers.add_parser( + "replicate", help="Replicates source databases to the target database." +) +replicateParser.add_argument( + "--dbPrefix", + required=True, + help="Prefix of the databases, that should be backed up.", +) +replicateParser.add_argument( + "--expires", + required=True, + type=int, + help="Deletes all backups, that are older than the given value in seconds.", +) +replicateParser.add_argument( + "--continuous", + action="store_true", + help="Wether or not the backup should be continuous", +) +replicateParser.add_argument( + "--exclude", + default="", + help="Comma separated list of database names, that should not be backed up. (Without prefix).", +) +replicateParser.add_argument( + "--excludeBaseName", + default="", + help="Comma separated list of database base names. All databases, that have this basename in their name will not be backed up. (Without prefix).", +) replicateParser.set_defaults(func=replicateDatabases) # Replay -replicateParser = subparsers.add_parser("replay", help="Replays source databases to the target database.") -replicateParser.add_argument("--dbPrefix", required=True, help="Prefix of the databases, that should be replayed. Usually 'backup_{TIMESTAMP}_'") +replicateParser = subparsers.add_parser( + "replay", help="Replays source databases to the target database." +) +replicateParser.add_argument( + "--dbPrefix", + required=True, + help="Prefix of the databases, that should be replayed. Usually 'backup_{TIMESTAMP}_'", +) replicateParser.set_defaults(func=replayDatabases) arguments = parser.parse_args() diff --git a/tools/ow-utils/build.gradle b/tools/ow-utils/build.gradle index 45f72b936fb..ec0813404f6 100644 --- a/tools/ow-utils/build.gradle +++ b/tools/ow-utils/build.gradle @@ -38,4 +38,3 @@ task cleanup(type: Delete) { delete 'genssl.sh' delete 'openwhisk-server-key.pem' } - diff --git a/tools/owperf/README.md b/tools/owperf/README.md index ccf79b63614..abbf972996e 100644 --- a/tools/owperf/README.md +++ b/tools/owperf/README.md @@ -98,4 +98,3 @@ Aside from that, the tool also counts **errors**. Failed invocations - of action ## Acknowledgements The owperf tool has been developed by IBM Research as part of the [CLASS](https://class-project.eu/) EU project. CLASS aims to integrate OpenWhisk as a foundation for latency-sensitive polyglot event-driven big-data analytics platform running on a compute continuum from the cloud to the edge. CLASS is funded by the European Union's Horizon 2020 Programme grant agreement No. 780622. - diff --git a/tools/owperf/setup.sh b/tools/owperf/setup.sh index 91397a6f8fb..b713e7029b5 100755 --- a/tools/owperf/setup.sh +++ b/tools/owperf/setup.sh @@ -78,4 +78,3 @@ remove_assets $wskparams if [ "$op" = "s" ]; then deploy_assets $wskparams fi - diff --git a/tools/travis/box-upload.py b/tools/travis/box-upload.py index fad503974f4..8e922309dd4 100755 --- a/tools/travis/box-upload.py +++ b/tools/travis/box-upload.py @@ -25,30 +25,31 @@ * limitations under the License. */ """ - from __future__ import print_function +import hashlib import os import subprocess import sys import tempfile import urllib + import humanize import requests -import hashlib def upload_file(local_file, remote_file): """Upload file.""" - if remote_file[0] == '/': + if remote_file[0] == "/": remote_file = remote_file[1:] - url = "http://DamCYhF8.mybluemix.net/upload?%s" % \ - urllib.parse.urlencode({"name": remote_file}) + url = "http://DamCYhF8.mybluemix.net/upload?%s" % urllib.parse.urlencode( + {"name": remote_file} + ) - r = requests.post(url, - headers={"Content-Type": "application/gzip"}, - data=open(local_file, 'rb')) + r = requests.post( + url, headers={"Content-Type": "application/gzip"}, data=open(local_file, "rb") + ) print("Posting result", r) print(r.text) diff --git a/tools/travis/setup-docker.py b/tools/travis/setup-docker.py index 525e6f44479..cd1298eca78 100755 --- a/tools/travis/setup-docker.py +++ b/tools/travis/setup-docker.py @@ -23,7 +23,6 @@ * limitations under the License. */ """ - from __future__ import print_function import json @@ -34,12 +33,9 @@ # Read the file. DOCKER_OPTS = { - "hosts": [ - "tcp://0.0.0.0:4243", - "unix:///var/run/docker.sock" - ], + "hosts": ["tcp://0.0.0.0:4243", "unix:///var/run/docker.sock"], "storage-driver": "overlay", - "userns-remap": "default" + "userns-remap": "default", } @@ -61,7 +57,7 @@ def add_content(data): def write_to_daemon_conf(data): try: - with open(DOCKER_DAEMON_FILE, 'w') as fp: + with open(DOCKER_DAEMON_FILE, "w") as fp: json.dump(data, fp) except Exception as e: print("Failed to write to daemon file") diff --git a/tools/ubuntu-setup/README.md b/tools/ubuntu-setup/README.md index 4a481cd0035..561d291d6d6 100644 --- a/tools/ubuntu-setup/README.md +++ b/tools/ubuntu-setup/README.md @@ -80,4 +80,3 @@ bin/wsk action invoke /whisk.system/utils/echo -p message hello --result "message": "hello" } ``` -