diff --git a/.flake8 b/.flake8 index 16d88aea3..0170d024a 100644 --- a/.flake8 +++ b/.flake8 @@ -6,7 +6,6 @@ exclude = # have to skip because the file should be fixed setup.py, # lots unused import because of fixtures - indy_client/test, indy_node/test, # config file docker-files/indy_config.py diff --git a/.travis.yml b/.travis.yml index c46e56af3..4a15e4dd1 100644 --- a/.travis.yml +++ b/.travis.yml @@ -18,7 +18,6 @@ before_install: install: - pip install -U -e . - - pip install -U indy-client script: - python -m indy_node.test diff --git a/CHANGELOG.md b/CHANGELOG.md index 0e03fedf5..0d4e7ec79 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,8 @@ # Hyperledger Indy Node Release Notes +* [1.6.78](#1678) + * [1.6.73](#1673) * [1.6.70](#1670) @@ -33,6 +35,66 @@ Although every attempt has been made to make this information as accurate as possible, please know there may be things that are omitted, not fully developed yet, or updates since this publication that were not included in the information below. Only the most pressing or significant items have been listed. For the entire list of tickets and or specific information about any given item, please visit the list at [Hyperleder Indy's Jira](https://jira.hyperledger.org/). Once logged in, simply navigate to Projects > Indy. +## 1.6.78 + +### Component Version Information + +| Components | Version Numbers | +| --- | --- | +| indy-plenum | 1.6.53 | +| indy-anoncreds | 1.0.11 | +| indy-node | 1.6.78 | +| | | | + +### Major Fixes + +| Description | Additional Information | Ticket Number | +| --- | --- | --- | +| Re-asking for ledger statuses and maximal consistency proofs is not canceled. | | [INDY-1740](https://jira.hyperledger.org/browse/INDY-1740) | +| Bug in calling notifier methods in Restarter. | | [INDY-1741](https://jira.hyperledger.org/browse/INDY-1741) | +| 35 view changes were happened during 10 minutes after nodes failure because of invalid request. | | [INDY-1696](https://jira.hyperledger.org/browse/INDY-1696) | +| Requests queue is not cleared in case of reject-nym transactions. | | [INDY-1700](https://jira.hyperledger.org/browse/INDY-1700) | +| Throughput critically decreases without causing view_change. | | [INDY-1672](https://jira.hyperledger.org/browse/INDY-1740) | +| Node can't catch up large ledger. | | [INDY-1595](https://jira.hyperledger.org/browse/INDY-1595) | +| Unable to demote node in STN. | | [INDY-1621](https://jira.hyperledger.org/browse/INDY-1621) | +| View changes happen when all responses should be rejected during load testing scenario. | | [INDY-1653](https://jira.hyperledger.org/browse/INDY-1653) | +| Node doesn't write txns after disconnection from the rest nodes. | | [INDY-1580](https://jira.hyperledger.org/browse/INDY-1580) | +| Throughput is degrading if backup primary is stopped. | | [INDY-1618](https://jira.hyperledger.org/browse/INDY-1618) | +| | | | | + +### Changes - Additions - Known Issues + +| Description | Workaround | Ticket | +| --- | --- | --- | +| Switch off a replica that stopped because disconnected from a backup primary. | | [INDY-1681](https://jira.hyperledger.org/browse/INDY-1681) | +| Extend load scripts emulating non-smooth load according to the changes in the core script. | | [INDY-1667](https://jira.hyperledger.org/browse/INDY-1667) | +| Proof of stability under load. | | [INDY-1607](https://jira.hyperledger.org/browse/INDY-1607) | +| Investigate Out of memory issues with the current load testing. | | [INDY-1688](https://jira.hyperledger.org/browse/INDY-1688) | +| Do not re-verify signature for Propagates with already verified requests. | | [INDY-1649](https://jira.hyperledger.org/browse/INDY-1649) | +| POA: Require multiple signatures for important transactions. | | [INDY-1704](https://jira.hyperledger.org/browse/INDY-1704) | +| Support all FEEs txns in the load script. | | [INDY-1665](https://jira.hyperledger.org/browse/INDY-1665) | +| Test domain transactions with FEEs. | | [INDY-1661](https://jira.hyperledger.org/browse/INDY-1661) | +| 3PC Batch should preserve the order of requests when applying PrePrepare on non-primary. | | [INDY-1642](https://jira.hyperledger.org/browse/INDY-1642) | +| Ability to switch off (remove) replicas with no changes of F value. | | [INDY-1680](https://jira.hyperledger.org/browse/INDY-1680) | +| A node should be able to participate in BLS multi-signature only if it has a valid proof of posession. | | [INDY-1589](https://jira.hyperledger.org/browse/INDY-1589) | +| Make validator info as a hystorical data. | | [INDY-1637](https://jira.hyperledger.org/browse/INDY-1637) | +| | | | | +| **Known Issue:** Upgrade failed on pool from 1.3.62 to 1.4.66. Note that INDY-1447 was fixed in indy-node 1.5.68, but it still presents in indy-node 1.3.62 and 1.4.66 code. | **So, some of the nodes may not to be upgraded during simultaneous pool-upgrade.** If this problem will appear, stewards should perform manual upgrade of indy-node in accordance with this [instruction:](https://docs.google.com/document/d/1vUvbioL5OsmZMSkwRcu0p0jdttJO5VS8K3GhDLdNaoI)**(!)** To reduce the risk of reproducing INDY-1447, it is **recommended to use old CLI for pool upgrade.** | [INDY-1447](https://jira.hyperledger.org/browse/INDY-1447) | +| | | | | + +### Upgrade Scripts: + +**Pool upgrade from indy-node 1.3.62 to indy-node 1.6.78 should be performed simultaneously for all nodes due to txn format changes.** + +### Additional Information: + +**All indy-cli pools should be recreated with actual genesis files.** + +**For more details about txn format changes see** [**INDY-1421**](https://jira.hyperledger.org/browse/INDY-1421) **.** + +**There are possible OOM issues during 3+ hours of target load or large catch-ups at 8 GB RAM nodes pool so 32 GB is recommended.** + + ## 1.6.73 **Important:** Several iterations were done very rapidly between the last release and this one. All of the changes, upgrades, etc... are included in this new release. Simply upgrading will include them all from 1.6.70 until 1.6.73. To see further, specific numerous changes, please reference the appropriate tickets in the [Hyperledger Jira ticketing system.](https://jira.hyperledger.org/) @@ -761,4 +823,4 @@ The genesis files are now located in their own directory based off the network n | **Implemented a command line tool to provide validator status.** | | [INDY-715](https://jira.hyperledger.org/browse/INDY-715) | | **"Debug" mode for tests was moved to parameter.** | | [INDY-716](https://jira.hyperledger.org/browse/INDY-716) | | **Log levels were changed on some debug level messages to an info level.** | | [INDY-800](https://jira.hyperledger.org/browse/INDY-800) | -| **If the pool loses enough nodes and cannot reach consensus when enough nodes become available, the pool will still not reach consensus.** | If you restart all the nodes in the pool, it will start reaching consensus again. | [INDY-849](https://jira.hyperledger.org/browse/INDY-849) | \ No newline at end of file +| **If the pool loses enough nodes and cannot reach consensus when enough nodes become available, the pool will still not reach consensus.** | If you restart all the nodes in the pool, it will start reaching consensus again. | [INDY-849](https://jira.hyperledger.org/browse/INDY-849) | diff --git a/Jenkinsfile b/Jenkinsfile index bcc491aff..12a538405 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -27,29 +27,6 @@ def nodeTestUbuntu = { } } -def clientTestUbuntu = { - try { - echo 'Ubuntu Test: Checkout csm' - checkout scm - - echo 'Ubuntu Test: Build docker image' - def testEnv = dockerHelpers.build(name) - - testEnv.inside('--network host') { - echo 'Ubuntu Test: Install dependencies' - testHelpers.install() - - echo 'Ubuntu Test: Test' - testHelpers.testRunner([resFile: "test-result-client.${NODE_NAME}.txt", testDir: 'indy_client']) - //testHelpers.testJUnit(resFile: "test-result-client.${NODE_NAME}.xml") - } - } - finally { - echo 'Ubuntu Test: Cleanup' - step([$class: 'WsCleanup']) - } -} - def commonTestUbuntu = { try { echo 'Ubuntu Test: Checkout csm' @@ -90,4 +67,4 @@ def buildDebUbuntu = { repoName, releaseVersion, sourcePath -> options = new TestAndPublishOptions() options.enable([StagesEnum.PACK_RELEASE_COPY, StagesEnum.PACK_RELEASE_COPY_ST]) options.setCopyWithDeps(true) -testAndPublish(name, [ubuntu: [node: nodeTestUbuntu, client: clientTestUbuntu, common: commonTestUbuntu]], true, options, [ubuntu: buildDebUbuntu]) +testAndPublish(name, [ubuntu: [node: nodeTestUbuntu, common: commonTestUbuntu]], true, options, [ubuntu: buildDebUbuntu]) diff --git a/Jenkinsfile.cd b/Jenkinsfile.cd index 077fc97cb..b5d05baa1 100644 --- a/Jenkinsfile.cd +++ b/Jenkinsfile.cd @@ -27,29 +27,6 @@ def nodeTestUbuntu = { } } -def clientTestUbuntu = { - try { - echo 'Ubuntu Test: Checkout csm' - checkout scm - - echo 'Ubuntu Test: Build docker image' - def testEnv = dockerHelpers.build(name) - - testEnv.inside('--network host') { - echo 'Ubuntu Test: Install dependencies' - testHelpers.install() - - echo 'Ubuntu Test: Test' - testHelpers.testRunner([resFile: "test-result-client.${NODE_NAME}.txt", testDir: 'indy_client']) - //testHelpers.testJUnit(resFile: "test-result-client.${NODE_NAME}.xml") - } - } - finally { - echo 'Ubuntu Test: Cleanup' - step([$class: 'WsCleanup']) - } -} - def commonTestUbuntu = { try { echo 'Ubuntu Test: Checkout csm' @@ -193,4 +170,4 @@ options.enable([StagesEnum.PACK_RELEASE_COPY, StagesEnum.PACK_RELEASE_COPY_ST]) options.setCopyWithDeps(true) options.setSystemTestsCb(systemTests) options.setPrContexts([env.INDY_GITHUB_PR_REQUIRED_CONTEXT ?: "ci/hyperledger-jenkins/pr-merge"]) -testAndPublish(name, [ubuntu: [node: nodeTestUbuntu, client: clientTestUbuntu, common: commonTestUbuntu]], true, options, [ubuntu: buildDebUbuntu]) +testAndPublish(name, [ubuntu: [node: nodeTestUbuntu, common: commonTestUbuntu]], true, options, [ubuntu: buildDebUbuntu]) diff --git a/Jenkinsfile.ci b/Jenkinsfile.ci index b5edb3d4e..9041bf0ba 100644 --- a/Jenkinsfile.ci +++ b/Jenkinsfile.ci @@ -142,14 +142,6 @@ def tests = [ python: python ) }, - client: { python -> - test( - resFile: "test-result-client.${NODE_NAME}.txt", - testDir: 'indy_client', - python: python, - useRunner: true - ) - }, node: { python -> test( resFile: "test-result-node.${NODE_NAME}.txt", diff --git a/README.md b/README.md index 0266c80ab..4793161b5 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,6 @@ * [How to Contribute](#how-to-contribute) * [How to Install a Test Network](#how-to-install-a-test-network) * [How to Start Working with the Code](#how-to-start-working-with-the-code) -* [How to Start Indy Client CLI](#how-to-start-indy-client-cli) * [Continuous integration and delivery](https://github.com/hyperledger/indy-node/blob/master/docs/ci-cd.md) * [How to send a PR](#how-to-send-a-pr) * [Docs and links](#docs-and-links) @@ -49,11 +48,8 @@ Indy Node repo consists of the following parts: - indy-node: - [indy-plenum](https://github.com/hyperledger/indy-plenum)-based implementation of distributed ledger - Extends plenum's base pool functionality with specific transactions support (CLAIM_DEF, SCHEMA, POOL_UPGRADE, etc.) -- indy-client - - Contains client and CLI code - - Will be deprecated soon in favor of [indy-sdk](https://github.com/hyperledger/indy-sdk), so please use indy-sdk for your own applications dealing with Indy ecosystem. - indy-common - - Common code for both indy-node and indy-client parts + - Common code for indy-node - scripts - Some scripts that can be run for installed Node (in particular, scripts to start Nodes, generate keys, prepare test Network, etc.) - doc @@ -69,15 +65,10 @@ Indy Node repo consists of the following parts: So, if you want to work with Indy Node, you will need to have the Plenum code as well in most of the cases and work with two projects at the same time (see [How to Start Working with the Code](#how-to-start-working-with-the-code) below). -- [indy-anoncreds](https://github.com/hyperledger/indy-anoncreds) - - A python implementation of the anonymous credentials ideas developed by IBM Research. - - This is quite independent from indy-node/plenum. So, in most cases you don't need this code to contribute to Indy-Node. - - It will be deprecated soon in favor of anoncreds implementation in indy-sdk (see below). - [indy-sdk](https://github.com/hyperledger/indy-sdk) - An official SDK for Indy. - it contains client and anoncreds implementation - You don't need it to contribute to Indy-Node. But please use indy-sdk for your own applications dealing with Indy ecosystem. - - It will replace indy-client and indy-anoncreds parsts soon. - [indy-crypto](https://github.com/hyperledger/indy-crypto) - A shared crypto library - It's based on [AMCL](https://github.com/milagro-crypto/amcl) @@ -119,14 +110,6 @@ The described process is automated in one of the ways below (it allow to install Please have a look at [Dev Setup](docs/setup-dev.md) -## How to Start Indy Client CLI -Once installed, you can play with the command-line interface by running Indy from a terminal. - -Note: For Windows, we recommended using either [cmder](http://cmder.net/) or [conemu](https://conemu.github.io/). -``` -indy -``` - ## Continuous Integration and Delivery Please have a look at [Continuous integration/delivery](docs/ci-cd.md) @@ -167,10 +150,12 @@ If you made changes in both indy-plenum and indy-node, you need to do the follow - Please have a look at documents and diagrams in [docs](docs) folder - Please have a look at documents and diagrams in Plenum's [docs](https://github.com/hyperledger/indy-plenum/tree/master/docs) folder: - [Technical Overview of Plenum](https://github.com/hyperledger/indy-plenum/blob/master/docs/main.md) + - [Plenum Consensus Algorithm Diagram](https://github.com/hyperledger/indy-plenum/blob/master/docs/diagrams/consensus-protocol.png) - [Glossary](https://github.com/hyperledger/indy-plenum/blob/master/docs/glossary.md) - [Storages](https://github.com/hyperledger/indy-plenum/blob/master/docs/storage.md) - [Request Handling](https://github.com/hyperledger/indy-plenum/blob/master/docs/request_handling.md) - [Catchup](https://github.com/hyperledger/indy-plenum/blob/master/docs/catchup.md) + - [Catchup Diagram](https://github.com/hyperledger/indy-plenum/blob/master/docs/diagrams/catchup-procedure.png) - [Plugins](https://github.com/hyperledger/indy-plenum/blob/master/docs/plugins.md) - Relationship between Entities and Transactions: [relationship diagram](docs/relationship-diagram.png) - Supported transactions and their format: [transactions](docs/transactions.md) diff --git a/acceptance/indy-cli-batches/AS-02-02-invalid-cases.batch b/acceptance/indy-cli-batches/AS-02-02-invalid-cases.batch index 464811b13..49d1767ee 100644 --- a/acceptance/indy-cli-batches/AS-02-02-invalid-cases.batch +++ b/acceptance/indy-cli-batches/AS-02-02-invalid-cases.batch @@ -7,6 +7,8 @@ wallet open AS-02-wallet key=testkey #demote node as STEWARD of ANOTHER node (FAIL) did use XhYtvJqezMUKfF6KVNaGmT - ledger node target=6G9QhQa3HWjRKeRmEvEkLbWWf2t7cw6KLtafzi494G4G alias=Node6 services= +#demote node with wrong pair of target and alias (FAIL) +- ledger node target=4Tn3wZMNCvhSTXPcLinQDnHyj56DTLQtL61ki4jo2Loc alias=Node6 services= #demote node as TRUST_ANCHOR (FAIL) did use Rhx2qwaeiGyhU9vn4ynHSS - ledger node target=4Tn3wZMNCvhSTXPcLinQDnHyj56DTLQtL61ki4jo2Loc alias=Node5 services= diff --git a/acceptance/indy-cli-batches/AS-03-01-identity-owner-anyone-can-write.batch b/acceptance/indy-cli-batches/AS-03-01-identity-owner-anyone-can-write.batch index e4510315d..723f1d9f6 100644 --- a/acceptance/indy-cli-batches/AS-03-01-identity-owner-anyone-can-write.batch +++ b/acceptance/indy-cli-batches/AS-03-01-identity-owner-anyone-can-write.batch @@ -101,5 +101,5 @@ did use UffJCJngTXc1o84dQ7aEUb - ledger attrib did=X3zYajU7gbz9Pu8k6E7Ppf raw={"endpoint":{"ha":"127.0.0.1:5555"}} # Identity Owner can create SCHEMA: (PASS with ANYONE_CAN_WRITE=True) ledger schema name=IdentityOwnerSchema version=1.0 attr_names=name,age -# Identity Owner can create CLAIM_DEF: (PASS with ANYONE_CAN_WRITE=True) -ledger cred-def schema_id=1 signature_type=CL primary={"n":"1","s":"2","rms":"3","r":{"age":"4","name":"5"},"rctxt":"6","z":"7"} tag=3 \ No newline at end of file +# Identity Owner can create CLAIM_DEF: (PASS with ANYONE_CAN_WRITE=True, but will fail because of wrong schema id) +- ledger cred-def schema_id=1 signature_type=CL primary={"n":"1","s":"2","rms":"3","r":{"age":"4","name":"5"},"rctxt":"6","z":"7"} tag=3 \ No newline at end of file diff --git a/acceptance/indy-cli-batches/AS-03-01-steward.batch b/acceptance/indy-cli-batches/AS-03-01-steward.batch index 2e5ad8f5e..6700e3cc0 100644 --- a/acceptance/indy-cli-batches/AS-03-01-steward.batch +++ b/acceptance/indy-cli-batches/AS-03-01-steward.batch @@ -67,7 +67,7 @@ did use CDcGtKx1boRYFwPBaGkMmk - ledger get-nym did=Jt7aMnw77aoaBMyhXUNjtt # #7.1 AS Steward (steward1) RESTORE TrustAnchor (PASS) ????????????????????????????????????????????????????????????????? did use 7qFmEyYCXcmUFVied5Sp3b -ledger nym did=CDcGtKx1boRYFwPBaGkMmk role=TRUST_ANCHOR +- ledger nym did=CDcGtKx1boRYFwPBaGkMmk role=TRUST_ANCHOR # #7.2 CHECK TrustAnchor IS VALID did use CDcGtKx1boRYFwPBaGkMmk ledger nym did=XkZJxs6Uadv6MQeKGGZdZ6 verkey=~Aza4zyTRazcVsokmqNJfsg @@ -107,5 +107,5 @@ did use 6LKnRH6hWPSpoWu824s5JH - ledger attrib did=SvXt2QGwZF1kXTcpd2pJ37 raw={"endpoint":{"ha":"127.0.0.1:5555"}} # Steward can create SCHEMA: ledger schema name=StewardSchema version=1.0 attr_names=name,age -# Steward can create CLAIM_DEF: -ledger cred-def schema_id=1 signature_type=CL primary={"n":"1","s":"2","rms":"3","r":{"age":"4","name":"5"},"rctxt":"6","z":"7"} tag=1 \ No newline at end of file +# Steward can create CLAIM_DEF (will fail because of wrong schema id): +- ledger cred-def schema_id=1 signature_type=CL primary={"n":"1","s":"2","rms":"3","r":{"age":"4","name":"5"},"rctxt":"6","z":"7"} tag=1 \ No newline at end of file diff --git a/acceptance/indy-cli-batches/AS-03-01-trust-anchor.batch b/acceptance/indy-cli-batches/AS-03-01-trust-anchor.batch index 7a7763145..da2eb4d10 100644 --- a/acceptance/indy-cli-batches/AS-03-01-trust-anchor.batch +++ b/acceptance/indy-cli-batches/AS-03-01-trust-anchor.batch @@ -103,5 +103,5 @@ did use ETcbLj612u9oXr7adZSWZV - ledger attrib did=LoZB9o2PFfSjrpnkugqunZ raw={"endpoint":{"ha":"127.0.0.1:5555"}} # TrustAnchor can create SCHEMA: ledger schema name=TrustAnchorSchema version=1.0 attr_names=name,age -# TrustAnchor can create CLAIM_DEF: -ledger cred-def schema_id=1 signature_type=CL primary={"n":"1","s":"2","rms":"3","r":{"age":"4","name":"5"},"rctxt":"6","z":"7"} tag=2 \ No newline at end of file +# TrustAnchor can create CLAIM_DEF (will fail because of wrong schema id): +- ledger cred-def schema_id=1 signature_type=CL primary={"n":"1","s":"2","rms":"3","r":{"age":"4","name":"5"},"rctxt":"6","z":"7"} tag=2 \ No newline at end of file diff --git a/acceptance/indy-cli-batches/AS-03-01-trustee.batch b/acceptance/indy-cli-batches/AS-03-01-trustee.batch index 9434e425d..e6d9cab36 100644 --- a/acceptance/indy-cli-batches/AS-03-01-trustee.batch +++ b/acceptance/indy-cli-batches/AS-03-01-trustee.batch @@ -116,5 +116,5 @@ did use 484PvcK1gUqwWEWCkumDzA - ledger attrib did=QbvWTyhmZmDF8Ms82wC2JD raw={"endpoint":{"ha":"127.0.0.1:5555"}} # Trustee can create SCHEMA: ledger schema name=TrusteeSchema version=1.0 attr_names=name,age -# Trustee can create CLAIM_DEF: -ledger cred-def schema_id=1 signature_type=CL primary={"n":"1","s":"2","rms":"3","r":{"age":"4","name":"5"},"rctxt":"6","z":"7"} tag=10 \ No newline at end of file +# Trustee can create CLAIM_DEF (will fail because of wrong schema id): +- ledger cred-def schema_id=1 signature_type=CL primary={"n":"1","s":"2","rms":"3","r":{"age":"4","name":"5"},"rctxt":"6","z":"7"} tag=10 \ No newline at end of file diff --git a/acceptance/indy-cli-batches/expected/AS-01-01-general-1.expected b/acceptance/indy-cli-batches/expected/AS-01-01-general-1.expected index ad46ea940..01429f10e 100755 --- a/acceptance/indy-cli-batches/expected/AS-01-01-general-1.expected +++ b/acceptance/indy-cli-batches/expected/AS-01-01-general-1.expected @@ -1,35 +1,35 @@ - pool create AS-pool gen_txn_file=./pool_transactions_genesis -Pool config "AS-pool" has been created +Pool config "AS-pool" has been created pool connect AS-pool -Pool "AS-pool" has been connected +Pool "AS-pool" has been connected wallet create AS-01-wallet-1 key=testkey storage_config={"pool":"AS-pool"} -Wallet "AS-01-wallet-1" has been created +Wallet "AS-01-wallet-1" has been created wallet open AS-01-wallet-1 key=testkey -Wallet "AS-01-wallet-1" has been opened +Wallet "AS-01-wallet-1" has been opened did new seed=000000000000000000000000Trustee1 metadata="default trustee" -Did "V4SGRU86Z58d6TV7PBUe6f" has been created with "~CoRER63DVYnWZtK8uAzNbx" verkey -Metadata has been saved for DID "V4SGRU86Z58d6TV7PBUe6f" +Did "V4SGRU86Z58d6TV7PBUe6f" has been created with "~CoRER63DVYnWZtK8uAzNbx" verkey +Metadata has been saved for DID "V4SGRU86Z58d6TV7PBUe6f" did new seed=000000000000000000000NewSteward1 metadata="new steward" -Did "5Ur5boqV3EXobXhtfow83q" has been created with "~WWyzovtLNpZW8iAfu9qSN5" verkey -Metadata has been saved for DID "5Ur5boqV3EXobXhtfow83q" +Did "5Ur5boqV3EXobXhtfow83q" has been created with "~WWyzovtLNpZW8iAfu9qSN5" verkey +Metadata has been saved for DID "5Ur5boqV3EXobXhtfow83q" did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=5Ur5boqV3EXobXhtfow83q verkey=~WWyzovtLNpZW8iAfu9qSN5 role=STEWARD -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +---------------------+---------------------+ | Request ID | Transaction time | +---------------------+---------------------+ | 1527781962070712451 | 2018-05-31 15:52:42 | +---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+---------+ | Did | Verkey | Role | +------------------------+-------------------------+---------+ @@ -37,20 +37,20 @@ ledger nym did=5Ur5boqV3EXobXhtfow83q verkey=~WWyzovtLNpZW8iAfu9qSN5 role=STEWAR +------------------------+-------------------------+---------+ did use 5Ur5boqV3EXobXhtfow83q -Did "5Ur5boqV3EXobXhtfow83q" has been set as active +Did "5Ur5boqV3EXobXhtfow83q" has been set as active did rotate-key -Verkey for did "5Ur5boqV3EXobXhtfow83q" has been updated. New verkey: "J1WyzFAvS4DUoGGf6eo9sX1iXJ7jayeX677Nh3VAwVRk" +Verkey for did "5Ur5boqV3EXobXhtfow83q" has been updated. New verkey: "J1WyzFAvS4DUoGGf6eo9sX1iXJ7jayeX677Nh3VAwVRk" ledger nym did=NyLkggDnCtZY6ngkfNqzg2 verkey=~LTar8Y3uzcQEt3HdhgqtSz -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +---------------------+---------------------+ | Request ID | Transaction time | +---------------------+---------------------+ | 1527781962599906305 | 2018-05-31 15:52:42 | +---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+------+ | Did | Verkey | Role | +------------------------+-------------------------+------+ @@ -58,14 +58,14 @@ ledger nym did=NyLkggDnCtZY6ngkfNqzg2 verkey=~LTar8Y3uzcQEt3HdhgqtSz +------------------------+-------------------------+------+ - ledger get-nym did=NyLkggDnCtZY6ngkfNqzg2 -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +-----------------+---------------------+---------------------+ | Sequence Number | Request ID | Transaction time | +-----------------+---------------------+---------------------+ | 1526 | 1527781962844903608 | 2018-05-31 15:52:42 | +-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+------+ @@ -80,31 +80,31 @@ did list +------------------------+----------------------------------------------+-----------------+ | 5Ur5boqV3EXobXhtfow83q | J1WyzFAvS4DUoGGf6eo9sX1iXJ7jayeX677Nh3VAwVRk | new steward | +------------------------+----------------------------------------------+-----------------+ -Current did "5Ur5boqV3EXobXhtfow83q" +Current did "5Ur5boqV3EXobXhtfow83q" wallet create AS-01-identity-wallet key=testkey storage_config={"pool":"AS-pool"} -Wallet "AS-01-identity-wallet" has been created +Wallet "AS-01-identity-wallet" has been created wallet open AS-01-identity-wallet key=testkey -Wallet "AS-01-wallet-1" has been closed -Wallet "AS-01-identity-wallet" has been opened +Wallet "AS-01-wallet-1" has been closed +Wallet "AS-01-identity-wallet" has been opened did new seed=00000000000000000000000Identity1 metadata="new identity" -Did "NyLkggDnCtZY6ngkfNqzg2" has been created with "~LTar8Y3uzcQEt3HdhgqtSz" verkey -Metadata has been saved for DID "NyLkggDnCtZY6ngkfNqzg2" +Did "NyLkggDnCtZY6ngkfNqzg2" has been created with "~LTar8Y3uzcQEt3HdhgqtSz" verkey +Metadata has been saved for DID "NyLkggDnCtZY6ngkfNqzg2" did use NyLkggDnCtZY6ngkfNqzg2 -Did "NyLkggDnCtZY6ngkfNqzg2" has been set as active +Did "NyLkggDnCtZY6ngkfNqzg2" has been set as active - ledger get-nym did=NyLkggDnCtZY6ngkfNqzg2 -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +-----------------+---------------------+---------------------+ | Sequence Number | Request ID | Transaction time | +-----------------+---------------------+---------------------+ | 1526 | 1527781964929492594 | 2018-05-31 15:52:42 | +-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+------+ @@ -112,17 +112,17 @@ did use NyLkggDnCtZY6ngkfNqzg2 +------------------------+------------------------+-------------------------+------+ did rotate-key -Verkey for did "NyLkggDnCtZY6ngkfNqzg2" has been updated. New verkey: "5TKaMf8EXoad6tLMptGNuh4563Vg9znFnBJ1iZH7zDJu" +Verkey for did "NyLkggDnCtZY6ngkfNqzg2" has been updated. New verkey: "5TKaMf8EXoad6tLMptGNuh4563Vg9znFnBJ1iZH7zDJu" - ledger get-nym did=NyLkggDnCtZY6ngkfNqzg2 -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +-----------------+---------------------+---------------------+ | Sequence Number | Request ID | Transaction time | +-----------------+---------------------+---------------------+ | 1527 | 1527781965229610522 | 2018-05-31 15:52:45 | +-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+----------------------------------------------+------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+----------------------------------------------+------+ @@ -130,10 +130,10 @@ did rotate-key +------------------------+------------------------+----------------------------------------------+------+ wallet close -Wallet "AS-01-identity-wallet" has been closed +Wallet "AS-01-identity-wallet" has been closed pool disconnect -Pool "AS-pool" has been disconnected +Pool "AS-pool" has been disconnected wallet list +-----------------------+----------------------+---------+ @@ -151,4 +151,4 @@ pool list | AS-pool | +---------+ -Goodbye... +Goodbye... diff --git a/acceptance/indy-cli-batches/expected/AS-01-01-general-2.expected b/acceptance/indy-cli-batches/expected/AS-01-01-general-2.expected index be97856f8..b4d7fe51b 100644 --- a/acceptance/indy-cli-batches/expected/AS-01-01-general-2.expected +++ b/acceptance/indy-cli-batches/expected/AS-01-01-general-2.expected @@ -1,35 +1,35 @@ - pool create AS-pool gen_txn_file=./pool_transactions_genesis -Pool config "AS-pool" has been created +Pool config "AS-pool" has been created pool connect AS-pool -Pool "AS-pool" has been connected +Pool "AS-pool" has been connected wallet create AS-01-wallet-2 key=testkey storage_config={"pool":"AS-pool"} -Wallet "AS-01-wallet-2" has been created +Wallet "AS-01-wallet-2" has been created wallet open AS-01-wallet-2 key=testkey -Wallet "AS-01-wallet-2" has been opened +Wallet "AS-01-wallet-2" has been opened did new seed=000000000000000000000000Trustee1 metadata="default trustee" -Did "V4SGRU86Z58d6TV7PBUe6f" has been created with "~CoRER63DVYnWZtK8uAzNbx" verkey -Metadata has been saved for DID "V4SGRU86Z58d6TV7PBUe6f" +Did "V4SGRU86Z58d6TV7PBUe6f" has been created with "~CoRER63DVYnWZtK8uAzNbx" verkey +Metadata has been saved for DID "V4SGRU86Z58d6TV7PBUe6f" did new seed=000000000000000000000NewSteward2 metadata="new steward" -Did "L8voaFcbR8oAJu9mKHxuYr" has been created with "~2oQNSSigdSVoPf6PVb7Fyf" verkey -Metadata has been saved for DID "L8voaFcbR8oAJu9mKHxuYr" +Did "L8voaFcbR8oAJu9mKHxuYr" has been created with "~2oQNSSigdSVoPf6PVb7Fyf" verkey +Metadata has been saved for DID "L8voaFcbR8oAJu9mKHxuYr" did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=L8voaFcbR8oAJu9mKHxuYr verkey=~2oQNSSigdSVoPf6PVb7Fyf role=STEWARD -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +---------------------+---------------------+ | Request ID | Transaction time | +---------------------+---------------------+ | 1527782164819448977 | 2018-05-31 15:56:04 | +---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+---------+ | Did | Verkey | Role | +------------------------+-------------------------+---------+ @@ -37,20 +37,20 @@ ledger nym did=L8voaFcbR8oAJu9mKHxuYr verkey=~2oQNSSigdSVoPf6PVb7Fyf role=STEWAR +------------------------+-------------------------+---------+ did use L8voaFcbR8oAJu9mKHxuYr -Did "L8voaFcbR8oAJu9mKHxuYr" has been set as active +Did "L8voaFcbR8oAJu9mKHxuYr" has been set as active did rotate-key -Verkey for did "L8voaFcbR8oAJu9mKHxuYr" has been updated. New verkey: "CTywiRi2vJr11a9JbV4AAG1qZR2cKCtz5dHA4cjicfdN" +Verkey for did "L8voaFcbR8oAJu9mKHxuYr" has been updated. New verkey: "CTywiRi2vJr11a9JbV4AAG1qZR2cKCtz5dHA4cjicfdN" ledger nym did=TLvnGKZVC3qkiZgn56esFG verkey=~YXLYfwoWJ8utAU8TBSmgn4 -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +---------------------+---------------------+ | Request ID | Transaction time | +---------------------+---------------------+ | 1527782165354580193 | 2018-05-31 15:56:05 | +---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+------+ | Did | Verkey | Role | +------------------------+-------------------------+------+ @@ -58,14 +58,14 @@ ledger nym did=TLvnGKZVC3qkiZgn56esFG verkey=~YXLYfwoWJ8utAU8TBSmgn4 +------------------------+-------------------------+------+ - ledger get-nym did=TLvnGKZVC3qkiZgn56esFG -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +-----------------+---------------------+---------------------+ | Sequence Number | Request ID | Transaction time | +-----------------+---------------------+---------------------+ | 1530 | 1527782165610798459 | 2018-05-31 15:56:05 | +-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+------+ @@ -80,31 +80,31 @@ did list +------------------------+----------------------------------------------+-----------------+ | V4SGRU86Z58d6TV7PBUe6f | ~CoRER63DVYnWZtK8uAzNbx | default trustee | +------------------------+----------------------------------------------+-----------------+ -Current did "L8voaFcbR8oAJu9mKHxuYr" +Current did "L8voaFcbR8oAJu9mKHxuYr" wallet create AS-01-identity-wallet-2 key=testkey storage_config={"pool":"AS-pool"} -Wallet "AS-01-identity-wallet-2" has been created +Wallet "AS-01-identity-wallet-2" has been created wallet open AS-01-identity-wallet-2 key=testkey -Wallet "AS-01-wallet-2" has been closed -Wallet "AS-01-identity-wallet-2" has been opened +Wallet "AS-01-wallet-2" has been closed +Wallet "AS-01-identity-wallet-2" has been opened did new seed=00000000000000000000000Identity2 metadata="new identity" -Did "TLvnGKZVC3qkiZgn56esFG" has been created with "~YXLYfwoWJ8utAU8TBSmgn4" verkey -Metadata has been saved for DID "TLvnGKZVC3qkiZgn56esFG" +Did "TLvnGKZVC3qkiZgn56esFG" has been created with "~YXLYfwoWJ8utAU8TBSmgn4" verkey +Metadata has been saved for DID "TLvnGKZVC3qkiZgn56esFG" did use TLvnGKZVC3qkiZgn56esFG -Did "TLvnGKZVC3qkiZgn56esFG" has been set as active +Did "TLvnGKZVC3qkiZgn56esFG" has been set as active - ledger get-nym did=TLvnGKZVC3qkiZgn56esFG -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +-----------------+---------------------+---------------------+ | Sequence Number | Request ID | Transaction time | +-----------------+---------------------+---------------------+ | 1530 | 1527782167736969406 | 2018-05-31 15:56:05 | +-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+------+ @@ -112,17 +112,17 @@ did use TLvnGKZVC3qkiZgn56esFG +------------------------+------------------------+-------------------------+------+ did rotate-key -Verkey for did "TLvnGKZVC3qkiZgn56esFG" has been updated. New verkey: "DYvs2Q5WwsYYPgBxvRpVspgXeG3zdUQBYrpSzMNZfCWT" +Verkey for did "TLvnGKZVC3qkiZgn56esFG" has been updated. New verkey: "DYvs2Q5WwsYYPgBxvRpVspgXeG3zdUQBYrpSzMNZfCWT" - ledger get-nym did=TLvnGKZVC3qkiZgn56esFG -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +-----------------+---------------------+---------------------+ | Sequence Number | Request ID | Transaction time | +-----------------+---------------------+---------------------+ | 1531 | 1527782168031255256 | 2018-05-31 15:56:07 | +-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+----------------------------------------------+------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+----------------------------------------------+------+ @@ -130,10 +130,10 @@ did rotate-key +------------------------+------------------------+----------------------------------------------+------+ wallet close -Wallet "AS-01-identity-wallet-2" has been closed +Wallet "AS-01-identity-wallet-2" has been closed pool disconnect -Pool "AS-pool" has been disconnected +Pool "AS-pool" has been disconnected wallet list +-------------------------+----------------------+---------+ @@ -151,4 +151,4 @@ pool list | AS-pool | +---------+ -Goodbye... +Goodbye... diff --git a/acceptance/indy-cli-batches/expected/AS-01-01-general-3.expected b/acceptance/indy-cli-batches/expected/AS-01-01-general-3.expected index 0a25f321f..b96883cbc 100644 --- a/acceptance/indy-cli-batches/expected/AS-01-01-general-3.expected +++ b/acceptance/indy-cli-batches/expected/AS-01-01-general-3.expected @@ -1,35 +1,35 @@ - pool create AS-pool gen_txn_file=./pool_transactions_genesis -Pool config "AS-pool" has been created +Pool config "AS-pool" has been created pool connect AS-pool -Pool "AS-pool" has been connected +Pool "AS-pool" has been connected wallet create AS-01-wallet-3 key=testkey storage_config={"pool":"AS-pool"} -Wallet "AS-01-wallet-3" has been created +Wallet "AS-01-wallet-3" has been created wallet open AS-01-wallet-3 key=testkey -Wallet "AS-01-wallet-3" has been opened +Wallet "AS-01-wallet-3" has been opened did new seed=000000000000000000000000Trustee1 metadata="default trustee" -Did "V4SGRU86Z58d6TV7PBUe6f" has been created with "~CoRER63DVYnWZtK8uAzNbx" verkey -Metadata has been saved for DID "V4SGRU86Z58d6TV7PBUe6f" +Did "V4SGRU86Z58d6TV7PBUe6f" has been created with "~CoRER63DVYnWZtK8uAzNbx" verkey +Metadata has been saved for DID "V4SGRU86Z58d6TV7PBUe6f" did new seed=000000000000000000000NewSteward3 metadata="new steward" -Did "Dji7gLeU11xUrHxZQVmScq" has been created with "~AvtK2VwnpLfVPt34gGFfA3" verkey -Metadata has been saved for DID "Dji7gLeU11xUrHxZQVmScq" +Did "Dji7gLeU11xUrHxZQVmScq" has been created with "~AvtK2VwnpLfVPt34gGFfA3" verkey +Metadata has been saved for DID "Dji7gLeU11xUrHxZQVmScq" did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=Dji7gLeU11xUrHxZQVmScq verkey=~AvtK2VwnpLfVPt34gGFfA3 role=STEWARD -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +---------------------+---------------------+ | Request ID | Transaction time | +---------------------+---------------------+ | 1527782319039165766 | 2018-05-31 15:58:39 | +---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+---------+ | Did | Verkey | Role | +------------------------+-------------------------+---------+ @@ -37,20 +37,20 @@ ledger nym did=Dji7gLeU11xUrHxZQVmScq verkey=~AvtK2VwnpLfVPt34gGFfA3 role=STEWAR +------------------------+-------------------------+---------+ did use Dji7gLeU11xUrHxZQVmScq -Did "Dji7gLeU11xUrHxZQVmScq" has been set as active +Did "Dji7gLeU11xUrHxZQVmScq" has been set as active did rotate-key -Verkey for did "Dji7gLeU11xUrHxZQVmScq" has been updated. New verkey: "75arSYUChsp4M5p8SbLcwgBWcZKgt5HnqzvmqTrQcZ2z" +Verkey for did "Dji7gLeU11xUrHxZQVmScq" has been updated. New verkey: "75arSYUChsp4M5p8SbLcwgBWcZKgt5HnqzvmqTrQcZ2z" ledger nym did=Qb7oAFvnr2kH6FeCA9UxUN verkey=~MBMEpUF1DPd6CM2irHVyXi -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +---------------------+---------------------+ | Request ID | Transaction time | +---------------------+---------------------+ | 1527782319568846122 | 2018-05-31 15:58:39 | +---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+------+ | Did | Verkey | Role | +------------------------+-------------------------+------+ @@ -58,14 +58,14 @@ ledger nym did=Qb7oAFvnr2kH6FeCA9UxUN verkey=~MBMEpUF1DPd6CM2irHVyXi +------------------------+-------------------------+------+ - ledger get-nym did=Qb7oAFvnr2kH6FeCA9UxUN -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +-----------------+---------------------+---------------------+ | Sequence Number | Request ID | Transaction time | +-----------------+---------------------+---------------------+ | 1534 | 1527782319809908255 | 2018-05-31 15:58:39 | +-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+------+ @@ -80,31 +80,31 @@ did list +------------------------+----------------------------------------------+-----------------+ | Dji7gLeU11xUrHxZQVmScq | 75arSYUChsp4M5p8SbLcwgBWcZKgt5HnqzvmqTrQcZ2z | new steward | +------------------------+----------------------------------------------+-----------------+ -Current did "Dji7gLeU11xUrHxZQVmScq" +Current did "Dji7gLeU11xUrHxZQVmScq" wallet create AS-01-identity-wallet-3 key=testkey storage_config={"pool":"AS-pool"} -Wallet "AS-01-identity-wallet-3" has been created +Wallet "AS-01-identity-wallet-3" has been created wallet open AS-01-identity-wallet-3 key=testkey -Wallet "AS-01-wallet-3" has been closed -Wallet "AS-01-identity-wallet-3" has been opened +Wallet "AS-01-wallet-3" has been closed +Wallet "AS-01-identity-wallet-3" has been opened did new seed=00000000000000000000000Identity3 metadata="new identity" -Did "Qb7oAFvnr2kH6FeCA9UxUN" has been created with "~MBMEpUF1DPd6CM2irHVyXi" verkey -Metadata has been saved for DID "Qb7oAFvnr2kH6FeCA9UxUN" +Did "Qb7oAFvnr2kH6FeCA9UxUN" has been created with "~MBMEpUF1DPd6CM2irHVyXi" verkey +Metadata has been saved for DID "Qb7oAFvnr2kH6FeCA9UxUN" did use Qb7oAFvnr2kH6FeCA9UxUN -Did "Qb7oAFvnr2kH6FeCA9UxUN" has been set as active +Did "Qb7oAFvnr2kH6FeCA9UxUN" has been set as active - ledger get-nym did=Qb7oAFvnr2kH6FeCA9UxUN -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +-----------------+---------------------+---------------------+ | Sequence Number | Request ID | Transaction time | +-----------------+---------------------+---------------------+ | 1534 | 1527782321899541393 | 2018-05-31 15:58:39 | +-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+------+ @@ -112,17 +112,17 @@ did use Qb7oAFvnr2kH6FeCA9UxUN +------------------------+------------------------+-------------------------+------+ did rotate-key -Verkey for did "Qb7oAFvnr2kH6FeCA9UxUN" has been updated. New verkey: "ANKU7SGwaZwmt8NBuuwMBpNGuv4nWzwfEnk5u4m6zoMD" +Verkey for did "Qb7oAFvnr2kH6FeCA9UxUN" has been updated. New verkey: "ANKU7SGwaZwmt8NBuuwMBpNGuv4nWzwfEnk5u4m6zoMD" - ledger get-nym did=Qb7oAFvnr2kH6FeCA9UxUN -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +-----------------+---------------------+---------------------+ | Sequence Number | Request ID | Transaction time | +-----------------+---------------------+---------------------+ | 1535 | 1527782322190605666 | 2018-05-31 15:58:41 | +-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+----------------------------------------------+------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+----------------------------------------------+------+ @@ -130,10 +130,10 @@ did rotate-key +------------------------+------------------------+----------------------------------------------+------+ wallet close -Wallet "AS-01-identity-wallet-3" has been closed +Wallet "AS-01-identity-wallet-3" has been closed pool disconnect -Pool "AS-pool" has been disconnected +Pool "AS-pool" has been disconnected wallet list +-------------------------+----------------------+---------+ @@ -151,4 +151,4 @@ pool list | AS-pool | +---------+ -Goodbye... +Goodbye... diff --git a/acceptance/indy-cli-batches/expected/AS-01-02-after-migration-1.expected b/acceptance/indy-cli-batches/expected/AS-01-02-after-migration-1.expected index e838d0bca..7f65eb721 100755 --- a/acceptance/indy-cli-batches/expected/AS-01-02-after-migration-1.expected +++ b/acceptance/indy-cli-batches/expected/AS-01-02-after-migration-1.expected @@ -1,21 +1,21 @@ pool connect AS-01-02-steward-pool -Pool "AS-01-02-steward-pool" has been connected +Pool "AS-01-02-steward-pool" has been connected wallet open AS-01-02-steward-wallet key=testkey -Wallet "AS-01-02-steward-wallet" has been opened +Wallet "AS-01-02-steward-wallet" has been opened did use Mzcs3aMUHRQQwnW9bSdFgW -Did "Mzcs3aMUHRQQwnW9bSdFgW" has been set as active +Did "Mzcs3aMUHRQQwnW9bSdFgW" has been set as active ledger nym did=Mzcs3aMUHRQQwnW9bSdFgW verkey=~VkaLiUxDhNHN4sS3LGxbCP -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +---------------------+---------------------+ | Request ID | Transaction time | +---------------------+---------------------+ | 1523197886696176059 | 2018-04-08 14:31:26 | +---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+------+ | Did | Verkey | Role | +------------------------+-------------------------+------+ @@ -23,14 +23,14 @@ ledger nym did=Mzcs3aMUHRQQwnW9bSdFgW verkey=~VkaLiUxDhNHN4sS3LGxbCP +------------------------+-------------------------+------+ did new seed=0000000000000000000NewSteward121 metadata="new steward1" -Did "Mzcs3aMUHRQQwnW9bSdFgW" has been created with "~VkaLiUxDhNHN4sS3LGxbCP" verkey -Metadata has been saved for DID "Mzcs3aMUHRQQwnW9bSdFgW" +Did "Mzcs3aMUHRQQwnW9bSdFgW" has been created with "~VkaLiUxDhNHN4sS3LGxbCP" verkey +Metadata has been saved for DID "Mzcs3aMUHRQQwnW9bSdFgW" wallet close -Wallet "AS-01-02-steward-wallet" has been closed +Wallet "AS-01-02-steward-wallet" has been closed pool disconnect -Pool "AS-01-02-steward-pool" has been disconnected +Pool "AS-01-02-steward-pool" has been disconnected wallet list +-------------------------+-----------------------+---------+ @@ -66,4 +66,4 @@ pool list exit -Goodbye... +Goodbye... diff --git a/acceptance/indy-cli-batches/expected/AS-01-02-after-migration-2.expected b/acceptance/indy-cli-batches/expected/AS-01-02-after-migration-2.expected index 9068d3a0c..c8a1cec4f 100755 --- a/acceptance/indy-cli-batches/expected/AS-01-02-after-migration-2.expected +++ b/acceptance/indy-cli-batches/expected/AS-01-02-after-migration-2.expected @@ -1,21 +1,21 @@ pool connect AS-01-02-steward-pool -Pool "AS-01-02-steward-pool" has been connected +Pool "AS-01-02-steward-pool" has been connected wallet open AS-01-02-steward-wallet key=testkey -Wallet "AS-01-02-steward-wallet" has been opened +Wallet "AS-01-02-steward-wallet" has been opened did use UAPL7pTFdHGCXS4MGU8Zqv -Did "UAPL7pTFdHGCXS4MGU8Zqv" has been set as active +Did "UAPL7pTFdHGCXS4MGU8Zqv" has been set as active ledger nym did=UAPL7pTFdHGCXS4MGU8Zqv verkey=~6m1fM2eoZ1ygsXgq8NLPGc -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +---------------------+---------------------+ | Request ID | Transaction time | +---------------------+---------------------+ | 1523199097219969241 | 2018-04-08 14:51:37 | +---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+------+ | Did | Verkey | Role | +------------------------+-------------------------+------+ @@ -23,14 +23,14 @@ ledger nym did=UAPL7pTFdHGCXS4MGU8Zqv verkey=~6m1fM2eoZ1ygsXgq8NLPGc +------------------------+-------------------------+------+ did new seed=0000000000000000000NewSteward122 metadata="new steward2" -Did "UAPL7pTFdHGCXS4MGU8Zqv" has been created with "~6m1fM2eoZ1ygsXgq8NLPGc" verkey -Metadata has been saved for DID "UAPL7pTFdHGCXS4MGU8Zqv" +Did "UAPL7pTFdHGCXS4MGU8Zqv" has been created with "~6m1fM2eoZ1ygsXgq8NLPGc" verkey +Metadata has been saved for DID "UAPL7pTFdHGCXS4MGU8Zqv" wallet close -Wallet "AS-01-02-steward-wallet" has been closed +Wallet "AS-01-02-steward-wallet" has been closed pool disconnect -Pool "AS-01-02-steward-pool" has been disconnected +Pool "AS-01-02-steward-pool" has been disconnected wallet list +-------------------------+-----------------------+---------+ @@ -52,4 +52,4 @@ pool list exit -Goodbye... +Goodbye... diff --git a/acceptance/indy-cli-batches/expected/AS-01-02-after-migration-3.expected b/acceptance/indy-cli-batches/expected/AS-01-02-after-migration-3.expected index 06abfc0e8..30e5e80fa 100755 --- a/acceptance/indy-cli-batches/expected/AS-01-02-after-migration-3.expected +++ b/acceptance/indy-cli-batches/expected/AS-01-02-after-migration-3.expected @@ -1,21 +1,21 @@ pool connect AS-01-02-steward-pool -Pool "AS-01-02-steward-pool" has been connected +Pool "AS-01-02-steward-pool" has been connected wallet open AS-01-02-steward-wallet key=testkey -Wallet "AS-01-02-steward-wallet" has been opened +Wallet "AS-01-02-steward-wallet" has been opened did use Q4sXZA2FF8ePpnCbaWiNGP -Did "Q4sXZA2FF8ePpnCbaWiNGP" has been set as active +Did "Q4sXZA2FF8ePpnCbaWiNGP" has been set as active ledger nym did=Q4sXZA2FF8ePpnCbaWiNGP verkey=~RiX2km3TqJwmVNXj8WwcM6 -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +---------------------+---------------------+ | Request ID | Transaction time | +---------------------+---------------------+ | 1523199301032507406 | 2018-04-08 14:55:01 | +---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+------+ | Did | Verkey | Role | +------------------------+-------------------------+------+ @@ -23,14 +23,14 @@ ledger nym did=Q4sXZA2FF8ePpnCbaWiNGP verkey=~RiX2km3TqJwmVNXj8WwcM6 +------------------------+-------------------------+------+ did new seed=0000000000000000000NewSteward123 metadata="new steward3" -Did "Q4sXZA2FF8ePpnCbaWiNGP" has been created with "~RiX2km3TqJwmVNXj8WwcM6" verkey -Metadata has been saved for DID "Q4sXZA2FF8ePpnCbaWiNGP" +Did "Q4sXZA2FF8ePpnCbaWiNGP" has been created with "~RiX2km3TqJwmVNXj8WwcM6" verkey +Metadata has been saved for DID "Q4sXZA2FF8ePpnCbaWiNGP" wallet close -Wallet "AS-01-02-steward-wallet" has been closed +Wallet "AS-01-02-steward-wallet" has been closed pool disconnect -Pool "AS-01-02-steward-pool" has been disconnected +Pool "AS-01-02-steward-pool" has been disconnected wallet list +-------------------------+-----------------------+---------+ @@ -52,4 +52,4 @@ pool list exit -Goodbye... +Goodbye... diff --git a/acceptance/indy-cli-batches/expected/AS-01-02-before-migration-1.expected b/acceptance/indy-cli-batches/expected/AS-01-02-before-migration-1.expected index 1cff5caf4..b49339fae 100644 --- a/acceptance/indy-cli-batches/expected/AS-01-02-before-migration-1.expected +++ b/acceptance/indy-cli-batches/expected/AS-01-02-before-migration-1.expected @@ -1,31 +1,31 @@ pool create AS-01-02-trustee-pool gen_txn_file=./pool_transactions_genesis -Pool config "AS-01-02-trustee-pool" has been created +Pool config "AS-01-02-trustee-pool" has been created pool connect AS-01-02-trustee-pool -Pool "AS-01-02-trustee-pool" has been connected +Pool "AS-01-02-trustee-pool" has been connected wallet create AS-01-02-trustee-wallet key=testkey storage_config={"pool":"AS-01-02-trustee-pool"} -Wallet "AS-01-02-trustee-wallet" has been created +Wallet "AS-01-02-trustee-wallet" has been created wallet open AS-01-02-trustee-wallet key=testkey -Wallet "AS-01-02-trustee-wallet" has been opened +Wallet "AS-01-02-trustee-wallet" has been opened did new seed=000000000000000000000000Trustee1 metadata="default trustee" -Did "V4SGRU86Z58d6TV7PBUe6f" has been created with "~CoRER63DVYnWZtK8uAzNbx" verkey -Metadata has been saved for DID "V4SGRU86Z58d6TV7PBUe6f" +Did "V4SGRU86Z58d6TV7PBUe6f" has been created with "~CoRER63DVYnWZtK8uAzNbx" verkey +Metadata has been saved for DID "V4SGRU86Z58d6TV7PBUe6f" did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=Mzcs3aMUHRQQwnW9bSdFgW verkey=~VkaLiUxDhNHN4sS3LGxbCP role=STEWARD -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +---------------------+---------------------+ | Request ID | Transaction time | +---------------------+---------------------+ | 1523197819707710189 | 2018-04-08 14:30:19 | +---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+---------+ | Did | Verkey | Role | +------------------------+-------------------------+---------+ @@ -33,34 +33,34 @@ ledger nym did=Mzcs3aMUHRQQwnW9bSdFgW verkey=~VkaLiUxDhNHN4sS3LGxbCP role=STEWAR +------------------------+-------------------------+---------+ pool create AS-01-02-steward-pool gen_txn_file=./pool_transactions_genesis -Pool config "AS-01-02-steward-pool" has been created +Pool config "AS-01-02-steward-pool" has been created pool connect AS-01-02-steward-pool -Pool "AS-01-02-trustee-pool" has been disconnected -Pool "AS-01-02-steward-pool" has been connected +Pool "AS-01-02-trustee-pool" has been disconnected +Pool "AS-01-02-steward-pool" has been connected wallet create AS-01-02-steward-wallet key=testkey storage_config={"pool":"AS-01-02-steward-pool"} -Wallet "AS-01-02-steward-wallet" has been created +Wallet "AS-01-02-steward-wallet" has been created wallet open AS-01-02-steward-wallet key=testkey -Wallet "AS-01-02-trustee-wallet" has been closed -Wallet "AS-01-02-steward-wallet" has been opened +Wallet "AS-01-02-trustee-wallet" has been closed +Wallet "AS-01-02-steward-wallet" has been opened did new seed=0000000000000000000NewSteward121 metadata="new steward1" -Did "Mzcs3aMUHRQQwnW9bSdFgW" has been created with "~VkaLiUxDhNHN4sS3LGxbCP" verkey -Metadata has been saved for DID "Mzcs3aMUHRQQwnW9bSdFgW" +Did "Mzcs3aMUHRQQwnW9bSdFgW" has been created with "~VkaLiUxDhNHN4sS3LGxbCP" verkey +Metadata has been saved for DID "Mzcs3aMUHRQQwnW9bSdFgW" did use Mzcs3aMUHRQQwnW9bSdFgW -Did "Mzcs3aMUHRQQwnW9bSdFgW" has been set as active +Did "Mzcs3aMUHRQQwnW9bSdFgW" has been set as active did rotate-key -Verkey for did "Mzcs3aMUHRQQwnW9bSdFgW" has been updated. New verkey: "7YwzRqcjhVnHWEnLHE2BdQuoCeLjFuGT58RM2txivhSF" +Verkey for did "Mzcs3aMUHRQQwnW9bSdFgW" has been updated. New verkey: "7YwzRqcjhVnHWEnLHE2BdQuoCeLjFuGT58RM2txivhSF" wallet close -Wallet "AS-01-02-steward-wallet" has been closed +Wallet "AS-01-02-steward-wallet" has been closed pool disconnect -Pool "AS-01-02-steward-pool" has been disconnected +Pool "AS-01-02-steward-pool" has been disconnected wallet list +-------------------------+-----------------------+---------+ @@ -96,4 +96,4 @@ pool list exit -Goodbye... +Goodbye... diff --git a/acceptance/indy-cli-batches/expected/AS-01-02-before-migration-2.expected b/acceptance/indy-cli-batches/expected/AS-01-02-before-migration-2.expected index ace761022..77289f9ee 100755 --- a/acceptance/indy-cli-batches/expected/AS-01-02-before-migration-2.expected +++ b/acceptance/indy-cli-batches/expected/AS-01-02-before-migration-2.expected @@ -1,31 +1,31 @@ pool create AS-01-02-trustee-pool gen_txn_file=./pool_transactions_genesis -Pool config "AS-01-02-trustee-pool" has been created +Pool config "AS-01-02-trustee-pool" has been created pool connect AS-01-02-trustee-pool -Pool "AS-01-02-trustee-pool" has been connected +Pool "AS-01-02-trustee-pool" has been connected wallet create AS-01-02-trustee-wallet key=testkey storage_config={"pool":"AS-01-02-trustee-pool"} -Wallet "AS-01-02-trustee-wallet" has been created +Wallet "AS-01-02-trustee-wallet" has been created wallet open AS-01-02-trustee-wallet key=testkey -Wallet "AS-01-02-trustee-wallet" has been opened +Wallet "AS-01-02-trustee-wallet" has been opened did new seed=000000000000000000000000Trustee1 metadata="default trustee" -Did "V4SGRU86Z58d6TV7PBUe6f" has been created with "~CoRER63DVYnWZtK8uAzNbx" verkey -Metadata has been saved for DID "V4SGRU86Z58d6TV7PBUe6f" +Did "V4SGRU86Z58d6TV7PBUe6f" has been created with "~CoRER63DVYnWZtK8uAzNbx" verkey +Metadata has been saved for DID "V4SGRU86Z58d6TV7PBUe6f" did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=UAPL7pTFdHGCXS4MGU8Zqv verkey=~6m1fM2eoZ1ygsXgq8NLPGc role=STEWARD -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +---------------------+---------------------+ | Request ID | Transaction time | +---------------------+---------------------+ | 1523199076276998677 | 2018-04-08 14:51:16 | +---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+---------+ | Did | Verkey | Role | +------------------------+-------------------------+---------+ @@ -33,34 +33,34 @@ ledger nym did=UAPL7pTFdHGCXS4MGU8Zqv verkey=~6m1fM2eoZ1ygsXgq8NLPGc role=STEWAR +------------------------+-------------------------+---------+ pool create AS-01-02-steward-pool gen_txn_file=./pool_transactions_genesis -Pool config "AS-01-02-steward-pool" has been created +Pool config "AS-01-02-steward-pool" has been created pool connect AS-01-02-steward-pool -Pool "AS-01-02-trustee-pool" has been disconnected -Pool "AS-01-02-steward-pool" has been connected +Pool "AS-01-02-trustee-pool" has been disconnected +Pool "AS-01-02-steward-pool" has been connected wallet create AS-01-02-steward-wallet key=testkey storage_config={"pool":"AS-01-02-steward-pool"} -Wallet "AS-01-02-steward-wallet" has been created +Wallet "AS-01-02-steward-wallet" has been created wallet open AS-01-02-steward-wallet key=testkey -Wallet "AS-01-02-trustee-wallet" has been closed -Wallet "AS-01-02-steward-wallet" has been opened +Wallet "AS-01-02-trustee-wallet" has been closed +Wallet "AS-01-02-steward-wallet" has been opened did new seed=0000000000000000000NewSteward122 metadata="new steward2" -Did "UAPL7pTFdHGCXS4MGU8Zqv" has been created with "~6m1fM2eoZ1ygsXgq8NLPGc" verkey -Metadata has been saved for DID "UAPL7pTFdHGCXS4MGU8Zqv" +Did "UAPL7pTFdHGCXS4MGU8Zqv" has been created with "~6m1fM2eoZ1ygsXgq8NLPGc" verkey +Metadata has been saved for DID "UAPL7pTFdHGCXS4MGU8Zqv" did use UAPL7pTFdHGCXS4MGU8Zqv -Did "UAPL7pTFdHGCXS4MGU8Zqv" has been set as active +Did "UAPL7pTFdHGCXS4MGU8Zqv" has been set as active did rotate-key -Verkey for did "UAPL7pTFdHGCXS4MGU8Zqv" has been updated. New verkey: "FZdXBoucpKUaYgRZpFXAWrn9pM4SWpFf9bxdmgaM9eaH" +Verkey for did "UAPL7pTFdHGCXS4MGU8Zqv" has been updated. New verkey: "FZdXBoucpKUaYgRZpFXAWrn9pM4SWpFf9bxdmgaM9eaH" wallet close -Wallet "AS-01-02-steward-wallet" has been closed +Wallet "AS-01-02-steward-wallet" has been closed pool disconnect -Pool "AS-01-02-steward-pool" has been disconnected +Pool "AS-01-02-steward-pool" has been disconnected wallet list +-------------------------+-----------------------+---------+ @@ -82,4 +82,4 @@ pool list exit -Goodbye... +Goodbye... diff --git a/acceptance/indy-cli-batches/expected/AS-01-02-before-migration-3.expected b/acceptance/indy-cli-batches/expected/AS-01-02-before-migration-3.expected index 047c76d2d..0bf4f1ee5 100755 --- a/acceptance/indy-cli-batches/expected/AS-01-02-before-migration-3.expected +++ b/acceptance/indy-cli-batches/expected/AS-01-02-before-migration-3.expected @@ -1,31 +1,31 @@ pool create AS-01-02-trustee-pool gen_txn_file=./pool_transactions_genesis -Pool config "AS-01-02-trustee-pool" has been created +Pool config "AS-01-02-trustee-pool" has been created pool connect AS-01-02-trustee-pool -Pool "AS-01-02-trustee-pool" has been connected +Pool "AS-01-02-trustee-pool" has been connected wallet create AS-01-02-trustee-wallet key=testkey storage_config={"pool":"AS-01-02-trustee-pool"} -Wallet "AS-01-02-trustee-wallet" has been created +Wallet "AS-01-02-trustee-wallet" has been created wallet open AS-01-02-trustee-wallet key=testkey -Wallet "AS-01-02-trustee-wallet" has been opened +Wallet "AS-01-02-trustee-wallet" has been opened did new seed=000000000000000000000000Trustee1 metadata="default trustee" -Did "V4SGRU86Z58d6TV7PBUe6f" has been created with "~CoRER63DVYnWZtK8uAzNbx" verkey -Metadata has been saved for DID "V4SGRU86Z58d6TV7PBUe6f" +Did "V4SGRU86Z58d6TV7PBUe6f" has been created with "~CoRER63DVYnWZtK8uAzNbx" verkey +Metadata has been saved for DID "V4SGRU86Z58d6TV7PBUe6f" did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=Q4sXZA2FF8ePpnCbaWiNGP verkey=~RiX2km3TqJwmVNXj8WwcM6 role=STEWARD -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +---------------------+---------------------+ | Request ID | Transaction time | +---------------------+---------------------+ | 1523199283448925357 | 2018-04-08 14:54:43 | +---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+---------+ | Did | Verkey | Role | +------------------------+-------------------------+---------+ @@ -33,34 +33,34 @@ ledger nym did=Q4sXZA2FF8ePpnCbaWiNGP verkey=~RiX2km3TqJwmVNXj8WwcM6 role=STEWAR +------------------------+-------------------------+---------+ pool create AS-01-02-steward-pool gen_txn_file=./pool_transactions_genesis -Pool config "AS-01-02-steward-pool" has been created +Pool config "AS-01-02-steward-pool" has been created pool connect AS-01-02-steward-pool -Pool "AS-01-02-trustee-pool" has been disconnected -Pool "AS-01-02-steward-pool" has been connected +Pool "AS-01-02-trustee-pool" has been disconnected +Pool "AS-01-02-steward-pool" has been connected wallet create AS-01-02-steward-wallet key=testkey storage_config={"pool":"AS-01-02-steward-pool"} -Wallet "AS-01-02-steward-wallet" has been created +Wallet "AS-01-02-steward-wallet" has been created wallet open AS-01-02-steward-wallet key=testkey -Wallet "AS-01-02-trustee-wallet" has been closed -Wallet "AS-01-02-steward-wallet" has been opened +Wallet "AS-01-02-trustee-wallet" has been closed +Wallet "AS-01-02-steward-wallet" has been opened did new seed=0000000000000000000NewSteward123 metadata="new steward3" -Did "Q4sXZA2FF8ePpnCbaWiNGP" has been created with "~RiX2km3TqJwmVNXj8WwcM6" verkey -Metadata has been saved for DID "Q4sXZA2FF8ePpnCbaWiNGP" +Did "Q4sXZA2FF8ePpnCbaWiNGP" has been created with "~RiX2km3TqJwmVNXj8WwcM6" verkey +Metadata has been saved for DID "Q4sXZA2FF8ePpnCbaWiNGP" did use Q4sXZA2FF8ePpnCbaWiNGP -Did "Q4sXZA2FF8ePpnCbaWiNGP" has been set as active +Did "Q4sXZA2FF8ePpnCbaWiNGP" has been set as active did rotate-key -Verkey for did "Q4sXZA2FF8ePpnCbaWiNGP" has been updated. New verkey: "CmkHfbqvAerFb5CF3GsfupLC4spbqykLPANEkt3NrPPc" +Verkey for did "Q4sXZA2FF8ePpnCbaWiNGP" has been updated. New verkey: "CmkHfbqvAerFb5CF3GsfupLC4spbqykLPANEkt3NrPPc" wallet close -Wallet "AS-01-02-steward-wallet" has been closed +Wallet "AS-01-02-steward-wallet" has been closed pool disconnect -Pool "AS-01-02-steward-pool" has been disconnected +Pool "AS-01-02-steward-pool" has been disconnected wallet list +-------------------------+-----------------------+---------+ @@ -82,4 +82,4 @@ pool list exit -Goodbye... +Goodbye... diff --git a/acceptance/indy-cli-batches/expected/AS-02-01-invalid-cases.expected b/acceptance/indy-cli-batches/expected/AS-02-01-invalid-cases.expected index f72a7da24..07335e257 100644 --- a/acceptance/indy-cli-batches/expected/AS-02-01-invalid-cases.expected +++ b/acceptance/indy-cli-batches/expected/AS-02-01-invalid-cases.expected @@ -1,45 +1,45 @@ - pool create AS-pool gen_txn_file=./pool_transactions_genesis -Pool config "AS-pool" already exists +Pool config "AS-pool" already exists pool connect AS-pool -Pool "AS-pool" has been connected +Pool "AS-pool" has been connected - wallet create AS-02-wallet key=testkey storage_config={"pool":"AS-pool"} -Wallet "AS-02-wallet" already exists +Wallet "AS-02-wallet" already exists wallet open AS-02-wallet key=testkey -Wallet "AS-02-wallet" has been opened +Wallet "AS-02-wallet" has been opened did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active - ledger node target=4Tn3wZMNCvhSTXPcLinQDnHyj56DTLQtL61ki4jo2Loc client_port=9702 client_ip=10.0.0.105 alias=Node5 node_ip=10.0.0.105 node_port=9701 services=VALIDATOR blskey=2RdajPq6rCidK5gQbMzSJo1NfBMYiS3e44GxjTqZUk3RhBdtF28qEABHRo4MgHS2hwekoLWRTza9XiGEMRCompeujWpX85MPt87WdbTMysXZfb7J1ZXUEMrtE5aZahfx6p2YdhZdrArFvTmFWdojaD2V5SuvuaQL4G92anZ1yteay3R blskey_pop=RbGAR89T5bg6Bg66Xg1fy5NjVarmvr8Q7XAmEPCA6arPvcfcDwLiEftD2cCVoEnrRSPLhgf3Cn9n81gdgMCbSBEhKuWW3njXkPFE1jvP67bU1d2jb6gw5BxyxsF9qg1Hvz6pr181u7s1WZBmGXinLoKaFhb6jptHghHvtqEuTgoqXM -Transaction has been rejected: V4SGRU86Z58d6TV7PBUe6f is not a steward so cannot add a new node +Transaction has been rejected: V4SGRU86Z58d6TV7PBUe6f is not a steward so cannot add a new node did use Rhx2qwaeiGyhU9vn4ynHSS -Did "Rhx2qwaeiGyhU9vn4ynHSS" has been set as active +Did "Rhx2qwaeiGyhU9vn4ynHSS" has been set as active - ledger node target=4Tn3wZMNCvhSTXPcLinQDnHyj56DTLQtL61ki4jo2Loc client_port=9702 client_ip=10.0.0.105 alias=Node5 node_ip=10.0.0.105 node_port=9701 services=VALIDATOR blskey=2RdajPq6rCidK5gQbMzSJo1NfBMYiS3e44GxjTqZUk3RhBdtF28qEABHRo4MgHS2hwekoLWRTza9XiGEMRCompeujWpX85MPt87WdbTMysXZfb7J1ZXUEMrtE5aZahfx6p2YdhZdrArFvTmFWdojaD2V5SuvuaQL4G92anZ1yteay3R blskey_pop=RbGAR89T5bg6Bg66Xg1fy5NjVarmvr8Q7XAmEPCA6arPvcfcDwLiEftD2cCVoEnrRSPLhgf3Cn9n81gdgMCbSBEhKuWW3njXkPFE1jvP67bU1d2jb6gw5BxyxsF9qg1Hvz6pr181u7s1WZBmGXinLoKaFhb6jptHghHvtqEuTgoqXM -Transaction has been rejected: Rhx2qwaeiGyhU9vn4ynHSS is not a steward so cannot add a new node +Transaction has been rejected: Rhx2qwaeiGyhU9vn4ynHSS is not a steward so cannot add a new node did use 5qhSKMkpqui6taGLTecwqf -Did "5qhSKMkpqui6taGLTecwqf" has been set as active +Did "5qhSKMkpqui6taGLTecwqf" has been set as active - ledger node target=4Tn3wZMNCvhSTXPcLinQDnHyj56DTLQtL61ki4jo2Loc client_port=9702 client_ip=10.0.0.105 alias=Node5 node_ip=10.0.0.105 node_port=9701 services=VALIDATOR blskey=2RdajPq6rCidK5gQbMzSJo1NfBMYiS3e44GxjTqZUk3RhBdtF28qEABHRo4MgHS2hwekoLWRTza9XiGEMRCompeujWpX85MPt87WdbTMysXZfb7J1ZXUEMrtE5aZahfx6p2YdhZdrArFvTmFWdojaD2V5SuvuaQL4G92anZ1yteay3R blskey_pop=RbGAR89T5bg6Bg66Xg1fy5NjVarmvr8Q7XAmEPCA6arPvcfcDwLiEftD2cCVoEnrRSPLhgf3Cn9n81gdgMCbSBEhKuWW3njXkPFE1jvP67bU1d2jb6gw5BxyxsF9qg1Hvz6pr181u7s1WZBmGXinLoKaFhb6jptHghHvtqEuTgoqXM -Transaction has been rejected: 5qhSKMkpqui6taGLTecwqf is not a steward so cannot add a new node +Transaction has been rejected: 5qhSKMkpqui6taGLTecwqf is not a steward so cannot add a new node did use XhYtvJqezMUKfF6KVNaGmT -Did "XhYtvJqezMUKfF6KVNaGmT" has been set as active +Did "XhYtvJqezMUKfF6KVNaGmT" has been set as active - ledger node target=4Tn3wZMNCvhSTXPcLinQDnHyj56DTLQtL61ki4jo2Loc client_port=9702 client_ip=10.0.0.105 alias=Node5 node_ip=10.0.0.105 node_port=9701 services=VALIDATOR blskey=2RdajPq6rCidK5gQbMzSJo1NfBMYiS3e44GxjTqZUk3RhBdtF28qEABHRo4MgHS2hwekoLWRTza9XiGEMRCompeujWpX85MPt87WdbTMysXZfb7J1ZXUEMrtE5aZahfx6p2YdhZdrArFvTmFWdojaD2V5SuvuaQL4G92anZ1yteay3R -Transaction has been rejected: A Proof of possession must be provided with BLS key +Transaction has been rejected: A Proof of possession must be provided with BLS key - ledger node target=4Tn3wZMNCvhSTXPcLinQDnHyj56DTLQtL61ki4jo2Loc client_port=9702 client_ip=10.0.0.105 alias=Node5 node_ip=10.0.0.105 node_port=9701 services=VALIDATOR blskey_pop=RbGAR89T5bg6Bg66Xg1fy5NjVarmvr8Q7XAmEPCA6arPvcfcDwLiEftD2cCVoEnrRSPLhgf3Cn9n81gdgMCbSBEhKuWW3njXkPFE1jvP67bU1d2jb6gw5BxyxsF9qg1Hvz6pr181u7s1WZBmGXinLoKaFhb6jptHghHvtqEuTgoqXM -Transaction has been rejected: A Proof of possession is not needed without BLS key +Transaction has been rejected: A Proof of possession is not needed without BLS key - ledger node target=4Tn3wZMNCvhSTXPcLinQDnHyj56DTLQtL61ki4jo2Loc client_port=9702 client_ip=10.0.0.105 alias=Node5 node_ip=10.0.0.105 node_port=9701 services=VALIDATOR blskey=2RdajPq6rCidK5gQbMzSJo1NfBMYiS3e44GxjTqZUk3RhBdtF28qEABHRo4MgHS2hwekoLWRTza9XiGEMRCompeujWpX85MPt87WdbTMysXZfb7J1ZXUEMrtE5aZahfx6p2YdhZdrArFvTmFWdojaD2V5SuvuaQL4G92anZ1yteay3R blskey_pop=RbGAR89T5bg6Bg66Xg1fy5NjVarmvr8Q7XAmEPCA6arPvcfcDwLiEftD2cCVoEnrRSPLhgf3Cn9n81gdgMCbSBEhKuWW3njXkPFE1jvP67bU1d2jb6gw5BxyxsF9qg1Hvz6pr181u7s1WZBmGXinLoKaFhb6jptHghHvtqEuT11111 -Transaction has been rejected: Proof of possession RbGAR89T5bg6Bg66Xg1fy5NjVarmvr8Q7XAmEPCA6arPvcfcDwLiEftD2cCVoEnrRSPLhgf3Cn9n81gdgMCbSBEhKuWW3njXkPFE1jvP67bU1d2jb6gw5BxyxsF9qg1Hvz6pr181u7s1WZBmGXinLoKaFhb6jptHghHvtqEuT11111 is incorrect for BLS key 2RdajPq6rCidK5gQbMzSJo1NfBMYiS3e44GxjTqZUk3RhBdtF28qEABHRo4MgHS2hwekoLWRTza9XiGEMRCompeujWpX85MPt87WdbTMysXZfb7J1ZXUEMrtE5aZahfx6p2YdhZdrArFvTmFWdojaD2V5SuvuaQL4G92anZ1yteay3R +Transaction has been rejected: Proof of possession RbGAR89T5bg6Bg66Xg1fy5NjVarmvr8Q7XAmEPCA6arPvcfcDwLiEftD2cCVoEnrRSPLhgf3Cn9n81gdgMCbSBEhKuWW3njXkPFE1jvP67bU1d2jb6gw5BxyxsF9qg1Hvz6pr181u7s1WZBmGXinLoKaFhb6jptHghHvtqEuT11111 is incorrect for BLS key 2RdajPq6rCidK5gQbMzSJo1NfBMYiS3e44GxjTqZUk3RhBdtF28qEABHRo4MgHS2hwekoLWRTza9XiGEMRCompeujWpX85MPt87WdbTMysXZfb7J1ZXUEMrtE5aZahfx6p2YdhZdrArFvTmFWdojaD2V5SuvuaQL4G92anZ1yteay3R -Pool "AS-pool" has been disconnected -Wallet "AS-02-wallet" has been closed -Goodbye... +Pool "AS-pool" has been disconnected +Wallet "AS-02-wallet" has been closed +Goodbye... diff --git a/acceptance/indy-cli-batches/expected/AS-02-01-prepare-DIDs.expected b/acceptance/indy-cli-batches/expected/AS-02-01-prepare-DIDs.expected index 62f5a7335..bead9a78b 100644 --- a/acceptance/indy-cli-batches/expected/AS-02-01-prepare-DIDs.expected +++ b/acceptance/indy-cli-batches/expected/AS-02-01-prepare-DIDs.expected @@ -1,51 +1,51 @@ - pool create AS-pool gen_txn_file=./pool_transactions_genesis -Pool config "AS-pool" already exists +Pool config "AS-pool" already exists pool connect AS-pool -Pool "AS-pool" has been connected +Pool "AS-pool" has been connected - wallet create AS-02-wallet key=testkey storage_config={"pool":"AS-pool"} -Wallet "AS-02-wallet" already exists +Wallet "AS-02-wallet" already exists wallet open AS-02-wallet key=testkey -Wallet "AS-02-wallet" has been opened +Wallet "AS-02-wallet" has been opened did new seed=StewardNode500000000000000000000 metadata="node5 steward" -Did "XhYtvJqezMUKfF6KVNaGmT" has been created with "~RmCt3RtDvKDfQBSKxo4qvy" verkey -Metadata has been saved for DID "XhYtvJqezMUKfF6KVNaGmT" +Did "XhYtvJqezMUKfF6KVNaGmT" has been created with "~RmCt3RtDvKDfQBSKxo4qvy" verkey +Metadata has been saved for DID "XhYtvJqezMUKfF6KVNaGmT" did new seed=StewardNode600000000000000000000 metadata="node6 steward" -Did "FBQ5W7LxRp8bLeVNEafjpd" has been created with "~2a4KepmnVVAPGo5gNA3P3p" verkey -Metadata has been saved for DID "FBQ5W7LxRp8bLeVNEafjpd" +Did "FBQ5W7LxRp8bLeVNEafjpd" has been created with "~2a4KepmnVVAPGo5gNA3P3p" verkey +Metadata has been saved for DID "FBQ5W7LxRp8bLeVNEafjpd" did new seed=TestAddNodeWithTrustAnchor000000 metadata="trust anchor" -Did "Rhx2qwaeiGyhU9vn4ynHSS" has been created with "~RTJS62PRHurqHJnbnT1Hgi" verkey -Metadata has been saved for DID "Rhx2qwaeiGyhU9vn4ynHSS" +Did "Rhx2qwaeiGyhU9vn4ynHSS" has been created with "~RTJS62PRHurqHJnbnT1Hgi" verkey +Metadata has been saved for DID "Rhx2qwaeiGyhU9vn4ynHSS" did new seed=TestAddNodeWithIdentityOwner0000 metadata="identity owner" -Did "5qhSKMkpqui6taGLTecwqf" has been created with "~UWK9xwiKsqMQSBmymYpR5v" verkey -Metadata has been saved for DID "5qhSKMkpqui6taGLTecwqf" +Did "5qhSKMkpqui6taGLTecwqf" has been created with "~UWK9xwiKsqMQSBmymYpR5v" verkey +Metadata has been saved for DID "5qhSKMkpqui6taGLTecwqf" did new seed=000000000000000000000000Trustee1 metadata="default trustee" -Did "V4SGRU86Z58d6TV7PBUe6f" has been created with "~CoRER63DVYnWZtK8uAzNbx" verkey -Metadata has been saved for DID "V4SGRU86Z58d6TV7PBUe6f" +Did "V4SGRU86Z58d6TV7PBUe6f" has been created with "~CoRER63DVYnWZtK8uAzNbx" verkey +Metadata has been saved for DID "V4SGRU86Z58d6TV7PBUe6f" did new seed=TrusteeTest080000000000000000000 metadata="custom trustee" -Did "WnynyXyqjNFXeGELaR81Gh" has been created with "~XKXdGyymBGos1Hz2JPCEkM" verkey -Metadata has been saved for DID "WnynyXyqjNFXeGELaR81Gh" +Did "WnynyXyqjNFXeGELaR81Gh" has been created with "~XKXdGyymBGos1Hz2JPCEkM" verkey +Metadata has been saved for DID "WnynyXyqjNFXeGELaR81Gh" did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=XhYtvJqezMUKfF6KVNaGmT role=STEWARD verkey=~RmCt3RtDvKDfQBSKxo4qvy -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +---------------------+---------------------+ | Request ID | Transaction time | +---------------------+---------------------+ | 1527783431507539137 | 2018-05-31 16:17:11 | +---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+---------+ | Did | Verkey | Role | +------------------------+-------------------------+---------+ @@ -53,14 +53,14 @@ ledger nym did=XhYtvJqezMUKfF6KVNaGmT role=STEWARD verkey=~RmCt3RtDvKDfQBSKxo4qv +------------------------+-------------------------+---------+ ledger nym did=FBQ5W7LxRp8bLeVNEafjpd role=STEWARD verkey=~2a4KepmnVVAPGo5gNA3P3p -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +---------------------+---------------------+ | Request ID | Transaction time | +---------------------+---------------------+ | 1527783431763862559 | 2018-05-31 16:17:11 | +---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+---------+ | Did | Verkey | Role | +------------------------+-------------------------+---------+ @@ -68,14 +68,14 @@ ledger nym did=FBQ5W7LxRp8bLeVNEafjpd role=STEWARD verkey=~2a4KepmnVVAPGo5gNA3P3 +------------------------+-------------------------+---------+ ledger nym did=Rhx2qwaeiGyhU9vn4ynHSS role=TRUST_ANCHOR verkey=~RTJS62PRHurqHJnbnT1Hgi -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +---------------------+---------------------+ | Request ID | Transaction time | +---------------------+---------------------+ | 1527783432009906066 | 2018-05-31 16:17:12 | +---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+--------------+ | Did | Verkey | Role | +------------------------+-------------------------+--------------+ @@ -83,14 +83,14 @@ ledger nym did=Rhx2qwaeiGyhU9vn4ynHSS role=TRUST_ANCHOR verkey=~RTJS62PRHurqHJnb +------------------------+-------------------------+--------------+ ledger nym did=5qhSKMkpqui6taGLTecwqf verkey=~UWK9xwiKsqMQSBmymYpR5v -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +---------------------+---------------------+ | Request ID | Transaction time | +---------------------+---------------------+ | 1527783432265453030 | 2018-05-31 16:17:12 | +---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+------+ | Did | Verkey | Role | +------------------------+-------------------------+------+ @@ -98,20 +98,20 @@ ledger nym did=5qhSKMkpqui6taGLTecwqf verkey=~UWK9xwiKsqMQSBmymYpR5v +------------------------+-------------------------+------+ ledger nym did=WnynyXyqjNFXeGELaR81Gh role=TRUSTEE verkey=~XKXdGyymBGos1Hz2JPCEkM -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +---------------------+---------------------+ | Request ID | Transaction time | +---------------------+---------------------+ | 1527783432528814452 | 2018-05-31 16:17:12 | +---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+---------+ | Did | Verkey | Role | +------------------------+-------------------------+---------+ | WnynyXyqjNFXeGELaR81Gh | ~XKXdGyymBGos1Hz2JPCEkM | TRUSTEE | +------------------------+-------------------------+---------+ -Pool "AS-pool" has been connected -Wallet "AS-02-wallet" has been closed -Goodbye... +Pool "AS-pool" has been connected +Wallet "AS-02-wallet" has been closed +Goodbye... diff --git a/acceptance/indy-cli-batches/expected/AS-02-02-invalid-cases.expected b/acceptance/indy-cli-batches/expected/AS-02-02-invalid-cases.expected index d95f41069..1c46ccd59 100644 --- a/acceptance/indy-cli-batches/expected/AS-02-02-invalid-cases.expected +++ b/acceptance/indy-cli-batches/expected/AS-02-02-invalid-cases.expected @@ -1,51 +1,54 @@ - pool create AS-pool gen_txn_file=./pool_transactions_genesis -Pool config "AS-pool" already exists +Pool config "AS-pool" already exists pool connect AS-pool -Pool "AS-pool" has been connected +Pool "AS-pool" has been connected - wallet create AS-02-wallet key=testkey storage_config={"pool":"AS-pool"} -Wallet "AS-02-wallet" already exists +Wallet "AS-02-wallet" already exists wallet open AS-02-wallet key=testkey -Wallet "AS-02-wallet" has been opened +Wallet "AS-02-wallet" has been opened did use XhYtvJqezMUKfF6KVNaGmT -Did "XhYtvJqezMUKfF6KVNaGmT" has been set as active +Did "XhYtvJqezMUKfF6KVNaGmT" has been set as active - ledger node target=6G9QhQa3HWjRKeRmEvEkLbWWf2t7cw6KLtafzi494G4G alias=Node6 services= -Transaction has been rejected: XhYtvJqezMUKfF6KVNaGmT is not a steward of node 6G9QhQa3HWjRKeRmEvEkLbWWf2t7cw6KLtafzi494G4G +Transaction has been rejected: XhYtvJqezMUKfF6KVNaGmT is not a steward of node 6G9QhQa3HWjRKeRmEvEkLbWWf2t7cw6KLtafzi494G4G + +- ledger node target=4Tn3wZMNCvhSTXPcLinQDnHyj56DTLQtL61ki4jo2Loc alias=Node6 services= +Transaction has been rejected: existing data has conflicts with request data {'alias': 'Node6', 'services': []}. Error: Node's alias cannot be changed did use Rhx2qwaeiGyhU9vn4ynHSS -Did "Rhx2qwaeiGyhU9vn4ynHSS" has been set as active +Did "Rhx2qwaeiGyhU9vn4ynHSS" has been set as active - ledger node target=4Tn3wZMNCvhSTXPcLinQDnHyj56DTLQtL61ki4jo2Loc alias=Node5 services= -Transaction has been rejected: Missing some of {'node_port', 'client_port', 'client_ip', 'alias', 'node_ip'} +Transaction has been rejected: Rhx2qwaeiGyhU9vn4ynHSS is not a steward so cannot update a node did use 5qhSKMkpqui6taGLTecwqf -Did "5qhSKMkpqui6taGLTecwqf" has been set as active +Did "5qhSKMkpqui6taGLTecwqf" has been set as active - ledger node target=4Tn3wZMNCvhSTXPcLinQDnHyj56DTLQtL61ki4jo2Loc alias=Node5 services= -Transaction has been rejected: Missing some of {'client_ip', 'node_ip', 'client_port', 'node_port', 'alias'} +Transaction has been rejected: 5qhSKMkpqui6taGLTecwqf is not a steward so cannot update a node did use XhYtvJqezMUKfF6KVNaGmT -Did "XhYtvJqezMUKfF6KVNaGmT" has been set as active +Did "XhYtvJqezMUKfF6KVNaGmT" has been set as active - ledger node target=6G9QhQa3HWjRKeRmEvEkLbWWf2t7cw6KLtafzi494G4G alias=Node6 services=VALIDATOR -Transaction has been rejected: XhYtvJqezMUKfF6KVNaGmT is not a steward of node 6G9QhQa3HWjRKeRmEvEkLbWWf2t7cw6KLtafzi494G4G +Transaction has been rejected: XhYtvJqezMUKfF6KVNaGmT is not a steward of node 6G9QhQa3HWjRKeRmEvEkLbWWf2t7cw6KLtafzi494G4G did use Rhx2qwaeiGyhU9vn4ynHSS -Did "Rhx2qwaeiGyhU9vn4ynHSS" has been set as active +Did "Rhx2qwaeiGyhU9vn4ynHSS" has been set as active - ledger node target=6G9QhQa3HWjRKeRmEvEkLbWWf2t7cw6KLtafzi494G4G alias=Node6 services=VALIDATOR -Transaction has been rejected: Missing some of {'client_ip', 'node_ip', 'alias', 'node_port', 'client_port'} +Transaction has been rejected: Rhx2qwaeiGyhU9vn4ynHSS is not a steward so cannot update a node did use 5qhSKMkpqui6taGLTecwqf -Did "5qhSKMkpqui6taGLTecwqf" has been set as active +Did "5qhSKMkpqui6taGLTecwqf" has been set as active - ledger node target=6G9QhQa3HWjRKeRmEvEkLbWWf2t7cw6KLtafzi494G4G alias=Node6 services=VALIDATOR -Transaction has been rejected: Missing some of {'node_port', 'client_port', 'client_ip', 'alias', 'node_ip'} +Transaction has been rejected: 5qhSKMkpqui6taGLTecwqf is not a steward so cannot update a node -Wallet "AS-02-wallet" has been closed -Pool "AS-pool" has been connected -Goodbye... +Pool "AS-pool" has been disconnected +Wallet "AS-02-wallet" has been closed +Goodbye... \ No newline at end of file diff --git a/acceptance/indy-cli-batches/expected/AS-03-01-identity-owner-anyone-can-write.expected b/acceptance/indy-cli-batches/expected/AS-03-01-identity-owner-anyone-can-write.expected index 60c99a58c..4021fd078 100644 --- a/acceptance/indy-cli-batches/expected/AS-03-01-identity-owner-anyone-can-write.expected +++ b/acceptance/indy-cli-batches/expected/AS-03-01-identity-owner-anyone-can-write.expected @@ -1,71 +1,71 @@ - pool create AS-0301-owner gen_txn_file=./pool_transactions_genesis -Pool config "AS-0301-owner" has been created +Pool config "AS-0301-owner" has been created pool connect AS-0301-owner -Pool "AS-0301-owner" has been connected +Pool "AS-0301-owner" has been connected - wallet create AS-03-wallet-owner key=testkey storage_config={"pool":"AS-0301-owner"} -Wallet "AS-03-wallet-owner" has been created +Wallet "AS-03-wallet-owner" has been created wallet open AS-03-wallet-owner key=testkey -Wallet "AS-03-wallet-owner" has been opened +Wallet "AS-03-wallet-owner" has been opened did new seed=000000000000000000000000Trustee1 metadata="Default Trustee" -Did "V4SGRU86Z58d6TV7PBUe6f" has been created with "~CoRER63DVYnWZtK8uAzNbx" verkey -Metadata has been saved for DID "V4SGRU86Z58d6TV7PBUe6f" +Did "V4SGRU86Z58d6TV7PBUe6f" has been created with "~CoRER63DVYnWZtK8uAzNbx" verkey +Metadata has been saved for DID "V4SGRU86Z58d6TV7PBUe6f" did new seed=TestTrustee100000000000000000003 metadata="test trustee 1" -Did "FiAsNdcWnpB2L22ZUGyKHa" has been created with "~BZHjLX8NYwJXPTE746hn6Y" verkey -Metadata has been saved for DID "FiAsNdcWnpB2L22ZUGyKHa" +Did "FiAsNdcWnpB2L22ZUGyKHa" has been created with "~BZHjLX8NYwJXPTE746hn6Y" verkey +Metadata has been saved for DID "FiAsNdcWnpB2L22ZUGyKHa" did new seed=TestTrustee200000000000000000003 metadata="test trustee 2" -Did "CYdQe2tmSwhv2XdicegoAn" has been created with "~E7MjQHm14YnmZD9RErTBsi" verkey -Metadata has been saved for DID "CYdQe2tmSwhv2XdicegoAn" +Did "CYdQe2tmSwhv2XdicegoAn" has been created with "~E7MjQHm14YnmZD9RErTBsi" verkey +Metadata has been saved for DID "CYdQe2tmSwhv2XdicegoAn" did new seed=TestSteward100000000000000000003 metadata="test steward 1" -Did "AhqUV2zHYdNaWLFCCe7xCn" has been created with "~YUY2ChUGWJovtU6XTn61D8" verkey -Metadata has been saved for DID "AhqUV2zHYdNaWLFCCe7xCn" +Did "AhqUV2zHYdNaWLFCCe7xCn" has been created with "~YUY2ChUGWJovtU6XTn61D8" verkey +Metadata has been saved for DID "AhqUV2zHYdNaWLFCCe7xCn" did new seed=TestSteward200000000000000000003 metadata="test steward 2" -Did "CbPwHxKEibPhV4pgXWpu26" has been created with "~MviYa49QADQXAM68WSiLPD" verkey -Metadata has been saved for DID "CbPwHxKEibPhV4pgXWpu26" +Did "CbPwHxKEibPhV4pgXWpu26" has been created with "~MviYa49QADQXAM68WSiLPD" verkey +Metadata has been saved for DID "CbPwHxKEibPhV4pgXWpu26" did new seed=TestTrustAnchor10000000000000003 metadata="test trust anchor 1" -Did "DriVwCMbtEgkmoEHKin6Ah" has been created with "~YPZot1kM4DLwvsX6mtcKd9" verkey -Metadata has been saved for DID "DriVwCMbtEgkmoEHKin6Ah" +Did "DriVwCMbtEgkmoEHKin6Ah" has been created with "~YPZot1kM4DLwvsX6mtcKd9" verkey +Metadata has been saved for DID "DriVwCMbtEgkmoEHKin6Ah" did new seed=TestTrustAnchor20000000000000003 metadata="test trust anchor 2" -Did "2b6xTx2HniDU77nxHm6zWB" has been created with "~HF34ymwfTJngb8zFDmCyvX" verkey -Metadata has been saved for DID "2b6xTx2HniDU77nxHm6zWB" +Did "2b6xTx2HniDU77nxHm6zWB" has been created with "~HF34ymwfTJngb8zFDmCyvX" verkey +Metadata has been saved for DID "2b6xTx2HniDU77nxHm6zWB" did new seed=TestTrustAnchor30000000000000003 metadata="test trust anchor 3" -Did "4xuWDwsQSqzQmYSheSWFyg" has been created with "~BmnEpJMi6kJHUcxcVJa2R4" verkey -Metadata has been saved for DID "4xuWDwsQSqzQmYSheSWFyg" +Did "4xuWDwsQSqzQmYSheSWFyg" has been created with "~BmnEpJMi6kJHUcxcVJa2R4" verkey +Metadata has been saved for DID "4xuWDwsQSqzQmYSheSWFyg" did new seed=RandomUser1000000000000000000003 metadata="test identity 1" -Did "LBbKEeczA9iL21p4Kgxcuf" has been created with "~3ZvjdaYs4cdFYXAwNGR85p" verkey -Metadata has been saved for DID "LBbKEeczA9iL21p4Kgxcuf" +Did "LBbKEeczA9iL21p4Kgxcuf" has been created with "~3ZvjdaYs4cdFYXAwNGR85p" verkey +Metadata has been saved for DID "LBbKEeczA9iL21p4Kgxcuf" did new seed=RandomUser2000000000000000000003 metadata="test identity 2" -Did "Fk9ENxnz1ztDgdivQBJTCZ" has been created with "~NT9ANFeDhCLKDg5PNKZXKK" verkey -Metadata has been saved for DID "Fk9ENxnz1ztDgdivQBJTCZ" +Did "Fk9ENxnz1ztDgdivQBJTCZ" has been created with "~NT9ANFeDhCLKDg5PNKZXKK" verkey +Metadata has been saved for DID "Fk9ENxnz1ztDgdivQBJTCZ" did new seed=RandomUser3000000000000000000003 metadata="test identity 3" -Did "Q8uxmCGdXgLHHtaTwYtVJG" has been created with "~UpRqa9gQ1jsbUBvKYPtWSo" verkey -Metadata has been saved for DID "Q8uxmCGdXgLHHtaTwYtVJG" +Did "Q8uxmCGdXgLHHtaTwYtVJG" has been created with "~UpRqa9gQ1jsbUBvKYPtWSo" verkey +Metadata has been saved for DID "Q8uxmCGdXgLHHtaTwYtVJG" did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=AhqUV2zHYdNaWLFCCe7xCn role=STEWARD verkey=~YUY2ChUGWJovtU6XTn61D8 -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 22 | 1534004765772764798 | 2018-08-11 16:26:05 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+---------+ | Did | Verkey | Role | +------------------------+-------------------------+---------+ @@ -73,14 +73,14 @@ ledger nym did=AhqUV2zHYdNaWLFCCe7xCn role=STEWARD verkey=~YUY2ChUGWJovtU6XTn61D +------------------------+-------------------------+---------+ - ledger get-nym did=AhqUV2zHYdNaWLFCCe7xCn -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 22 | 1534004766586504866 | 2018-08-11 16:26:05 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+---------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+---------+ @@ -88,17 +88,17 @@ ledger nym did=AhqUV2zHYdNaWLFCCe7xCn role=STEWARD verkey=~YUY2ChUGWJovtU6XTn61D +------------------------+------------------------+-------------------------+---------+ did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=DriVwCMbtEgkmoEHKin6Ah role=TRUST_ANCHOR verkey=~YPZot1kM4DLwvsX6mtcKd9 -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 23 | 1534004766668159443 | 2018-08-11 16:26:06 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+--------------+ | Did | Verkey | Role | +------------------------+-------------------------+--------------+ @@ -106,14 +106,14 @@ ledger nym did=DriVwCMbtEgkmoEHKin6Ah role=TRUST_ANCHOR verkey=~YPZot1kM4DLwvsX6 +------------------------+-------------------------+--------------+ - ledger get-nym did=DriVwCMbtEgkmoEHKin6Ah -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 23 | 1534004767538168631 | 2018-08-11 16:26:06 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+--------------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+--------------+ @@ -121,17 +121,17 @@ ledger nym did=DriVwCMbtEgkmoEHKin6Ah role=TRUST_ANCHOR verkey=~YPZot1kM4DLwvsX6 +------------------------+------------------------+-------------------------+--------------+ did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=LBbKEeczA9iL21p4Kgxcuf verkey=~3ZvjdaYs4cdFYXAwNGR85p -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 24 | 1534004767610126516 | 2018-08-11 16:26:08 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+------+ | Did | Verkey | Role | +------------------------+-------------------------+------+ @@ -139,14 +139,14 @@ ledger nym did=LBbKEeczA9iL21p4Kgxcuf verkey=~3ZvjdaYs4cdFYXAwNGR85p +------------------------+-------------------------+------+ - ledger get-nym did=LBbKEeczA9iL21p4Kgxcuf -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 24 | 1534004768598085816 | 2018-08-11 16:26:08 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+------+ @@ -154,23 +154,23 @@ ledger nym did=LBbKEeczA9iL21p4Kgxcuf verkey=~3ZvjdaYs4cdFYXAwNGR85p +------------------------+------------------------+-------------------------+------+ did use LBbKEeczA9iL21p4Kgxcuf -Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active +Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active - ledger nym did=V4SGRU86Z58d6TV7PBUe6f role= -Transaction has been rejected: LBbKEeczA9iL21p4Kgxcuf is neither Trustee nor owner of V4SGRU86Z58d6TV7PBUe6f +Transaction has been rejected: LBbKEeczA9iL21p4Kgxcuf is neither Trustee nor owner of V4SGRU86Z58d6TV7PBUe6f did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=CYdQe2tmSwhv2XdicegoAn role=TRUSTEE verkey=~E7MjQHm14YnmZD9RErTBsi -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 25 | 1534004769268089334 | 2018-08-11 16:26:10 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+---------+ | Did | Verkey | Role | +------------------------+-------------------------+---------+ @@ -178,14 +178,14 @@ ledger nym did=CYdQe2tmSwhv2XdicegoAn role=TRUSTEE verkey=~E7MjQHm14YnmZD9RErTBs +------------------------+-------------------------+---------+ - ledger get-nym did=CYdQe2tmSwhv2XdicegoAn -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 25 | 1534004770648712280 | 2018-08-11 16:26:10 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+---------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+---------+ @@ -193,32 +193,32 @@ ledger nym did=CYdQe2tmSwhv2XdicegoAn role=TRUSTEE verkey=~E7MjQHm14YnmZD9RErTBs +------------------------+------------------------+-------------------------+---------+ did use LBbKEeczA9iL21p4Kgxcuf -Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active +Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active - ledger nym did=FiAsNdcWnpB2L22ZUGyKHa role=TRUSTEE verkey=~BZHjLX8NYwJXPTE746hn6Y -Transaction has been rejected: None role cannot add TRUSTEE +Transaction has been rejected: None role cannot add TRUSTEE - ledger get-nym did=FiAsNdcWnpB2L22ZUGyKHa -NYM not found +NYM not found did use LBbKEeczA9iL21p4Kgxcuf -Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active +Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active - ledger nym did=AhqUV2zHYdNaWLFCCe7xCn role= -Transaction has been rejected: LBbKEeczA9iL21p4Kgxcuf is neither Trustee nor owner of AhqUV2zHYdNaWLFCCe7xCn +Transaction has been rejected: LBbKEeczA9iL21p4Kgxcuf is neither Trustee nor owner of AhqUV2zHYdNaWLFCCe7xCn did use AhqUV2zHYdNaWLFCCe7xCn -Did "AhqUV2zHYdNaWLFCCe7xCn" has been set as active +Did "AhqUV2zHYdNaWLFCCe7xCn" has been set as active ledger nym did=4xuWDwsQSqzQmYSheSWFyg role=TRUST_ANCHOR verkey=~BmnEpJMi6kJHUcxcVJa2R4 -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | AhqUV2zHYdNaWLFCCe7xCn | 26 | 1534004772311384746 | 2018-08-11 16:26:13 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+--------------+ | Did | Verkey | Role | +------------------------+-------------------------+--------------+ @@ -226,14 +226,14 @@ ledger nym did=4xuWDwsQSqzQmYSheSWFyg role=TRUST_ANCHOR verkey=~BmnEpJMi6kJHUcxc +------------------------+-------------------------+--------------+ - ledger get-nym did=4xuWDwsQSqzQmYSheSWFyg -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | AhqUV2zHYdNaWLFCCe7xCn | 26 | 1534004773697374448 | 2018-08-11 16:26:13 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+--------------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+--------------+ @@ -241,32 +241,32 @@ ledger nym did=4xuWDwsQSqzQmYSheSWFyg role=TRUST_ANCHOR verkey=~BmnEpJMi6kJHUcxc +------------------------+------------------------+-------------------------+--------------+ did use LBbKEeczA9iL21p4Kgxcuf -Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active +Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active - ledger nym did=CbPwHxKEibPhV4pgXWpu26 role=STEWARD verkey=~MviYa49QADQXAM68WSiLPD -Transaction has been rejected: None role cannot add STEWARD +Transaction has been rejected: None role cannot add STEWARD - ledger get-nym did=CbPwHxKEibPhV4pgXWpu26 -NYM not found +NYM not found did use LBbKEeczA9iL21p4Kgxcuf -Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active +Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active - ledger nym did=DriVwCMbtEgkmoEHKin6Ah role= -Transaction has been rejected: LBbKEeczA9iL21p4Kgxcuf is neither Trustee nor owner of DriVwCMbtEgkmoEHKin6Ah +Transaction has been rejected: LBbKEeczA9iL21p4Kgxcuf is neither Trustee nor owner of DriVwCMbtEgkmoEHKin6Ah did use DriVwCMbtEgkmoEHKin6Ah -Did "DriVwCMbtEgkmoEHKin6Ah" has been set as active +Did "DriVwCMbtEgkmoEHKin6Ah" has been set as active ledger nym did=Q8uxmCGdXgLHHtaTwYtVJG verkey=~UpRqa9gQ1jsbUBvKYPtWSo -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | DriVwCMbtEgkmoEHKin6Ah | 27 | 1534004775388034251 | 2018-08-11 16:26:16 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+------+ | Did | Verkey | Role | +------------------------+-------------------------+------+ @@ -274,14 +274,14 @@ ledger nym did=Q8uxmCGdXgLHHtaTwYtVJG verkey=~UpRqa9gQ1jsbUBvKYPtWSo +------------------------+-------------------------+------+ - ledger get-nym did=Q8uxmCGdXgLHHtaTwYtVJG -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | DriVwCMbtEgkmoEHKin6Ah | 27 | 1534004776760912140 | 2018-08-11 16:26:16 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+------+ @@ -289,26 +289,26 @@ ledger nym did=Q8uxmCGdXgLHHtaTwYtVJG verkey=~UpRqa9gQ1jsbUBvKYPtWSo +------------------------+------------------------+-------------------------+------+ did use LBbKEeczA9iL21p4Kgxcuf -Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active +Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active - ledger nym did=2b6xTx2HniDU77nxHm6zWB role=TRUST_ANCHOR verkey=~HF34ymwfTJngb8zFDmCyvX -Transaction has been rejected: None role cannot add TRUST_ANCHOR +Transaction has been rejected: None role cannot add TRUST_ANCHOR - ledger get-nym did=2b6xTx2HniDU77nxHm6zWB -NYM not found +NYM not found did use LBbKEeczA9iL21p4Kgxcuf -Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active +Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active ledger nym did=Fk9ENxnz1ztDgdivQBJTCZ verkey=~NT9ANFeDhCLKDg5PNKZXKK -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | LBbKEeczA9iL21p4Kgxcuf | 28 | 1534004777497343573 | 2018-08-11 16:26:18 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+------+ | Did | Verkey | Role | +------------------------+-------------------------+------+ @@ -316,14 +316,14 @@ ledger nym did=Fk9ENxnz1ztDgdivQBJTCZ verkey=~NT9ANFeDhCLKDg5PNKZXKK +------------------------+-------------------------+------+ - ledger get-nym did=Fk9ENxnz1ztDgdivQBJTCZ -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | LBbKEeczA9iL21p4Kgxcuf | 28 | 1534004778787990056 | 2018-08-11 16:26:18 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+------+ @@ -331,32 +331,32 @@ ledger nym did=Fk9ENxnz1ztDgdivQBJTCZ verkey=~NT9ANFeDhCLKDg5PNKZXKK +------------------------+------------------------+-------------------------+------+ did use LBbKEeczA9iL21p4Kgxcuf -Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active +Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active - ledger nym did=Q8uxmCGdXgLHHtaTwYtVJG role=TRUST_ANCHOR -Transaction has been rejected: LBbKEeczA9iL21p4Kgxcuf is neither Trustee nor owner of Q8uxmCGdXgLHHtaTwYtVJG +Transaction has been rejected: LBbKEeczA9iL21p4Kgxcuf is neither Trustee nor owner of Q8uxmCGdXgLHHtaTwYtVJG - ledger nym did=Q8uxmCGdXgLHHtaTwYtVJG role=STEWARD -Transaction has been rejected: LBbKEeczA9iL21p4Kgxcuf is neither Trustee nor owner of Q8uxmCGdXgLHHtaTwYtVJG +Transaction has been rejected: LBbKEeczA9iL21p4Kgxcuf is neither Trustee nor owner of Q8uxmCGdXgLHHtaTwYtVJG - ledger nym did=Q8uxmCGdXgLHHtaTwYtVJG role=TRUSTEE -Transaction has been rejected: LBbKEeczA9iL21p4Kgxcuf is neither Trustee nor owner of Q8uxmCGdXgLHHtaTwYtVJG +Transaction has been rejected: LBbKEeczA9iL21p4Kgxcuf is neither Trustee nor owner of Q8uxmCGdXgLHHtaTwYtVJG did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active did new seed=RandomAttribOwner000000000000003 -Did "UffJCJngTXc1o84dQ7aEUb" has been created with "~J4HtnGz2wW2nE7VuMeg39M" verkey +Did "UffJCJngTXc1o84dQ7aEUb" has been created with "~J4HtnGz2wW2nE7VuMeg39M" verkey ledger nym did=UffJCJngTXc1o84dQ7aEUb verkey=~J4HtnGz2wW2nE7VuMeg39M -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 29 | 1534004781500707517 | 2018-08-11 16:26:22 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+------+ | Did | Verkey | Role | +------------------------+-------------------------+------+ @@ -364,17 +364,17 @@ ledger nym did=UffJCJngTXc1o84dQ7aEUb verkey=~J4HtnGz2wW2nE7VuMeg39M +------------------------+-------------------------+------+ did new seed=RandomUserForRotateKey0000000003 -Did "X3zYajU7gbz9Pu8k6E7Ppf" has been created with "~3ZDo6g4ZDRKGauKrR452xU" verkey +Did "X3zYajU7gbz9Pu8k6E7Ppf" has been created with "~3ZDo6g4ZDRKGauKrR452xU" verkey ledger nym did=X3zYajU7gbz9Pu8k6E7Ppf verkey=~3ZDo6g4ZDRKGauKrR452xU -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 30 | 1534004782875645242 | 2018-08-11 16:26:23 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+------+ | Did | Verkey | Role | +------------------------+-------------------------+------+ @@ -382,29 +382,29 @@ ledger nym did=X3zYajU7gbz9Pu8k6E7Ppf verkey=~3ZDo6g4ZDRKGauKrR452xU +------------------------+-------------------------+------+ did use X3zYajU7gbz9Pu8k6E7Ppf -Did "X3zYajU7gbz9Pu8k6E7Ppf" has been set as active +Did "X3zYajU7gbz9Pu8k6E7Ppf" has been set as active did rotate-key seed=RandomUserForRotateKey0NewKey003 -Verkey for did "X3zYajU7gbz9Pu8k6E7Ppf" has been updated. New verkey: "DHUqLyUnhjBgsEKHCm3cNekC29rvx6STQAgvepPRLjAH" +Verkey for did "X3zYajU7gbz9Pu8k6E7Ppf" has been updated. New verkey: "DHUqLyUnhjBgsEKHCm3cNekC29rvx6STQAgvepPRLjAH" did use UffJCJngTXc1o84dQ7aEUb -Did "UffJCJngTXc1o84dQ7aEUb" has been set as active +Did "UffJCJngTXc1o84dQ7aEUb" has been set as active - ledger nym did=X3zYajU7gbz9Pu8k6E7Ppf verkey=~3ZDo6g4ZDRKGauKrR452xU -Transaction has been rejected: UffJCJngTXc1o84dQ7aEUb is neither Trustee nor owner of X3zYajU7gbz9Pu8k6E7Ppf +Transaction has been rejected: UffJCJngTXc1o84dQ7aEUb is neither Trustee nor owner of X3zYajU7gbz9Pu8k6E7Ppf did use X3zYajU7gbz9Pu8k6E7Ppf -Did "X3zYajU7gbz9Pu8k6E7Ppf" has been set as active +Did "X3zYajU7gbz9Pu8k6E7Ppf" has been set as active ledger attrib did=X3zYajU7gbz9Pu8k6E7Ppf raw={"endpoint":{"ha":"127.0.0.1:5555"}} -Attrib request has been sent to Ledger. -Metadata: +Attrib request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | X3zYajU7gbz9Pu8k6E7Ppf | 32 | 1534004785574568146 | 2018-08-11 16:26:26 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +--------------------------------------+ | Raw value | +--------------------------------------+ @@ -412,14 +412,14 @@ ledger attrib did=X3zYajU7gbz9Pu8k6E7Ppf raw={"endpoint":{"ha":"127.0.0.1:5555"} +--------------------------------------+ ledger attrib did=X3zYajU7gbz9Pu8k6E7Ppf raw={"endpoint":{"ha":"127.0.0.1:6666"}} -Attrib request has been sent to Ledger. -Metadata: +Attrib request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | X3zYajU7gbz9Pu8k6E7Ppf | 33 | 1534004786960798227 | 2018-08-11 16:26:27 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +--------------------------------------+ | Raw value | +--------------------------------------+ @@ -427,26 +427,26 @@ ledger attrib did=X3zYajU7gbz9Pu8k6E7Ppf raw={"endpoint":{"ha":"127.0.0.1:6666"} +--------------------------------------+ did use X3zYajU7gbz9Pu8k6E7Ppf -Did "X3zYajU7gbz9Pu8k6E7Ppf" has been set as active +Did "X3zYajU7gbz9Pu8k6E7Ppf" has been set as active - ledger attrib did=UffJCJngTXc1o84dQ7aEUb raw={"endpoint":{"ha":"127.0.0.1:5555"}} -Transaction has been rejected: Only identity owner/guardian can add attribute for that identity +Transaction has been rejected: Only identity owner/guardian can add attribute for that identity did use UffJCJngTXc1o84dQ7aEUb -Did "UffJCJngTXc1o84dQ7aEUb" has been set as active +Did "UffJCJngTXc1o84dQ7aEUb" has been set as active - ledger attrib did=X3zYajU7gbz9Pu8k6E7Ppf raw={"endpoint":{"ha":"127.0.0.1:5555"}} -Transaction has been rejected: Only identity owner/guardian can add attribute for that identity +Transaction has been rejected: Only identity owner/guardian can add attribute for that identity ledger schema name=IdentityOwnerSchema version=1.0 attr_names=name,age -Schema request has been sent to Ledger. -Metadata: +Schema request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | UffJCJngTXc1o84dQ7aEUb | 34 | 1534004789633198964 | 2018-08-11 16:26:30 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +---------------------+---------+--------------+ | Name | Version | Attributes | +---------------------+---------+--------------+ @@ -454,20 +454,8 @@ ledger schema name=IdentityOwnerSchema version=1.0 attr_names=name,age +---------------------+---------+--------------+ ledger cred-def schema_id=1 signature_type=CL primary={"n":"1","s":"2","rms":"3","r":{"age":"4","name":"5"},"rctxt":"6","z":"7"} tag=3 -NodeConfig request has been sent to Ledger. -Metadata: -+------------------------+-----------------+---------------------+---------------------+ -| From | Sequence Number | Request ID | Transaction time | -+------------------------+-----------------+---------------------+---------------------+ -| UffJCJngTXc1o84dQ7aEUb | 35 | 1534004792100180390 | 2018-08-11 16:26:32 | -+------------------------+-----------------+---------------------+---------------------+ -Data: -+----------------------------------------------------------------------------+ -| Primary Key | -+----------------------------------------------------------------------------+ -| {n:"1",r:{"age":"4","master_secret":"3","name":"5"},rctxt:"6",s:"2",z:"7"} | -+----------------------------------------------------------------------------+ +Transaction has been rejected: Mentioned seqNo (1) isn't seqNo of the schema. -Pool "AS-0301-owner" has been disconnected -Wallet "AS-03-wallet-owner" has been closed -Goodbye... +Pool "AS-0301-owner" has been disconnected +Wallet "AS-03-wallet-owner" has been closed +Goodbye... diff --git a/acceptance/indy-cli-batches/expected/AS-03-01-identity-owner.expected b/acceptance/indy-cli-batches/expected/AS-03-01-identity-owner.expected index 3fbc8af19..90d54f81b 100644 --- a/acceptance/indy-cli-batches/expected/AS-03-01-identity-owner.expected +++ b/acceptance/indy-cli-batches/expected/AS-03-01-identity-owner.expected @@ -1,71 +1,71 @@ - pool create AS-0301-owner gen_txn_file=./pool_transactions_genesis -Pool config "AS-0301-owner" has been created +Pool config "AS-0301-owner" has been created pool connect AS-0301-owner -Pool "AS-0301-owner" has been connected +Pool "AS-0301-owner" has been connected - wallet create AS-03-wallet-owner key=testkey storage_config={"pool":"AS-0301-owner"} -Wallet "AS-03-wallet-owner" has been created +Wallet "AS-03-wallet-owner" has been created wallet open AS-03-wallet-owner key=testkey -Wallet "AS-03-wallet-owner" has been opened +Wallet "AS-03-wallet-owner" has been opened did new seed=000000000000000000000000Trustee1 metadata="Default Trustee" -Did "V4SGRU86Z58d6TV7PBUe6f" has been created with "~CoRER63DVYnWZtK8uAzNbx" verkey -Metadata has been saved for DID "V4SGRU86Z58d6TV7PBUe6f" +Did "V4SGRU86Z58d6TV7PBUe6f" has been created with "~CoRER63DVYnWZtK8uAzNbx" verkey +Metadata has been saved for DID "V4SGRU86Z58d6TV7PBUe6f" did new seed=TestTrustee100000000000000000003 metadata="test trustee 1" -Did "FiAsNdcWnpB2L22ZUGyKHa" has been created with "~BZHjLX8NYwJXPTE746hn6Y" verkey -Metadata has been saved for DID "FiAsNdcWnpB2L22ZUGyKHa" +Did "FiAsNdcWnpB2L22ZUGyKHa" has been created with "~BZHjLX8NYwJXPTE746hn6Y" verkey +Metadata has been saved for DID "FiAsNdcWnpB2L22ZUGyKHa" did new seed=TestTrustee200000000000000000003 metadata="test trustee 2" -Did "CYdQe2tmSwhv2XdicegoAn" has been created with "~E7MjQHm14YnmZD9RErTBsi" verkey -Metadata has been saved for DID "CYdQe2tmSwhv2XdicegoAn" +Did "CYdQe2tmSwhv2XdicegoAn" has been created with "~E7MjQHm14YnmZD9RErTBsi" verkey +Metadata has been saved for DID "CYdQe2tmSwhv2XdicegoAn" did new seed=TestSteward100000000000000000003 metadata="test steward 1" -Did "AhqUV2zHYdNaWLFCCe7xCn" has been created with "~YUY2ChUGWJovtU6XTn61D8" verkey -Metadata has been saved for DID "AhqUV2zHYdNaWLFCCe7xCn" +Did "AhqUV2zHYdNaWLFCCe7xCn" has been created with "~YUY2ChUGWJovtU6XTn61D8" verkey +Metadata has been saved for DID "AhqUV2zHYdNaWLFCCe7xCn" did new seed=TestSteward200000000000000000003 metadata="test steward 2" -Did "CbPwHxKEibPhV4pgXWpu26" has been created with "~MviYa49QADQXAM68WSiLPD" verkey -Metadata has been saved for DID "CbPwHxKEibPhV4pgXWpu26" +Did "CbPwHxKEibPhV4pgXWpu26" has been created with "~MviYa49QADQXAM68WSiLPD" verkey +Metadata has been saved for DID "CbPwHxKEibPhV4pgXWpu26" did new seed=TestTrustAnchor10000000000000003 metadata="test trust anchor 1" -Did "DriVwCMbtEgkmoEHKin6Ah" has been created with "~YPZot1kM4DLwvsX6mtcKd9" verkey -Metadata has been saved for DID "DriVwCMbtEgkmoEHKin6Ah" +Did "DriVwCMbtEgkmoEHKin6Ah" has been created with "~YPZot1kM4DLwvsX6mtcKd9" verkey +Metadata has been saved for DID "DriVwCMbtEgkmoEHKin6Ah" did new seed=TestTrustAnchor20000000000000003 metadata="test trust anchor 2" -Did "2b6xTx2HniDU77nxHm6zWB" has been created with "~HF34ymwfTJngb8zFDmCyvX" verkey -Metadata has been saved for DID "2b6xTx2HniDU77nxHm6zWB" +Did "2b6xTx2HniDU77nxHm6zWB" has been created with "~HF34ymwfTJngb8zFDmCyvX" verkey +Metadata has been saved for DID "2b6xTx2HniDU77nxHm6zWB" did new seed=TestTrustAnchor30000000000000003 metadata="test trust anchor 3" -Did "4xuWDwsQSqzQmYSheSWFyg" has been created with "~BmnEpJMi6kJHUcxcVJa2R4" verkey -Metadata has been saved for DID "4xuWDwsQSqzQmYSheSWFyg" +Did "4xuWDwsQSqzQmYSheSWFyg" has been created with "~BmnEpJMi6kJHUcxcVJa2R4" verkey +Metadata has been saved for DID "4xuWDwsQSqzQmYSheSWFyg" did new seed=RandomUser1000000000000000000003 metadata="test identity 1" -Did "LBbKEeczA9iL21p4Kgxcuf" has been created with "~3ZvjdaYs4cdFYXAwNGR85p" verkey -Metadata has been saved for DID "LBbKEeczA9iL21p4Kgxcuf" +Did "LBbKEeczA9iL21p4Kgxcuf" has been created with "~3ZvjdaYs4cdFYXAwNGR85p" verkey +Metadata has been saved for DID "LBbKEeczA9iL21p4Kgxcuf" did new seed=RandomUser2000000000000000000003 metadata="test identity 2" -Did "Fk9ENxnz1ztDgdivQBJTCZ" has been created with "~NT9ANFeDhCLKDg5PNKZXKK" verkey -Metadata has been saved for DID "Fk9ENxnz1ztDgdivQBJTCZ" +Did "Fk9ENxnz1ztDgdivQBJTCZ" has been created with "~NT9ANFeDhCLKDg5PNKZXKK" verkey +Metadata has been saved for DID "Fk9ENxnz1ztDgdivQBJTCZ" did new seed=RandomUser3000000000000000000003 metadata="test identity 3" -Did "Q8uxmCGdXgLHHtaTwYtVJG" has been created with "~UpRqa9gQ1jsbUBvKYPtWSo" verkey -Metadata has been saved for DID "Q8uxmCGdXgLHHtaTwYtVJG" +Did "Q8uxmCGdXgLHHtaTwYtVJG" has been created with "~UpRqa9gQ1jsbUBvKYPtWSo" verkey +Metadata has been saved for DID "Q8uxmCGdXgLHHtaTwYtVJG" did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=AhqUV2zHYdNaWLFCCe7xCn role=STEWARD verkey=~YUY2ChUGWJovtU6XTn61D8 -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 22 | 1534005141541903469 | 2018-08-11 16:32:21 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+---------+ | Did | Verkey | Role | +------------------------+-------------------------+---------+ @@ -73,14 +73,14 @@ ledger nym did=AhqUV2zHYdNaWLFCCe7xCn role=STEWARD verkey=~YUY2ChUGWJovtU6XTn61D +------------------------+-------------------------+---------+ - ledger get-nym did=AhqUV2zHYdNaWLFCCe7xCn -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 22 | 1534005142314188942 | 2018-08-11 16:32:21 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+---------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+---------+ @@ -88,17 +88,17 @@ ledger nym did=AhqUV2zHYdNaWLFCCe7xCn role=STEWARD verkey=~YUY2ChUGWJovtU6XTn61D +------------------------+------------------------+-------------------------+---------+ did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=DriVwCMbtEgkmoEHKin6Ah role=TRUST_ANCHOR verkey=~YPZot1kM4DLwvsX6mtcKd9 -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 23 | 1534005142350312943 | 2018-08-11 16:32:22 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+--------------+ | Did | Verkey | Role | +------------------------+-------------------------+--------------+ @@ -106,14 +106,14 @@ ledger nym did=DriVwCMbtEgkmoEHKin6Ah role=TRUST_ANCHOR verkey=~YPZot1kM4DLwvsX6 +------------------------+-------------------------+--------------+ - ledger get-nym did=DriVwCMbtEgkmoEHKin6Ah -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 23 | 1534005143229132303 | 2018-08-11 16:32:22 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+--------------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+--------------+ @@ -121,17 +121,17 @@ ledger nym did=DriVwCMbtEgkmoEHKin6Ah role=TRUST_ANCHOR verkey=~YPZot1kM4DLwvsX6 +------------------------+------------------------+-------------------------+--------------+ did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=LBbKEeczA9iL21p4Kgxcuf verkey=~3ZvjdaYs4cdFYXAwNGR85p -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 24 | 1534005143359076898 | 2018-08-11 16:32:23 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+------+ | Did | Verkey | Role | +------------------------+-------------------------+------+ @@ -139,14 +139,14 @@ ledger nym did=LBbKEeczA9iL21p4Kgxcuf verkey=~3ZvjdaYs4cdFYXAwNGR85p +------------------------+-------------------------+------+ - ledger get-nym did=LBbKEeczA9iL21p4Kgxcuf -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 24 | 1534005144255440866 | 2018-08-11 16:32:23 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+------+ @@ -154,23 +154,23 @@ ledger nym did=LBbKEeczA9iL21p4Kgxcuf verkey=~3ZvjdaYs4cdFYXAwNGR85p +------------------------+------------------------+-------------------------+------+ did use LBbKEeczA9iL21p4Kgxcuf -Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active +Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active - ledger nym did=V4SGRU86Z58d6TV7PBUe6f role= -Transaction has been rejected: LBbKEeczA9iL21p4Kgxcuf is neither Trustee nor owner of V4SGRU86Z58d6TV7PBUe6f +Transaction has been rejected: LBbKEeczA9iL21p4Kgxcuf is neither Trustee nor owner of V4SGRU86Z58d6TV7PBUe6f did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=CYdQe2tmSwhv2XdicegoAn role=TRUSTEE verkey=~E7MjQHm14YnmZD9RErTBsi -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 25 | 1534005144845760413 | 2018-08-11 16:32:25 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+---------+ | Did | Verkey | Role | +------------------------+-------------------------+---------+ @@ -178,14 +178,14 @@ ledger nym did=CYdQe2tmSwhv2XdicegoAn role=TRUSTEE verkey=~E7MjQHm14YnmZD9RErTBs +------------------------+-------------------------+---------+ - ledger get-nym did=CYdQe2tmSwhv2XdicegoAn -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 25 | 1534005146333582676 | 2018-08-11 16:32:25 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+---------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+---------+ @@ -193,32 +193,32 @@ ledger nym did=CYdQe2tmSwhv2XdicegoAn role=TRUSTEE verkey=~E7MjQHm14YnmZD9RErTBs +------------------------+------------------------+-------------------------+---------+ did use LBbKEeczA9iL21p4Kgxcuf -Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active +Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active - ledger nym did=FiAsNdcWnpB2L22ZUGyKHa role=TRUSTEE verkey=~BZHjLX8NYwJXPTE746hn6Y -Transaction has been rejected: None role cannot add TRUSTEE +Transaction has been rejected: None role cannot add TRUSTEE - ledger get-nym did=FiAsNdcWnpB2L22ZUGyKHa -NYM not found +NYM not found did use LBbKEeczA9iL21p4Kgxcuf -Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active +Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active - ledger nym did=AhqUV2zHYdNaWLFCCe7xCn role= -Transaction has been rejected: LBbKEeczA9iL21p4Kgxcuf is neither Trustee nor owner of AhqUV2zHYdNaWLFCCe7xCn +Transaction has been rejected: LBbKEeczA9iL21p4Kgxcuf is neither Trustee nor owner of AhqUV2zHYdNaWLFCCe7xCn did use AhqUV2zHYdNaWLFCCe7xCn -Did "AhqUV2zHYdNaWLFCCe7xCn" has been set as active +Did "AhqUV2zHYdNaWLFCCe7xCn" has been set as active ledger nym did=4xuWDwsQSqzQmYSheSWFyg role=TRUST_ANCHOR verkey=~BmnEpJMi6kJHUcxcVJa2R4 -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | AhqUV2zHYdNaWLFCCe7xCn | 26 | 1534005147935406313 | 2018-08-11 16:32:28 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+--------------+ | Did | Verkey | Role | +------------------------+-------------------------+--------------+ @@ -226,14 +226,14 @@ ledger nym did=4xuWDwsQSqzQmYSheSWFyg role=TRUST_ANCHOR verkey=~BmnEpJMi6kJHUcxc +------------------------+-------------------------+--------------+ - ledger get-nym did=4xuWDwsQSqzQmYSheSWFyg -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | AhqUV2zHYdNaWLFCCe7xCn | 26 | 1534005149385641029 | 2018-08-11 16:32:28 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+--------------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+--------------+ @@ -241,32 +241,32 @@ ledger nym did=4xuWDwsQSqzQmYSheSWFyg role=TRUST_ANCHOR verkey=~BmnEpJMi6kJHUcxc +------------------------+------------------------+-------------------------+--------------+ did use LBbKEeczA9iL21p4Kgxcuf -Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active +Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active - ledger nym did=CbPwHxKEibPhV4pgXWpu26 role=STEWARD verkey=~MviYa49QADQXAM68WSiLPD -Transaction has been rejected: None role cannot add STEWARD +Transaction has been rejected: None role cannot add STEWARD - ledger get-nym did=CbPwHxKEibPhV4pgXWpu26 -NYM not found +NYM not found did use LBbKEeczA9iL21p4Kgxcuf -Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active +Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active - ledger nym did=DriVwCMbtEgkmoEHKin6Ah role= -Transaction has been rejected: LBbKEeczA9iL21p4Kgxcuf is neither Trustee nor owner of DriVwCMbtEgkmoEHKin6Ah +Transaction has been rejected: LBbKEeczA9iL21p4Kgxcuf is neither Trustee nor owner of DriVwCMbtEgkmoEHKin6Ah did use DriVwCMbtEgkmoEHKin6Ah -Did "DriVwCMbtEgkmoEHKin6Ah" has been set as active +Did "DriVwCMbtEgkmoEHKin6Ah" has been set as active ledger nym did=Q8uxmCGdXgLHHtaTwYtVJG verkey=~UpRqa9gQ1jsbUBvKYPtWSo -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | DriVwCMbtEgkmoEHKin6Ah | 27 | 1534005150982291009 | 2018-08-11 16:32:31 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+------+ | Did | Verkey | Role | +------------------------+-------------------------+------+ @@ -274,14 +274,14 @@ ledger nym did=Q8uxmCGdXgLHHtaTwYtVJG verkey=~UpRqa9gQ1jsbUBvKYPtWSo +------------------------+-------------------------+------+ - ledger get-nym did=Q8uxmCGdXgLHHtaTwYtVJG -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | DriVwCMbtEgkmoEHKin6Ah | 27 | 1534005152410574550 | 2018-08-11 16:32:31 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+------+ @@ -289,50 +289,50 @@ ledger nym did=Q8uxmCGdXgLHHtaTwYtVJG verkey=~UpRqa9gQ1jsbUBvKYPtWSo +------------------------+------------------------+-------------------------+------+ did use LBbKEeczA9iL21p4Kgxcuf -Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active +Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active - ledger nym did=2b6xTx2HniDU77nxHm6zWB role=TRUST_ANCHOR verkey=~HF34ymwfTJngb8zFDmCyvX -Transaction has been rejected: None role cannot add TRUST_ANCHOR +Transaction has been rejected: None role cannot add TRUST_ANCHOR - ledger get-nym did=2b6xTx2HniDU77nxHm6zWB -NYM not found +NYM not found did use LBbKEeczA9iL21p4Kgxcuf -Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active +Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active - ledger nym did=Fk9ENxnz1ztDgdivQBJTCZ verkey=~NT9ANFeDhCLKDg5PNKZXKK -Transaction has been rejected: None role cannot add None role +Transaction has been rejected: None role cannot add None role - ledger get-nym did=Fk9ENxnz1ztDgdivQBJTCZ -NYM not found +NYM not found did use LBbKEeczA9iL21p4Kgxcuf -Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active +Did "LBbKEeczA9iL21p4Kgxcuf" has been set as active - ledger nym did=Q8uxmCGdXgLHHtaTwYtVJG role=TRUST_ANCHOR -Transaction has been rejected: LBbKEeczA9iL21p4Kgxcuf is neither Trustee nor owner of Q8uxmCGdXgLHHtaTwYtVJG +Transaction has been rejected: LBbKEeczA9iL21p4Kgxcuf is neither Trustee nor owner of Q8uxmCGdXgLHHtaTwYtVJG - ledger nym did=Q8uxmCGdXgLHHtaTwYtVJG role=STEWARD -Transaction has been rejected: LBbKEeczA9iL21p4Kgxcuf is neither Trustee nor owner of Q8uxmCGdXgLHHtaTwYtVJG +Transaction has been rejected: LBbKEeczA9iL21p4Kgxcuf is neither Trustee nor owner of Q8uxmCGdXgLHHtaTwYtVJG - ledger nym did=Q8uxmCGdXgLHHtaTwYtVJG role=TRUSTEE -Transaction has been rejected: LBbKEeczA9iL21p4Kgxcuf is neither Trustee nor owner of Q8uxmCGdXgLHHtaTwYtVJG +Transaction has been rejected: LBbKEeczA9iL21p4Kgxcuf is neither Trustee nor owner of Q8uxmCGdXgLHHtaTwYtVJG did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active did new seed=RandomAttribOwner000000000000003 -Did "UffJCJngTXc1o84dQ7aEUb" has been created with "~J4HtnGz2wW2nE7VuMeg39M" verkey +Did "UffJCJngTXc1o84dQ7aEUb" has been created with "~J4HtnGz2wW2nE7VuMeg39M" verkey ledger nym did=UffJCJngTXc1o84dQ7aEUb verkey=~J4HtnGz2wW2nE7VuMeg39M -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 28 | 1534005157150470834 | 2018-08-11 16:32:43 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+------+ | Did | Verkey | Role | +------------------------+-------------------------+------+ @@ -340,17 +340,17 @@ ledger nym did=UffJCJngTXc1o84dQ7aEUb verkey=~J4HtnGz2wW2nE7VuMeg39M +------------------------+-------------------------+------+ did new seed=RandomUserForRotateKey0000000003 -Did "X3zYajU7gbz9Pu8k6E7Ppf" has been created with "~3ZDo6g4ZDRKGauKrR452xU" verkey +Did "X3zYajU7gbz9Pu8k6E7Ppf" has been created with "~3ZDo6g4ZDRKGauKrR452xU" verkey ledger nym did=X3zYajU7gbz9Pu8k6E7Ppf verkey=~3ZDo6g4ZDRKGauKrR452xU -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 29 | 1534005163849343051 | 2018-08-11 16:32:44 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+------+ | Did | Verkey | Role | +------------------------+-------------------------+------+ @@ -358,29 +358,29 @@ ledger nym did=X3zYajU7gbz9Pu8k6E7Ppf verkey=~3ZDo6g4ZDRKGauKrR452xU +------------------------+-------------------------+------+ did use X3zYajU7gbz9Pu8k6E7Ppf -Did "X3zYajU7gbz9Pu8k6E7Ppf" has been set as active +Did "X3zYajU7gbz9Pu8k6E7Ppf" has been set as active did rotate-key seed=RandomUserForRotateKey0NewKey003 -Verkey for did "X3zYajU7gbz9Pu8k6E7Ppf" has been updated. New verkey: "DHUqLyUnhjBgsEKHCm3cNekC29rvx6STQAgvepPRLjAH" +Verkey for did "X3zYajU7gbz9Pu8k6E7Ppf" has been updated. New verkey: "DHUqLyUnhjBgsEKHCm3cNekC29rvx6STQAgvepPRLjAH" did use UffJCJngTXc1o84dQ7aEUb -Did "UffJCJngTXc1o84dQ7aEUb" has been set as active +Did "UffJCJngTXc1o84dQ7aEUb" has been set as active - ledger nym did=X3zYajU7gbz9Pu8k6E7Ppf verkey=~3ZDo6g4ZDRKGauKrR452xU -Transaction has been rejected: UffJCJngTXc1o84dQ7aEUb is neither Trustee nor owner of X3zYajU7gbz9Pu8k6E7Ppf +Transaction has been rejected: UffJCJngTXc1o84dQ7aEUb is neither Trustee nor owner of X3zYajU7gbz9Pu8k6E7Ppf did use X3zYajU7gbz9Pu8k6E7Ppf -Did "X3zYajU7gbz9Pu8k6E7Ppf" has been set as active +Did "X3zYajU7gbz9Pu8k6E7Ppf" has been set as active ledger attrib did=X3zYajU7gbz9Pu8k6E7Ppf raw={"endpoint":{"ha":"127.0.0.1:5555"}} -Attrib request has been sent to Ledger. -Metadata: +Attrib request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | X3zYajU7gbz9Pu8k6E7Ppf | 31 | 1534005166455425333 | 2018-08-11 16:32:47 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +--------------------------------------+ | Raw value | +--------------------------------------+ @@ -388,14 +388,14 @@ ledger attrib did=X3zYajU7gbz9Pu8k6E7Ppf raw={"endpoint":{"ha":"127.0.0.1:5555"} +--------------------------------------+ ledger attrib did=X3zYajU7gbz9Pu8k6E7Ppf raw={"endpoint":{"ha":"127.0.0.1:6666"}} -Attrib request has been sent to Ledger. -Metadata: +Attrib request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | X3zYajU7gbz9Pu8k6E7Ppf | 32 | 1534005167849981265 | 2018-08-11 16:32:48 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +--------------------------------------+ | Raw value | +--------------------------------------+ @@ -403,23 +403,23 @@ ledger attrib did=X3zYajU7gbz9Pu8k6E7Ppf raw={"endpoint":{"ha":"127.0.0.1:6666"} +--------------------------------------+ did use X3zYajU7gbz9Pu8k6E7Ppf -Did "X3zYajU7gbz9Pu8k6E7Ppf" has been set as active +Did "X3zYajU7gbz9Pu8k6E7Ppf" has been set as active - ledger attrib did=UffJCJngTXc1o84dQ7aEUb raw={"endpoint":{"ha":"127.0.0.1:5555"}} -Transaction has been rejected: Only identity owner/guardian can add attribute for that identity +Transaction has been rejected: Only identity owner/guardian can add attribute for that identity did use UffJCJngTXc1o84dQ7aEUb -Did "UffJCJngTXc1o84dQ7aEUb" has been set as active +Did "UffJCJngTXc1o84dQ7aEUb" has been set as active - ledger attrib did=X3zYajU7gbz9Pu8k6E7Ppf raw={"endpoint":{"ha":"127.0.0.1:5555"}} -Transaction has been rejected: Only identity owner/guardian can add attribute for that identity +Transaction has been rejected: Only identity owner/guardian can add attribute for that identity - ledger schema name=IdentityOwnerSchema version=1.0 attr_names=name,age -Transaction has been rejected: None role cannot add schema +Transaction has been rejected: None role cannot add schema - ledger cred-def schema_id=1 signature_type=CL primary={"n":"1","s":"2","rms":"3","r":{"age":"4","name":"5"},"rctxt":"6","z":"7"} tag=3 -Transaction has been rejected: None role cannot add claim def +Transaction has been rejected: None role cannot add claim def -Wallet "AS-03-wallet-owner" has been closed -Pool "AS-0301-owner" has been disconnected -Goodbye... +Wallet "AS-03-wallet-owner" has been closed +Pool "AS-0301-owner" has been disconnected +Goodbye... diff --git a/acceptance/indy-cli-batches/expected/AS-03-01-steward.expected b/acceptance/indy-cli-batches/expected/AS-03-01-steward.expected index 9bb600d8c..f3e051f1b 100644 --- a/acceptance/indy-cli-batches/expected/AS-03-01-steward.expected +++ b/acceptance/indy-cli-batches/expected/AS-03-01-steward.expected @@ -1,71 +1,71 @@ - pool create AS-0301-steward gen_txn_file=./pool_transactions_genesis -Pool config "AS-0301-steward" has been created +Pool config "AS-0301-steward" has been created pool connect AS-0301-steward -Pool "AS-0301-steward" has been connected +Pool "AS-0301-steward" has been connected - wallet create AS-03-wallet-steward key=testkey storage_config={"pool":"AS-0301-steward"} -Wallet "AS-03-wallet-steward" has been created +Wallet "AS-03-wallet-steward" has been created wallet open AS-03-wallet-steward key=testkey -Wallet "AS-03-wallet-steward" has been opened +Wallet "AS-03-wallet-steward" has been opened did new seed=TestTrustee100000000000000000001 metadata="test trustee 1" -Did "81CCbJwqSyGNLLEtVo1kMq" has been created with "~UimNgHv7X45jxnh65faK3h" verkey -Metadata has been saved for DID "81CCbJwqSyGNLLEtVo1kMq" +Did "81CCbJwqSyGNLLEtVo1kMq" has been created with "~UimNgHv7X45jxnh65faK3h" verkey +Metadata has been saved for DID "81CCbJwqSyGNLLEtVo1kMq" did new seed=TestSteward100000000000000000001 metadata="test steward 1" -Did "7qFmEyYCXcmUFVied5Sp3b" has been created with "~Mj3PFUSi6qmrTRonFXHx9n" verkey -Metadata has been saved for DID "7qFmEyYCXcmUFVied5Sp3b" +Did "7qFmEyYCXcmUFVied5Sp3b" has been created with "~Mj3PFUSi6qmrTRonFXHx9n" verkey +Metadata has been saved for DID "7qFmEyYCXcmUFVied5Sp3b" did new seed=TestSteward200000000000000000001 metadata="test steward 2" -Did "XVP5k4E62PMJJWt2nPacHy" has been created with "~Ka6GqJ3qBYRE4Ku9uyFuQW" verkey -Metadata has been saved for DID "XVP5k4E62PMJJWt2nPacHy" +Did "XVP5k4E62PMJJWt2nPacHy" has been created with "~Ka6GqJ3qBYRE4Ku9uyFuQW" verkey +Metadata has been saved for DID "XVP5k4E62PMJJWt2nPacHy" did new seed=TestSteward300000000000000000001 metadata="test steward 3" -Did "CEJJcfjNGt7YcNLyXaszaq" has been created with "~AAGGsLT3yQtTmNdKrHXtC2" verkey -Metadata has been saved for DID "CEJJcfjNGt7YcNLyXaszaq" +Did "CEJJcfjNGt7YcNLyXaszaq" has been created with "~AAGGsLT3yQtTmNdKrHXtC2" verkey +Metadata has been saved for DID "CEJJcfjNGt7YcNLyXaszaq" did new seed=TestTrustAnchor00000000000000001 metadata="test trust anchor 0" -Did "CDcGtKx1boRYFwPBaGkMmk" has been created with "~PuCGfSiTB3NZGi1SH8w7H" verkey -Metadata has been saved for DID "CDcGtKx1boRYFwPBaGkMmk" +Did "CDcGtKx1boRYFwPBaGkMmk" has been created with "~PuCGfSiTB3NZGi1SH8w7H" verkey +Metadata has been saved for DID "CDcGtKx1boRYFwPBaGkMmk" did new seed=TestTrustAnchor10000000000000001 metadata="test trust anchor 1" -Did "Bhe7Uh5E1LYLgpLcbuVjj2" has been created with "~NMpYrG7tAXYV4ujYZjddKu" verkey -Metadata has been saved for DID "Bhe7Uh5E1LYLgpLcbuVjj2" +Did "Bhe7Uh5E1LYLgpLcbuVjj2" has been created with "~NMpYrG7tAXYV4ujYZjddKu" verkey +Metadata has been saved for DID "Bhe7Uh5E1LYLgpLcbuVjj2" did new seed=RandomUser1000000000000000000001 metadata="test identity 1" -Did "XkZJxs6Uadv6MQeKGGZdZ6" has been created with "~Aza4zyTRazcVsokmqNJfsg" verkey -Metadata has been saved for DID "XkZJxs6Uadv6MQeKGGZdZ6" +Did "XkZJxs6Uadv6MQeKGGZdZ6" has been created with "~Aza4zyTRazcVsokmqNJfsg" verkey +Metadata has been saved for DID "XkZJxs6Uadv6MQeKGGZdZ6" did new seed=RandomUser2000000000000000000001 metadata="test identity 2" -Did "Xm3b3LtJ3UoL5KeYT7ti7j" has been created with "~GmTyyvE4eHWeSWuiAtmE19" verkey -Metadata has been saved for DID "Xm3b3LtJ3UoL5KeYT7ti7j" +Did "Xm3b3LtJ3UoL5KeYT7ti7j" has been created with "~GmTyyvE4eHWeSWuiAtmE19" verkey +Metadata has been saved for DID "Xm3b3LtJ3UoL5KeYT7ti7j" did new seed=RandomUser4000000000000000000001 metadata="test identity 4" -Did "YUGDShR1RSr5T2CwbM7Hhu" has been created with "~GfsAf6NpSQDJ1ZWW2X7BiD" verkey -Metadata has been saved for DID "YUGDShR1RSr5T2CwbM7Hhu" +Did "YUGDShR1RSr5T2CwbM7Hhu" has been created with "~GfsAf6NpSQDJ1ZWW2X7BiD" verkey +Metadata has been saved for DID "YUGDShR1RSr5T2CwbM7Hhu" did new seed=RandomUser5000000000000000000001 metadata="test identity 5" -Did "Jt7aMnw77aoaBMyhXUNjtt" has been created with "~DT5pLP1wcvsgAzM78sqiRJ" verkey -Metadata has been saved for DID "Jt7aMnw77aoaBMyhXUNjtt" +Did "Jt7aMnw77aoaBMyhXUNjtt" has been created with "~DT5pLP1wcvsgAzM78sqiRJ" verkey +Metadata has been saved for DID "Jt7aMnw77aoaBMyhXUNjtt" did new seed=000000000000000000000000Trustee1 metadata="default trustee" -Did "V4SGRU86Z58d6TV7PBUe6f" has been created with "~CoRER63DVYnWZtK8uAzNbx" verkey -Metadata has been saved for DID "V4SGRU86Z58d6TV7PBUe6f" +Did "V4SGRU86Z58d6TV7PBUe6f" has been created with "~CoRER63DVYnWZtK8uAzNbx" verkey +Metadata has been saved for DID "V4SGRU86Z58d6TV7PBUe6f" did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=7qFmEyYCXcmUFVied5Sp3b role=STEWARD verkey=~Mj3PFUSi6qmrTRonFXHx9n -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 33 | 1534005195103816786 | 2018-08-11 16:33:15 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+---------+ | Did | Verkey | Role | +------------------------+-------------------------+---------+ @@ -73,14 +73,14 @@ ledger nym did=7qFmEyYCXcmUFVied5Sp3b role=STEWARD verkey=~Mj3PFUSi6qmrTRonFXHx9 +------------------------+-------------------------+---------+ - ledger get-nym did=7qFmEyYCXcmUFVied5Sp3b -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 33 | 1534005195894891570 | 2018-08-11 16:33:15 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+---------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+---------+ @@ -88,14 +88,14 @@ ledger nym did=7qFmEyYCXcmUFVied5Sp3b role=STEWARD verkey=~Mj3PFUSi6qmrTRonFXHx9 +------------------------+------------------------+-------------------------+---------+ ledger nym did=CEJJcfjNGt7YcNLyXaszaq role=STEWARD verkey=~AAGGsLT3yQtTmNdKrHXtC2 -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 34 | 1534005195933339924 | 2018-08-11 16:33:16 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+---------+ | Did | Verkey | Role | +------------------------+-------------------------+---------+ @@ -103,14 +103,14 @@ ledger nym did=CEJJcfjNGt7YcNLyXaszaq role=STEWARD verkey=~AAGGsLT3yQtTmNdKrHXtC +------------------------+-------------------------+---------+ - ledger get-nym did=CEJJcfjNGt7YcNLyXaszaq -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 34 | 1534005196874579207 | 2018-08-11 16:33:16 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+---------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+---------+ @@ -118,32 +118,32 @@ ledger nym did=CEJJcfjNGt7YcNLyXaszaq role=STEWARD verkey=~AAGGsLT3yQtTmNdKrHXtC +------------------------+------------------------+-------------------------+---------+ did use 7qFmEyYCXcmUFVied5Sp3b -Did "7qFmEyYCXcmUFVied5Sp3b" has been set as active +Did "7qFmEyYCXcmUFVied5Sp3b" has been set as active - ledger nym did=81CCbJwqSyGNLLEtVo1kMq role=TRUSTEE verkey=~UimNgHv7X45jxnh65faK3h -Transaction has been rejected: STEWARD cannot add TRUSTEE +Transaction has been rejected: STEWARD cannot add TRUSTEE - ledger get-nym did=81CCbJwqSyGNLLEtVo1kMq -NYM not found +NYM not found did use 7qFmEyYCXcmUFVied5Sp3b -Did "7qFmEyYCXcmUFVied5Sp3b" has been set as active +Did "7qFmEyYCXcmUFVied5Sp3b" has been set as active - ledger nym did=V4SGRU86Z58d6TV7PBUe6f role= -Transaction has been rejected: 7qFmEyYCXcmUFVied5Sp3b is neither Trustee nor owner of V4SGRU86Z58d6TV7PBUe6f +Transaction has been rejected: 7qFmEyYCXcmUFVied5Sp3b is neither Trustee nor owner of V4SGRU86Z58d6TV7PBUe6f did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=81CCbJwqSyGNLLEtVo1kMq role=TRUSTEE verkey=~UimNgHv7X45jxnh65faK3h -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 35 | 1534005198493653092 | 2018-08-11 16:33:19 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+---------+ | Did | Verkey | Role | +------------------------+-------------------------+---------+ @@ -151,14 +151,14 @@ ledger nym did=81CCbJwqSyGNLLEtVo1kMq role=TRUSTEE verkey=~UimNgHv7X45jxnh65faK3 +------------------------+-------------------------+---------+ - ledger get-nym did=81CCbJwqSyGNLLEtVo1kMq -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 35 | 1534005199941485532 | 2018-08-11 16:33:19 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+---------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+---------+ @@ -166,32 +166,32 @@ ledger nym did=81CCbJwqSyGNLLEtVo1kMq role=TRUSTEE verkey=~UimNgHv7X45jxnh65faK3 +------------------------+------------------------+-------------------------+---------+ did use 7qFmEyYCXcmUFVied5Sp3b -Did "7qFmEyYCXcmUFVied5Sp3b" has been set as active +Did "7qFmEyYCXcmUFVied5Sp3b" has been set as active - ledger nym did=XVP5k4E62PMJJWt2nPacHy role=STEWARD verkey=~Ka6GqJ3qBYRE4Ku9uyFuQW -Transaction has been rejected: STEWARD cannot add STEWARD +Transaction has been rejected: STEWARD cannot add STEWARD - ledger get-nym did=XVP5k4E62PMJJWt2nPacHy -NYM not found +NYM not found did use 7qFmEyYCXcmUFVied5Sp3b -Did "7qFmEyYCXcmUFVied5Sp3b" has been set as active +Did "7qFmEyYCXcmUFVied5Sp3b" has been set as active - ledger nym did=CEJJcfjNGt7YcNLyXaszaq role= -Transaction has been rejected: 7qFmEyYCXcmUFVied5Sp3b is neither Trustee nor owner of CEJJcfjNGt7YcNLyXaszaq +Transaction has been rejected: 7qFmEyYCXcmUFVied5Sp3b is neither Trustee nor owner of CEJJcfjNGt7YcNLyXaszaq did use CEJJcfjNGt7YcNLyXaszaq -Did "CEJJcfjNGt7YcNLyXaszaq" has been set as active +Did "CEJJcfjNGt7YcNLyXaszaq" has been set as active ledger nym did=Bhe7Uh5E1LYLgpLcbuVjj2 role=TRUST_ANCHOR verkey=~NMpYrG7tAXYV4ujYZjddKu -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | CEJJcfjNGt7YcNLyXaszaq | 36 | 1534005201589633068 | 2018-08-11 16:33:22 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+--------------+ | Did | Verkey | Role | +------------------------+-------------------------+--------------+ @@ -199,14 +199,14 @@ ledger nym did=Bhe7Uh5E1LYLgpLcbuVjj2 role=TRUST_ANCHOR verkey=~NMpYrG7tAXYV4ujY +------------------------+-------------------------+--------------+ - ledger get-nym did=Bhe7Uh5E1LYLgpLcbuVjj2 -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | CEJJcfjNGt7YcNLyXaszaq | 36 | 1534005202961457481 | 2018-08-11 16:33:22 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+--------------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+--------------+ @@ -214,17 +214,17 @@ ledger nym did=Bhe7Uh5E1LYLgpLcbuVjj2 role=TRUST_ANCHOR verkey=~NMpYrG7tAXYV4ujY +------------------------+------------------------+-------------------------+--------------+ did use 7qFmEyYCXcmUFVied5Sp3b -Did "7qFmEyYCXcmUFVied5Sp3b" has been set as active +Did "7qFmEyYCXcmUFVied5Sp3b" has been set as active ledger nym did=CDcGtKx1boRYFwPBaGkMmk role=TRUST_ANCHOR verkey=~PuCGfSiTB3NZGi1SH8w7H -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | 7qFmEyYCXcmUFVied5Sp3b | 37 | 1534005203036368824 | 2018-08-11 16:33:23 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+--------------+ | Did | Verkey | Role | +------------------------+------------------------+--------------+ @@ -232,14 +232,14 @@ ledger nym did=CDcGtKx1boRYFwPBaGkMmk role=TRUST_ANCHOR verkey=~PuCGfSiTB3NZGi1S +------------------------+------------------------+--------------+ - ledger get-nym did=CDcGtKx1boRYFwPBaGkMmk -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | 7qFmEyYCXcmUFVied5Sp3b | 37 | 1534005204011390885 | 2018-08-11 16:33:23 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+------------------------+--------------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+------------------------+--------------+ @@ -247,17 +247,17 @@ ledger nym did=CDcGtKx1boRYFwPBaGkMmk role=TRUST_ANCHOR verkey=~PuCGfSiTB3NZGi1S +------------------------+------------------------+------------------------+--------------+ did use CDcGtKx1boRYFwPBaGkMmk -Did "CDcGtKx1boRYFwPBaGkMmk" has been set as active +Did "CDcGtKx1boRYFwPBaGkMmk" has been set as active ledger nym did=YUGDShR1RSr5T2CwbM7Hhu verkey=~GfsAf6NpSQDJ1ZWW2X7BiD -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | CDcGtKx1boRYFwPBaGkMmk | 38 | 1534005204050115133 | 2018-08-11 16:33:24 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+------+ | Did | Verkey | Role | +------------------------+-------------------------+------+ @@ -265,14 +265,14 @@ ledger nym did=YUGDShR1RSr5T2CwbM7Hhu verkey=~GfsAf6NpSQDJ1ZWW2X7BiD +------------------------+-------------------------+------+ - ledger get-nym did=YUGDShR1RSr5T2CwbM7Hhu -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | CDcGtKx1boRYFwPBaGkMmk | 38 | 1534005205042158052 | 2018-08-11 16:33:24 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+------+ @@ -280,23 +280,23 @@ ledger nym did=YUGDShR1RSr5T2CwbM7Hhu verkey=~GfsAf6NpSQDJ1ZWW2X7BiD +------------------------+------------------------+-------------------------+------+ did use 7qFmEyYCXcmUFVied5Sp3b -Did "7qFmEyYCXcmUFVied5Sp3b" has been set as active +Did "7qFmEyYCXcmUFVied5Sp3b" has been set as active - ledger nym did=CDcGtKx1boRYFwPBaGkMmk role= -Transaction has been rejected: 7qFmEyYCXcmUFVied5Sp3b is neither Trustee nor owner of CDcGtKx1boRYFwPBaGkMmk +Transaction has been rejected: 7qFmEyYCXcmUFVied5Sp3b is neither Trustee nor owner of CDcGtKx1boRYFwPBaGkMmk did use CDcGtKx1boRYFwPBaGkMmk -Did "CDcGtKx1boRYFwPBaGkMmk" has been set as active +Did "CDcGtKx1boRYFwPBaGkMmk" has been set as active - ledger nym did=Jt7aMnw77aoaBMyhXUNjtt verkey=~DT5pLP1wcvsgAzM78sqiRJ -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | CDcGtKx1boRYFwPBaGkMmk | 39 | 1534005205716058459 | 2018-08-11 16:33:26 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+------+ | Did | Verkey | Role | +------------------------+-------------------------+------+ @@ -304,14 +304,14 @@ did use CDcGtKx1boRYFwPBaGkMmk +------------------------+-------------------------+------+ - ledger get-nym did=Jt7aMnw77aoaBMyhXUNjtt -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | CDcGtKx1boRYFwPBaGkMmk | 39 | 1534005207074672779 | 2018-08-11 16:33:26 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+------+ @@ -319,11 +319,11 @@ did use CDcGtKx1boRYFwPBaGkMmk +------------------------+------------------------+-------------------------+------+ did use 7qFmEyYCXcmUFVied5Sp3b -Did "7qFmEyYCXcmUFVied5Sp3b" has been set as active +Did "7qFmEyYCXcmUFVied5Sp3b" has been set as active ledger nym did=CDcGtKx1boRYFwPBaGkMmk role=TRUST_ANCHOR -Transaction has been rejected: 7qFmEyYCXcmUFVied5Sp3b is neither Trustee nor owner of CDcGtKx1boRYFwPBaGkMmk -Batch execution failed at line #49 -Wallet "AS-03-wallet-steward" has been closed -Pool "AS-0301-steward" has been disconnected -Goodbye... +Transaction has been rejected: 7qFmEyYCXcmUFVied5Sp3b is neither Trustee nor owner of CDcGtKx1boRYFwPBaGkMmk +Batch execution failed at line #49 +Wallet "AS-03-wallet-steward" has been closed +Pool "AS-0301-steward" has been disconnected +Goodbye... diff --git a/acceptance/indy-cli-batches/expected/AS-03-01-trust-anchor.expected b/acceptance/indy-cli-batches/expected/AS-03-01-trust-anchor.expected index eb4a22739..9a47e7bb8 100644 --- a/acceptance/indy-cli-batches/expected/AS-03-01-trust-anchor.expected +++ b/acceptance/indy-cli-batches/expected/AS-03-01-trust-anchor.expected @@ -1,75 +1,75 @@ - pool create AS-0301-anchor gen_txn_file=./pool_transactions_genesis -Pool config "AS-0301-anchor" has been created +Pool config "AS-0301-anchor" has been created pool connect AS-0301-anchor -Pool "AS-0301-anchor" has been connected +Pool "AS-0301-anchor" has been connected - wallet create AS-03-wallet-anchor key=testkey storage_config={"pool":"AS-0301-anchor"} -Wallet "AS-03-wallet-anchor" has been created +Wallet "AS-03-wallet-anchor" has been created wallet open AS-03-wallet-anchor key=testkey -Wallet "AS-03-wallet-anchor" has been opened +Wallet "AS-03-wallet-anchor" has been opened did new seed=000000000000000000000000Trustee1 metadata="Default Trustee" -Did "V4SGRU86Z58d6TV7PBUe6f" has been created with "~CoRER63DVYnWZtK8uAzNbx" verkey -Metadata has been saved for DID "V4SGRU86Z58d6TV7PBUe6f" +Did "V4SGRU86Z58d6TV7PBUe6f" has been created with "~CoRER63DVYnWZtK8uAzNbx" verkey +Metadata has been saved for DID "V4SGRU86Z58d6TV7PBUe6f" did new seed=TestTrustAnchorTrustee1000000002 metadata="Trustee 1" -Did "HUBDajRyCvTGR18ETzEGEB" has been created with "~DriCN8uMxoA8tiBazmkBWk" verkey -Metadata has been saved for DID "HUBDajRyCvTGR18ETzEGEB" +Did "HUBDajRyCvTGR18ETzEGEB" has been created with "~DriCN8uMxoA8tiBazmkBWk" verkey +Metadata has been saved for DID "HUBDajRyCvTGR18ETzEGEB" did new seed=TestTrustAnchorTrustee2000000002 metadata="Trustee 2" -Did "KY4VDKjHUPTnxtF2zfJnNP" has been created with "~WUktVVQHL7saxaNochggZR" verkey -Metadata has been saved for DID "KY4VDKjHUPTnxtF2zfJnNP" +Did "KY4VDKjHUPTnxtF2zfJnNP" has been created with "~WUktVVQHL7saxaNochggZR" verkey +Metadata has been saved for DID "KY4VDKjHUPTnxtF2zfJnNP" did new seed=TestTrustAnchorSteward1000000002 metadata="Steward 1" -Did "23LxaWGVajU7vMn7zcrHGK" has been created with "~UgkZ1ktZV2DQg1sJn3tjdP" verkey -Metadata has been saved for DID "23LxaWGVajU7vMn7zcrHGK" +Did "23LxaWGVajU7vMn7zcrHGK" has been created with "~UgkZ1ktZV2DQg1sJn3tjdP" verkey +Metadata has been saved for DID "23LxaWGVajU7vMn7zcrHGK" did new seed=TestTrustAnchorSteward2000000002 metadata="Steward 2" -Did "BhJXbKYGm9ZkHF3jjhDrPJ" has been created with "~R7RAboPbpw5ycURytkcASB" verkey -Metadata has been saved for DID "BhJXbKYGm9ZkHF3jjhDrPJ" +Did "BhJXbKYGm9ZkHF3jjhDrPJ" has been created with "~R7RAboPbpw5ycURytkcASB" verkey +Metadata has been saved for DID "BhJXbKYGm9ZkHF3jjhDrPJ" did new seed=TestTrustAnchorCreateTrustAncho1 metadata="Trust Anchor 1" -Did "T6XTs3nSU3J7ptAcxSnaVo" has been created with "~5izgZjWpw3gxeDB9KRS7BN" verkey -Metadata has been saved for DID "T6XTs3nSU3J7ptAcxSnaVo" +Did "T6XTs3nSU3J7ptAcxSnaVo" has been created with "~5izgZjWpw3gxeDB9KRS7BN" verkey +Metadata has been saved for DID "T6XTs3nSU3J7ptAcxSnaVo" did new seed=TestTrustAnchorCreateTrustAncho2 metadata="Trust Anchor 2" -Did "TPrMRHHSunP3cEtpkY8gi7" has been created with "~ANMdNhQ3uuyXDmn7cxi8df" verkey -Metadata has been saved for DID "TPrMRHHSunP3cEtpkY8gi7" +Did "TPrMRHHSunP3cEtpkY8gi7" has been created with "~ANMdNhQ3uuyXDmn7cxi8df" verkey +Metadata has been saved for DID "TPrMRHHSunP3cEtpkY8gi7" did new seed=TestTrustAnchorCreateTrustAncho3 metadata="Trust Anchor 3" -Did "Xw1QKjyG938oV5gUnLoBhm" has been created with "~3iC9GEndzoEYcruyhTLWDc" verkey -Metadata has been saved for DID "Xw1QKjyG938oV5gUnLoBhm" +Did "Xw1QKjyG938oV5gUnLoBhm" has been created with "~3iC9GEndzoEYcruyhTLWDc" verkey +Metadata has been saved for DID "Xw1QKjyG938oV5gUnLoBhm" did new seed=TestTrustAnchorCreateTrustAncho4 metadata="Trust Anchor 4" -Did "3KghQejG1HZU5MbpQL5Cuk" has been created with "~6DWoz55wzC77CDEYkZzytR" verkey -Metadata has been saved for DID "3KghQejG1HZU5MbpQL5Cuk" +Did "3KghQejG1HZU5MbpQL5Cuk" has been created with "~6DWoz55wzC77CDEYkZzytR" verkey +Metadata has been saved for DID "3KghQejG1HZU5MbpQL5Cuk" did new seed=TestTrustAnchorRandomUser1000002 metadata="Random User 1" -Did "MXYSvpB5e549z6nqa1nTEh" has been created with "~PR7BXc75UwtiPvKjtDn26N" verkey -Metadata has been saved for DID "MXYSvpB5e549z6nqa1nTEh" +Did "MXYSvpB5e549z6nqa1nTEh" has been created with "~PR7BXc75UwtiPvKjtDn26N" verkey +Metadata has been saved for DID "MXYSvpB5e549z6nqa1nTEh" did new seed=TestTrustAnchorRandomUser2000002 metadata="Random User 2" -Did "WDLETDtBugFiJvtkghHoH" has been created with "~QdAhvFaZ81ZZ1DxwqZ5i8i" verkey -Metadata has been saved for DID "WDLETDtBugFiJvtkghHoH" +Did "WDLETDtBugFiJvtkghHoH" has been created with "~QdAhvFaZ81ZZ1DxwqZ5i8i" verkey +Metadata has been saved for DID "WDLETDtBugFiJvtkghHoH" did new seed=TestTrustAnchorRandomUser3000002 metadata="Random User 3" -Did "3B1g9f3vC4pR8bTfi21z91" has been created with "~5RtP3dPmQdNmz75TvQPK9k" verkey -Metadata has been saved for DID "3B1g9f3vC4pR8bTfi21z91" +Did "3B1g9f3vC4pR8bTfi21z91" has been created with "~5RtP3dPmQdNmz75TvQPK9k" verkey +Metadata has been saved for DID "3B1g9f3vC4pR8bTfi21z91" did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=23LxaWGVajU7vMn7zcrHGK role=STEWARD verkey=~UgkZ1ktZV2DQg1sJn3tjdP -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 40 | 1534005232137571436 | 2018-08-11 16:33:52 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+---------+ | Did | Verkey | Role | +------------------------+-------------------------+---------+ @@ -77,14 +77,14 @@ ledger nym did=23LxaWGVajU7vMn7zcrHGK role=STEWARD verkey=~UgkZ1ktZV2DQg1sJn3tjd +------------------------+-------------------------+---------+ - ledger get-nym did=23LxaWGVajU7vMn7zcrHGK -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 40 | 1534005232925602488 | 2018-08-11 16:33:52 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+---------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+---------+ @@ -92,17 +92,17 @@ ledger nym did=23LxaWGVajU7vMn7zcrHGK role=STEWARD verkey=~UgkZ1ktZV2DQg1sJn3tjd +------------------------+------------------------+-------------------------+---------+ did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=T6XTs3nSU3J7ptAcxSnaVo role=TRUST_ANCHOR verkey=~5izgZjWpw3gxeDB9KRS7BN -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 41 | 1534005233011271550 | 2018-08-11 16:33:53 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+--------------+ | Did | Verkey | Role | +------------------------+-------------------------+--------------+ @@ -110,14 +110,14 @@ ledger nym did=T6XTs3nSU3J7ptAcxSnaVo role=TRUST_ANCHOR verkey=~5izgZjWpw3gxeDB9 +------------------------+-------------------------+--------------+ - ledger get-nym did=T6XTs3nSU3J7ptAcxSnaVo -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 41 | 1534005233840906565 | 2018-08-11 16:33:53 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+--------------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+--------------+ @@ -125,17 +125,17 @@ ledger nym did=T6XTs3nSU3J7ptAcxSnaVo role=TRUST_ANCHOR verkey=~5izgZjWpw3gxeDB9 +------------------------+------------------------+-------------------------+--------------+ did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=TPrMRHHSunP3cEtpkY8gi7 role=TRUST_ANCHOR verkey=~ANMdNhQ3uuyXDmn7cxi8df -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 42 | 1534005233933786246 | 2018-08-11 16:33:54 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+--------------+ | Did | Verkey | Role | +------------------------+-------------------------+--------------+ @@ -143,14 +143,14 @@ ledger nym did=TPrMRHHSunP3cEtpkY8gi7 role=TRUST_ANCHOR verkey=~ANMdNhQ3uuyXDmn7 +------------------------+-------------------------+--------------+ - ledger get-nym did=TPrMRHHSunP3cEtpkY8gi7 -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 42 | 1534005234813642241 | 2018-08-11 16:33:54 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+--------------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+--------------+ @@ -158,17 +158,17 @@ ledger nym did=TPrMRHHSunP3cEtpkY8gi7 role=TRUST_ANCHOR verkey=~ANMdNhQ3uuyXDmn7 +------------------------+------------------------+-------------------------+--------------+ did use T6XTs3nSU3J7ptAcxSnaVo -Did "T6XTs3nSU3J7ptAcxSnaVo" has been set as active +Did "T6XTs3nSU3J7ptAcxSnaVo" has been set as active ledger nym did=MXYSvpB5e549z6nqa1nTEh verkey=~PR7BXc75UwtiPvKjtDn26N -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | T6XTs3nSU3J7ptAcxSnaVo | 43 | 1534005235009499330 | 2018-08-11 16:33:55 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+------+ | Did | Verkey | Role | +------------------------+-------------------------+------+ @@ -176,14 +176,14 @@ ledger nym did=MXYSvpB5e549z6nqa1nTEh verkey=~PR7BXc75UwtiPvKjtDn26N +------------------------+-------------------------+------+ - ledger get-nym did=MXYSvpB5e549z6nqa1nTEh -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | T6XTs3nSU3J7ptAcxSnaVo | 43 | 1534005235833544368 | 2018-08-11 16:33:55 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+------+ @@ -191,23 +191,23 @@ ledger nym did=MXYSvpB5e549z6nqa1nTEh verkey=~PR7BXc75UwtiPvKjtDn26N +------------------------+------------------------+-------------------------+------+ did use T6XTs3nSU3J7ptAcxSnaVo -Did "T6XTs3nSU3J7ptAcxSnaVo" has been set as active +Did "T6XTs3nSU3J7ptAcxSnaVo" has been set as active - ledger nym did=V4SGRU86Z58d6TV7PBUe6f role= -Transaction has been rejected: T6XTs3nSU3J7ptAcxSnaVo is neither Trustee nor owner of V4SGRU86Z58d6TV7PBUe6f +Transaction has been rejected: T6XTs3nSU3J7ptAcxSnaVo is neither Trustee nor owner of V4SGRU86Z58d6TV7PBUe6f did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=KY4VDKjHUPTnxtF2zfJnNP role=TRUSTEE verkey=~WUktVVQHL7saxaNochggZR -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 44 | 1534005236401589738 | 2018-08-11 16:33:57 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+---------+ | Did | Verkey | Role | +------------------------+-------------------------+---------+ @@ -215,14 +215,14 @@ ledger nym did=KY4VDKjHUPTnxtF2zfJnNP role=TRUSTEE verkey=~WUktVVQHL7saxaNochggZ +------------------------+-------------------------+---------+ - ledger get-nym did=KY4VDKjHUPTnxtF2zfJnNP -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 44 | 1534005237793600735 | 2018-08-11 16:33:57 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+---------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+---------+ @@ -230,32 +230,32 @@ ledger nym did=KY4VDKjHUPTnxtF2zfJnNP role=TRUSTEE verkey=~WUktVVQHL7saxaNochggZ +------------------------+------------------------+-------------------------+---------+ did use T6XTs3nSU3J7ptAcxSnaVo -Did "T6XTs3nSU3J7ptAcxSnaVo" has been set as active +Did "T6XTs3nSU3J7ptAcxSnaVo" has been set as active - ledger nym did=HUBDajRyCvTGR18ETzEGEB role=TRUSTEE verkey=~DriCN8uMxoA8tiBazmkBWk -Transaction has been rejected: TRUST_ANCHOR cannot add TRUSTEE +Transaction has been rejected: TRUST_ANCHOR cannot add TRUSTEE - ledger get-nym did=HUBDajRyCvTGR18ETzEGEB -NYM not found +NYM not found did use T6XTs3nSU3J7ptAcxSnaVo -Did "T6XTs3nSU3J7ptAcxSnaVo" has been set as active +Did "T6XTs3nSU3J7ptAcxSnaVo" has been set as active - ledger nym did=23LxaWGVajU7vMn7zcrHGK role= -Transaction has been rejected: T6XTs3nSU3J7ptAcxSnaVo is neither Trustee nor owner of 23LxaWGVajU7vMn7zcrHGK +Transaction has been rejected: T6XTs3nSU3J7ptAcxSnaVo is neither Trustee nor owner of 23LxaWGVajU7vMn7zcrHGK did use 23LxaWGVajU7vMn7zcrHGK -Did "23LxaWGVajU7vMn7zcrHGK" has been set as active +Did "23LxaWGVajU7vMn7zcrHGK" has been set as active ledger nym did=3KghQejG1HZU5MbpQL5Cuk role=TRUST_ANCHOR verkey=~6DWoz55wzC77CDEYkZzytR -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | 23LxaWGVajU7vMn7zcrHGK | 45 | 1534005239424172193 | 2018-08-11 16:34:00 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+--------------+ | Did | Verkey | Role | +------------------------+-------------------------+--------------+ @@ -263,14 +263,14 @@ ledger nym did=3KghQejG1HZU5MbpQL5Cuk role=TRUST_ANCHOR verkey=~6DWoz55wzC77CDEY +------------------------+-------------------------+--------------+ - ledger get-nym did=3KghQejG1HZU5MbpQL5Cuk -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | 23LxaWGVajU7vMn7zcrHGK | 45 | 1534005240802053758 | 2018-08-11 16:34:00 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+--------------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+--------------+ @@ -278,32 +278,32 @@ ledger nym did=3KghQejG1HZU5MbpQL5Cuk role=TRUST_ANCHOR verkey=~6DWoz55wzC77CDEY +------------------------+------------------------+-------------------------+--------------+ did use T6XTs3nSU3J7ptAcxSnaVo -Did "T6XTs3nSU3J7ptAcxSnaVo" has been set as active +Did "T6XTs3nSU3J7ptAcxSnaVo" has been set as active - ledger nym did=BhJXbKYGm9ZkHF3jjhDrPJ role=STEWARD verkey=~R7RAboPbpw5ycURytkcASB -Transaction has been rejected: TRUST_ANCHOR cannot add STEWARD +Transaction has been rejected: TRUST_ANCHOR cannot add STEWARD - ledger get-nym did=BhJXbKYGm9ZkHF3jjhDrPJ -NYM not found +NYM not found did use T6XTs3nSU3J7ptAcxSnaVo -Did "T6XTs3nSU3J7ptAcxSnaVo" has been set as active +Did "T6XTs3nSU3J7ptAcxSnaVo" has been set as active - ledger nym did=TPrMRHHSunP3cEtpkY8gi7 role= -Transaction has been rejected: T6XTs3nSU3J7ptAcxSnaVo is neither Trustee nor owner of TPrMRHHSunP3cEtpkY8gi7 +Transaction has been rejected: T6XTs3nSU3J7ptAcxSnaVo is neither Trustee nor owner of TPrMRHHSunP3cEtpkY8gi7 did use TPrMRHHSunP3cEtpkY8gi7 -Did "TPrMRHHSunP3cEtpkY8gi7" has been set as active +Did "TPrMRHHSunP3cEtpkY8gi7" has been set as active ledger nym did=3B1g9f3vC4pR8bTfi21z91 verkey=~5RtP3dPmQdNmz75TvQPK9k -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | TPrMRHHSunP3cEtpkY8gi7 | 46 | 1534005242457431605 | 2018-08-11 16:34:03 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+------+ | Did | Verkey | Role | +------------------------+-------------------------+------+ @@ -311,14 +311,14 @@ ledger nym did=3B1g9f3vC4pR8bTfi21z91 verkey=~5RtP3dPmQdNmz75TvQPK9k +------------------------+-------------------------+------+ - ledger get-nym did=3B1g9f3vC4pR8bTfi21z91 -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | TPrMRHHSunP3cEtpkY8gi7 | 46 | 1534005243838067619 | 2018-08-11 16:34:03 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+------+ @@ -326,29 +326,29 @@ ledger nym did=3B1g9f3vC4pR8bTfi21z91 verkey=~5RtP3dPmQdNmz75TvQPK9k +------------------------+------------------------+-------------------------+------+ did use T6XTs3nSU3J7ptAcxSnaVo -Did "T6XTs3nSU3J7ptAcxSnaVo" has been set as active +Did "T6XTs3nSU3J7ptAcxSnaVo" has been set as active - ledger nym did=Xw1QKjyG938oV5gUnLoBhm role=TRUST_ANCHOR verkey=~3iC9GEndzoEYcruyhTLWDc -Transaction has been rejected: TRUST_ANCHOR cannot add TRUST_ANCHOR +Transaction has been rejected: TRUST_ANCHOR cannot add TRUST_ANCHOR - ledger get-nym did=Xw1QKjyG938oV5gUnLoBhm -NYM not found +NYM not found did use T6XTs3nSU3J7ptAcxSnaVo -Did "T6XTs3nSU3J7ptAcxSnaVo" has been set as active +Did "T6XTs3nSU3J7ptAcxSnaVo" has been set as active - ledger nym did=T6XTs3nSU3J7ptAcxSnaVo role= -Transaction has been rejected: TRUST_ANCHOR cannot update role +Transaction has been rejected: TRUST_ANCHOR cannot update role ledger nym did=WDLETDtBugFiJvtkghHoH verkey=~QdAhvFaZ81ZZ1DxwqZ5i8i -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | T6XTs3nSU3J7ptAcxSnaVo | 47 | 1534005245509089648 | 2018-08-11 16:34:06 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +-----------------------+-------------------------+------+ | Did | Verkey | Role | +-----------------------+-------------------------+------+ @@ -356,14 +356,14 @@ ledger nym did=WDLETDtBugFiJvtkghHoH verkey=~QdAhvFaZ81ZZ1DxwqZ5i8i +-----------------------+-------------------------+------+ - ledger get-nym did=WDLETDtBugFiJvtkghHoH -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | T6XTs3nSU3J7ptAcxSnaVo | 47 | 1534005246907362764 | 2018-08-11 16:34:06 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-----------------------+-------------------------+------+ | Identifier | Dest | Verkey | Role | +------------------------+-----------------------+-------------------------+------+ @@ -371,32 +371,32 @@ ledger nym did=WDLETDtBugFiJvtkghHoH verkey=~QdAhvFaZ81ZZ1DxwqZ5i8i +------------------------+-----------------------+-------------------------+------+ did use T6XTs3nSU3J7ptAcxSnaVo -Did "T6XTs3nSU3J7ptAcxSnaVo" has been set as active +Did "T6XTs3nSU3J7ptAcxSnaVo" has been set as active - ledger nym did=WDLETDtBugFiJvtkghHoH role=TRUST_ANCHOR -Transaction has been rejected: T6XTs3nSU3J7ptAcxSnaVo is neither Trustee nor owner of WDLETDtBugFiJvtkghHoH +Transaction has been rejected: T6XTs3nSU3J7ptAcxSnaVo is neither Trustee nor owner of WDLETDtBugFiJvtkghHoH - ledger nym did=WDLETDtBugFiJvtkghHoH role=STEWARD -Transaction has been rejected: T6XTs3nSU3J7ptAcxSnaVo is neither Trustee nor owner of WDLETDtBugFiJvtkghHoH +Transaction has been rejected: T6XTs3nSU3J7ptAcxSnaVo is neither Trustee nor owner of WDLETDtBugFiJvtkghHoH - ledger nym did=WDLETDtBugFiJvtkghHoH role=TRUSTEE -Transaction has been rejected: T6XTs3nSU3J7ptAcxSnaVo is neither Trustee nor owner of WDLETDtBugFiJvtkghHoH +Transaction has been rejected: T6XTs3nSU3J7ptAcxSnaVo is neither Trustee nor owner of WDLETDtBugFiJvtkghHoH did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active did new seed=RandomAttribOwner000000000000002 -Did "LoZB9o2PFfSjrpnkugqunZ" has been created with "~X2yUYuMgCaaPSEyudS8jnT" verkey +Did "LoZB9o2PFfSjrpnkugqunZ" has been created with "~X2yUYuMgCaaPSEyudS8jnT" verkey ledger nym did=LoZB9o2PFfSjrpnkugqunZ verkey=~X2yUYuMgCaaPSEyudS8jnT role=TRUST_ANCHOR -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 48 | 1534005249621295310 | 2018-08-11 16:34:10 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+--------------+ | Did | Verkey | Role | +------------------------+-------------------------+--------------+ @@ -404,17 +404,17 @@ ledger nym did=LoZB9o2PFfSjrpnkugqunZ verkey=~X2yUYuMgCaaPSEyudS8jnT role=TRUST_ +------------------------+-------------------------+--------------+ did new seed=RandomUserForRotateKey0000000002 -Did "ETcbLj612u9oXr7adZSWZV" has been created with "~UUMiscx15SsxekQtLfVVLh" verkey +Did "ETcbLj612u9oXr7adZSWZV" has been created with "~UUMiscx15SsxekQtLfVVLh" verkey ledger nym did=ETcbLj612u9oXr7adZSWZV verkey=~UUMiscx15SsxekQtLfVVLh role=TRUST_ANCHOR -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 49 | 1534005250977304111 | 2018-08-11 16:34:11 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+--------------+ | Did | Verkey | Role | +------------------------+-------------------------+--------------+ @@ -422,29 +422,29 @@ ledger nym did=ETcbLj612u9oXr7adZSWZV verkey=~UUMiscx15SsxekQtLfVVLh role=TRUST_ +------------------------+-------------------------+--------------+ did use LoZB9o2PFfSjrpnkugqunZ -Did "LoZB9o2PFfSjrpnkugqunZ" has been set as active +Did "LoZB9o2PFfSjrpnkugqunZ" has been set as active did rotate-key seed=RandomUserForRotateKey0NewKey002 -Verkey for did "LoZB9o2PFfSjrpnkugqunZ" has been updated. New verkey: "4MDqPrrqjuUP8csRGTbTN81oXjypYQVsW6dQVsUTHgDk" +Verkey for did "LoZB9o2PFfSjrpnkugqunZ" has been updated. New verkey: "4MDqPrrqjuUP8csRGTbTN81oXjypYQVsW6dQVsUTHgDk" did use ETcbLj612u9oXr7adZSWZV -Did "ETcbLj612u9oXr7adZSWZV" has been set as active +Did "ETcbLj612u9oXr7adZSWZV" has been set as active - ledger nym did=LoZB9o2PFfSjrpnkugqunZ verkey=~X2yUYuMgCaaPSEyudS8jnT -Transaction has been rejected: ETcbLj612u9oXr7adZSWZV is neither Trustee nor owner of LoZB9o2PFfSjrpnkugqunZ +Transaction has been rejected: ETcbLj612u9oXr7adZSWZV is neither Trustee nor owner of LoZB9o2PFfSjrpnkugqunZ did use LoZB9o2PFfSjrpnkugqunZ -Did "LoZB9o2PFfSjrpnkugqunZ" has been set as active +Did "LoZB9o2PFfSjrpnkugqunZ" has been set as active ledger attrib did=LoZB9o2PFfSjrpnkugqunZ raw={"endpoint":{"ha":"127.0.0.1:5555"}} -Attrib request has been sent to Ledger. -Metadata: +Attrib request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | LoZB9o2PFfSjrpnkugqunZ | 51 | 1534005253673940808 | 2018-08-11 16:34:14 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +--------------------------------------+ | Raw value | +--------------------------------------+ @@ -452,14 +452,14 @@ ledger attrib did=LoZB9o2PFfSjrpnkugqunZ raw={"endpoint":{"ha":"127.0.0.1:5555"} +--------------------------------------+ ledger attrib did=LoZB9o2PFfSjrpnkugqunZ raw={"endpoint":{"ha":"127.0.0.1:6666"}} -Attrib request has been sent to Ledger. -Metadata: +Attrib request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | LoZB9o2PFfSjrpnkugqunZ | 52 | 1534005255027302638 | 2018-08-11 16:34:15 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +--------------------------------------+ | Raw value | +--------------------------------------+ @@ -467,26 +467,26 @@ ledger attrib did=LoZB9o2PFfSjrpnkugqunZ raw={"endpoint":{"ha":"127.0.0.1:6666"} +--------------------------------------+ did use LoZB9o2PFfSjrpnkugqunZ -Did "LoZB9o2PFfSjrpnkugqunZ" has been set as active +Did "LoZB9o2PFfSjrpnkugqunZ" has been set as active - ledger attrib did=ETcbLj612u9oXr7adZSWZV raw={"endpoint":{"ha":"127.0.0.1:5555"}} -Transaction has been rejected: Only identity owner/guardian can add attribute for that identity +Transaction has been rejected: Only identity owner/guardian can add attribute for that identity did use ETcbLj612u9oXr7adZSWZV -Did "ETcbLj612u9oXr7adZSWZV" has been set as active +Did "ETcbLj612u9oXr7adZSWZV" has been set as active - ledger attrib did=LoZB9o2PFfSjrpnkugqunZ raw={"endpoint":{"ha":"127.0.0.1:5555"}} -Transaction has been rejected: Only identity owner/guardian can add attribute for that identity +Transaction has been rejected: Only identity owner/guardian can add attribute for that identity ledger schema name=TrustAnchorSchema version=1.0 attr_names=name,age -Schema request has been sent to Ledger. -Metadata: +Schema request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | ETcbLj612u9oXr7adZSWZV | 53 | 1534005257721831110 | 2018-08-11 16:34:18 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +-------------------+---------+--------------+ | Name | Version | Attributes | +-------------------+---------+--------------+ @@ -494,20 +494,8 @@ ledger schema name=TrustAnchorSchema version=1.0 attr_names=name,age +-------------------+---------+--------------+ ledger cred-def schema_id=1 signature_type=CL primary={"n":"1","s":"2","rms":"3","r":{"age":"4","name":"5"},"rctxt":"6","z":"7"} tag=2 -NodeConfig request has been sent to Ledger. -Metadata: -+------------------------+-----------------+---------------------+---------------------+ -| From | Sequence Number | Request ID | Transaction time | -+------------------------+-----------------+---------------------+---------------------+ -| ETcbLj612u9oXr7adZSWZV | 54 | 1534005259113311294 | 2018-08-11 16:34:19 | -+------------------------+-----------------+---------------------+---------------------+ -Data: -+----------------------------------------------------------------------------+ -| Primary Key | -+----------------------------------------------------------------------------+ -| {n:"1",r:{"age":"4","master_secret":"3","name":"5"},rctxt:"6",s:"2",z:"7"} | -+----------------------------------------------------------------------------+ +Transaction has been rejected: Mentioned seqNo (1) isn't seqNo of the schema. -Wallet "AS-03-wallet-anchor" has been closed -Pool "AS-0301-anchor" has been disconnected -Goodbye... +Wallet "AS-03-wallet-anchor" has been closed +Pool "AS-0301-anchor" has been disconnected +Goodbye... diff --git a/acceptance/indy-cli-batches/expected/AS-03-01-trustee.expected b/acceptance/indy-cli-batches/expected/AS-03-01-trustee.expected index c70bd687c..99715cde6 100644 --- a/acceptance/indy-cli-batches/expected/AS-03-01-trustee.expected +++ b/acceptance/indy-cli-batches/expected/AS-03-01-trustee.expected @@ -1,75 +1,75 @@ - pool create AS-0301-trustee gen_txn_file=./pool_transactions_genesis -Pool config "AS-0301-trustee" has been created +Pool config "AS-0301-trustee" has been created pool connect AS-0301-trustee -Pool "AS-0301-trustee" has been connected +Pool "AS-0301-trustee" has been connected - wallet create AS-03-wallet-trustee key=testkey storage_config={"pool":"AS-0301-trustee"} -Wallet "AS-03-wallet-trustee" has been created +Wallet "AS-03-wallet-trustee" has been created wallet open AS-03-wallet-trustee key=testkey -Wallet "AS-03-wallet-trustee" has been opened +Wallet "AS-03-wallet-trustee" has been opened did new seed=TestTrustee100000000000000000000 metadata="test trustee 1" -Did "CDBcM7hSAmQuenmEnE8dXG" has been created with "~JsqrSipV963hwbojxwR2fg" verkey -Metadata has been saved for DID "CDBcM7hSAmQuenmEnE8dXG" +Did "CDBcM7hSAmQuenmEnE8dXG" has been created with "~JsqrSipV963hwbojxwR2fg" verkey +Metadata has been saved for DID "CDBcM7hSAmQuenmEnE8dXG" did new seed=TestTrustee200000000000000000000 metadata="test trustee 2" -Did "QQMQmb1mrBRJR3vx6GP4kg" has been created with "~7Cj2MTAjyAbHdAiuafgCUW" verkey -Metadata has been saved for DID "QQMQmb1mrBRJR3vx6GP4kg" +Did "QQMQmb1mrBRJR3vx6GP4kg" has been created with "~7Cj2MTAjyAbHdAiuafgCUW" verkey +Metadata has been saved for DID "QQMQmb1mrBRJR3vx6GP4kg" did new seed=TestTrustee300000000000000000000 metadata="test trustee 3" -Did "VNHim6zQkiNEF1yPL6xVev" has been created with "~Xih8tdRvVQiudPEB3M2ypc" verkey -Metadata has been saved for DID "VNHim6zQkiNEF1yPL6xVev" +Did "VNHim6zQkiNEF1yPL6xVev" has been created with "~Xih8tdRvVQiudPEB3M2ypc" verkey +Metadata has been saved for DID "VNHim6zQkiNEF1yPL6xVev" did new seed=TestSteward100000000000000000000 metadata="test steward 1" -Did "YLs2LfEvHoSdS6euixMAe8" has been created with "~UEjom7C2wcysFHkHqXVtSo" verkey -Metadata has been saved for DID "YLs2LfEvHoSdS6euixMAe8" +Did "YLs2LfEvHoSdS6euixMAe8" has been created with "~UEjom7C2wcysFHkHqXVtSo" verkey +Metadata has been saved for DID "YLs2LfEvHoSdS6euixMAe8" did new seed=TestSteward200000000000000000000 metadata="test steward 2" -Did "3JauiZvnHv6oSBi4DTikg4" has been created with "~5EhgzbpoKEH6f7rQgyUbjJ" verkey -Metadata has been saved for DID "3JauiZvnHv6oSBi4DTikg4" +Did "3JauiZvnHv6oSBi4DTikg4" has been created with "~5EhgzbpoKEH6f7rQgyUbjJ" verkey +Metadata has been saved for DID "3JauiZvnHv6oSBi4DTikg4" did new seed=TestTrustAnchor00000000000000000 metadata="test trust anchor 1" -Did "AWwweaRGGCa4hfvW9WLmeb" has been created with "~D8gYyv77VvdkXguu7mHKfR" verkey -Metadata has been saved for DID "AWwweaRGGCa4hfvW9WLmeb" +Did "AWwweaRGGCa4hfvW9WLmeb" has been created with "~D8gYyv77VvdkXguu7mHKfR" verkey +Metadata has been saved for DID "AWwweaRGGCa4hfvW9WLmeb" did new seed=TestTrustAnchor10000000000000000 metadata="test trust anchor 2" -Did "EAvxCBmoYzkojBtgX1SVh8" has been created with "~HXn8bHcAYuaDAsCdjdLxLc" verkey -Metadata has been saved for DID "EAvxCBmoYzkojBtgX1SVh8" +Did "EAvxCBmoYzkojBtgX1SVh8" has been created with "~HXn8bHcAYuaDAsCdjdLxLc" verkey +Metadata has been saved for DID "EAvxCBmoYzkojBtgX1SVh8" did new seed=RandomUser1000000000000000000000 metadata="test identity 1" -Did "6nnZTxQ18EkVEjVV6hBMeT" has been created with "~2TBdyJLgAhkbogxjpUfPJD" verkey -Metadata has been saved for DID "6nnZTxQ18EkVEjVV6hBMeT" +Did "6nnZTxQ18EkVEjVV6hBMeT" has been created with "~2TBdyJLgAhkbogxjpUfPJD" verkey +Metadata has been saved for DID "6nnZTxQ18EkVEjVV6hBMeT" did new seed=RandomUser2000000000000000000000 metadata="test identity 2" -Did "X25X1vKgEkwRDNgj1zJ1Xg" has been created with "~5TR49msrun2AgCKQZsstRX" verkey -Metadata has been saved for DID "X25X1vKgEkwRDNgj1zJ1Xg" +Did "X25X1vKgEkwRDNgj1zJ1Xg" has been created with "~5TR49msrun2AgCKQZsstRX" verkey +Metadata has been saved for DID "X25X1vKgEkwRDNgj1zJ1Xg" did new seed=RandomUser4000000000000000000000 metadata="test identity 4" -Did "FZp8ZoqiKrp9uuAujE38xA" has been created with "~2unRfj35pEJ9qRpWJyBAvz" verkey -Metadata has been saved for DID "FZp8ZoqiKrp9uuAujE38xA" +Did "FZp8ZoqiKrp9uuAujE38xA" has been created with "~2unRfj35pEJ9qRpWJyBAvz" verkey +Metadata has been saved for DID "FZp8ZoqiKrp9uuAujE38xA" did new seed=RandomUser5000000000000000000000 metadata="test identity 5" -Did "NhF3hPZyV715sfmvsjhj1h" has been created with "~EjjPJGQcnFGhuyS35bVMGA" verkey -Metadata has been saved for DID "NhF3hPZyV715sfmvsjhj1h" +Did "NhF3hPZyV715sfmvsjhj1h" has been created with "~EjjPJGQcnFGhuyS35bVMGA" verkey +Metadata has been saved for DID "NhF3hPZyV715sfmvsjhj1h" did new seed=000000000000000000000000Trustee1 metadata="default trustee" -Did "V4SGRU86Z58d6TV7PBUe6f" has been created with "~CoRER63DVYnWZtK8uAzNbx" verkey -Metadata has been saved for DID "V4SGRU86Z58d6TV7PBUe6f" +Did "V4SGRU86Z58d6TV7PBUe6f" has been created with "~CoRER63DVYnWZtK8uAzNbx" verkey +Metadata has been saved for DID "V4SGRU86Z58d6TV7PBUe6f" did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=QQMQmb1mrBRJR3vx6GP4kg role=TRUSTEE verkey=~7Cj2MTAjyAbHdAiuafgCUW -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 55 | 1534005278646869693 | 2018-08-11 16:34:38 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+---------+ | Did | Verkey | Role | +------------------------+-------------------------+---------+ @@ -77,17 +77,17 @@ ledger nym did=QQMQmb1mrBRJR3vx6GP4kg role=TRUSTEE verkey=~7Cj2MTAjyAbHdAiuafgCU +------------------------+-------------------------+---------+ did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=CDBcM7hSAmQuenmEnE8dXG role=TRUSTEE verkey=~JsqrSipV963hwbojxwR2fg -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 56 | 1534005279391165404 | 2018-08-11 16:34:39 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+---------+ | Did | Verkey | Role | +------------------------+-------------------------+---------+ @@ -95,14 +95,14 @@ ledger nym did=CDBcM7hSAmQuenmEnE8dXG role=TRUSTEE verkey=~JsqrSipV963hwbojxwR2f +------------------------+-------------------------+---------+ - ledger get-nym did=CDBcM7hSAmQuenmEnE8dXG -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 56 | 1534005280410181611 | 2018-08-11 16:34:39 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+---------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+---------+ @@ -110,17 +110,17 @@ ledger nym did=CDBcM7hSAmQuenmEnE8dXG role=TRUSTEE verkey=~JsqrSipV963hwbojxwR2f +------------------------+------------------------+-------------------------+---------+ did use CDBcM7hSAmQuenmEnE8dXG -Did "CDBcM7hSAmQuenmEnE8dXG" has been set as active +Did "CDBcM7hSAmQuenmEnE8dXG" has been set as active ledger nym did=3JauiZvnHv6oSBi4DTikg4 role=STEWARD verkey=~5EhgzbpoKEH6f7rQgyUbjJ -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | CDBcM7hSAmQuenmEnE8dXG | 57 | 1534005280481293123 | 2018-08-11 16:34:40 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+---------+ | Did | Verkey | Role | +------------------------+-------------------------+---------+ @@ -128,14 +128,14 @@ ledger nym did=3JauiZvnHv6oSBi4DTikg4 role=STEWARD verkey=~5EhgzbpoKEH6f7rQgyUbj +------------------------+-------------------------+---------+ - ledger get-nym did=3JauiZvnHv6oSBi4DTikg4 -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | CDBcM7hSAmQuenmEnE8dXG | 57 | 1534005281449313999 | 2018-08-11 16:34:40 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+---------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+---------+ @@ -143,17 +143,17 @@ ledger nym did=3JauiZvnHv6oSBi4DTikg4 role=STEWARD verkey=~5EhgzbpoKEH6f7rQgyUbj +------------------------+------------------------+-------------------------+---------+ did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=CDBcM7hSAmQuenmEnE8dXG role= -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 58 | 1534005281506844769 | 2018-08-11 16:34:41 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------+ | Did | Role | +------------------------+------+ @@ -161,23 +161,23 @@ ledger nym did=CDBcM7hSAmQuenmEnE8dXG role= +------------------------+------+ did use CDBcM7hSAmQuenmEnE8dXG -Did "CDBcM7hSAmQuenmEnE8dXG" has been set as active +Did "CDBcM7hSAmQuenmEnE8dXG" has been set as active -- ledger nym did=6nnZTxQ18EkVEjVV6hBMeT verkey=~2TBdyJLgAhkbogxjpUfPJD -Transaction has been rejected: None role cannot add None role +- ledger nym did=6nnZTxQ18EkVEjVV6hBMeT verkey=~2TBdyJLgAhkbogxjpUfPJD role=TRUSTEE +Transaction has been rejected: None role cannot add TRUSTEE did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=CDBcM7hSAmQuenmEnE8dXG role=TRUSTEE -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 59 | 1534005283121801151 | 2018-08-11 16:34:43 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+---------+ | Did | Role | +------------------------+---------+ @@ -185,17 +185,17 @@ ledger nym did=CDBcM7hSAmQuenmEnE8dXG role=TRUSTEE +------------------------+---------+ did use CDBcM7hSAmQuenmEnE8dXG -Did "CDBcM7hSAmQuenmEnE8dXG" has been set as active +Did "CDBcM7hSAmQuenmEnE8dXG" has been set as active ledger nym did=VNHim6zQkiNEF1yPL6xVev verkey=~Xih8tdRvVQiudPEB3M2ypc -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | CDBcM7hSAmQuenmEnE8dXG | 60 | 1534005284522049385 | 2018-08-11 16:34:44 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+------+ | Did | Verkey | Role | +------------------------+-------------------------+------+ @@ -203,14 +203,14 @@ ledger nym did=VNHim6zQkiNEF1yPL6xVev verkey=~Xih8tdRvVQiudPEB3M2ypc +------------------------+-------------------------+------+ - ledger get-nym did=VNHim6zQkiNEF1yPL6xVev -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | CDBcM7hSAmQuenmEnE8dXG | 60 | 1534005285514033760 | 2018-08-11 16:34:44 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+------+ @@ -218,17 +218,17 @@ ledger nym did=VNHim6zQkiNEF1yPL6xVev verkey=~Xih8tdRvVQiudPEB3M2ypc +------------------------+------------------------+-------------------------+------+ did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=YLs2LfEvHoSdS6euixMAe8 role=STEWARD verkey=~UEjom7C2wcysFHkHqXVtSo -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 61 | 1534005285561517178 | 2018-08-11 16:34:46 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+---------+ | Did | Verkey | Role | +------------------------+-------------------------+---------+ @@ -236,14 +236,14 @@ ledger nym did=YLs2LfEvHoSdS6euixMAe8 role=STEWARD verkey=~UEjom7C2wcysFHkHqXVtS +------------------------+-------------------------+---------+ - ledger get-nym did=YLs2LfEvHoSdS6euixMAe8 -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 61 | 1534005286520083600 | 2018-08-11 16:34:46 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+---------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+---------+ @@ -251,17 +251,17 @@ ledger nym did=YLs2LfEvHoSdS6euixMAe8 role=STEWARD verkey=~UEjom7C2wcysFHkHqXVtS +------------------------+------------------------+-------------------------+---------+ did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=YLs2LfEvHoSdS6euixMAe8 role= -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 62 | 1534005286572843271 | 2018-08-11 16:34:47 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------+ | Did | Role | +------------------------+------+ @@ -269,26 +269,26 @@ ledger nym did=YLs2LfEvHoSdS6euixMAe8 role= +------------------------+------+ did use YLs2LfEvHoSdS6euixMAe8 -Did "YLs2LfEvHoSdS6euixMAe8" has been set as active +Did "YLs2LfEvHoSdS6euixMAe8" has been set as active - ledger nym did=EAvxCBmoYzkojBtgX1SVh8 role=TRUST_ANCHOR verkey=~HXn8bHcAYuaDAsCdjdLxLc -Transaction has been rejected: None role cannot add TRUST_ANCHOR +Transaction has been rejected: None role cannot add TRUST_ANCHOR - ledger get-nym did=EAvxCBmoYzkojBtgX1SVh8 -NYM not found +NYM not found did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=YLs2LfEvHoSdS6euixMAe8 role=STEWARD -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 63 | 1534005288186710219 | 2018-08-11 16:34:54 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+---------+ | Did | Role | +------------------------+---------+ @@ -296,17 +296,17 @@ ledger nym did=YLs2LfEvHoSdS6euixMAe8 role=STEWARD +------------------------+---------+ did use YLs2LfEvHoSdS6euixMAe8 -Did "YLs2LfEvHoSdS6euixMAe8" has been set as active +Did "YLs2LfEvHoSdS6euixMAe8" has been set as active ledger nym did=EAvxCBmoYzkojBtgX1SVh8 role=TRUST_ANCHOR verkey=~HXn8bHcAYuaDAsCdjdLxLc -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | YLs2LfEvHoSdS6euixMAe8 | 64 | 1534005294913888379 | 2018-08-11 16:34:55 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+--------------+ | Did | Verkey | Role | +------------------------+-------------------------+--------------+ @@ -314,14 +314,14 @@ ledger nym did=EAvxCBmoYzkojBtgX1SVh8 role=TRUST_ANCHOR verkey=~HXn8bHcAYuaDAsCd +------------------------+-------------------------+--------------+ - ledger get-nym did=EAvxCBmoYzkojBtgX1SVh8 -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | YLs2LfEvHoSdS6euixMAe8 | 64 | 1534005295813235748 | 2018-08-11 16:34:55 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+--------------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+--------------+ @@ -329,17 +329,17 @@ ledger nym did=EAvxCBmoYzkojBtgX1SVh8 role=TRUST_ANCHOR verkey=~HXn8bHcAYuaDAsCd +------------------------+------------------------+-------------------------+--------------+ did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=AWwweaRGGCa4hfvW9WLmeb role=TRUST_ANCHOR verkey=~D8gYyv77VvdkXguu7mHKfR -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 65 | 1534005295869893764 | 2018-08-11 16:34:56 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+--------------+ | Did | Verkey | Role | +------------------------+-------------------------+--------------+ @@ -347,14 +347,14 @@ ledger nym did=AWwweaRGGCa4hfvW9WLmeb role=TRUST_ANCHOR verkey=~D8gYyv77VvdkXguu +------------------------+-------------------------+--------------+ - ledger get-nym did=AWwweaRGGCa4hfvW9WLmeb -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 65 | 1534005296797922466 | 2018-08-11 16:34:56 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+--------------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+--------------+ @@ -362,17 +362,17 @@ ledger nym did=AWwweaRGGCa4hfvW9WLmeb role=TRUST_ANCHOR verkey=~D8gYyv77VvdkXguu +------------------------+------------------------+-------------------------+--------------+ did use AWwweaRGGCa4hfvW9WLmeb -Did "AWwweaRGGCa4hfvW9WLmeb" has been set as active +Did "AWwweaRGGCa4hfvW9WLmeb" has been set as active ledger nym did=FZp8ZoqiKrp9uuAujE38xA verkey=~2unRfj35pEJ9qRpWJyBAvz -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | AWwweaRGGCa4hfvW9WLmeb | 66 | 1534005296857985964 | 2018-08-11 16:34:57 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+------+ | Did | Verkey | Role | +------------------------+-------------------------+------+ @@ -380,14 +380,14 @@ ledger nym did=FZp8ZoqiKrp9uuAujE38xA verkey=~2unRfj35pEJ9qRpWJyBAvz +------------------------+-------------------------+------+ - ledger get-nym did=FZp8ZoqiKrp9uuAujE38xA -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | AWwweaRGGCa4hfvW9WLmeb | 66 | 1534005297776583595 | 2018-08-11 16:34:57 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+------+ @@ -395,17 +395,17 @@ ledger nym did=FZp8ZoqiKrp9uuAujE38xA verkey=~2unRfj35pEJ9qRpWJyBAvz +------------------------+------------------------+-------------------------+------+ did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=AWwweaRGGCa4hfvW9WLmeb role= -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 67 | 1534005297926545126 | 2018-08-11 16:34:58 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------+ | Did | Role | +------------------------+------+ @@ -413,26 +413,26 @@ ledger nym did=AWwweaRGGCa4hfvW9WLmeb role= +------------------------+------+ did use AWwweaRGGCa4hfvW9WLmeb -Did "AWwweaRGGCa4hfvW9WLmeb" has been set as active +Did "AWwweaRGGCa4hfvW9WLmeb" has been set as active - ledger nym did=NhF3hPZyV715sfmvsjhj1h verkey=~EjjPJGQcnFGhuyS35bVMGA -Transaction has been rejected: None role cannot add None role +Transaction has been rejected: None role cannot add None role - ledger get-nym did=NhF3hPZyV715sfmvsjhj1h -NYM not found +NYM not found did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=AWwweaRGGCa4hfvW9WLmeb role=TRUST_ANCHOR -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 68 | 1534005299489783217 | 2018-08-11 16:35:00 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+--------------+ | Did | Role | +------------------------+--------------+ @@ -440,17 +440,17 @@ ledger nym did=AWwweaRGGCa4hfvW9WLmeb role=TRUST_ANCHOR +------------------------+--------------+ did use AWwweaRGGCa4hfvW9WLmeb -Did "AWwweaRGGCa4hfvW9WLmeb" has been set as active +Did "AWwweaRGGCa4hfvW9WLmeb" has been set as active ledger nym did=NhF3hPZyV715sfmvsjhj1h verkey=~EjjPJGQcnFGhuyS35bVMGA -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | AWwweaRGGCa4hfvW9WLmeb | 69 | 1534005300793162812 | 2018-08-11 16:35:01 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+------+ | Did | Verkey | Role | +------------------------+-------------------------+------+ @@ -458,14 +458,14 @@ ledger nym did=NhF3hPZyV715sfmvsjhj1h verkey=~EjjPJGQcnFGhuyS35bVMGA +------------------------+-------------------------+------+ - ledger get-nym did=NhF3hPZyV715sfmvsjhj1h -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | AWwweaRGGCa4hfvW9WLmeb | 69 | 1534005301766010217 | 2018-08-11 16:35:01 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+------+ @@ -473,17 +473,17 @@ ledger nym did=NhF3hPZyV715sfmvsjhj1h verkey=~EjjPJGQcnFGhuyS35bVMGA +------------------------+------------------------+-------------------------+------+ did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=6nnZTxQ18EkVEjVV6hBMeT verkey=~2TBdyJLgAhkbogxjpUfPJD -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 70 | 1534005301825509656 | 2018-08-11 16:35:02 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+------+ | Did | Verkey | Role | +------------------------+-------------------------+------+ @@ -491,14 +491,14 @@ ledger nym did=6nnZTxQ18EkVEjVV6hBMeT verkey=~2TBdyJLgAhkbogxjpUfPJD +------------------------+-------------------------+------+ - ledger get-nym did=6nnZTxQ18EkVEjVV6hBMeT -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 70 | 1534005302769665075 | 2018-08-11 16:35:02 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+-------------------------+------+ | Identifier | Dest | Verkey | Role | +------------------------+------------------------+-------------------------+------+ @@ -506,17 +506,17 @@ ledger nym did=6nnZTxQ18EkVEjVV6hBMeT verkey=~2TBdyJLgAhkbogxjpUfPJD +------------------------+------------------------+-------------------------+------+ did use QQMQmb1mrBRJR3vx6GP4kg -Did "QQMQmb1mrBRJR3vx6GP4kg" has been set as active +Did "QQMQmb1mrBRJR3vx6GP4kg" has been set as active ledger nym did=QQMQmb1mrBRJR3vx6GP4kg role= -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | QQMQmb1mrBRJR3vx6GP4kg | 71 | 1534005302843119464 | 2018-08-11 16:35:03 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------+ | Did | Role | +------------------------+------+ @@ -524,26 +524,26 @@ ledger nym did=QQMQmb1mrBRJR3vx6GP4kg role= +------------------------+------+ - ledger nym did=X25X1vKgEkwRDNgj1zJ1Xg verkey=~5TR49msrun2AgCKQZsstRX -Transaction has been rejected: None role cannot add None role +Transaction has been rejected: None role cannot add None role - ledger get-nym did=X25X1vKgEkwRDNgj1zJ1Xg -NYM not found +NYM not found did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active did new seed=RandomAttribOwner000000000000000 -Did "QbvWTyhmZmDF8Ms82wC2JD" has been created with "~55hsLZb8uixk46gRPPZHF8" verkey +Did "QbvWTyhmZmDF8Ms82wC2JD" has been created with "~55hsLZb8uixk46gRPPZHF8" verkey ledger nym did=QbvWTyhmZmDF8Ms82wC2JD verkey=~55hsLZb8uixk46gRPPZHF8 role=TRUSTEE -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 72 | 1534005304546445737 | 2018-08-11 16:35:05 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+---------+ | Did | Verkey | Role | +------------------------+-------------------------+---------+ @@ -551,17 +551,17 @@ ledger nym did=QbvWTyhmZmDF8Ms82wC2JD verkey=~55hsLZb8uixk46gRPPZHF8 role=TRUSTE +------------------------+-------------------------+---------+ did new seed=RandomUserForRotateKey0000000000 -Did "484PvcK1gUqwWEWCkumDzA" has been created with "~VNugjiSF2G22QKfYvESPf3" verkey +;32mDid "484PvcK1gUqwWEWCkumDzA" has been created with "~VNugjiSF2G22QKfYvESPf3" verkey ledger nym did=484PvcK1gUqwWEWCkumDzA verkey=~VNugjiSF2G22QKfYvESPf3 role=TRUSTEE -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 73 | 1534005305816417089 | 2018-08-11 16:35:06 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+-------------------------+---------+ | Did | Verkey | Role | +------------------------+-------------------------+---------+ @@ -569,29 +569,29 @@ ledger nym did=484PvcK1gUqwWEWCkumDzA verkey=~VNugjiSF2G22QKfYvESPf3 role=TRUSTE +------------------------+-------------------------+---------+ did use QbvWTyhmZmDF8Ms82wC2JD -Did "QbvWTyhmZmDF8Ms82wC2JD" has been set as active +Did "QbvWTyhmZmDF8Ms82wC2JD" has been set as active did rotate-key seed=RandomUserForRotateKey0NewKey000 -Verkey for did "QbvWTyhmZmDF8Ms82wC2JD" has been updated. New verkey: "B9hLAB2pBCvkVNxwyRSBfmCsyTJmR8vVNiYmh233niRT" +Verkey for did "QbvWTyhmZmDF8Ms82wC2JD" has been updated. New verkey: "B9hLAB2pBCvkVNxwyRSBfmCsyTJmR8vVNiYmh233niRT" did use 484PvcK1gUqwWEWCkumDzA -Did "484PvcK1gUqwWEWCkumDzA" has been set as active +Did "484PvcK1gUqwWEWCkumDzA" has been set as active - ledger nym did=QbvWTyhmZmDF8Ms82wC2JD verkey=~55hsLZb8uixk46gRPPZHF8 -Transaction has been rejected: TRUSTEE cannot update verkey +Transaction has been rejected: TRUSTEE cannot update verkey did use QbvWTyhmZmDF8Ms82wC2JD -Did "QbvWTyhmZmDF8Ms82wC2JD" has been set as active +Did "QbvWTyhmZmDF8Ms82wC2JD" has been set as active ledger attrib did=QbvWTyhmZmDF8Ms82wC2JD raw={"endpoint":{"ha":"127.0.0.1:5555"}} -Attrib request has been sent to Ledger. -Metadata: +Attrib request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | QbvWTyhmZmDF8Ms82wC2JD | 75 | 1534005308496021038 | 2018-08-11 16:35:09 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +--------------------------------------+ | Raw value | +--------------------------------------+ @@ -599,14 +599,14 @@ ledger attrib did=QbvWTyhmZmDF8Ms82wC2JD raw={"endpoint":{"ha":"127.0.0.1:5555"} +--------------------------------------+ ledger attrib did=QbvWTyhmZmDF8Ms82wC2JD raw={"endpoint":{"ha":"127.0.0.1:6666"}} -Attrib request has been sent to Ledger. -Metadata: +Attrib request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | QbvWTyhmZmDF8Ms82wC2JD | 76 | 1534005309846344843 | 2018-08-11 16:35:10 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +--------------------------------------+ | Raw value | +--------------------------------------+ @@ -614,26 +614,26 @@ ledger attrib did=QbvWTyhmZmDF8Ms82wC2JD raw={"endpoint":{"ha":"127.0.0.1:6666"} +--------------------------------------+ did use QbvWTyhmZmDF8Ms82wC2JD -Did "QbvWTyhmZmDF8Ms82wC2JD" has been set as active +Did "QbvWTyhmZmDF8Ms82wC2JD" has been set as active - ledger attrib did=484PvcK1gUqwWEWCkumDzA raw={"endpoint":{"ha":"127.0.0.1:5555"}} -Transaction has been rejected: Only identity owner/guardian can add attribute for that identity +Transaction has been rejected: Only identity owner/guardian can add attribute for that identity did use 484PvcK1gUqwWEWCkumDzA -Did "484PvcK1gUqwWEWCkumDzA" has been set as active +Did "484PvcK1gUqwWEWCkumDzA" has been set as active - ledger attrib did=QbvWTyhmZmDF8Ms82wC2JD raw={"endpoint":{"ha":"127.0.0.1:5555"}} -Transaction has been rejected: Only identity owner/guardian can add attribute for that identity +Transaction has been rejected: Only identity owner/guardian can add attribute for that identity ledger schema name=TrusteeSchema version=1.0 attr_names=name,age -Schema request has been sent to Ledger. -Metadata: +Schema request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | From | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | 484PvcK1gUqwWEWCkumDzA | 77 | 1534005312536395489 | 2018-08-11 16:35:13 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +---------------+---------+--------------+ | Name | Version | Attributes | +---------------+---------+--------------+ @@ -641,20 +641,8 @@ ledger schema name=TrusteeSchema version=1.0 attr_names=name,age +---------------+---------+--------------+ ledger cred-def schema_id=1 signature_type=CL primary={"n":"1","s":"2","rms":"3","r":{"age":"4","name":"5"},"rctxt":"6","z":"7"} tag=10 -NodeConfig request has been sent to Ledger. -Metadata: -+------------------------+-----------------+---------------------+---------------------+ -| From | Sequence Number | Request ID | Transaction time | -+------------------------+-----------------+---------------------+---------------------+ -| 484PvcK1gUqwWEWCkumDzA | 78 | 1534005313901722812 | 2018-08-11 16:35:14 | -+------------------------+-----------------+---------------------+---------------------+ -Data: -+----------------------------------------------------------------------------+ -| Primary Key | -+----------------------------------------------------------------------------+ -| {n:"1",r:{"age":"4","master_secret":"3","name":"5"},rctxt:"6",s:"2",z:"7"} | -+----------------------------------------------------------------------------+ +Transaction has been rejected: Mentioned seqNo (1) isn't seqNo of the schema. -Pool "AS-0301-trustee" has been disconnected -Wallet "AS-03-wallet-trustee" has been closed -Goodbye... +Pool "AS-0301-trustee" has been disconnected +Wallet "AS-03-wallet-trustee" has been closed +Goodbye... diff --git a/acceptance/indy-cli-batches/expected/AS-04-02-after-upgrade.expected b/acceptance/indy-cli-batches/expected/AS-04-02-after-upgrade.expected index d8e7f26c7..f7f0f0441 100644 --- a/acceptance/indy-cli-batches/expected/AS-04-02-after-upgrade.expected +++ b/acceptance/indy-cli-batches/expected/AS-04-02-after-upgrade.expected @@ -1,21 +1,21 @@ pool connect AS-pool -Pool "AS-pool" has been connected +Pool "AS-pool" has been connected wallet open AS-04-wallet key=testkey -Wallet "AS-04-wallet" has been opened +Wallet "AS-04-wallet" has been opened did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger nym did=542MVr22zcHbVyGzaXmbT2 -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +---------------------+---------------------+ | Request ID | Transaction time | +---------------------+---------------------+ | 1527789271458474578 | 2018-05-31 17:54:31 | +---------------------+---------------------+ -Data: +Data: +------------------------+------+ | Did | Role | +------------------------+------+ @@ -23,14 +23,14 @@ ledger nym did=542MVr22zcHbVyGzaXmbT2 +------------------------+------+ ledger get-schema did=V4SGRU86Z58d6TV7PBUe6f name=Degree version=1.0 -Following Schema has been received. -Metadata: +Following Schema has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Did | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 16 | 1527789271710847095 | 2018-05-31 17:52:57 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +--------+---------+----------------------------------------------------------------------------+ | Name | Version | Attributes | +--------+---------+----------------------------------------------------------------------------+ @@ -38,14 +38,14 @@ ledger get-schema did=V4SGRU86Z58d6TV7PBUe6f name=Degree version=1.0 +--------+---------+----------------------------------------------------------------------------+ ledger get-cred-def schema_id=1 signature_type=CL origin=V4SGRU86Z58d6TV7PBUe6f tag="1" -Following Credential Definition has been received. -Metadata: +Following Credential Definition has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 17 | 1527789271740916519 | 2018-05-31 17:52:57 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +----------------------------------------------------------------+ | Primary Key | +----------------------------------------------------------------+ @@ -53,14 +53,14 @@ ledger get-cred-def schema_id=1 signature_type=CL origin=V4SGRU86Z58d6TV7PBUe6f +----------------------------------------------------------------+ - ledger get-nym did=542MVr22zcHbVyGzaXmbT5 -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +-----------------+---------------------+---------------------+ | Sequence Number | Request ID | Transaction time | +-----------------+---------------------+---------------------+ | 20 | 1527789271777541476 | 2018-05-31 17:52:58 | +-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+------+ | Identifier | Dest | Role | +------------------------+------------------------+------+ @@ -68,14 +68,14 @@ ledger get-cred-def schema_id=1 signature_type=CL origin=V4SGRU86Z58d6TV7PBUe6f +------------------------+------------------------+------+ - ledger get-nym did=542MVr22zcHbVyGzaXmbT2 -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +-----------------+---------------------+---------------------+ | Sequence Number | Request ID | Transaction time | +-----------------+---------------------+---------------------+ | 23 | 1527789271796959032 | 2018-05-31 17:54:31 | +-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+------+ | Identifier | Dest | Role | +------------------------+------------------------+------+ @@ -83,9 +83,9 @@ ledger get-cred-def schema_id=1 signature_type=CL origin=V4SGRU86Z58d6TV7PBUe6f +------------------------+------------------------+------+ wallet close -Wallet "AS-04-wallet" has been closed +Wallet "AS-04-wallet" has been closed pool disconnect -Pool "AS-pool" has been disconnected +Pool "AS-pool" has been disconnected -Goodbye... +Goodbye... diff --git a/acceptance/indy-cli-batches/expected/AS-04-02-before-upgrade.expected b/acceptance/indy-cli-batches/expected/AS-04-02-before-upgrade.expected index 08dd8810a..58857289d 100644 --- a/acceptance/indy-cli-batches/expected/AS-04-02-before-upgrade.expected +++ b/acceptance/indy-cli-batches/expected/AS-04-02-before-upgrade.expected @@ -1,31 +1,31 @@ - pool create AS-pool gen_txn_file=./pool_transactions_genesis -Pool config "AS-pool" has been created +Pool config "AS-pool" has been created pool connect AS-pool -Pool "AS-pool" has been connected +Pool "AS-pool" has been connected wallet create AS-04-wallet key=testkey storage_config={"pool":"AS-pool"} -Wallet "AS-04-wallet" has been created +Wallet "AS-04-wallet" has been created wallet open AS-04-wallet key=testkey -Wallet "AS-04-wallet" has been opened +Wallet "AS-04-wallet" has been opened did new seed=000000000000000000000000Trustee1 metadata="default trustee" -Did "V4SGRU86Z58d6TV7PBUe6f" has been created with "~CoRER63DVYnWZtK8uAzNbx" verkey -Metadata has been saved for DID "V4SGRU86Z58d6TV7PBUe6f" +Did "V4SGRU86Z58d6TV7PBUe6f" has been created with "~CoRER63DVYnWZtK8uAzNbx" verkey +Metadata has been saved for DID "V4SGRU86Z58d6TV7PBUe6f" did use V4SGRU86Z58d6TV7PBUe6f -Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active +Did "V4SGRU86Z58d6TV7PBUe6f" has been set as active ledger schema name=Degree version=1.0 attr_names=undergrad,last_name,first_name,birth_date,postgrad,expiry_date -Schema request has been sent to Ledger. -Metadata: +Schema request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 16 | 1527789177227318572 | 2018-05-31 17:52:57 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +--------+---------+----------------------------------------------------------------------------+ | Name | Version | Attributes | +--------+---------+----------------------------------------------------------------------------+ @@ -33,14 +33,14 @@ ledger schema name=Degree version=1.0 attr_names=undergrad,last_name,first_name, +--------+---------+----------------------------------------------------------------------------+ ledger cred-def schema_id=1 signature_type=CL primary={"n":"1","s":"2","rms":"3","r":{"age":"4","name":"5"},"rctxt":"6","z":"7"} -NodeConfig request has been sent to Ledger. -Metadata: +NodeConfig request has been sent to Ledger. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 17 | 1527789177477364284 | 2018-05-31 17:52:57 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +----------------------------------------------------------------+ | Primary Key | +----------------------------------------------------------------+ @@ -48,14 +48,14 @@ ledger cred-def schema_id=1 signature_type=CL primary={"n":"1","s":"2","rms":"3" +----------------------------------------------------------------+ ledger nym did=542MVr22zcHbVyGzaXmbT3 -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +---------------------+---------------------+ | Request ID | Transaction time | +---------------------+---------------------+ | 1527789177741209163 | 2018-05-31 17:52:57 | +---------------------+---------------------+ -Data: +Data: +------------------------+------+ | Did | Role | +------------------------+------+ @@ -63,14 +63,14 @@ ledger nym did=542MVr22zcHbVyGzaXmbT3 +------------------------+------+ ledger nym did=542MVr22zcHbVyGzaXmbT4 -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +---------------------+---------------------+ | Request ID | Transaction time | +---------------------+---------------------+ | 1527789178005493135 | 2018-05-31 17:52:58 | +---------------------+---------------------+ -Data: +Data: +------------------------+------+ | Did | Role | +------------------------+------+ @@ -78,14 +78,14 @@ ledger nym did=542MVr22zcHbVyGzaXmbT4 +------------------------+------+ ledger nym did=542MVr22zcHbVyGzaXmbT5 -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +---------------------+---------------------+ | Request ID | Transaction time | +---------------------+---------------------+ | 1527789178261981181 | 2018-05-31 17:52:58 | +---------------------+---------------------+ -Data: +Data: +------------------------+------+ | Did | Role | +------------------------+------+ @@ -93,14 +93,14 @@ ledger nym did=542MVr22zcHbVyGzaXmbT5 +------------------------+------+ ledger nym did=542MVr22zcHbVyGzaXmbT6 -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +---------------------+---------------------+ | Request ID | Transaction time | +---------------------+---------------------+ | 1527789178519129509 | 2018-05-31 17:52:58 | +---------------------+---------------------+ -Data: +Data: +------------------------+------+ | Did | Role | +------------------------+------+ @@ -108,14 +108,14 @@ ledger nym did=542MVr22zcHbVyGzaXmbT6 +------------------------+------+ ledger nym did=542MVr22zcHbVyGzaXmbT7 -Nym request has been sent to Ledger. -Metadata: +Nym request has been sent to Ledger. +Metadata: +---------------------+---------------------+ | Request ID | Transaction time | +---------------------+---------------------+ | 1527789178770187542 | 2018-05-31 17:52:58 | +---------------------+---------------------+ -Data: +Data: +------------------------+------+ | Did | Role | +------------------------+------+ @@ -123,14 +123,14 @@ ledger nym did=542MVr22zcHbVyGzaXmbT7 +------------------------+------+ ledger get-schema did=V4SGRU86Z58d6TV7PBUe6f name=Degree version=1.0 -Following Schema has been received. -Metadata: +Following Schema has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Did | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 16 | 1527789179035167928 | 2018-05-31 17:52:57 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +--------+---------+----------------------------------------------------------------------------+ | Name | Version | Attributes | +--------+---------+----------------------------------------------------------------------------+ @@ -138,14 +138,14 @@ ledger get-schema did=V4SGRU86Z58d6TV7PBUe6f name=Degree version=1.0 +--------+---------+----------------------------------------------------------------------------+ ledger get-cred-def schema_id=1 signature_type=CL origin=V4SGRU86Z58d6TV7PBUe6f tag="1" -Following Credential Definition has been received. -Metadata: +Following Credential Definition has been received. +Metadata: +------------------------+-----------------+---------------------+---------------------+ | Identifier | Sequence Number | Request ID | Transaction time | +------------------------+-----------------+---------------------+---------------------+ | V4SGRU86Z58d6TV7PBUe6f | 17 | 1527789179064660983 | 2018-05-31 17:52:57 | +------------------------+-----------------+---------------------+---------------------+ -Data: +Data: +----------------------------------------------------------------+ | Primary Key | +----------------------------------------------------------------+ @@ -153,14 +153,14 @@ ledger get-cred-def schema_id=1 signature_type=CL origin=V4SGRU86Z58d6TV7PBUe6f +----------------------------------------------------------------+ - ledger get-nym did=542MVr22zcHbVyGzaXmbT5 -Following NYM has been received. -Metadata: +Following NYM has been received. +Metadata: +-----------------+---------------------+---------------------+ | Sequence Number | Request ID | Transaction time | +-----------------+---------------------+---------------------+ | 20 | 1527789179091290867 | 2018-05-31 17:52:58 | +-----------------+---------------------+---------------------+ -Data: +Data: +------------------------+------------------------+------+ | Identifier | Dest | Role | +------------------------+------------------------+------+ @@ -168,9 +168,9 @@ ledger get-cred-def schema_id=1 signature_type=CL origin=V4SGRU86Z58d6TV7PBUe6f +------------------------+------------------------+------+ wallet close -Wallet "AS-04-wallet" has been closed +Wallet "AS-04-wallet" has been closed pool disconnect -Pool "AS-pool" has been disconnected +Pool "AS-pool" has been disconnected -Goodbye... +Goodbye... diff --git a/acceptance/tests/test_ledger.py b/acceptance/tests/test_ledger.py new file mode 100644 index 000000000..d85ad5b8b --- /dev/null +++ b/acceptance/tests/test_ledger.py @@ -0,0 +1,295 @@ +from utils import pool_helper, wallet_helper, nym_helper, get_nym_helper, attrib_helper, get_attrib_helper,\ + schema_helper, get_schema_helper, cred_def_helper, get_cred_def_helper, revoc_reg_def_helper,\ + get_revoc_reg_def_helper, revoc_reg_entry_helper, get_revoc_reg_helper, get_revoc_reg_delta_helper,\ + random_did_and_json, random_seed_and_json +import pytest +import json +from indy import pool, did, ledger, IndyError +import hashlib +import time + + +@pytest.mark.parametrize('writer_role', ['TRUSTEE', 'STEWARD', 'TRUST_ANCHOR']) +@pytest.mark.parametrize('reader_role', ['TRUSTEE', 'STEWARD', 'TRUST_ANCHOR', None]) +@pytest.mark.asyncio +async def test_send_and_get_nym_positive(writer_role, reader_role): + await pool.set_protocol_version(2) + pool_handle, _ = await pool_helper() + wallet_handle, _, _ = await wallet_helper() + target_did, target_vk = await did.create_and_store_my_did(wallet_handle, '{}') + writer_did, writer_vk = await did.create_and_store_my_did(wallet_handle, '{}') + reader_did, reader_vk = await did.create_and_store_my_did(wallet_handle, '{}') + trustee_did, trustee_vk = await did.create_and_store_my_did(wallet_handle, json.dumps( + {'seed': '000000000000000000000000Trustee1'})) + # Trustee adds NYM writer + await nym_helper(pool_handle, wallet_handle, trustee_did, writer_did, writer_vk, None, writer_role) + # Trustee adds NYM reader + await nym_helper(pool_handle, wallet_handle, trustee_did, reader_did, reader_vk, None, reader_role) + # Writer sends NYM + res1 = json.loads(await nym_helper(pool_handle, wallet_handle, writer_did, target_did)) + time.sleep(1) + # Reader gets NYM + res2 = json.loads(await get_nym_helper(pool_handle, wallet_handle, target_did, target_did)) + + assert res1['op'] == 'REPLY' + assert res2['result']['seqNo'] is not None + + print(res1) + print(res2) + + +@pytest.mark.parametrize('submitter_seed', ['{}', + random_did_and_json()[1], + random_seed_and_json()[1], + ]) +@pytest.mark.asyncio +async def test_send_and_get_nym_negative(submitter_seed): + await pool.set_protocol_version(2) + pool_handle, _ = await pool_helper() + wallet_handle, _, _ = await wallet_helper() + target_did, target_vk = await did.create_and_store_my_did(wallet_handle, '{}') + submitter_did, submitter_vk = await did.create_and_store_my_did(wallet_handle, submitter_seed) + trustee_did, trustee_vk = await did.create_and_store_my_did(wallet_handle, json.dumps( + {'seed': '000000000000000000000000Trustee1'})) + # Trustee adds submitter + await nym_helper(pool_handle, wallet_handle, trustee_did, submitter_did, submitter_vk) + # None role submitter tries to send NYM (rejected) and gets no data about this NYM from ledger + res1 = json.loads(await nym_helper(pool_handle, wallet_handle, submitter_did, target_did)) + res2 = json.loads(await get_nym_helper(pool_handle, wallet_handle, submitter_did, target_did)) + + assert res1['op'] == 'REJECT' + assert res2['result']['seqNo'] is None + + print(res1) + print(res2) + + +@pytest.mark.parametrize('xhash, raw, enc', [ + (hashlib.sha256().hexdigest(), None, None), + (None, json.dumps({'key': 'value'}), None), + (None, None, 'ENCRYPTED_STRING') +]) +@pytest.mark.asyncio +async def test_send_and_get_attrib_positive(xhash, raw, enc): + await pool.set_protocol_version(2) + pool_handle, _ = await pool_helper() + wallet_handle, _, _ = await wallet_helper() + target_did, target_vk = await did.create_and_store_my_did(wallet_handle, '{}') + submitter_did, submitter_vk = await did.create_and_store_my_did(wallet_handle, json.dumps( + {'seed': '000000000000000000000000Trustee1'})) + await nym_helper(pool_handle, wallet_handle, submitter_did, target_did, target_vk) + res1 = json.loads(await attrib_helper(pool_handle, wallet_handle, target_did, target_did, xhash, raw, enc)) + res2 = json.loads(await get_attrib_helper(pool_handle, wallet_handle, target_did, target_did, xhash, raw, enc)) + + assert res1['op'] == 'REPLY' + assert res2['op'] == 'REPLY' + + print(res1) + print(res2) + + +@pytest.mark.parametrize('xhash, raw, enc, error', [ + (None, None, None, IndyError), + (hashlib.sha256().hexdigest(), json.dumps({'key': 'value'}), None, None), + (None, json.dumps({'key': 'value'}), 'ENCRYPTED_STRING', None), + (hashlib.sha256().hexdigest(), None, 'ENCRYPTED_STRING', None), + (hashlib.sha256().hexdigest(), json.dumps({'key': 'value'}), 'ENCRYPTED_STRING', None) +]) +@pytest.mark.asyncio +async def test_send_and_get_attrib_negative(xhash, raw, enc, error): + await pool.set_protocol_version(2) + pool_handle, _ = await pool_helper() + wallet_handle, _, _ = await wallet_helper() + target_did, target_vk = await did.create_and_store_my_did(wallet_handle, '{}') + submitter_did, submitter_vk = await did.create_and_store_my_did(wallet_handle, json.dumps( + {'seed': '000000000000000000000000Trustee1'})) + await nym_helper(pool_handle, wallet_handle, submitter_did, target_did, target_vk) + if error: + with pytest.raises(error): + await attrib_helper(pool_handle, wallet_handle, target_did, target_did, xhash, raw, enc) + await get_attrib_helper(pool_handle, wallet_handle, target_did, target_did, xhash, raw, enc) + else: + res1 = json.loads(await attrib_helper(pool_handle, wallet_handle, target_did, target_did, xhash, raw, enc)) + res2 = json.loads(await get_attrib_helper(pool_handle, wallet_handle, target_did, target_did, xhash, raw, enc)) + + assert res1['op'] == 'REQNACK' + assert res2['op'] == 'REQNACK' + + print(res1) + print(res2) + + +@pytest.mark.parametrize('writer_role', ['TRUSTEE', 'STEWARD', 'TRUST_ANCHOR']) +@pytest.mark.parametrize('reader_role', ['TRUSTEE', 'STEWARD', 'TRUST_ANCHOR', None]) +@pytest.mark.asyncio +async def test_send_and_get_schema_positive(writer_role, reader_role): + await pool.set_protocol_version(2) + pool_handle, _ = await pool_helper() + wallet_handle, _, _ = await wallet_helper() + writer_did, writer_vk = await did.create_and_store_my_did(wallet_handle, '{}') + reader_did, reader_vk = await did.create_and_store_my_did(wallet_handle, '{}') + trustee_did, trustee_vk = await did.create_and_store_my_did(wallet_handle, json.dumps( + {'seed': '000000000000000000000000Trustee1'})) + # Trustee adds SCHEMA writer + await nym_helper(pool_handle, wallet_handle, trustee_did, writer_did, writer_vk, None, writer_role) + # Trustee adds SCHEMA reader + await nym_helper(pool_handle, wallet_handle, trustee_did, reader_did, reader_vk, None, reader_role) + # Writer sends SCHEMA + schema_id, res = await schema_helper(pool_handle, wallet_handle, writer_did, + 'schema1', '1.0', json.dumps(["age", "sex", "height", "name"])) + res1 = json.loads(res) + # Reader gets SCHEMA + res2 = json.loads(await get_schema_helper(pool_handle, wallet_handle, reader_did, schema_id)) + + assert res1['op'] == 'REPLY' + assert res2['op'] == 'REPLY' + + print(res1) + print(res2) + + +@pytest.mark.skip('IS-932') +@pytest.mark.asyncio +async def test_send_and_get_schema_negative(): + await pool.set_protocol_version(2) + pool_handle, _ = await pool_helper() + wallet_handle, _, _ = await wallet_helper() + trustee_did, trustee_vk = await did.create_and_store_my_did(wallet_handle, json.dumps( + {'seed': '000000000000000000000000Trustee1'})) + res = await get_schema_helper(pool_handle, wallet_handle, trustee_did, + '7kqbG8zcdAMc9Q6SMU4xZy:2:schema1:1.0') + res_json = json.loads(res) + schema_id, schema_json = await ledger.parse_get_schema_response(res) + + assert res_json + print(schema_id, schema_json) + + +@pytest.mark.parametrize('writer_role', ['TRUSTEE', 'STEWARD', 'TRUST_ANCHOR']) +@pytest.mark.parametrize('reader_role', ['TRUSTEE', 'STEWARD', 'TRUST_ANCHOR', None]) +@pytest.mark.asyncio +async def test_send_and_get_cred_def_positive(writer_role, reader_role): + await pool.set_protocol_version(2) + pool_handle, _ = await pool_helper() + wallet_handle, _, _ = await wallet_helper() + writer_did, writer_vk = await did.create_and_store_my_did(wallet_handle, '{}') + reader_did, reader_vk = await did.create_and_store_my_did(wallet_handle, '{}') + trustee_did, trustee_vk = await did.create_and_store_my_did(wallet_handle, json.dumps( + {'seed': '000000000000000000000000Trustee1'})) + # Trustee adds CRED_DEF writer + await nym_helper(pool_handle, wallet_handle, trustee_did, writer_did, writer_vk, None, writer_role) + # Trustee adds CRED_DEF reader + await nym_helper(pool_handle, wallet_handle, trustee_did, reader_did, reader_vk, None, reader_role) + schema_id, _ = await schema_helper(pool_handle, wallet_handle, writer_did, + 'schema1', '1.0', json.dumps(["age", "sex", "height", "name"])) + time.sleep(1) + res = await get_schema_helper(pool_handle, wallet_handle, reader_did, schema_id) + schema_id, schema_json = await ledger.parse_get_schema_response(res) + cred_def_id, _, res = await cred_def_helper(pool_handle, wallet_handle, writer_did, schema_json, 'TAG', + None, json.dumps({'support_revocation': False})) + res1 = json.loads(res) + res2 = json.loads(await get_cred_def_helper(pool_handle, wallet_handle, reader_did, cred_def_id)) + assert res1['op'] == 'REPLY' + assert res2['op'] == 'REPLY' + print(res1) + print(res2) + print(cred_def_id) + + +@pytest.mark.skip('IS-932') +@pytest.mark.asyncio +async def test_send_and_get_cred_def_negative(): + await pool.set_protocol_version(2) + pool_handle, _ = await pool_helper() + wallet_handle, _, _ = await wallet_helper() + trustee_did, trustee_vk = await did.create_and_store_my_did(wallet_handle, json.dumps( + {'seed': '000000000000000000000000Trustee1'})) + res = await get_cred_def_helper(pool_handle, wallet_handle, trustee_did, + 'AfdMw5jMX9pcNAuSwppbC7:3:CL:297:cred_def_tag') + res_json = json.loads(res) + cred_def_id, cred_def_json = await ledger.parse_get_cred_def_response(res) + + assert res_json + print(cred_def_id, cred_def_json) + + +@pytest.mark.parametrize('writer_role', ['TRUSTEE', 'STEWARD', 'TRUST_ANCHOR']) +@pytest.mark.parametrize('reader_role', ['TRUSTEE', 'STEWARD', 'TRUST_ANCHOR', None]) +@pytest.mark.asyncio +async def test_send_and_get_revoc_reg_def_positive(writer_role, reader_role): + await pool.set_protocol_version(2) + pool_handle, _ = await pool_helper() + wallet_handle, _, _ = await wallet_helper() + writer_did, writer_vk = await did.create_and_store_my_did(wallet_handle, '{}') + reader_did, reader_vk = await did.create_and_store_my_did(wallet_handle, '{}') + trustee_did, trustee_vk = await did.create_and_store_my_did(wallet_handle, json.dumps( + {'seed': '000000000000000000000000Trustee1'})) + # Trustee adds REVOC_REG_DEF writer + await nym_helper(pool_handle, wallet_handle, trustee_did, writer_did, writer_vk, None, writer_role) + # Trustee adds REVOC_REG_DEF reader + await nym_helper(pool_handle, wallet_handle, trustee_did, reader_did, reader_vk, None, reader_role) + schema_id, _ = await schema_helper(pool_handle, wallet_handle, writer_did, + 'schema1', '1.0', json.dumps(["age", "sex", "height", "name"])) + time.sleep(1) + res = await get_schema_helper(pool_handle, wallet_handle, reader_did, schema_id) + schema_id, schema_json = await ledger.parse_get_schema_response(res) + cred_def_id, _, res = await cred_def_helper(pool_handle, wallet_handle, writer_did, schema_json, 'cred_def_tag', + None, json.dumps({"support_revocation": True})) + revoc_reg_def_id, _, _, res1 = await revoc_reg_def_helper(pool_handle, wallet_handle, writer_did, None, + 'revoc_def_tag', cred_def_id, + json.dumps({'max_cred_num': 1, + 'issuance_type': 'ISSUANCE_BY_DEFAULT'})) + res2 = await get_revoc_reg_def_helper(pool_handle, wallet_handle, reader_did, revoc_reg_def_id) + assert res1['op'] == 'REPLY' + assert res2['op'] == 'REPLY' + print(res1) + print(res2) + + +@pytest.mark.asyncio +async def test_send_and_get_revoc_reg_def_negative(): + pass + + +@pytest.mark.parametrize('writer_role', ['TRUSTEE', 'STEWARD', 'TRUST_ANCHOR']) +@pytest.mark.parametrize('reader_role', ['TRUSTEE', 'STEWARD', 'TRUST_ANCHOR', None]) +@pytest.mark.asyncio +async def test_send_and_get_revoc_reg_entry_positive(writer_role, reader_role): + await pool.set_protocol_version(2) + timestamp0 = int(time.time()) + pool_handle, _ = await pool_helper() + wallet_handle, _, _ = await wallet_helper() + writer_did, writer_vk = await did.create_and_store_my_did(wallet_handle, '{}') + reader_did, reader_vk = await did.create_and_store_my_did(wallet_handle, '{}') + trustee_did, trustee_vk = await did.create_and_store_my_did(wallet_handle, json.dumps( + {'seed': '000000000000000000000000Trustee1'})) + # Trustee adds REVOC_REG_ENTRY writer + await nym_helper(pool_handle, wallet_handle, trustee_did, writer_did, writer_vk, None, writer_role) + # Trustee adds REVOC_REG_ENTRY reader + await nym_helper(pool_handle, wallet_handle, trustee_did, reader_did, reader_vk, None, reader_role) + schema_id, _ = await schema_helper(pool_handle, wallet_handle, writer_did, + 'schema1', '1.0', json.dumps(["age", "sex", "height", "name"])) + time.sleep(1) + res = await get_schema_helper(pool_handle, wallet_handle, reader_did, schema_id) + schema_id, schema_json = await ledger.parse_get_schema_response(res) + cred_def_id, _, res = await cred_def_helper(pool_handle, wallet_handle, writer_did, schema_json, 'cred_def_tag', + 'CL', json.dumps({'support_revocation': True})) + revoc_reg_def_id, _, _, res1 = await revoc_reg_entry_helper(pool_handle, wallet_handle, writer_did, 'CL_ACCUM', + 'revoc_def_tag', cred_def_id, + json.dumps({'max_cred_num': 1, + 'issuance_type': 'ISSUANCE_BY_DEFAULT'})) + timestamp1 = int(time.time()) + res2 = await get_revoc_reg_helper(pool_handle, wallet_handle, reader_did, revoc_reg_def_id, timestamp1) + res3 = await get_revoc_reg_delta_helper(pool_handle, wallet_handle, reader_did, revoc_reg_def_id, + timestamp0, timestamp1) + assert res1['op'] == 'REPLY' + assert res2['op'] == 'REPLY' + assert res3['op'] == 'REPLY' + print(res1) + print(res2) + print(res3) + + +@pytest.mark.asyncio +async def test_send_and_get_revoc_reg_entry_negative(): + pass diff --git a/acceptance/tests/utils.py b/acceptance/tests/utils.py new file mode 100644 index 000000000..c6a8f1641 --- /dev/null +++ b/acceptance/tests/utils.py @@ -0,0 +1,174 @@ +import json +import string +import random +import base58 +from indy import pool, wallet, ledger, anoncreds, blob_storage +from ctypes import CDLL + + +def run_async_method(method, *args, **kwargs): + + import asyncio + loop = asyncio.get_event_loop() + loop.run_until_complete(method(*args, **kwargs)) + + +def random_string(length): + return ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(length)) + + +def random_did_and_json(): + return base58.b58encode(random_string(16)).decode(),\ + json.dumps({'did': base58.b58encode(random_string(16)).decode()}) + + +def random_seed_and_json(): + return base58.b58encode(random_string(23)).decode(),\ + json.dumps({'seed': base58.b58encode(random_string(23)).decode()}) + + +async def pool_helper(pool_name=None, path_to_genesis='./docker_genesis'): + if not pool_name: + pool_name = random_string(5) + pool_config = json.dumps({"genesis_txn": path_to_genesis}) + await pool.create_pool_ledger_config(pool_name, pool_config) + pool_handle = await pool.open_pool_ledger(pool_name, pool_config) + + return pool_handle, pool_name + + +async def wallet_helper(wallet_id=None, wallet_key='', wallet_key_derivation_method='ARGON2I_INT'): + if not wallet_id: + wallet_id = random_string(5) + wallet_config = json.dumps({"id": wallet_id}) + wallet_credentials = json.dumps({"key": wallet_key, "key_derivation_method": wallet_key_derivation_method}) + await wallet.create_wallet(wallet_config, wallet_credentials) + wallet_handle = await wallet.open_wallet(wallet_config, wallet_credentials) + + return wallet_handle, wallet_config, wallet_credentials + + +async def pool_destructor(pool_handle, pool_name): + await pool.close_pool_ledger(pool_handle) + await pool.delete_pool_ledger_config(pool_name) + + +async def wallet_destructor(wallet_handle, wallet_config, wallet_credentials): + await wallet.close_wallet(wallet_handle) + await wallet.delete_wallet(wallet_config, wallet_credentials) + + +async def payment_initializer(library_name, initializer_name): + library = CDLL(library_name) + init = getattr(library, initializer_name) + init() + + +async def nym_helper(pool_handle, wallet_handle, submitter_did, target_did, + target_vk=None, target_alias=None, target_role=None): + req = await ledger.build_nym_request(submitter_did, target_did, target_vk, target_alias, target_role) + res = await ledger.sign_and_submit_request(pool_handle, wallet_handle, submitter_did, req) + + return res + + +async def attrib_helper(pool_handle, wallet_handle, submitter_did, target_did, xhash=None, raw=None, enc=None): + req = await ledger.build_attrib_request(submitter_did, target_did, xhash, raw, enc) + res = await ledger.sign_and_submit_request(pool_handle, wallet_handle, submitter_did, req) + + return res + + +async def schema_helper(pool_handle, wallet_handle, submitter_did, schema_name, schema_version, schema_attrs): + schema_id, schema_json = await anoncreds.issuer_create_schema(submitter_did, schema_name, schema_version, + schema_attrs) + req = await ledger.build_schema_request(submitter_did, schema_json) + res = await ledger.sign_and_submit_request(pool_handle, wallet_handle, submitter_did, req) + + return schema_id, res + + +async def cred_def_helper(pool_handle, wallet_handle, submitter_did, schema_json, tag, signature_type, config_json): + cred_def_id, cred_def_json = \ + await anoncreds.issuer_create_and_store_credential_def(wallet_handle, submitter_did, schema_json, tag, + signature_type, config_json) + req = await ledger.build_cred_def_request(submitter_did, cred_def_json) + res = await ledger.sign_and_submit_request(pool_handle, wallet_handle, submitter_did, req) + + return cred_def_id, cred_def_json, res + + +async def revoc_reg_def_helper(pool_handle, wallet_handle, submitter_did, revoc_def_type, tag, cred_def_id, config_json): + tails_writer_config = json.dumps({'base_dir': 'tails', 'uri_pattern': ''}) + tails_writer_handle = await blob_storage.open_writer('default', tails_writer_config) + revoc_reg_def_id, revoc_reg_def_json, revoc_reg_entry_json = \ + await anoncreds.issuer_create_and_store_revoc_reg(wallet_handle, submitter_did, revoc_def_type, tag, + cred_def_id, config_json, tails_writer_handle) + req = await ledger.build_revoc_reg_def_request(submitter_did, revoc_reg_def_json) + res = json.loads(await ledger.sign_and_submit_request(pool_handle, wallet_handle, submitter_did, req)) + + return revoc_reg_def_id, revoc_reg_def_json, revoc_reg_entry_json, res + + +async def revoc_reg_entry_helper(pool_handle, wallet_handle, submitter_did, revoc_def_type, tag, cred_def_id, config_json): + tails_writer_config = json.dumps({'base_dir': 'tails', 'uri_pattern': ''}) + tails_writer_handle = await blob_storage.open_writer('default', tails_writer_config) + revoc_reg_def_id, revoc_reg_def_json, revoc_reg_entry_json = \ + await anoncreds.issuer_create_and_store_revoc_reg(wallet_handle, submitter_did, revoc_def_type, tag, + cred_def_id, config_json, tails_writer_handle) + req = await ledger.build_revoc_reg_def_request(submitter_did, revoc_reg_def_json) + await ledger.sign_and_submit_request(pool_handle, wallet_handle, submitter_did, req) + req = await ledger.build_revoc_reg_entry_request(submitter_did, revoc_reg_def_id, revoc_def_type, + revoc_reg_entry_json) + res = json.loads(await ledger.sign_and_submit_request(pool_handle, wallet_handle, submitter_did, req)) + + return revoc_reg_def_id, revoc_reg_def_json, revoc_reg_entry_json, res + + +async def get_nym_helper(pool_handle, wallet_handle, submitter_did, target_did): + req = await ledger.build_get_nym_request(submitter_did, target_did) + res = await ledger.sign_and_submit_request(pool_handle, wallet_handle, submitter_did, req) + + return res + + +async def get_attrib_helper(pool_handle, wallet_handle, submitter_did, target_did, xhash=None, raw=None, enc=None): + req = await ledger.build_get_attrib_request(submitter_did, target_did, raw, xhash, enc) + res = await ledger.sign_and_submit_request(pool_handle, wallet_handle, submitter_did, req) + + return res + + +async def get_schema_helper(pool_handle, wallet_handle, submitter_did, id_): + req = await ledger.build_get_schema_request(submitter_did, id_) + res = await ledger.sign_and_submit_request(pool_handle, wallet_handle, submitter_did, req) + + return res + + +async def get_cred_def_helper(pool_handle, wallet_handle, submitter_did, id_): + req = await ledger.build_get_cred_def_request(submitter_did, id_) + res = await ledger.sign_and_submit_request(pool_handle, wallet_handle, submitter_did, req) + + return res + + +async def get_revoc_reg_def_helper(pool_handle, wallet_handle, submitter_did, id_): + req = await ledger.build_get_revoc_reg_def_request(submitter_did, id_) + res = json.loads(await ledger.sign_and_submit_request(pool_handle, wallet_handle, submitter_did, req)) + + return res + + +async def get_revoc_reg_helper(pool_handle, wallet_handle, submitter_did, id_, timestamp): + req = await ledger.build_get_revoc_reg_request(submitter_did, id_, timestamp) + res = json.loads(await ledger.sign_and_submit_request(pool_handle, wallet_handle, submitter_did, req)) + + return res + + +async def get_revoc_reg_delta_helper(pool_handle, wallet_handle, submitter_did, id_, from_, to_): + req = await ledger.build_get_revoc_reg_delta_request(submitter_did, id_, from_, to_) + res = json.loads(await ledger.sign_and_submit_request(pool_handle, wallet_handle, submitter_did, req)) + + return res diff --git a/build-scripts/ubuntu-1604/postinst_node b/build-scripts/ubuntu-1604/postinst_node index f3d854716..a13ddcad6 100755 --- a/build-scripts/ubuntu-1604/postinst_node +++ b/build-scripts/ubuntu-1604/postinst_node @@ -6,22 +6,13 @@ GENERAL_CONFIG_DIR="/etc/indy" GENERAL_DATA_DIR="/var/lib/indy" GENERAL_LOG_DIR="/var/log/indy" -CLI_BASE_DIR="/home/indy/.indy-cli" -CLI_NETWORKS_DIR="$CLI_BASE_DIR/networks" -CLI_WALLETS_DIR="$CLI_BASE_DIR/wallets" - INSTALL_DIR='/usr/local/lib/python3.5/dist-packages' NOFILES_SOFT_LIMIT=65536 NOFILES_HARD_LIMIT=131072 -CLIENT_CONNECTIONS_LIMIT=10000 - +CLIENT_CONNECTIONS_LIMIT=500 -# workaround when .indy become regular file -if [ -f $CLI_BASE_DIR ]; then - rm $CLI_BASE_DIR -fi # create general indy config folder if does not exist mkdir -p $GENERAL_CONFIG_DIR @@ -43,13 +34,6 @@ chmod -R ug+rwx $GENERAL_CONFIG_DIR chmod -R ug+rwx $GENERAL_DATA_DIR chmod -R ug+rwx $GENERAL_LOG_DIR -# create indy cli folder if does not exist -mkdir -p $CLI_BASE_DIR -mkdir -p $CLI_NETWORKS_DIR -mkdir -p $CLI_WALLETS_DIR -chown -R indy:indy $CLI_BASE_DIR -chmod -R ug+rwx $CLI_BASE_DIR - # init_indy_node script cat < /usr/local/bin/init_indy_node diff --git a/build-scripts/ubuntu-1604/prepare-package.sh b/build-scripts/ubuntu-1604/prepare-package.sh index 6cad5d5ce..1378eade4 100755 --- a/build-scripts/ubuntu-1604/prepare-package.sh +++ b/build-scripts/ubuntu-1604/prepare-package.sh @@ -35,7 +35,6 @@ fi echo -e "\n\nReplace postfixes" sed -i -r "s~indy-node-[a-z]+~indy-node~" "$repo/setup.py" sed -i -r "s~indy-plenum-[a-z]+~indy-plenum~" "$repo/setup.py" -sed -i -r "s~indy-anoncreds-[a-z]+~indy-anoncreds~" "$repo/setup.py" echo -e "Adapt the dependencies for the Canonical archive" sed -i "s~python-dateutil~python3-dateutil~" "$repo/setup.py" diff --git a/ci/ubuntu.dockerfile b/ci/ubuntu.dockerfile index c786e4c6d..eb3f47a31 100644 --- a/ci/ubuntu.dockerfile +++ b/ci/ubuntu.dockerfile @@ -1,4 +1,4 @@ -FROM hyperledger/indy-core-baseci:0.0.1 +FROM hyperledger/indy-core-baseci:0.0.2 LABEL maintainer="Hyperledger " ARG uid=1000 @@ -8,7 +8,7 @@ ARG venv=venv RUN apt-get update -y && apt-get install -y \ python3-nacl \ libindy-crypto=0.4.3 \ - libindy=1.6.1~683 \ + libindy=1.6.8 \ # rocksdb python wrapper libbz2-dev \ zlib1g-dev \ @@ -16,8 +16,7 @@ RUN apt-get update -y && apt-get install -y \ libsnappy-dev \ rocksdb=5.8.8 -RUN indy_ci_add_user $uid $user $venv && \ - indy_ci_charm_crypto $user $venv +RUN indy_ci_add_user $uid $user $venv RUN indy_image_clean diff --git a/data/migrations/deb/1_6_703_to_1_6_704.py b/data/migrations/deb/1_6_703_to_1_6_704.py new file mode 100644 index 000000000..f12b5bce1 --- /dev/null +++ b/data/migrations/deb/1_6_703_to_1_6_704.py @@ -0,0 +1,15 @@ +#!/usr/bin/python3.5 + +from indy_common.migration.helper import update_indy_env + +from stp_core.common.log import getlogger + +logger = getlogger() + +CLIENT_CONNECTIONS_LIMIT_KEY = "CLIENT_CONNECTIONS_LIMIT" +CLIENT_CONNECTIONS_LIMIT_VAL = 500 + + +logger.info("Going to update clients connections limit.") +update_indy_env(CLIENT_CONNECTIONS_LIMIT_KEY, CLIENT_CONNECTIONS_LIMIT_VAL) +logger.info("Done.") diff --git a/dev-setup/osx/setup-dev-dependencies.sh b/dev-setup/osx/setup-dev-dependencies.sh index 6cac89d7d..479aacb12 100755 --- a/dev-setup/osx/setup-dev-dependencies.sh +++ b/dev-setup/osx/setup-dev-dependencies.sh @@ -19,22 +19,6 @@ echo 'Installing RocksDB 5.8.8...' brew install https://gist.githubusercontent.com/faisal00813/4059a5b41c10aa87270351c4795af752/raw/551d4de01a83f884c798ec5c2cb28a1b15d04db8/rocksdb.rb echo 'Installing RocksDB...' -echo 'Installing Charm Crypto...' -xcode-select --install -brew install gmp -brew install pbc -pushd /tmp -git clone https://github.com/JHUISI/charm.git -pushd charm -./configure.sh --enable-darwin -make -make install -make test -popd -rm -rf charm -popd -echo 'Installed Charm Crypto' - echo 'Installing libindy...' brew install pkg-config brew install automake diff --git a/dev-setup/ubuntu/init-dev-project.sh b/dev-setup/ubuntu/init-dev-project.sh index 3b9d6c5b1..cc9da4a45 100755 --- a/dev-setup/ubuntu/init-dev-project.sh +++ b/dev-setup/ubuntu/init-dev-project.sh @@ -19,11 +19,6 @@ mkvirtualenv -p python3.5 ${virtualenv_name} workon ${virtualenv_name} echo "Created virtual environment" -echo "Installing Charm Crypto..." -cp -r /usr/local/lib/python3.5/dist-packages/Charm_Crypto-0.0.0.egg-info ~/.virtualenvs/${virtualenv_name}/lib/python3.5/site-packages/Charm_Crypto-0.0.0.egg-info -cp -r /usr/local/lib/python3.5/dist-packages/charm ~/.virtualenvs/${virtualenv_name}/lib/python3.5/site-packages/charm -echo "Installed Charm Crypto..." - echo "Installing indy-node..." pushd indy-node pip install -e .[tests] diff --git a/dev-setup/ubuntu/setup-dev-depend-ubuntu14.sh b/dev-setup/ubuntu/setup-dev-depend-ubuntu14.sh index 51feb69b7..ba818585a 100755 --- a/dev-setup/ubuntu/setup-dev-depend-ubuntu14.sh +++ b/dev-setup/ubuntu/setup-dev-depend-ubuntu14.sh @@ -23,12 +23,6 @@ echo 'Installing libsodium...' sudo apt-get install -y libsodium13 echo 'Installed libsodium' - -echo 'Installing Charm Crypto...' -sudo apt-get install -y python3-charm-crypto -echo 'Installed Charm Crypto' - - echo 'Installing Libindy and Libindy Crypto...' sudo apt-get install -y libindy libindy-crypto echo 'Installed Libindy and Libindy Crypto' diff --git a/dev-setup/ubuntu/setup-dev-depend-ubuntu16.sh b/dev-setup/ubuntu/setup-dev-depend-ubuntu16.sh index ae689f88c..e66afafff 100755 --- a/dev-setup/ubuntu/setup-dev-depend-ubuntu16.sh +++ b/dev-setup/ubuntu/setup-dev-depend-ubuntu16.sh @@ -30,11 +30,6 @@ sudo apt-get install -y libbz2-dev \ echo 'Installed RocksDB' -echo 'Installing Charm Crypto...' -sudo apt-get install -y python3-charm-crypto -echo 'Installed Charm Crypto' - - echo 'Installing Libindy and Libindy Crypto...' sudo apt-get install -y libindy libindy-crypto echo 'Installed Libindy and Libindy Crypto' diff --git a/docs/auth_rules.md b/docs/auth_rules.md new file mode 100644 index 000000000..6aedd48e0 --- /dev/null +++ b/docs/auth_rules.md @@ -0,0 +1,56 @@ +# Current implemented rules in auth_map +| Transaction type | Field | Previous value | New value | Who can| Description | +|------------------|-------|----------------|-----------|--------|-------------| +| NYM |`role` |`` | TRUSTEE | TRUSTEE|Adding new TRUSTEE| +| NYM |`role` |`` | STEWARD | TRUSTEE|Adding new STEWARD| +| NYM |`role` |`` | TRUST_ANCHOR| TRUSTEE, STEWARD|Adding new TRUST_ANCHOR| +| NYM |`role` |`` |`` | TRUSTEE, STEWARD, TRUST_ANCHOR| Adding new Identity Owner| +| NYM |`role` | TRUSTEE |`` | TRUSTEE | Blacklisting Trustee| +| NYM |`role` | STEWARD |`` | TRUSTEE | Blacklisting Steward| +| NYM |`role` | TRUST_ANCHOR |`` | TRUSTEE | Blacklisting Trust anchor| +| NYM |`verkey`|`*`|`*`| Owner of this nym | Key Rotation| +| SCHEMA |`*`|`*`|`*`| TRUSTEE, STEWARD, TRUST_ANCHOR | Adding new Schema| +| SCHEMA |`*`|`*`|`*`| No one can edit existing Schema | Editing Schema| +| CLAIM_DEF |`*`|`*`|`*`| TRUSTEE, STEWARD, TRUST_ANCHOR| Adding new CLAIM_DEF transaction| +| CLAIM_DEF |`*`|`*`|`*`| Owner of claim_def txn| Editing CLAIM_DEF transaction| +| NODE |`services`|``|`[VALIDATOR]`| STEWARD if it is owner of this transaction| Adding new node to pool| +| NODE |`services`|`[VALIDATOR]`|`[]`| TRUSTEE, STEWARD if it is owner of this transaction| Demotion of node| +| NODE |`services`|`[]`|`[VALIDATOR]`| TRUSTEE, STEWARD if it is owner of this transaction| Promotion of node| +| NODE |`node_ip`|`*`|`*`| STEWARD if it is owner of this transaction| Changing Node's ip address| +| NODE |`node_port`|`*`|`*`| STEWARD if it is owner of this transaction| Changing Node's port| +| NODE |`client_ip`|`*`|`*`| STEWARD if it is owner of this transaction| Changing Client's ip address| +| NODE |`client_port`|`*`|`*`| STEWARD if it is owner of this transaction| Changing Client's port| +| NODE |`blskey`|`*`|`*`| STEWARD if it is owner of this transaction| Changing Node's blskey| +| POOL_UPGRADE |`action`|``|`start`|TRUSTEE| Starting upgrade procedure| +| POOL_UPGRADE |`action`|`start`|`cancel`|TRUSTEE| Canceling upgrade procedure| +| POOL_RESTART |`action`|`*`|`*`|TRUSTEE| Restarting pool command| +| POOL_CONFIG |`action`|`*`|`*`|TRUSTEE| Pool config command (like a `read only` option)| +| VALIDATOR_INFO |`*`|`*`|`*`| TRUSTEE, STEWARD| Getting validator_info from pool| + + +### Also, there is a some optional rules for case if in config option ANYONE_CAN_WRITE is set to True: +| Transaction type | Field | Previous value | New value | Who can| Description | +|------------------|-------|----------------|-----------|--------|-------------| +|NYM |`role`|``|``| Anyone| Adding new nym| +|SCHEMA |`*`|`*`|`*`| Anyone| Any operations with SCHEMA transaction| +|CLAIM_DEF |`*`|`*`|`*`| Anyone| Any operations with CLAIM_DEF transaction| + + +### As of now it's not implemented yet, but the next rules for Revocation feature are needed: +#### If ANYONE_CAN_WRITE is set to False: +| Transaction type | Field | Previous value | New value | Who can| Description | +|------------------|-------|----------------|-----------|--------|-------------| +|REVOC_REG_DEF|`*`|`*`|`*`| TRUSTEE, STEWARD, TRUST_ANCHOR| Adding new REVOC_REG_DEF| +|REVOC_REG_DEF|`*`|`*`|`*`| Only owners can edit existing REVOC_REG_DEF| Editing REVOC_REG_DEF| +|REVOC_REG_ENTRY|`*`|`*`|`*`| Only the owner of the corresponding REVOC_REG_DEF can create new REVOC_REG_ENTRY| Adding new REVOC_REG_ENTRY| +|REVOC_REG_ENTRY|`*`|`*`|`*`| Only owners can edit existing REVOC_REG_ENTRY| Editing REVOC_REG_ENTRY| + +#### If ANYONE_CAN_WRITE is set to True: +| Transaction type | Field | Previous value | New value | Who can| Description | +|------------------|-------|----------------|-----------|--------|-------------| +|REVOC_REG_DEF|`*`|`*`|`*`| Anyone can create new REVOC_REG_DEF| Adding new REVOC_REG_DEF| +|REVOC_REG_DEF|`*`|`*`|`*`| Only owners can edit existing REVOC_REG_DEF| Editing REVOC_REG_DEF| +|REVOC_REG_ENTRY|`*`|`*`|`*`| Only the owner of the corresponding REVOC_REG_DEF can create new REVOC_REG_ENTRY| Adding new REVOC_REG_ENTRY| +|REVOC_REG_ENTRY|`*`|`*`|`*`| Only owners can edit existing REVOC_REG_ENTRY| Adding new REVOC_REG_ENTRY| + + diff --git a/docs/ci-cd.md b/docs/ci-cd.md index 787e01dfc..89149c8a6 100644 --- a/docs/ci-cd.md +++ b/docs/ci-cd.md @@ -78,12 +78,10 @@ Use cases for artifacts We use [fpm](https://github.com/jordansissel/fpm) for packaging python code into deb packages. Build scripts are placed in `build-scripts` folders: - https://github.com/hyperledger/indy-node/blob/master/build-scripts - https://github.com/hyperledger/indy-plenum/blob/master/build-scripts -- https://github.com/hyperledger/indy-anoncreds/blob/master/build-scripts We also pack some 3rd parties dependencies which are not presented in canonical ubuntu repositories: - https://github.com/hyperledger/indy-node/blob/master/build-scripts/ubuntu-1604/build-3rd-parties.sh - https://github.com/hyperledger/indy-plenum/blob/master/build-scripts/ubuntu-1604/build-3rd-parties.sh -- https://github.com/hyperledger/indy-anoncreds/blob/master/build-scripts/ubuntu-1604/build-3rd-parties.sh Each `build-scripts` folder includes `Readme.md`. Please check them for more details. @@ -92,7 +90,7 @@ Each `build-scripts` folder includes `Readme.md`. Please check them for more det - Please note, that we are using semver-like approach for versioning (major, minor, build) for each of the components. - Major and minor parts are set in the code (see [\_\_metadata\_\_.py](https://github.com/hyperledger/indy-node/blob/master/indy_node/__metadata__.py)). They must be incremented for new releases manually from code if needed. - Build part is incremented with each build on Jenkins (so it always increases, but may be not sequentially) -- Each dependency (including indy-plenum and indy-anoncreds) has a strict version (see [setup.py](https://github.com/hyperledger/indy-node/blob/master/setup.py)) +- Each dependency (including indy-plenum) has a strict version (see [setup.py](https://github.com/hyperledger/indy-node/blob/master/setup.py)) - If you install indy-node (either from pypi, or from deb package), the specified in setup.py version of indy-plenum is installed. - Master and Stable builds usually have different versions. - Differences in master and stable code: diff --git a/docs/setup-dev.md b/docs/setup-dev.md index df4d63262..a062db36c 100644 --- a/docs/setup-dev.md +++ b/docs/setup-dev.md @@ -18,7 +18,7 @@ You can also have a look at the scripts mentioned below to follow them and perfo 1. Get scripts from [dev-setup-ubuntu](https://github.com/hyperledger/indy-node/tree/master/dev-setup/ubuntu) 1. Run `setup-dev-python.sh` to setup Python3.5, pip and virtualenv 1. Run `source ~/.bashrc` to apply virtual environment wrapper installation -1. Run `setup-dev-depend-ubuntu16.sh` to setup dependencies (charm-crypto, libindy, libindy-crypto, libsodium) +1. Run `setup-dev-depend-ubuntu16.sh` to setup dependencies (libindy, libindy-crypto, libsodium) 1. Fork [indy-plenum](https://github.com/hyperledger/indy-plenum) and [indy-node](https://github.com/hyperledger/indy-node) 1. Go to the destination folder for the project 1. Run `init-dev-project.sh ` to clone indy-plenum and indy-node projects and @@ -81,13 +81,6 @@ Run ```sudo yum install python3.5``` Download the latest build (pywin32-220.win-amd64-py3.5.exe is the latest build as of this writing) from [here](https://sourceforge.net/projects/pywin32/files/pywin32/Build%20220/) and run the downloaded executable. -### Setup Charm-Crypto - -Indy-client requires anonymous credentials library which requires a cryptographic library. -The default configuration includes an example that uses Charm-Crypto framework. -You can install it as described in [Anonymous Credentials](https://github.com/evernym/anoncreds) repository -(in particular, running `setup-charm.sh`). - ### Setup Libsodium Indy also depends on libsodium, an awesome crypto library. These need to be installed separately. diff --git a/docs/setup-iptables.md b/docs/setup-iptables.md index e248d15b3..768cd3318 100644 --- a/docs/setup-iptables.md +++ b/docs/setup-iptables.md @@ -1,7 +1,15 @@ # Setup iptables rules (recommended) -In order to prevent the indy-node process from reaching of open file descriptors limit caused by clients connections it is strongly -recommended to add iptables rule that limits the number of simultaneous clients connections for client port. +It is strongly recommended to add iptables (or some other firewall) rule that limits the number of simultaneous clients +connections for client port. +There are at least two important reasons for this: + - preventing the indy-node process from reaching of open file descriptors limit caused by clients connections + - preventing the indy-node process from large memory usage as ZeroMQ creates the separate queue for each TCP connection. + +NOTE: limitation of the number of *simultaneous clients connections* does not mean that we limit the +number of *simultaneous clients* the indy-node works with in any time. The IndySDK client does not keep +connection infinitely, it uses the same connection for request-response session with some optimisations, +so it's just about **connections**, **not** about **clients**. Also iptables can be used to deal with various DoS attacks (e.g. syn flood) but rules' parameters are not estimated yet. @@ -17,7 +25,7 @@ This environment file contains client port (NODE_CLIENT_PORT) and recommended cl This parameters can be used to add the iptables rule for chain INPUT: ``` -# iptables -I INPUT -p tcp --syn --dport 9702 -m connlimit --connlimit-above 15360 --connlimit-mask 0 -j REJECT --reject-with tcp-reset +# iptables -I INPUT -p tcp --syn --dport 9702 -m connlimit --connlimit-above 500 --connlimit-mask 0 -j REJECT --reject-with tcp-reset ``` Some key options: - --dport - a port for which limit is set @@ -55,8 +63,8 @@ NOTE: this script should be called *after* `init_indy_node` script. ###### For pip installation The `setup_indy_node_iptables` script can not be used in case of pip installation as indy-node environment file does not exist, -use the `setup_iptables` script instead (9702 is a client port, 15360 is recommended limit for now) +use the `setup_iptables` script instead (9702 is a client port, 500 is recommended limit for now) ``` -# setup_iptables 9702 15360 +# setup_iptables 9702 500 ``` In fact, the `setup_indy_node_iptables` script is just a wrapper for the `setup_iptables` script. diff --git a/docs/start-nodes.md b/docs/start-nodes.md index 4aed19161..c3f06505b 100644 --- a/docs/start-nodes.md +++ b/docs/start-nodes.md @@ -75,6 +75,12 @@ We can run the script multiple times for different networks. #### Setup iptables (recommended) +It is strongly recommended to add iptables (or some other firewall) rule that limits the number of simultaneous clients +connections for client port. +There are at least two important reasons for this: + - preventing the indy-node process from reaching of open file descriptors limit caused by clients connections + - preventing the indy-node process from large memory usage as ZeroMQ creates the separate queue for each TCP connection. + Instructions related to iptables setup can be found [here](https://github.com/hyperledger/indy-node/blob/master/docs/setup-iptables.md). #### Running Node diff --git a/environment/cloudformation/training/GettingStartedGuideCluster.json b/environment/cloudformation/training/GettingStartedGuideCluster.json index da3e7885d..61492c478 100644 --- a/environment/cloudformation/training/GettingStartedGuideCluster.json +++ b/environment/cloudformation/training/GettingStartedGuideCluster.json @@ -3238,12 +3238,6 @@ "IPaddress": "10.0.0.201" } }, -// "ClientCodePyPi": { -// "Ubuntu1604": { -// "stable": "indy-client", -// "master": "indy-client-dev" -// } -// }, "RegionBasedData": { "ap-northeast-1": { "Ubuntu1604": "ami-aae114cc" diff --git a/environment/docker/baseimage/README.md b/environment/docker/baseimage/README.md index 622dc29a8..80baf3d9f 100644 --- a/environment/docker/baseimage/README.md +++ b/environment/docker/baseimage/README.md @@ -26,11 +26,9 @@ Based on [indy-baseimage](#indy-baseimage). Adds [Indy Core apt repository](https://repo.sovrin.org/deb) to apt sources.list. Also it adds two scripts into system $PATH available directory that could be run by child images to perform common setup routine: - `indy_ci_add_user` creates user with python virtualenv configured - - `indy_ci_charm_crypto` installs Charm Crypto ### indy-core-baseci Base image for images that provide CI testing environment for Indy core projects -([indy-anoncreds](https://github.com/hyperledger/indy-anoncreds), [indy-plenum](https://github.com/hyperledger/indy-plenum), [indy-node](https://github.com/hyperledger/indy-node)). Based on [indy-baseci](#indy-baseci). diff --git a/environment/docker/baseimage/indy-baseci.ubuntu.dockerfile b/environment/docker/baseimage/indy-baseci.ubuntu.dockerfile index f056babda..f45ffdd72 100644 --- a/environment/docker/baseimage/indy-baseci.ubuntu.dockerfile +++ b/environment/docker/baseimage/indy-baseci.ubuntu.dockerfile @@ -7,8 +7,7 @@ RUN apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 68DB5E88 && \ apt-get update COPY scripts/user.sh /usr/local/bin/indy_ci_add_user -COPY scripts/charm_crypto.sh /usr/local/bin/indy_ci_charm_crypto -RUN bash -c "chmod 755 /usr/local/bin/indy_ci_{add_user,charm_crypto}" +RUN bash -c "chmod 755 /usr/local/bin/indy_ci_add_user" COPY __VERSION_FILE__ / diff --git a/environment/docker/baseimage/scripts/charm_crypto.sh b/environment/docker/baseimage/scripts/charm_crypto.sh deleted file mode 100755 index a58477331..000000000 --- a/environment/docker/baseimage/scripts/charm_crypto.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash -e -set -e -set -x - -USERNAME="$1" -USERHOME=$(eval echo "~$USERNAME") -VENVDIR="$USERHOME/$2" - -PY_GLOBAL="/usr/local/lib/python3.5/dist-packages" -PY_USER="$VENVDIR/lib/python3.5/site-packages" - -apt-get update -apt-get install -y python3-charm-crypto - -su -c "cp -r $PY_GLOBAL/Charm_Crypto-0.0.0.egg-info $PY_USER" - $USERNAME -su -c "cp -r $PY_GLOBAL/charm $PY_USER" - $USERNAME diff --git a/environment/docker/getting_started_turnkey/Makefile b/environment/docker/getting_started_turnkey/Makefile deleted file mode 100644 index 510104836..000000000 --- a/environment/docker/getting_started_turnkey/Makefile +++ /dev/null @@ -1,205 +0,0 @@ -SHELL := /bin/bash #bash syntax -# -# ALICE Indy Sovrin Demo -# -# Setup a four node Indy Cluster, and four Indy clients called Indy, Faber, Acme, and Thrift -# -# *** Make the indy-base docker image -# -# make indy-base -# -# *** Run the first part of the Alice demo and then interactively run the rest of the demo -# -# make run-demo -# -# *** Run the entire Alice demo -# -# make run-alice -# -# *** Start a cluster and then start indy and agents (Only run the first time) -# make cluster -# make indy -# -# *** Start a cluster and then start indy prompt -# make cluster -# make indy-cli -# -# *** Start Faber only -# make faber -# -# *** You can stop all docker containers -# make stop -# -# *** Remove all docker containers -# make clean -# - -# Detect the local IP address -LOCAL:=$(shell ifconfig|grep 'inet '|grep -vm1 127.0.0.1|awk '{print $$2}' | sed -e 's/addr://g') - -# Uncomment to manually set the local IP address if not set correctly above -# LOCAL=192.168.1.100 - -NO_COLOR="\x1b[0m" -OK_COLOR="\x1b[32;01m" -ERROR_COLOR="\x1b[31;01m" -WARN_COLOR="\x1b[33;01m" -BLUE_COLOR="\x1b[34;01m" - - -run-demo: clean info cluster faber acme thrift indy - -run-alice: clean info cluster faber acme thrift indy-alice - -indy-base: - @echo -e $(BLUE_COLOR)Indy-base Docker $(NO_COLOR) - -docker rmi -f indy-base - docker build -t indy-base -f ./indy-base-dockerfile . - @echo -e $(GREEN_COLOR)SUCCESS Indy-base Docker $(LOCAL) $(NO_COLOR) - -local: - @echo -e $(BLUE_COLOR) Local IP is $(LOCAL) $(NO_COLOR) - $(eval IPS=$(LOCAL),$(LOCAL),$(LOCAL),$(LOCAL)) - $(eval IPFABER=$(LOCAL)) - $(eval IPACME=$(LOCAL)) - $(eval IPTHRIFT=$(LOCAL)) - -info: local - @echo -e $(BLUE_COLOR) Settings.... $(NO_COLOR) - @echo -e $(BLUE_COLOR) IPS=$(IPS) $(NO_COLOR) - @echo -e $(BLUE_COLOR) IPFABER=$(IPFABER) $(NO_COLOR) - @echo -e $(BLUE_COLOR) IPACME=$(IPACME) $(NO_COLOR) - @echo -e $(BLUE_COLOR) IPTHRIFT=$(IPTHRIFT) $(NO_COLOR) - -cluster: - @echo -e $(BLUE_COLOR) CLUSTER: Create 4 Nodes at IPS $(IPS) $(NO_COLOR) - docker run --name Node1 -d -p 9701:9701 -p 9702:9702 indy-base /bin/bash -c "create_dirs.sh; init_indy_keys --name Node1; generate_indy_pool_transactions --nodes 4 --clients 5 --nodeNum 1 --ips $(IPS); start_indy_node Node1 0.0.0.0 9701 0.0.0.0 9702" - docker run --name Node2 -d -p 9703:9703 -p 9704:9704 indy-base /bin/bash -c "create_dirs.sh; init_indy_keys --name Node2; generate_indy_pool_transactions --nodes 4 --clients 5 --nodeNum 2 --ips $(IPS); start_indy_node Node2 0.0.0.0 9703 0.0.0.0 9704" - docker run --name Node3 -d -p 9705:9705 -p 9706:9706 indy-base /bin/bash -c "create_dirs.sh; init_indy_keys --name Node3; generate_indy_pool_transactions --nodes 4 --clients 5 --nodeNum 3 --ips $(IPS); start_indy_node Node3 0.0.0.0 9705 0.0.0.0 9706" - docker run --name Node4 -d -p 9707:9707 -p 9708:9708 indy-base /bin/bash -c "create_dirs.sh; init_indy_keys --name Node4; generate_indy_pool_transactions --nodes 4 --clients 5 --nodeNum 4 --ips $(IPS); start_indy_node Node4 0.0.0.0 9707 0.0.0.0 9708" - @echo -e $(OK_COLOR) SUCCESS: Cluster 4 nodes success at IPS $(IPS) $(NO_COLOR) - -indy-cli: info - @echo -e $(BLUE_COLOR) INDY DEBUG: Create Indy $(IPS) $(NO_COLOR) - docker run --rm --name IndyCli -it indy-base /bin/bash -c "create_dirs.sh; generate_indy_pool_transactions --nodes 4 --clients 5 --ips $(IPS); /bin/bash" - -indy: info - @echo -e $(BLUE_COLOR) INDY: Create Indy and initialize with commandline jobs $(IPS) $(NO_COLOR) - docker run --rm --name Indy -it indy-base /bin/bash -c "\ - create_dirs.sh; generate_indy_pool_transactions --nodes 4 --clients 5 --ips $(IPS); \ - ./indy-cli \ - 'connect sandbox' \ - 'new key with seed 000000000000000000000000Steward1' \ - 'send NYM dest=ULtgFQJe6bjiFbs7ke3NJD role=TRUST_ANCHOR verkey=~5kh3FB4H3NKq7tUDqeqHc1' \ - 'send NYM dest=CzkavE58zgX7rUMrzSinLr role=TRUST_ANCHOR verkey=~WjXEvZ9xj4Tz9sLtzf7HVP' \ - 'send NYM dest=H2aKRiDeq8aLZSydQMDbtf role=TRUST_ANCHOR verkey=~3sphzTb2itL2mwSeJ1Ji28' \ - 'new key with seed Faber000000000000000000000000000' \ - 'send ATTRIB dest=ULtgFQJe6bjiFbs7ke3NJD raw={\"endpoint\": {\"ha\": \"$(IPFABER):5555\", \"pubkey\": \"5hmMA64DDQz5NzGJNVtRzNwpkZxktNQds21q3Wxxa62z\"}}' \ - 'new key with seed Acme0000000000000000000000000000' \ - 'send ATTRIB dest=CzkavE58zgX7rUMrzSinLr raw={\"endpoint\": {\"ha\": \"$(IPACME):6666\", \"pubkey\": \"C5eqjU7NMVMGGfGfx2ubvX5H9X346bQt5qeziVAo3naQ\"}}' \ - 'new key with seed Thrift00000000000000000000000000' \ - 'send ATTRIB dest=H2aKRiDeq8aLZSydQMDbtf raw={\"endpoint\": {\"ha\": \"$(IPTHRIFT):7777\", \"pubkey\": \"AGBjYvyM3SFnoiDGAEzkSLHvqyzVkXeMZfKDvdpEsC2x\"}}' \ - 'save wallet'; \ - /bin/bash \ - " - @echo -e $(OK_COLOR) SUCCESS: Indy $(NO_COLOR) - -indy-alice: info - @echo -e $(BLUE_COLOR) INDY ALICE: Create Indy and initialize with commandline jobs $(IPS) $(NO_COLOR) - docker run --rm --name IndyAlice -it indy-base /bin/bash -c "\ - create_dirs.sh; generate_indy_pool_transactions --nodes 4 --clients 5 --ips $(IPS); \ - ./indy-cli \ - 'connect sandbox' \ - 'new key with seed 000000000000000000000000Steward1' \ - 'send NYM dest=ULtgFQJe6bjiFbs7ke3NJD role=TRUST_ANCHOR verkey=~5kh3FB4H3NKq7tUDqeqHc1' \ - 'send NYM dest=CzkavE58zgX7rUMrzSinLr role=TRUST_ANCHOR verkey=~WjXEvZ9xj4Tz9sLtzf7HVP' \ - 'send NYM dest=H2aKRiDeq8aLZSydQMDbtf role=TRUST_ANCHOR verkey=~3sphzTb2itL2mwSeJ1Ji28' \ - 'new key with seed Faber000000000000000000000000000' \ - 'send ATTRIB dest=ULtgFQJe6bjiFbs7ke3NJD raw={\"endpoint\": {\"ha\": \"$(IPFABER):5555\", \"pubkey\": \"5hmMA64DDQz5NzGJNVtRzNwpkZxktNQds21q3Wxxa62z\"}}' \ - 'new key with seed Acme0000000000000000000000000000' \ - 'send ATTRIB dest=CzkavE58zgX7rUMrzSinLr raw={\"endpoint\": {\"ha\": \"$(IPACME):6666\", \"pubkey\": \"C5eqjU7NMVMGGfGfx2ubvX5H9X346bQt5qeziVAo3naQ\"}}' \ - 'new key with seed Thrift00000000000000000000000000' \ - 'send ATTRIB dest=H2aKRiDeq8aLZSydQMDbtf raw={\"endpoint\": {\"ha\": \"$(IPTHRIFT):7777\", \"pubkey\": \"AGBjYvyM3SFnoiDGAEzkSLHvqyzVkXeMZfKDvdpEsC2x\"}}' \ - 'save wallet' \ - 'prompt ALICE' \ - 'new wallet Alice' \ - 'load sample/faber-request.indy' \ - 'show connection Faber' \ - 'accept request from Faber' \ - 'show claim Transcript' \ - 'request claim Transcript' \ - 'show claim Transcript' \ - 'save wallet' \ - 'load sample/acme-job-application.indy' \ - 'accept request from Acme' \ - 'show proof request Job-Application' \ - 'set first_name to Alice' \ - 'set last_name to Garcia' \ - 'set phone_number to 123-456-7890' \ - 'show proof request Job-Application' \ - 'send proof Job-Application to Acme' \ - 'show connection Acme' \ - 'show claim Job-Certificate' \ - 'request claim Job-Certificate' \ - 'show claim Job-Certificate' \ - 'load sample/thrift-loan-application.indy' \ - 'accept request from Thrift' \ - 'show proof request Loan-Application-Basic' \ - 'send proof Loan-Application-Basic to Thrift' \ - 'show proof request Loan-Application-KYC' \ - 'send proof Loan-Application-KYC to Thrift' \ - 'save wallet' \ - ; \ - /bin/bash \ - " - @echo -e $(OK_COLOR) SUCCESS: Indy $(NO_COLOR) - -faber: - @echo -e $(BLUE_COLOR) FABER: Create Faber $(IPS) $(NO_COLOR) - docker run --rm --name Faber -d -p 5555:5555 indy-base /bin/bash -c "create_dirs.sh; generate_indy_pool_transactions --nodes 4 --clients 5 --ips $(IPS); sleep 40; python3 /usr/local/lib/python3.5/dist-packages/indy_client/test/agent/faber.py --port 5555" - @echo -e $(OK_COLOR) Faber success assumes IPS $(IPS) $(NO_COLOR) - -acme: - @echo -e $(BLUE_COLOR) ACME: Create Acme $(IPS) $(NO_COLOR) - docker run --rm --name Acme -d -p 6666:6666 indy-base /bin/bash -c "create_dirs.sh; generate_indy_pool_transactions --nodes 4 --clients 5 --ips $(IPS); sleep 40; python3 /usr/local/lib/python3.5/dist-packages/indy_client/test/agent/acme.py --port 6666" - @echo -e $(OK_COLOR) Acme success assumes IPS $(IPS) $(NO_COLOR) - -thrift: - @echo -e $(BLUE_COLOR) THRIFT: Create Thrift $(IPS) $(NO_COLOR) - docker run --rm --name Thrift -d -p 7777:7777 indy-base /bin/bash -c "create_dirs.sh; generate_indy_pool_transactions --nodes 4 --clients 5 --ips $(IPS); sleep 40; python3 /usr/local/lib/python3.5/dist-packages/indy_client/test/agent/thrift.py --port 7777" - @echo -e $(OK_COLOR) Thrift success assumes IPS $(IPS) $(NO_COLOR) - - -stop: - -docker stop Node1 - -docker stop Node2 - -docker stop Node3 - -docker stop Node4 - -docker stop Indy - -docker stop IndyAlice - -docker stop Faber - -docker stop Acme - -docker stop Thrift - -start: - -docker start Node1 - -docker start Node2 - -docker start Node3 - -docker start Node4 - -docker start Indy - -docker start IndyAlice - -docker start Faber - -docker start Acme - -docker start Thrift - -clean: - @echo -e $(BLUE_COLOR) CLEAN out docker images and prune $(NO_COLOR) - -docker rm -f Indy - -docker rm -f IndyAlice - -docker rm -f Faber - -docker rm -f Acme - -docker rm -f Thrift - -docker rm -f Node1 - -docker rm -f Node2 - -docker rm -f Node3 - -docker rm -f Node4 diff --git a/environment/docker/getting_started_turnkey/README.md b/environment/docker/getting_started_turnkey/README.md deleted file mode 100644 index 3e0795032..000000000 --- a/environment/docker/getting_started_turnkey/README.md +++ /dev/null @@ -1,211 +0,0 @@ -# Abstract - -A turnkey, Docker-based sandbox that enables quick and easy exploration of Hyperledger Indy concepts. This devops repo can be used to gather hands-on experience of Indy basics using the scenarios outlined in the [Sovrin's Getting Started Guide](https://github.com/hyperledger/indy-node/blob/stable/getting-started.md). - -## Quick Summary commands - -With just four command lines executed you have the Indy Demo ready to use. - -``` -$ git clone https://github.com/hyperledger/indy-node.git -$ cd indy-node/environment/docker/getting_started_turnkey -$ make indy-base -$ make run-demo -``` - -# Indy Docker - -A Docker file is provided that creates and configures Indy nodes and clients. The resulting Docker image can be used to instantiate the particants in the **Alice Demo** that are described in the [Sovrin's Getting Started Guide](https://github.com/hyperledger/indy-node/blob/stable/getting-started.md). - -## Dependencies - -While the Docker image that will be created below may run on many different versions of Docker, it was initially tested and verified on Docker v17.10.0-ce. To see what version of Docker is currently installed on your system, run: - -``` -$ docker --version -``` - -Information on downloading and installing Docker for various platforms can be found [here](https://www.docker.com/get-docker). - -## Step 1: Create the Indy Docker Image - -Clone the **indy-node** repository. - -``` -$ git clone https://github.com/hyperledger/indy-node.git -``` - -Change to the cloned directory and use the **Makefile** target **indy-base** to create the **indy-base** Docker image. - -``` -$ cd indy-node/environment/docker/getting_started_turnkey -$ make indy-base -``` - -Now, you should have a **indy-base** Docker image available to run. - -``` -$ docker images -REPOSITORY TAG IMAGE ID CREATED SIZE -indy-base latest 0e5fe43800da 43 hours ago 1.09GB -``` - -## Step 2: Run the Alice Demo - -You can set up and run the **Alice Demo** using the **indy-base** Docker image from Step 1. In the cloned directory there is a **Makefile** that can be used to start and stop all of the Docker containers used for the demo. - -The **run-demo** target starts a four-node pool (Node1-Node4), sets up and runs the Faber, Acme and Thrift agents, and starts an Indy CLI. - -``` -$ make run-demo -``` - -The **Makefile** has a number of targets that perform many tasks. An attempt is made to determine the local IP address. It can be checked using the **local** target. If you want to use a different IP address, you can edit the Makefile and set the LOCAL variable. - -To see what your local address is you can run the command with just the **local** target. - -``` -$ make local -``` - -After executing the **run-demo** target, you should have 8 Docker containers running. - -``` -$ docker ps -CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES -e26633e1d1f9 indy-base "/bin/bash -c ' ..." 10 seconds ago Up 11 seconds Indy -41e9fcc0733f indy-base "/bin/bash -c 'gen..." 11 seconds ago Up 12 seconds 0.0.0.0:7777->7777/tcp Thrift -287accdc16a2 indy-base "/bin/bash -c 'gen..." 12 seconds ago Up 12 seconds 0.0.0.0:6666->6666/tcp Acme -5d13e6af5836 indy-base "/bin/bash -c 'gen..." 13 seconds ago Up 13 seconds 0.0.0.0:5555->5555/tcp Faber -70126d9120f0 indy-base "/bin/bash -c 'ini..." 13 seconds ago Up 14 seconds 0.0.0.0:9707-9708->9707-9708/tcp Node4 -5305fcb69354 indy-base "/bin/bash -c 'ini..." 14 seconds ago Up 15 seconds 0.0.0.0:9705-9706->9705-9706/tcp Node3 -63932d40357e indy-base "/bin/bash -c 'ini..." 15 seconds ago Up 15 seconds 0.0.0.0:9703-9704->9703-9704/tcp Node2 -7e9f2f93f41e indy-base "/bin/bash -c 'ini..." 15 seconds ago Up 16 seconds 0.0.0.0:9701-9702->9701-9702/tcp Node1 -``` - -When the Indy container starts, it runs several Indy commands that set up the agents. Once the agents are operational, you are at the **indy>** prompt and the demo environment is ready for use. You can now follow the **Alice Demo** scenario. - -The following commands are from the demo script and can be used to test that the demo environment is working correctly. - -``` -indy@sandbox> prompt ALICE -ALICE@sandbox> new wallet Alice -ALICE@sandbox> show sample/faber-request.indy -ALICE@sandbox> load sample/faber-request.indy -ALICE@sandbox> show connection "Faber College" -ALICE@sandbox> accept request from "Faber College" -ALICE@sandbox> show claim Transcript -ALICE@sandbox> request claim Transcript -ALICE@sandbox> show claim Transcript -``` - -The entire **Alice Demo** can be run using the **run-alice** target. This does everything that the **run-demo** target does, plus executes the remaining Indy commands to run the entire demo. - -You will be left at the **indy>** prompt, allowing you to explore additional commands. To get a list of all Indy commands, enter **help**. - -The **exit** command will exit the Indy command prompt and leave you at the bash shell command line. You can explore the file system or run the Indy command prompt again by typing **indy**. - -There are several directories under **~/.indy-cli** that might be interesting to explore. The network configuration is in the **~/.indy-cli/networks/sandbox** directory, and the wallets are in the **~/.indy-cli/wallets/sandbox** directory. - - -## Makefile Targets - -The following **Makefile** targets can be used to start and stop the Docker containers and set up the demo environment used for the **Alice Demo**. - -**indy-base** - -* Create the Docker image that is used for both Indy nodes and clients. - -**local** - -* Find the local host IP address. - -**run-demo** - -* Start all Indy node, Indy agents and Indy CLI used for the **Alice Demo**. This also automatically executes several Indy commands that set up the agents before leaving you at the **indy>** prompt. - -**run-alice** - -* Start all Indy node, Indy agents and Indy CLI used for the **Alice Demo**. This also automatically executes all of the Indy commands that run the entire Alice demo before leaving you at the **indy>** prompt. - -**indy-cli** - -* Start a new Indy CLI client leaving you at the **indy>** prompt. - -**stop** - -* Stop all Docker containers used for the **Alice Demo**. - -**start** - -* Start all stopped Docker containers used for the **Alice Demo** that were stopped using the **stop** target. - -**clean** - -* Stop and remove all Docker containers used for the **Alice Demo**. - -## Troubleshooting - -Some failures running through the demo can be due to failure to contact the various service endpoints. Verify the IP addresses that the makefile is using and edit the Makefile LOCAL variable as necessary. - - - -## Using the Docker Image - -The **indy-base** Docker image is used for both Indy nodes and clients. - -You can run the Docker image and interact with it using a bash shell. - -``` -$ docker run -it --rm indy-base /bin/bash -``` - -To start the Docker image as an Indy client: - -``` -$ docker run -it --rm indy-base /bin/bash -# indy -Loading module /usr/local/lib/python3.5/dist-packages/config/config-crypto-example1.py -Module loaded. - -Indy-CLI (c) 2017 Evernym, Inc. -Type 'help' for more information. -Running Indy 1.2 - -indy> -``` - -To start the docker image as an Indy node: - -``` -$ docker run -it --rm indy-base /bin/bash -# init_indy_keys --name Alpha -# start_indy_node Alpha 0.0.0.0 9701 0.0.0.0 9702 -``` - -You can connect to an existing node: - -``` -$ docker exec -it Node1 /bin/bash -``` - -## Cleanup - -To stop and remove the created Docker containers from your system: - -``` -$ make clean -``` - -To remove the Docker image from your system: - -``` -$ docker rmi indy-base -``` - -## Links - -* [Getting Started with Indy](https://github.com/hyperledger/indy-node/blob/stable/getting-started.md) -* [Indy Node](https://github.com/hyperledger/indy-node) -* [Indy – Running the Getting Started tutorial locally](https://github.com/hyperledger/indy-node/blob/master/docs/indy-running-locally.md) -* [Create a Network and Start Nodes](https://github.com/hyperledger/indy-node/blob/master/docs/start-nodes.md) diff --git a/environment/docker/getting_started_turnkey/indy-base-dockerfile b/environment/docker/getting_started_turnkey/indy-base-dockerfile deleted file mode 100644 index e85ce3aa6..000000000 --- a/environment/docker/getting_started_turnkey/indy-base-dockerfile +++ /dev/null @@ -1,35 +0,0 @@ -FROM solita/ubuntu-systemd:16.04 - -# Install environment -RUN apt-get update -y && apt-get install -y \ - git \ - wget \ - python3.5 \ - python3-pip \ - python-setuptools \ - python3-nacl \ - apt-transport-https \ - ca-certificates \ - sudo \ - nano \ - emacs \ - vim -RUN pip3 install -U \ - 'pip<10.0.0' \ - setuptools -RUN apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 68DB5E88 -RUN apt-key adv --keyserver keyserver.ubuntu.com --recv-keys BD33704C -RUN echo "deb https://repo.sovrin.org/deb xenial master" >> /etc/apt/sources.list -RUN echo "deb https://repo.sovrin.org/sdk/deb xenial master" >> /etc/apt/sources.list -RUN apt-get update -y && apt-get install -y indy-node libindy -RUN pip3 install python3-indy -WORKDIR /home/indy -ADD . /home/indy - -# Set NETWORK_NAME in indy_config.py to 'sandbox' -RUN awk '{if (index($1, "NETWORK_NAME") != 0) {print("NETWORK_NAME = \"sandbox\"")} else print($0)}' /etc/indy/indy_config.py> /tmp/indy_config.py -RUN mv /tmp/indy_config.py /etc/indy/indy_config.py - -RUN echo " " >> /etc/indy/indy_config.py -RUN echo "logLevel=0" >> /etc/indy/indy_config.py -RUN echo " " >> /etc/indy/indy_config.py diff --git a/environment/docker/getting_started_turnkey/indy-cli b/environment/docker/getting_started_turnkey/indy-cli deleted file mode 100755 index cfb560322..000000000 --- a/environment/docker/getting_started_turnkey/indy-cli +++ /dev/null @@ -1,104 +0,0 @@ -#! /usr/bin/env python3 -""" -Convenience script for calling the indy command line interface (CLI). For now, -the CLI is designed for experimenting with the Indy Identity platform, and not -for creating a live consensus pool. For that, it's as simple as defining a node -registry, creating a looper, creating a node, and running it. - -$ indy-cli - -or supply a command to be executed first - -$ indy-cli "new nodes all" - -Spec: The indy-cli is a fix for the indy script to provide a command line - execution of commands which require somewhat means to be atomic. The indy - script as it is today fails for executing commands from the command line - if one command depends on the previous for completion. A good example - is to provide a list of commands to indy script and the last one as quit and - you can see that the indy script exits before finishing all the commands - due to asyncronous operation and no concept of atomic operations for commands. - For now this is a solution for the Alice demo to setup without a human linearly - stepping through commands and waiting for responses for the next. -""" - -import logging -import os -import sys -import asyncio -import time - -# NOTE: Loading of plugin should happen as early as possible -# So put all other required imports after loadPlugins function call below -from plenum.common.plugin_helper import loadPlugins -from indy_common.config_util import getConfig - -logging.root.handlers = [] -logger = logging.getLogger() -logger.propagate = False -logger.disabled = True - -config = getConfig() -baseDir = os.path.expanduser(config.CLI_BASE_DIR) -network_dir = os.path.expanduser(config.CLI_NETWORK_DIR) -if not os.path.exists(baseDir): - os.makedirs(baseDir) -if not os.path.exists(network_dir): - os.makedirs(network_dir) -loadPlugins(baseDir) - -# NOTE: Put all regular imports below (not related to loadplugin) -from indy_client.cli.cli import IndyCli -from stp_core.loop.looper import Looper - - -async def cmdline(cli,commands): - for command in commands: - if not command.startswith("--"): - print("\nRunning command: '{}'...\n".format(command)) - cli.parse(command) - await asyncio.sleep(3) - -def run_cli(): - - commands = sys.argv[1:] - - withNode = True if '--with-node' in commands else False - - - with Looper(debug=config.LOOPER_DEBUG) as looper: - curDir = os.getcwd() - logFilePath = os.path.join(curDir, config.logFilePath) - cli = IndyCli(looper=looper, - basedirpath=baseDir, - ledger_base_dir=network_dir, - logFileName=logFilePath, - withNode=withNode - ) - - looper.run(cmdline(cli,commands) ) - commands = [] - looper.run(cli.shell(*commands)) - - - -default_config = """ -[node_reg] -Alpha = 127.0.0.1 8001 -Beta = 127.0.0.1 8003 -Gamma = 127.0.0.1 8005 -Delta = 127.0.0.1 8007 - -[client_node_reg] -AlphaC = 127.0.0.1 8002 -BetaC = 127.0.0.1 8004 -GammaC = 127.0.0.1 8006 -DeltaC = 127.0.0.1 8008 - -[storage_locations] -basePath = ~ -""" - - -if __name__ == '__main__': - run_cli() diff --git a/environment/openshift/scripts/agent/start.sh b/environment/openshift/scripts/agent/start.sh deleted file mode 100644 index 8c83977b1..000000000 --- a/environment/openshift/scripts/agent/start.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash -SCRIPT_DIR=$(dirname $0) - -$SCRIPT_DIR/initialize.sh - -agentName="$(echo "$AGENT_NAME" | tr '[:upper:]' '[:lower:]')" - -# Examples: -# /usr/bin/env python3 /usr/local/lib/python3.5/dist-packages/indy_client/test/agent/faber.py --port 5555 > faber.log -# /usr/bin/env python3 /usr/local/lib/python3.5/dist-packages/indy_client/test/agent/acme.py --port 6666 > acme.log -# /usr/bin/env python3 /usr/local/lib/python3.5/dist-packages/indy_client/test/agent/thrift.py --port 7777 > thrift.log - -echo "Starting ${AGENT_NAME} agent node ..." -echo "/usr/bin/env python3 /usr/local/lib/python3.5/dist-packages/indy_client/test/agent/${agentName}.py --port ${AGENT_PORT} > ${agentName}.log" -echo -exec /usr/bin/env python3 /usr/local/lib/python3.5/dist-packages/indy_client/test/agent/${agentName}.py --port ${AGENT_PORT} > ${agentName}.log \ No newline at end of file diff --git a/environment/openshift/scripts/client/start.sh b/environment/openshift/scripts/client/start.sh deleted file mode 100644 index ef22940c4..000000000 --- a/environment/openshift/scripts/client/start.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/bash -SCRIPT_DIR=$(dirname $0) - -$SCRIPT_DIR/initialize.sh - - - -echo "This client is deprecated! Please, use the new libindy-based CLI: https://github.com/hyperledger/indy-sdk/tree/master/cli" -echo "Starting indy client node ..." -echo "The indy cli will not keep the pod running, so instead we'll sleep for infinity." -echo "To use the indy cli, rsh into the pod and run the cli in the session." -echo -sleep infinity -# indy \ No newline at end of file diff --git a/environment/vagrant/sandbox/DevelopmentEnvironment/AWS/scriptlets/common/pre/linux/setup b/environment/vagrant/sandbox/DevelopmentEnvironment/AWS/scriptlets/common/pre/linux/setup index cb6244347..b5b1b94df 100644 --- a/environment/vagrant/sandbox/DevelopmentEnvironment/AWS/scriptlets/common/pre/linux/setup +++ b/environment/vagrant/sandbox/DevelopmentEnvironment/AWS/scriptlets/common/pre/linux/setup @@ -102,39 +102,8 @@ cd /src/libsodium-${LIBSODIUM_VERSION} make make install -#-------------------------------------------------------- -# Indy Client - Prerequsites -echo "Install Indy CLI prerequisites..." -mkdir -p /home/$USER/indy-cli && cd /home/$USER/indy-cli -if [ ! -d ./indy-anoncreds ]; then - git clone https://github.com/hyperledger/indy-anoncreds.git -fi -cd indy-anoncreds -sed -i -- 's#if \[ -f /etc/redhat-release \]#if \[ -f /etc/redhat-release \] \|\| grep -q "Amazon Linux AMI" /etc/os-release#' ./setup-charm.sh - -# Comment taken from ./setup-charm.sh -# -# Ensure that you are using pip. for installation. -# Use link to refer pip. using pip command -# -# Ugg! -# -# We installed Python from source. The prefix was set to /usr during -# installation. -# -# TODO: Figure out how to activate a Python virtualenv on 'vagrant up' (non -# interactive shell) -ln -sf /usr/bin/pip${PYTHON_PIP_VERSION} /usr/bin/pip -./setup-charm.sh - #-------------------------------------------------------- # Setup a Python virtual environment with Python 3.6 #cd /home/$USER #echo "Setup and activate a Python virtual environment..." #virtualenv -p python${PYTHON_MAJOR_VERSION}.${PYTHON_MINOR_VERSION} indy-client -#source ./indy-client/bin/activate - -#-------------------------------------------------------- -# Install indy client in the Python virtual environment -echo "Install indy (CLI) client..." -pip install indy diff --git a/indy_client/agent/agent.py b/indy_client/agent/agent.py deleted file mode 100644 index ba636b3b8..000000000 --- a/indy_client/agent/agent.py +++ /dev/null @@ -1,202 +0,0 @@ -import asyncio -import os -from typing import Tuple - -from plenum.common.motor import Motor -from plenum.common.signer_did import DidSigner -from plenum.common.signer_simple import SimpleSigner -from plenum.common.startable import Status -from plenum.common.types import HA -from plenum.common.util import randomString -from indy_client.agent.agent_net import AgentNet -from indy_client.client.client import Client -from indy_client.client.wallet.wallet import Wallet - -from indy_common.config import agentLoggingLevel -from indy_common.config_util import getConfig -from indy_common.identity import Identity -from indy_common.strict_types import strict_types, decClassMethods - -from stp_core.common.log import getlogger -from stp_core.network.port_dispenser import genHa -from stp_core.network.util import checkPortAvailable -from stp_core.types import Identifier - -logger = getlogger() -logger.setLevel(agentLoggingLevel) - - -@decClassMethods(strict_types()) -class Agent(Motor, AgentNet): - def __init__(self, - name: str=None, - basedirpath: str=None, - client: Client=None, - port: int=None, - loop=None, - config=None, - endpointArgs=None): - - self.endpoint = None - if port: - checkPortAvailable(HA("0.0.0.0", port)) - Motor.__init__(self) - self.loop = loop or asyncio.get_event_loop() - self._eventListeners = {} # Dict[str, set(Callable)] - self._name = name or 'Agent' - self._port = port - - self.config = config or getConfig() - self.basedirpath = basedirpath or os.path.expanduser( - self.config.CLI_BASE_DIR) - self.endpointArgs = endpointArgs - - # Client used to connect to Indy and forward on owner's txns - self._client = client # type: Client - - # known identifiers of this agent's owner - self.ownerIdentifiers = {} # type: Dict[Identifier, Identity] - - self.logger = logger - - @property - def client(self): - return self._client - - @client.setter - def client(self, client): - self._client = client - - @property - def name(self): - return self._name - - @property - def port(self): - return self._port - - async def prod(self, limit) -> int: - c = 0 - if self.get_status() == Status.starting: - self.status = Status.started - c += 1 - if self.client: - c += await self.client.prod(limit) - if self.endpoint: - c += await self.endpoint.service(limit) - return c - - def start(self, loop): - AgentNet.__init__(self, - name=self._name.replace(" ", ""), - port=self._port, - basedirpath=self.basedirpath, - msgHandler=self.handleEndpointMessage, - config=self.config, - endpoint_args=self.endpointArgs) - - super().start(loop) - if self.client: - self.client.start(loop) - if self.endpoint: - self.endpoint.start() - - def stop(self, *args, **kwargs): - super().stop(*args, **kwargs) - if self.client: - self.client.stop() - if self.endpoint: - self.endpoint.stop() - - def _statusChanged(self, old, new): - pass - - def onStopping(self, *args, **kwargs): - pass - - def connect(self, network: str): - """ - Uses the client to connect to Indy - :param network: (test|live) - :return: - """ - raise NotImplementedError - - def syncKeys(self): - """ - Iterates through ownerIdentifiers and ensures the keys are correct - according to Indy. Updates the updated - :return: - """ - raise NotImplementedError - - def handleOwnerRequest(self, request): - """ - Consumes an owner request, verifies it's authentic (by checking against - synced owner identifiers' keys), and handles it. - :param request: - :return: - """ - raise NotImplementedError - - def handleEndpointMessage(self, msg): - raise NotImplementedError - - def ensureConnectedToDest(self, name, ha, clbk, *args): - if self.endpoint.isConnectedTo(name=name, ha=ha): - if clbk: - clbk(*args) - else: - self.loop.call_later(.2, self.ensureConnectedToDest, - name, ha, clbk, *args) - - def sendMessage(self, msg, name: str = None, ha: Tuple = None): - self.endpoint.send(msg, name, ha) - logger.debug("Message sent (to -> {}): {}".format(ha, msg)) - - def registerEventListener(self, eventName, listener): - cur = self._eventListeners.get(eventName) - if cur: - self._eventListeners[eventName] = cur.add(listener) - else: - self._eventListeners[eventName] = {listener} - - def deregisterEventListener(self, eventName, listener): - cur = self._eventListeners.get(eventName) - if cur: - self._eventListeners[eventName] = cur - set(listener) - - -def createAgent(agentClass, name, wallet=None, basedirpath=None, port=None, - loop=None, clientClass=Client): - config = getConfig() - - if not wallet: - wallet = Wallet(name) - wallet.addIdentifier(signer=DidSigner( - seed=randomString(32).encode('utf-8'))) - if not basedirpath: - basedirpath = config.CLI_BASE_DIR - if not port: - _, port = genHa() - - client = create_client(base_dir_path=basedirpath, client_class=clientClass) - - return agentClass(basedirpath=basedirpath, - client=client, - wallet=wallet, - port=port, - loop=loop) - - -def create_client(base_dir_path=None, client_class=Client): - config = getConfig() - - if not base_dir_path: - base_dir_path = config.CLI_BASE_DIR - - _, clientPort = genHa() - client = client_class(randomString(6), - ha=("0.0.0.0", clientPort), - basedirpath=base_dir_path) - return client diff --git a/indy_client/agent/agent_cli.py b/indy_client/agent/agent_cli.py deleted file mode 100644 index 717079cd5..000000000 --- a/indy_client/agent/agent_cli.py +++ /dev/null @@ -1,75 +0,0 @@ -from indy_client.cli.cli import IndyCli - - -class AgentCli(IndyCli): - def __init__(self, name=None, agentCreator=None, *args, **kwargs): - if name is not None: - self.name = name - - init_agent = kwargs.get('agent', None) - if 'agent' in kwargs: - kwargs.pop('agent') - - super().__init__(*args, **kwargs) - - self._activeWallet = None - - if init_agent is not None: - self.agent = init_agent - if name is None: - self.name = init_agent.name - - @property - def actions(self): - if not self._actions: - self._actions = [self._simpleAction, self._helpAction, - self._listIdsAction, self._changePrompt, - self._listWalletsAction, self._showFile, - self._showConnection, self._pingTarget, - self._listConnections, self._sendProofRequest] - return self._actions - - def getKeyringsBaseDir(self): - return self.agent.getContextDir() - - def getContextBasedKeyringsBaseDir(self): - return self.agent.getContextDir() - - def getAllSubDirNamesForKeyrings(self): - return ["issuer"] - - def getTopComdMappingKeysForHelp(self): - return ['helpAction'] - - def getComdMappingKeysToNotShowInHelp(self): - allowedCmds = [func.__name__.replace("_", "") for func in self.actions] - return {k: v for (k, v) in - self.cmdHandlerToCmdMappings().items() if k not in allowedCmds} - - def getBottomComdMappingKeysForHelp(self): - return ['licenseAction', 'exitAction'] - - def restoreLastActiveWallet(self): - pass - - def _saveActiveWallet(self): - pass - - def printSuggestion(self, msgs): - self.print("\n") - # TODO: as of now we are not printing the suggestion (msg) - # because, those suggestion may not be intented or may not work - # correctly for agents, so when such requirement will come, - # we can look this again. - - @property - def activeWallet(self): - return self.agent._wallet - - @activeWallet.setter - def activeWallet(self, wallet): - pass - - @property - def walletSaver(self): - return self.agent.walletSaver diff --git a/indy_client/agent/agent_issuer.py b/indy_client/agent/agent_issuer.py deleted file mode 100644 index c841f45c6..000000000 --- a/indy_client/agent/agent_issuer.py +++ /dev/null @@ -1,83 +0,0 @@ -import json -from plenum.common.types import f - -from anoncreds.protocol.issuer import Issuer -from anoncreds.protocol.types import ID -from anoncreds.protocol.types import ClaimRequest -from indy_client.agent.constants import EVENT_NOTIFY_MSG, CLAIMS_LIST_FIELD -from indy_client.agent.msg_constants import CLAIM, CLAIM_REQ_FIELD, CLAIM_FIELD, \ - AVAIL_CLAIM_LIST, REVOC_REG_SEQ_NO, SCHEMA_SEQ_NO, ISSUER_DID -from indy_common.identity import Identity -from plenum.common.constants import DATA -from indy_client.client.wallet.attribute import Attribute - - -class AgentIssuer: - def __init__(self, issuer: Issuer): - self.issuer = issuer - - async def processReqAvailClaims(self, msg): - body, (frm, ha) = msg - link = self.verifyAndGetLink(msg) - data = { - CLAIMS_LIST_FIELD: self.get_available_claim_list(link) - } - resp = self.getCommonMsg(AVAIL_CLAIM_LIST, data) - self.signAndSend(resp, link.localIdentifier, frm) - - async def processReqClaim(self, msg): - body, (frm, _) = msg - link = self.verifyAndGetLink(msg) - if not link: - raise NotImplementedError - - claimReqDetails = body[DATA] - - schemaId = ID(schemaId=claimReqDetails[SCHEMA_SEQ_NO]) - schema = await self.issuer.wallet.getSchema(schemaId) - - if not self.is_claim_available(link, schema.name): - self.notifyToRemoteCaller( - EVENT_NOTIFY_MSG, "This claim is not yet available.", - self.wallet.defaultId, frm, - origReqId=body.get(f.REQ_ID.nm)) - return - - public_key = await self.issuer.wallet.getPublicKey(schemaId) - claimReq = ClaimRequest.from_str_dict( - claimReqDetails[CLAIM_REQ_FIELD], public_key.N) - - self._add_attribute( - schemaKey=schema.getKey(), - proverId=claimReq.userId, - link=link) - - claim_signature, claim_attributes = await self.issuer.issueClaim(schemaId, claimReq) - - claimDetails = { - f.SIG.nm: claim_signature.to_str_dict(), - ISSUER_DID: schema.issuerId, - CLAIM_FIELD: json.dumps({k: v.to_str_dict() for k, v in claim_attributes.items()}), - REVOC_REG_SEQ_NO: None, - SCHEMA_SEQ_NO: claimReqDetails[SCHEMA_SEQ_NO] - } - - resp = self.getCommonMsg(CLAIM, claimDetails) - self.signAndSend(resp, link.localIdentifier, frm, - origReqId=body.get(f.REQ_ID.nm)) - - def _add_attribute(self, schemaKey, proverId, link): - attr = self.issuer_backend.get_record_by_internal_id(link.internalId) - self.issuer._attrRepo.addAttributes(schemaKey=schemaKey, - userId=proverId, - attributes=attr) - - def publish_trust_anchor(self, idy: Identity): - self.wallet.addTrustAnchoredIdentity(idy) - reqs = self.wallet.preparePending() - self.client.submitReqs(*reqs) - - def publish_trust_anchor_attribute(self, attrib: Attribute): - self.wallet.addAttribute(attrib) - reqs = self.wallet.preparePending() - self.client.submitReqs(*reqs) diff --git a/indy_client/agent/agent_net.py b/indy_client/agent/agent_net.py deleted file mode 100644 index 7085034d4..000000000 --- a/indy_client/agent/agent_net.py +++ /dev/null @@ -1,23 +0,0 @@ -from indy_client.agent.endpoint import ZEndpoint - - -class AgentNet: - """ - Mixin for Agents to encapsulate the network interface to communicate with - other agents. - """ - - def __init__(self, name, port, msgHandler, config, basedirpath=None, - endpoint_args=None): - if port: - endpoint_args = endpoint_args or {} - seed = endpoint_args.get('seed') - onlyListener = endpoint_args.get('onlyListener', False) - self.endpoint = ZEndpoint(port=port, - msgHandler=msgHandler, - name=name, - basedirpath=basedirpath, - seed=seed, - onlyListener=onlyListener) - else: - self.endpoint = None diff --git a/indy_client/agent/agent_prover.py b/indy_client/agent/agent_prover.py deleted file mode 100644 index 166bff3b3..000000000 --- a/indy_client/agent/agent_prover.py +++ /dev/null @@ -1,206 +0,0 @@ -import asyncio -import json -from typing import Any -from collections import OrderedDict - -from plenum.common.constants import NONCE, TYPE, IDENTIFIER, DATA -from plenum.common.types import f -from plenum.common.util import getCryptonym - -from anoncreds.protocol.prover import Prover -from anoncreds.protocol.types import SchemaKey, ID, Claims, ClaimAttributeValues, ProofRequest -from indy_client.agent.msg_constants import CLAIM_REQUEST, PROOF, CLAIM_FIELD, \ - CLAIM_REQ_FIELD, PROOF_FIELD, \ - REQ_AVAIL_CLAIMS, ISSUER_DID, SCHEMA_SEQ_NO, PROOF_REQUEST_FIELD -from indy_client.client.wallet.connection import Connection -from indy_common.exceptions import LinkNotReady - - -class AgentProver: - def __init__(self, prover: Prover): - self.prover = prover - - def sendRequestForAvailClaims(self, link: Connection): - if self.loop.is_running(): - self.loop.call_soon(asyncio.ensure_future, - self.sendRequestForAvailClaimsAsync(link)) - else: - self.loop.run_until_complete( - self.sendRequestForAvailClaimsAsync(link)) - - async def sendRequestForAvailClaimsAsync(self, link: Connection): - op = { - TYPE: REQ_AVAIL_CLAIMS, - NONCE: link.request_nonce - } - try: - self.signAndSendToLink(msg=op, linkName=link.name) - except LinkNotReady as ex: - self.notifyMsgListener(str(ex)) - - def sendReqClaim(self, link: Connection, schemaKey): - if self.loop.is_running(): - self.loop.call_soon(asyncio.ensure_future, - self.send_claim(link, schemaKey)) - else: - self.loop.run_until_complete( - self.send_claim(link, schemaKey)) - - # async def send_claim(self, link, claim_to_request): - # return await self.sendReqClaimAsync(link, claim_to_request) - - async def send_claim(self, link: Connection, schema_key): - name, version, origin = schema_key - schema_key = SchemaKey(name, version, origin) - - claimReq = await self.prover.createClaimRequest( - schemaId=ID(schema_key), - proverId=link.request_nonce, - reqNonRevoc=False) - - # It has served its purpose by this point. Claim Requests do not need a - # nonce. - schema = await self.prover.wallet.getSchema(ID(schema_key)) - - claimRequestDetails = { - SCHEMA_SEQ_NO: schema.seqId, - ISSUER_DID: origin, - CLAIM_REQ_FIELD: claimReq.to_str_dict() - } - - op = { - TYPE: CLAIM_REQUEST, - NONCE: link.request_nonce, - DATA: claimRequestDetails - } - - self.signAndSendToLink(msg=op, linkName=link.name) - - def handleProofRequest(self, msg): - body, _ = msg - link = self._getLinkByTarget(getCryptonym(body.get(IDENTIFIER))) - proofRequest = body.get(PROOF_REQUEST_FIELD) - proofRequest = ProofRequest.from_str_dict(proofRequest) - proofReqExist = False - - for request in link.proofRequests: - if request.name == proofRequest.name: - proofReqExist = True - break - - self.notifyMsgListener(' Proof request {} received from {}.\n' - .format(proofRequest.name, link.name)) - - if not proofReqExist: - link.proofRequests.append(proofRequest) - else: - self.notifyMsgListener(' Proof request {} already exist.\n' - .format(proofRequest.name)) - - async def handleReqClaimResponse(self, msg): - body, _ = msg - issuerId = body.get(IDENTIFIER) - claim = body[DATA] - li = self._getLinkByTarget(getCryptonym(issuerId)) - if li: - schemaId = ID(schemaId=claim[SCHEMA_SEQ_NO]) - schema = await self.prover.wallet.getSchema(schemaId) - - self.notifyResponseFromMsg(li.name, body.get(f.REQ_ID.nm)) - self.notifyMsgListener( - ' Received claim "{}".\n'.format(schema.name)) - - pk = await self.prover.wallet.getPublicKey(schemaId) - - claim_attributes = {k: ClaimAttributeValues.from_str_dict( - v) for k, v in json.loads(claim[CLAIM_FIELD]).items()} - claim_signature = Claims.from_str_dict(claim[f.SIG.nm], pk.N) - - await self.prover.processClaim(schemaId, claim_attributes, claim_signature) - else: - self.notifyMsgListener("No matching connection found") - - def sendProof(self, link: Connection, proofReq: ProofRequest): - if self.loop.is_running(): - self.loop.call_soon(asyncio.ensure_future, - self.sendProofAsync(link, proofReq)) - else: - self.loop.run_until_complete(self.sendProofAsync(link, proofReq)) - - async def sendProofAsync(self, link: Connection, proofRequest: ProofRequest): - # TODO _F_ this nonce should be from the Proof Request, not from an - # invitation - # TODO rename presentProof to buildProof or generateProof - - proof = await self.prover.presentProof(proofRequest) - proof.requestedProof.self_attested_attrs.update( - proofRequest.selfAttestedAttrs) - - op = { - TYPE: PROOF, - NONCE: link.request_nonce, - PROOF_FIELD: proof.to_str_dict(), - PROOF_REQUEST_FIELD: proofRequest.to_str_dict() - } - - self.signAndSendToLink(msg=op, linkName=link.name) - - def handleProofStatusResponse(self, msg: Any): - body, _ = msg - data = body.get(DATA) - identifier = body.get(IDENTIFIER) - li = self._getLinkByTarget(getCryptonym(identifier)) - self.notifyResponseFromMsg(li.name, body.get(f.REQ_ID.nm)) - self.notifyMsgListener(data) - - async def getMatchingConnectionsWithReceivedClaimAsync(self, claimName=None): - matchingLinkAndAvailableClaim = self.wallet.getMatchingConnectionsWithAvailableClaim( - claimName) - matchingLinkAndReceivedClaim = [] - for li, cl in matchingLinkAndAvailableClaim: - name, version, origin = cl - schemaKeyId = ID( - SchemaKey(name=name, version=version, issuerId=origin)) - schema = await self.prover.wallet.getSchema(schemaKeyId) - claimAttrs = OrderedDict() - for attr in schema.attrNames: - claimAttrs[attr] = None - attrs = None - try: - attrs = await self.prover.wallet.getClaimAttributes(schemaKeyId) - except ValueError: - pass # it means no claim was issued - - if attrs: - if set(claimAttrs.keys()).intersection(attrs.keys()): - for k in claimAttrs.keys(): - claimAttrs[k] = attrs[k].raw - matchingLinkAndReceivedClaim.append((li, cl, claimAttrs)) - return matchingLinkAndReceivedClaim - - async def getMatchingRcvdClaimsAsync(self, attributes): - linksAndReceivedClaim = await self.getMatchingConnectionsWithReceivedClaimAsync() - attributes = set(attributes) - - matchingLinkAndRcvdClaim = [] - for li, cl, issuedAttrs in linksAndReceivedClaim: - if attributes.intersection(issuedAttrs.keys()): - matchingLinkAndRcvdClaim.append((li, cl, issuedAttrs)) - return matchingLinkAndRcvdClaim - - async def getClaimsUsedForAttrs(self, attributes): - allMatchingClaims = await self.getMatchingConnectionsWithReceivedClaimAsync() - alreadySatisfiedKeys = {} - claimsToUse = [] - alreadyAddedClaims = [] - - for li, cl, issuedAttrs in allMatchingClaims: - issuedClaimKeys = issuedAttrs.keys() - for key in attributes.keys(): - if key not in alreadySatisfiedKeys and key in issuedClaimKeys: - if li not in alreadyAddedClaims: - claimsToUse.append((li, cl, issuedAttrs)) - alreadySatisfiedKeys[key] = True - alreadyAddedClaims.append(li) - - return claimsToUse diff --git a/indy_client/agent/agent_verifier.py b/indy_client/agent/agent_verifier.py deleted file mode 100644 index 0d55998b1..000000000 --- a/indy_client/agent/agent_verifier.py +++ /dev/null @@ -1,96 +0,0 @@ -from typing import Any - -from plenum.common.constants import NAME, NONCE, TYPE, DATA, VERSION, \ - ATTRIBUTES, VERIFIABLE_ATTRIBUTES, PREDICATES -from plenum.common.types import f - -from anoncreds.protocol.types import FullProof, ProofInfo, ID, AggregatedProof, RequestedProof -from anoncreds.protocol.types import ProofRequest -from anoncreds.protocol.verifier import Verifier -from indy_client.agent.msg_constants import PROOF_STATUS, PROOF_FIELD, PROOF_REQUEST, \ - PROOF_REQUEST_FIELD, ERR_NO_PROOF_REQUEST_SCHEMA_FOUND -from indy_client.client.wallet.connection import Connection -from indy_common.util import getNonceForProof - - -class AgentVerifier(Verifier): - def __init__(self, verifier: Verifier): - self.verifier = verifier - - async def verifyProof(self, msg: Any): - body, (frm, _) = msg - link = self.verifyAndGetLink(msg) - if not link: - raise NotImplementedError - - proof = body[PROOF_FIELD] - proofRequest = ProofRequest.from_str_dict(body[PROOF_REQUEST_FIELD]) - nonce = getNonceForProof(body[NONCE]) - proofName = proofRequest.name - - proofs = {} - - for key, p in proof['proofs'].items(): - schema = await self.verifier.wallet.getSchema(ID(schemaId=int(p['schema_seq_no']))) - pk = await self.verifier.wallet.getPublicKey(ID(schemaKey=schema.getKey())) - proofs[key] = ProofInfo.from_str_dict(p, str(pk.N)) - - proof = FullProof( - proofs, AggregatedProof.from_str_dict( - proof['aggregated_proof']), RequestedProof.from_str_dict( - proof['requested_proof'])) - - result = await self.verifier.verify(proofRequest, proof) - - self.logger.info('Proof "{}" accepted with nonce {}' - .format(proofName, nonce)) - self.logger.info('Verifying proof "{}" from {}' - .format(proofName, link.name)) - status = 'verified' if result else 'failed verification' - resp = { - TYPE: PROOF_STATUS, - DATA: ' Your Proof {} {} was received and {}\n'. - format(proofRequest.name, proofRequest.version, status), - } - self.signAndSend(resp, link.localIdentifier, frm, - origReqId=body.get(f.REQ_ID.nm)) - - if result: - for uuid, attribute in proofRequest.verifiableAttributes.items(): - # Log attributes that were verified - self.logger.info( - 'verified {}: {}'. format( - attribute.name, - proof.requestedProof.revealed_attrs[uuid][1])) - self.logger.info('Verified that proof "{}" contains attributes ' - 'from claim(s) issued by: {}'.format( - proofName, ", ".join( - sorted([v.issuer_did for k, v in proof.proofs.items()])))) - await self._postProofVerif(proofName, link, frm) - else: - self.logger.info('Verification failed for proof {} from {} ' - .format(proofName, link.name)) - - def sendProofReq(self, link: Connection, proofReqSchemaKey): - if self._proofRequestsSchema and ( - proofReqSchemaKey in self._proofRequestsSchema): - proofRequest = self._proofRequestsSchema[proofReqSchemaKey] - - proofRequest = ProofRequest( - proofRequest[NAME], - proofRequest[VERSION], - getNonceForProof(link.request_nonce), - proofRequest[ATTRIBUTES], - proofRequest[VERIFIABLE_ATTRIBUTES] if VERIFIABLE_ATTRIBUTES in proofRequest else [ - ], - proofRequest[PREDICATES] if PREDICATES in proofRequest else [] - ) - - op = { - TYPE: PROOF_REQUEST, - PROOF_REQUEST_FIELD: proofRequest.to_str_dict() - } - - self.signAndSendToLink(msg=op, linkName=link.name) - else: - return ERR_NO_PROOF_REQUEST_SCHEMA_FOUND diff --git a/indy_client/agent/backend.py b/indy_client/agent/backend.py deleted file mode 100644 index 7823bfc2b..000000000 --- a/indy_client/agent/backend.py +++ /dev/null @@ -1,3 +0,0 @@ -class BackendSystem: - def get_record_by_internal_id(self, internal_id): - raise NotImplementedError diff --git a/indy_client/agent/caching.py b/indy_client/agent/caching.py deleted file mode 100644 index 35ceb04c0..000000000 --- a/indy_client/agent/caching.py +++ /dev/null @@ -1,24 +0,0 @@ -from plenum.common.exceptions import NotConnectedToAny -from indy_common.identity import Identity - - -class Caching: - """ - Mixin for agents to manage caching. - - Dev notes: Feels strange to inherit from WalletedAgent, but self-typing - doesn't appear to be implemented in Python yet. - """ - - def getClient(self): - if self.client: - return self.client - else: - raise NotConnectedToAny - - def getIdentity(self, identifier): - identity = Identity(identifier=identifier) - req = self.wallet.requestIdentity(identity, - sender=self.wallet.defaultId) - self.getClient().submitReqs(req) - return req diff --git a/indy_client/agent/constants.py b/indy_client/agent/constants.py deleted file mode 100644 index 4c7a646eb..000000000 --- a/indy_client/agent/constants.py +++ /dev/null @@ -1,13 +0,0 @@ -ALREADY_ACCEPTED_FIELD = 'alreadyAccepted' -CLAIMS_LIST_FIELD = 'availableClaimsList' -CLAIMS_FIELD = 'claims' -REQ_MSG = "REQ_MSG" -PING = "ping" -PONG = "pong" -ERROR = "error" -EVENT = "event" -EVENT_NAME = "eventName" -EVENT_NOTIFY_MSG = "NOTIFY" -EVENT_MSG_RECEIVED = "MSG_RECEIVED" -EVENT_POST_ACCEPT_INVITE = "POST_ACCEPT_INVITE_EVENT" -EVENT_NOT_CONNECTED_TO_ANY_ENV = "NOT_CONNECTED_TO_ANY_ENV" diff --git a/indy_client/agent/endpoint.py b/indy_client/agent/endpoint.py deleted file mode 100644 index 69d63afb1..000000000 --- a/indy_client/agent/endpoint.py +++ /dev/null @@ -1,45 +0,0 @@ -import os - -from typing import Callable - -from plenum.common.message_processor import MessageProcessor - -from stp_core.common.log import getlogger -from stp_core.network.auth_mode import AuthMode -from plenum.common.util import randomString -from stp_core.crypto.util import randomSeed -from stp_core.types import HA -from stp_zmq.simple_zstack import SimpleZStack - -logger = getlogger() - - -class EndpointCore(MessageProcessor): - - def tracedMsgHandler(self, msg): - logger.debug("Got {}".format(msg)) - self.msgHandler(msg) - - -class ZEndpoint(SimpleZStack, EndpointCore): - def __init__(self, port: int, msgHandler: Callable, - name: str=None, basedirpath: str=None, seed=None, - onlyListener=False, msgRejectHandler=None): - stackParams = { - "name": name or randomString(8), - "ha": HA("0.0.0.0", port), - "auth_mode": AuthMode.ALLOW_ANY.value - } - if basedirpath: - stackParams["basedirpath"] = os.path.join(basedirpath, "keys") - - seed = seed or randomSeed() - SimpleZStack.__init__( - self, - stackParams, - self.tracedMsgHandler, - seed=seed, - onlyListener=onlyListener, - msgRejectHandler=msgRejectHandler) - - self.msgHandler = msgHandler diff --git a/indy_client/agent/exception.py b/indy_client/agent/exception.py deleted file mode 100644 index 7377890b3..000000000 --- a/indy_client/agent/exception.py +++ /dev/null @@ -1,7 +0,0 @@ - -class NonceNotFound(RuntimeError): - pass - - -class SignatureRejected(RuntimeError): - pass diff --git a/indy_client/agent/helper.py b/indy_client/agent/helper.py deleted file mode 100644 index 665aa8b04..000000000 --- a/indy_client/agent/helper.py +++ /dev/null @@ -1,81 +0,0 @@ -import os - -from plenum.common.signer_did import DidSigner -from plenum.common.util import friendlyToRaw, rawToFriendly -from indy_client.client.wallet.wallet import Wallet -from indy_common.config_util import getConfig - -from stp_core.crypto.util import ed25519PkToCurve25519 - - -def processInvAccept(wallet, msg): - pass - - -def rawVerkeyToPubkey(raw_verkey): - return ed25519PkToCurve25519(raw_verkey) - - -def friendlyVerkeyToPubkey(verkey): - vkRaw = friendlyToRaw(verkey) - pkraw = ed25519PkToCurve25519(vkRaw) - return rawToFriendly(pkraw) - - -def getClaimVersionFileName(agentName): - return agentName.replace(" ", "-").lower() + "-schema-version.txt" - - -def updateAndGetNextClaimVersionNumber(basedirpath, fileName): - claimVersionFilePath = '{}/{}'.format(basedirpath, fileName) - # get version number from file - claimVersionNumber = 0.01 - if os.path.isfile(claimVersionFilePath): - with open(claimVersionFilePath, mode='r+') as file: - claimVersionNumber = float(file.read()) + 0.001 - file.seek(0) - # increment version and update file - file.write(str(claimVersionNumber)) - file.truncate() - else: - with open(claimVersionFilePath, mode='w') as file: - file.write(str(claimVersionNumber)) - return claimVersionNumber - - -def build_wallet_core(wallet_name, seed_file): - config = getConfig() - baseDir = os.path.expanduser(config.CLI_BASE_DIR) - - seedFilePath = '{}/{}'.format(baseDir, seed_file) - seed = wallet_name + '0' * (32 - len(wallet_name)) - - # if seed file is available, read seed from it - if os.path.isfile(seedFilePath): - with open(seedFilePath, mode='r+') as file: - seed = file.read().strip(' \t\n\r') - wallet = Wallet(wallet_name) - - seed = bytes(seed, encoding='utf-8') - wallet.addIdentifier(signer=DidSigner(seed=seed)) - - return wallet - - -async def bootstrap_schema(agent, attrib_def_name, schema_name, schema_version, p_prime, q_prime): - schema_id = await agent.publish_schema(attrib_def_name, - schema_name=schema_name, - schema_version=schema_version) - - _, _ = await agent.publish_issuer_keys(schema_id, p_prime=p_prime, q_prime=q_prime) - - # TODO not fully implemented yet! - # await agent.publish_revocation_registry(schema_id=schema_id) - - return schema_id - - -def buildAgentWallet(name, seed): - wallet = Wallet(name) - wallet.addIdentifier(signer=DidSigner(seed=seed)) - return wallet diff --git a/indy_client/agent/jsonpickle_util.py b/indy_client/agent/jsonpickle_util.py deleted file mode 100644 index 2a4618f17..000000000 --- a/indy_client/agent/jsonpickle_util.py +++ /dev/null @@ -1,57 +0,0 @@ -import jsonpickle - -from anoncreds.protocol.types import PublicKey, RevocationPublicKey, \ - SecretKey, RevocationSecretKey, AccumulatorSecretKey -from anoncreds.protocol.utils import toDictWithStrValues, fromDictWithStrValues - -DATA_KEY = 'py/integer-element' - - -class CommonIntegerElementHandler(jsonpickle.handlers.BaseHandler): - def flatten(self, obj, data): - data[DATA_KEY] = obj.toStrDict() - return data - - def restore(self, obj): - cls = self._getClass() - return cls.fromStrDict(obj[DATA_KEY]) - - def _getClass(self): - raise NotImplemented - - -class PublicKeyHandler(CommonIntegerElementHandler): - def _getClass(self): - return PublicKey - - -class RevocationPublicKeyHandler(CommonIntegerElementHandler): - def _getClass(self): - return RevocationPublicKey - - -class SecretKeyHandler(CommonIntegerElementHandler): - def _getClass(self): - return SecretKey - - -class RevocationSecretKeyHandler(CommonIntegerElementHandler): - def _getClass(self): - return RevocationSecretKey - - -class AccumulatorSecretKeyHandler(CommonIntegerElementHandler): - def _getClass(self): - return AccumulatorSecretKey - - -def setUpJsonpickle(): - customHandlers = [ - (PublicKey, PublicKeyHandler), - (RevocationPublicKey, RevocationPublicKeyHandler), - (SecretKey, SecretKeyHandler), - (RevocationSecretKey, RevocationSecretKeyHandler), - (AccumulatorSecretKey, AccumulatorSecretKeyHandler) - ] - for cls, handler in customHandlers: - jsonpickle.handlers.register(cls, handler, base=True) diff --git a/indy_client/agent/msg_constants.py b/indy_client/agent/msg_constants.py deleted file mode 100644 index c3e2f000c..000000000 --- a/indy_client/agent/msg_constants.py +++ /dev/null @@ -1,89 +0,0 @@ -ACCEPT_INVITE = 'ACCEPT_INVITE' -INVITE_ACCEPTED = "INVITE_ACCEPTED" - -# Claims message types -CLAIM_OFFER = 'CLAIM_OFFER' -CLAIM_REQUEST = 'CLAIM_REQUEST' -CLAIM = 'CLAIM' -AVAIL_CLAIM_LIST = 'AVAIL_CLAIM_LIST' -REQ_AVAIL_CLAIMS = 'REQ_AVAIL_CLAIMS' - -# TODO Why do we have this and AVAIL_CLAIM_LIST -NEW_AVAILABLE_CLAIMS = "NEW_AVAILABLE_CLAIMS" - -# Proofs message types -PROOF_REQUEST = 'PROOF_REQUEST' -PROOF = 'PROOF' -PROOF_STATUS = 'PROOF_STATUS' - - -ISSUER_DID = 'issuer_did' -CLAIM_REQ_FIELD = 'blinded_ms' -CLAIM_FIELD = 'claim' -PROOF_FIELD = 'proof' -SCHEMA_SEQ_NO = 'schema_seq_no' -PROOF_REQUEST_FIELD = 'proof_request' - -# Proof request schema keys -PROOF_REQ_SCHEMA_NAME = 'name' -PROOF_REQ_SCHEMA_VERSION = 'version' -PROOF_REQ_SCHEMA_ATTRIBUTES = 'attributes' -PROOF_REQ_SCHEMA_VERIFIABLE_ATTRIBUTES = 'verifiableAttributes' - -# Other -CLAIM_NAME_FIELD = "claimName" -REF_REQUEST_ID = "refRequestId" -REVOC_REG_SEQ_NO = "revoc_reg_seq_no" - -# Error constants -ERR_NO_PROOF_REQUEST_SCHEMA_FOUND = 'Error: No proof request schema found' - -""" -ACCEPT_INVITE -{ - "type": 'ACCEPT_INVITE', - "identifier": , - "nonce": , - "signature" : -} - - -AVAIL_CLAIM_LIST -{ - 'type': 'AVAIL_CLAIM_LIST', - 'claims_list': [ - "Name": "Transcript", - "Version": "1.2", - "Definition": { - "Attributes": { - "student_name": "string", - "ssn": "int", - "degree": "string", - "year": "string", - "status": "string" - } - } - ], - "signature" : -} - -AVAIL_CLAIM_LIST -{ - 'type': 'AVAIL_CLAIM_LIST', - 'claims_list': [ - "Name": "Transcript", - "Version": "1.2", - "Definition": { - "Attributes": { - "student_name": "string", - "ssn": "int", - "degree": "string", - "year": "string", - "status": "string" - } - } - ], - "signature" : -} - -""" diff --git a/indy_client/agent/run_agent.py b/indy_client/agent/run_agent.py deleted file mode 100644 index 7e9d6bef9..000000000 --- a/indy_client/agent/run_agent.py +++ /dev/null @@ -1,104 +0,0 @@ -import os -import sys - -from indy_common.exceptions import NotConnectedToNetwork -from plenum.common.exceptions import NoConsensusYet -from stp_core.common.log import getlogger -from indy_client.agent.agent_cli import AgentCli -from indy_common.config_util import getConfig - -from stp_core.loop.looper import Looper - -logger = getlogger() - - -async def runBootstrap(bootstrapFunc): - try: - await bootstrapFunc - except TimeoutError as exc: - raise NoConsensusYet("consensus is not yet achieved, " - "check if indy is running and " - "client is able to connect to it") from exc - - -def bootstrapAgentCli(name, agent, looper, bootstrap, config): - curDir = os.getcwd() - logFilePath = os.path.join(curDir, config.logFilePath) - cli = AgentCli(name='{}-Agent'.format(name).lower().replace(" ", "-"), - agentCreator=True, - agent=agent, - basedirpath=config.CLI_BASE_DIR, - logFileName=logFilePath, - looper=looper) - if bootstrap: - try: - looper.run(runBootstrap(bootstrap)) - except Exception as exc: - error = "Agent startup failed: [cause : {}]".format(str(exc)) - cli.print(error) - - return cli - - -def runAgentCli(agent, config, looper=None, bootstrap=None): - def run(looper): - agent.loop = looper.loop - logger.info("Running {} now (port: {})".format(agent.name, agent.port)) - agentCli = bootstrapAgentCli( - agent.name, agent, looper, bootstrap, config) - commands = sys.argv[1:] - looper.run(agentCli.shell(*commands)) - - if looper: - run(looper) - else: - with Looper(debug=config.LOOPER_DEBUG) as looper: - run(looper) - - -CONNECTION_TIMEOUT = 120 - - -def runAgent(agent, looper=None, bootstrap=None): - assert agent - - def is_connected(agent): - client = agent.client - if (client.mode is None) or (not client.can_send_write_requests()): - raise NotConnectedToNetwork("Client hasn't finished catch-up with Pool Ledger yet or " - "doesn't have sufficient number of connections") - - async def wait_until_connected(agent): - from stp_core.loop.eventually import eventually - await eventually(is_connected, agent, - timeout=CONNECTION_TIMEOUT, retryWait=2) - - def do_run(looper): - agent.loop = looper.loop - looper.add(agent) - logger.info("Running {} now (port: {})".format(agent.name, agent.port)) - if bootstrap: - looper.run(wait_until_connected(agent)) - looper.run(runBootstrap(bootstrap)) - - if looper: - do_run(looper) - else: - with Looper(debug=getConfig().LOOPER_DEBUG, loop=agent.loop) as looper: - do_run(looper) - looper.run() - -# Note: Commented it as didn't find any usage of this method -# def run_agent(looper, wallet, agent): -# -# def run(): -# _agent = agent -# wallet.pendSyncRequests() -# prepared = wallet.preparePending() -# _agent.client.submitReqs(*prepared) -# -# runAgent(_agent, looper) -# -# return _agent, wallet -# -# return run diff --git a/indy_client/agent/runnable_agent.py b/indy_client/agent/runnable_agent.py deleted file mode 100644 index ff6696b0b..000000000 --- a/indy_client/agent/runnable_agent.py +++ /dev/null @@ -1,53 +0,0 @@ -import sys -import argparse - -from indy_client.agent.agent import Agent -from indy_client.agent.run_agent import runAgentCli, runAgent -from stp_core.common.log import getlogger -from plenum.common.util import getFormattedErrorMsg -from indy_common.config_util import getConfig - -logger = getlogger() - - -class RunnableAgent: - @classmethod - def get_passed_args(cls): - return cls.parser_cmd_args() - - @classmethod - def parser_cmd_args(cls): - args = [] - if sys.stdin.isatty(): - args = sys.argv[1:] - - parser = argparse.ArgumentParser( - description="Starts agents with given port, cred def and issuer seq") - - parser.add_argument('--port', type=int, required=False, - help='port where agent will listen') - - parser.add_argument('--withcli', - help='if given, agent will start in cli mode', - action='store_true') - - parser.add_argument('--network', required=False, - help='network connect to (sandbox by default)') - - args = parser.parse_args(args=args) - # port = int(args.port) if args.port else None - return args - - @classmethod - def run_agent(cls, agent: Agent, looper=None, - bootstrap=None, with_cli=False): - try: - config = getConfig() - if with_cli: - runAgentCli(agent, config, looper=looper, bootstrap=bootstrap) - else: - runAgent(agent, looper, bootstrap) - return agent - except Exception as exc: - error = "Agent startup failed: [cause : {}]".format(str(exc)) - logger.error(getFormattedErrorMsg(error)) diff --git a/indy_client/agent/walleted.py b/indy_client/agent/walleted.py deleted file mode 100644 index 2bfe2e98b..000000000 --- a/indy_client/agent/walleted.py +++ /dev/null @@ -1,1047 +0,0 @@ -import asyncio -import collections -import inspect -import json -import time -from datetime import datetime -from typing import Dict, List, Union - -from base58 import b58decode -from common.serializers.serialization import serialize_msg_for_signing - -from stp_core.common.log import getlogger -from plenum.common.signer_did import DidSigner -from plenum.common.constants import TYPE, DATA, NONCE, IDENTIFIER, NAME, VERSION, \ - TARGET_NYM, ATTRIBUTES, VERKEY, VERIFIABLE_ATTRIBUTES, PREDICATES -from plenum.common.types import f -from plenum.common.util import getTimeBasedId, getCryptonym, \ - isMaxCheckTimeExpired, convertTimeBasedReqIdToMillis, friendlyToRaw -from plenum.common.verifier import DidVerifier - -from anoncreds.protocol.issuer import Issuer -from anoncreds.protocol.prover import Prover -from anoncreds.protocol.verifier import Verifier -from anoncreds.protocol.globals import TYPE_CL -from anoncreds.protocol.types import AttribDef, ID, ProofRequest, AvailableClaim -from plenum.common.exceptions import NotConnectedToAny -from indy_client.agent.agent_issuer import AgentIssuer -from indy_client.agent.backend import BackendSystem -from indy_client.agent.agent_prover import AgentProver -from indy_client.agent.agent_verifier import AgentVerifier -from indy_client.agent.constants import ALREADY_ACCEPTED_FIELD, CLAIMS_LIST_FIELD, \ - REQ_MSG, PING, ERROR, EVENT, EVENT_NAME, EVENT_NOTIFY_MSG, \ - EVENT_POST_ACCEPT_INVITE, PONG, EVENT_NOT_CONNECTED_TO_ANY_ENV -from indy_client.agent.exception import NonceNotFound, SignatureRejected -from indy_client.agent.helper import friendlyVerkeyToPubkey, rawVerkeyToPubkey -from indy_client.agent.msg_constants import ACCEPT_INVITE, CLAIM_REQUEST, \ - PROOF, AVAIL_CLAIM_LIST, CLAIM, PROOF_STATUS, NEW_AVAILABLE_CLAIMS, \ - REF_REQUEST_ID, REQ_AVAIL_CLAIMS, INVITE_ACCEPTED, PROOF_REQUEST -from indy_client.client.wallet.attribute import Attribute, LedgerStore -from indy_client.client.wallet.connection import Connection, constant -from indy_client.client.wallet.wallet import Wallet -from indy_common.exceptions import ConnectionNotFound, ConnectionAlreadyExists, \ - NotConnectedToNetwork, LinkNotReady, VerkeyNotFound, RemoteEndpointNotFound -from indy_common.identity import Identity -from indy_common.constants import ENDPOINT -from indy_common.util import ensureReqCompleted -from indy_common.config import agentLoggingLevel -from indy_common.exceptions import InvalidConnectionException -from plenum.common.constants import PUBKEY -from indy_common.util import getNonceForProof - -logger = getlogger() -logger.setLevel(agentLoggingLevel) - - -class Walleted(AgentIssuer, AgentProver, AgentVerifier): - """ - An agent with a self-contained wallet. - - Normally, other logic acts upon a remote agent. That other logic holds keys - and signs messages and transactions that the Agent then forwards. In this - case, the agent holds a wallet. - """ - - def __init__(self, - issuer: Issuer = None, - prover: Prover = None, - verifier: Verifier = None): - - AgentIssuer.__init__(self, issuer) - AgentProver.__init__(self, prover) - AgentVerifier.__init__(self, verifier) - - # TODO Why are we syncing the client here? - if self.client: - self.syncClient() - self.rcvdMsgStore = {} # type: Dict[reqId, [reqMsg]] - - self.msgHandlers = { - ERROR: self._handleError, - EVENT: self._eventHandler, - - PING: self._handlePing, - ACCEPT_INVITE: self._handleAcceptance, - REQ_AVAIL_CLAIMS: self.processReqAvailClaims, - - CLAIM_REQUEST: self.processReqClaim, - CLAIM: self.handleReqClaimResponse, - - PROOF: self.verifyProof, - PROOF_STATUS: self.handleProofStatusResponse, - PROOF_REQUEST: self.handleProofRequest, - - PONG: self._handlePong, - INVITE_ACCEPTED: self._handleAcceptInviteResponse, - AVAIL_CLAIM_LIST: self._handleAvailableClaimsResponse, - - NEW_AVAILABLE_CLAIMS: self._handleNewAvailableClaimsDataResponse - } - self.logger = logger - - self.issuer_backend = None - - self._invites = {} # type: Dict[Nonce, Tuple(InternalId, str)] - self._attribDefs = {} # type: Dict[str, AttribDef] - self.defined_claims = [] # type: List[Dict[str, Any] - - # dict for proof request schema Dict[str, Dict[str, any]] - self._proofRequestsSchema = {} - - def syncClient(self): - obs = self._wallet.handleIncomingReply - if not self.client.hasObserver(obs): - self.client.registerObserver(obs) - self._wallet.pendSyncRequests() - prepared = self._wallet.preparePending() - self.client.submitReqs(*prepared) - - @property - def wallet(self) -> Wallet: - return self._wallet - - @wallet.setter - def wallet(self, wallet): - self._wallet = wallet - - @property - def lockedMsgs(self): - # Msgs for which signature verification is required - return ACCEPT_INVITE, CLAIM_REQUEST, PROOF, \ - CLAIM, AVAIL_CLAIM_LIST, EVENT, PONG, REQ_AVAIL_CLAIMS - - async def postProofVerif(self, claimName, link, frm): - raise NotImplementedError - - def is_claim_available(self, link, claim_name): - return any( - ac[NAME] == claim_name for ac in self._get_available_claim_list_by_internal_id( - link.internalId)) - - async def _postProofVerif(self, claimName, link, frm): - link.verifiedClaimProofs.append(claimName) - await self.postProofVerif(claimName, link, frm) - - async def _set_available_claim_by_internal_id(self, internal_id, schema_id): - sd = await self.schema_dict_from_id(schema_id) - try: - if not any( - d == sd for d in self.issuer.wallet.availableClaimsByInternalId[internal_id]): - self.issuer.wallet.availableClaimsByInternalId[internal_id].append( - sd) - except KeyError: - self.issuer.wallet.availableClaimsByInternalId[internal_id] = [sd] - - def _get_available_claim_list_by_internal_id(self, internal_id): - return self.issuer.wallet.availableClaimsByInternalId.get( - internal_id, set()) - - def get_available_claim_list(self, link): - li = self.wallet.getConnectionBy(remote=link.remoteIdentifier) - # TODO: Need to return set instead of list, but if we return set, - # stack communication fails as set is not json serializable, - # need to work on that. - if li is None: - return list() - return list( - self._get_available_claim_list_by_internal_id(li.internalId)) - - def getErrorResponse(self, reqBody, errorMsg="Error"): - invalidSigResp = { - TYPE: ERROR, - DATA: errorMsg, - REQ_MSG: reqBody, - } - return invalidSigResp - - def logAndSendErrorResp(self, to, reqBody, respMsg, logMsg): - logger.warning(logMsg) - self.signAndSend(msg=self.getErrorResponse(reqBody, respMsg), - signingIdr=self.wallet.defaultId, name=to) - - # TODO: Verification needs to be moved out of it, - # use `verifySignature` instead - def verifyAndGetLink(self, msg): - body, (frm, ha) = msg - nonce = body.get(NONCE) - try: - kwargs = dict(nonce=nonce, remoteIdr=body.get( - f.IDENTIFIER.nm), remoteHa=ha) - if ha is None: - # Incase of ZStack, - kwargs.update(remotePubkey=frm) - return self.linkFromNonce(**kwargs) - except NonceNotFound: - self.logAndSendErrorResp(frm, body, - "Nonce not found", - "Nonce not found for msg: {}".format(msg)) - return None - - def linkFromNonce(self, nonce, remoteIdr, remoteHa=None, - remotePubkey=None): - internalId = self.get_internal_id_by_nonce(nonce) - linkName = self.get_link_name_by_internal_id(internalId) - link = self.wallet.getConnectionBy(internalId=internalId) - if not link: - # QUESTION: We use wallet.defaultId as the local identifier, - # this looks ok for test code, but not production code - link = Connection(linkName, - self.wallet.defaultId, - self.wallet.getVerkey(), - request_nonce=nonce, - remoteIdentifier=remoteIdr, - remoteEndPoint=remoteHa, - internalId=internalId, - remotePubkey=remotePubkey) - self.wallet.addConnection(link) - else: - link.remoteIdentifier = remoteIdr - link.remoteEndPoint = remoteHa - return link - - def get_internal_id_by_nonce(self, nonce): - if nonce in self._invites: - return self._invites[nonce][0] - else: - raise NonceNotFound - - def get_link_name_by_internal_id(self, internalId): - for invite in self._invites.values(): - if invite[0] == internalId: - return invite[1] - - def set_issuer_backend(self, backend: BackendSystem): - self.issuer_backend = backend - - async def publish_issuer_keys(self, schema_id, p_prime, q_prime): - keys = await self.issuer.genKeys(schema_id, - p_prime=p_prime, - q_prime=q_prime) - await self.add_to_available_claims(schema_id) - return keys - - async def schema_dict_from_id(self, schema_id): - schema = await self.issuer.wallet.getSchema(schema_id) - return self.schema_dict(schema) - - async def publish_revocation_registry(self, schema_id, rev_reg_id='110', size=5): - return await self.issuer.issueAccumulator(schemaId=schema_id, - iA=rev_reg_id, - L=size) - - def schema_dict(self, schema): - return { - NAME: schema.name, - VERSION: schema.version, - "schemaSeqNo": schema.seqId - } - - async def add_to_available_claims(self, schema_id): - schema = await self.issuer.wallet.getSchema(schema_id) - self.defined_claims.append(self.schema_dict(schema)) - - async def publish_schema(self, - attrib_def_name, - schema_name, - schema_version): - attribDef = self._attribDefs[attrib_def_name] - schema = await self.issuer.genSchema(schema_name, - schema_version, - attribDef.attribNames()) - schema_id = ID(schemaKey=schema.getKey(), schemaId=schema.seqId) - return schema_id - - def add_attribute_definition(self, attr_def: AttribDef): - self._attribDefs[attr_def.name] = attr_def - - async def get_claim(self, schema_id: ID): - return await self.prover.wallet.getClaimAttributes(schema_id) - - def new_identifier(self, seed=None): - idr, _ = self.wallet.addIdentifier(seed=seed) - verkey = self.wallet.getVerkey(idr) - return idr, verkey - - def get_link_by_name(self, name): - return self.wallet.getConnection(str(name)) - - def signAndSendToLink(self, msg, linkName, origReqId=None): - link = self.wallet.getConnection(linkName, required=True) - if not link.localIdentifier: - raise LinkNotReady('connection is not yet established, ' - 'send/accept request first') - - ha = link.getRemoteEndpoint(required=False) - name = link.name - if not ha: - # if not remote address is present, then it's upcominh link, so we may have no - # explicit connection (wrk in a listener mode). - # PulicKey is used as a name in this case - name = link.remotePubkey - - if ha: - self.connectTo(link=link) - - return self.signAndSend(msg=msg, signingIdr=link.localIdentifier, - name=name, ha=ha, origReqId=origReqId) - - def signAndSend(self, msg, signingIdr, name=None, ha=None, origReqId=None): - msg[f.REQ_ID.nm] = getTimeBasedId() - if origReqId: - msg[REF_REQUEST_ID] = origReqId - - msg[IDENTIFIER] = signingIdr - signature = self.wallet.signMsg(msg, signingIdr) - msg[f.SIG.nm] = signature - - self.sendMessage(msg, name=name, ha=ha) - - return msg[f.REQ_ID.nm] - - @staticmethod - def getCommonMsg(typ, data): - msg = { - TYPE: typ, - DATA: data - } - return msg - - @classmethod - def createInviteAcceptedMsg(cls, claimLists, alreadyAccepted=False): - data = { - CLAIMS_LIST_FIELD: claimLists - } - if alreadyAccepted: - data[ALREADY_ACCEPTED_FIELD] = alreadyAccepted - - return cls.getCommonMsg(INVITE_ACCEPTED, data) - - @classmethod - def createNewAvailableClaimsMsg(cls, claimLists): - data = { - CLAIMS_LIST_FIELD: claimLists - } - return cls.getCommonMsg(NEW_AVAILABLE_CLAIMS, data) - - @classmethod - def createClaimMsg(cls, claim): - return cls.getCommonMsg(CLAIM, claim) - - def _eventHandler(self, msg): - body, _ = msg - eventName = body[EVENT_NAME] - data = body[DATA] - self.notifyEventListeners(eventName, **data) - - def notifyEventListeners(self, eventName, **data): - for el in self._eventListeners.get(eventName, []): - el(notifier=self, **data) - - def notifyMsgListener(self, msg): - self.notifyEventListeners(EVENT_NOTIFY_MSG, msg=msg) - - def isSignatureVerifRespRequired(self, typ): - return typ in self.lockedMsgs and typ not in [EVENT, PING, PONG] - - def sendSigVerifResponseMsg(self, respMsg, to, reqMsgTyp, identifier): - if self.isSignatureVerifRespRequired(reqMsgTyp): - self.notifyToRemoteCaller(EVENT_NOTIFY_MSG, - respMsg, identifier, to) - - def handleEndpointMessage(self, msg): - body, frm = msg - logger.debug("Message received (from -> {}): {}".format(frm, body)) - if isinstance(frm, bytes): - frm = frm.decode() - for reqFieldName in (TYPE, f.REQ_ID.nm): - reqFieldValue = body.get(reqFieldName) - if not reqFieldValue: - errorMsg = "{} not specified in message: {}".format( - reqFieldName, body) - self.notifyToRemoteCaller(EVENT_NOTIFY_MSG, - errorMsg, self.wallet.defaultId, frm) - logger.warning("{}".format(errorMsg)) - return - - typ = body.get(TYPE) - link = self.wallet.getConnectionBy(remote=body.get(f.IDENTIFIER.nm)) - - # If accept invite is coming the first time, then use the default - # identifier of the wallet since link wont be created - if typ == ACCEPT_INVITE and link is None: - localIdr = self.wallet.defaultId - else: - # if accept invite is not the message type - # and we are still missing link, then return the error - if link is None: - linkNotCreated = ' Error processing {}. ' \ - 'Connection is not yet created.'.format(typ) - self.notifyToRemoteCaller(EVENT_NOTIFY_MSG, - linkNotCreated, - self.wallet.defaultId, - frm) - return - - localIdr = link.localIdentifier - - if typ in self.lockedMsgs: - try: - self.verifySignature(body) - except SignatureRejected: - self.sendSigVerifResponseMsg("\nSignature rejected.", - frm, typ, localIdr) - return - reqId = body.get(f.REQ_ID.nm) - - oldResps = self.rcvdMsgStore.get(reqId) - if oldResps: - oldResps.append(msg) - else: - self.rcvdMsgStore[reqId] = [msg] - - # TODO: Question: Should we sending an acknowledgement for every message? - # We are sending, ACKs for "signature accepted" messages too - self.sendSigVerifResponseMsg("\nSignature accepted.", - frm, typ, localIdr) - - handler = self.msgHandlers.get(typ) - if handler: - # TODO we should verify signature here - frmHa = self.endpoint.getHa(frm) - # `frmHa` can be None - res = handler((body, (frm, frmHa))) - if inspect.isawaitable(res): - self.loop.call_soon(asyncio.ensure_future, res) - else: - raise NotImplementedError("No type handle found for {} message". - format(typ)) - - def _handleError(self, msg): - body, _ = msg - self.notifyMsgListener("Error ({}) occurred while processing this " - "msg: {}".format(body[DATA], body[REQ_MSG])) - - def _handlePing(self, msg): - body, (frm, ha) = msg - link = self.wallet.getConnectionBy(nonce=body.get(NONCE)) - if link: - self.logger.info('Ping sent to %s', link.remoteIdentifier) - self.signAndSend({TYPE: 'pong'}, self.wallet.defaultId, frm, - origReqId=body.get(f.REQ_ID.nm)) - - def _handlePong(self, msg): - body, (frm, ha) = msg - identifier = body.get(IDENTIFIER) - if identifier: - li = self._getLinkByTarget(getCryptonym(identifier)) - if li: - self.logger.info('Pong received from %s', li.remoteIdentifier) - self.notifyMsgListener(" Pong received.") - else: - self.notifyMsgListener( - " Pong received from unknown endpoint.") - else: - self.notifyMsgListener(' Identifier is not yet set.') - - def _handleNewAvailableClaimsDataResponse(self, msg): - body, _ = msg - isVerified = self.verifySignature(body) - if isVerified: - identifier = body.get(IDENTIFIER) - li = self._getLinkByTarget(getCryptonym(identifier)) - if li: - self.notifyResponseFromMsg(li.name, body.get(f.REQ_ID.nm)) - - rcvdAvailableClaims = body[DATA][CLAIMS_LIST_FIELD] - newAvailableClaims = self._getNewAvailableClaims( - li, rcvdAvailableClaims) - if newAvailableClaims: - li.availableClaims.extend(newAvailableClaims) - claimNames = ", ".join( - [n for n, _, _ in newAvailableClaims]) - self.notifyMsgListener( - " Available Claim(s): {}\n".format(claimNames)) - - else: - self.notifyMsgListener("No matching connection found") - - @staticmethod - def _getNewAvailableClaims( - li, rcvdAvailableClaims) -> List[AvailableClaim]: - receivedClaims = [AvailableClaim(cl[NAME], - cl[VERSION], - li.remoteIdentifier) - for cl in rcvdAvailableClaims] - existingAvailableClaims = set(li.availableClaims) - newReceivedClaims = set(receivedClaims) - return list(newReceivedClaims - existingAvailableClaims) - - def _handleAvailableClaimsResponse(self, msg): - body, _ = msg - identifier = body.get(IDENTIFIER) - li = self._getLinkByTarget(getCryptonym(identifier)) - if li: - rcvdAvailableClaims = body[DATA][CLAIMS_LIST_FIELD] - if len(rcvdAvailableClaims) > 0: - self.notifyMsgListener(" Available Claim(s): {}". format( - ",".join([rc.get(NAME) for rc in rcvdAvailableClaims]))) - else: - self.notifyMsgListener(" Available Claim(s): " - "No available claims found") - - def _handleAcceptInviteResponse(self, msg): - body, _ = msg - identifier = body.get(IDENTIFIER) - li = self._getLinkByTarget(getCryptonym(identifier)) - if li: - # TODO: Show seconds took to respond - self.notifyResponseFromMsg(li.name, body.get(f.REQ_ID.nm)) - self.notifyMsgListener(" Trust established.") - alreadyAccepted = body[DATA].get(ALREADY_ACCEPTED_FIELD) - if alreadyAccepted: - self.notifyMsgListener(" Already accepted.") - else: - self.notifyMsgListener(" DID created in Indy.") - - li.connection_status = constant.CONNECTION_STATUS_ACCEPTED - rcvdAvailableClaims = body[DATA][CLAIMS_LIST_FIELD] - newAvailableClaims = self._getNewAvailableClaims( - li, rcvdAvailableClaims) - if newAvailableClaims: - li.availableClaims.extend(newAvailableClaims) - self.notifyMsgListener(" Available Claim(s): {}". format( - ",".join([rc.get(NAME) for rc in rcvdAvailableClaims]))) - try: - self._checkIfLinkIdentifierWrittenToIndy( - li, newAvailableClaims) - except NotConnectedToAny: - self.notifyEventListeners( - EVENT_NOT_CONNECTED_TO_ANY_ENV, - msg="Cannot check if identifier is written to Indy.") - else: - self.notifyMsgListener("No matching connection found") - - def getVerkeyForLink(self, link): - # TODO: Get latest verkey for this link's remote identifier from Indy - if link.remoteVerkey: - return link.remoteVerkey - else: - raise VerkeyNotFound("verkey not set in connection") - - def getLinkForMsg(self, msg): - nonce = msg.get(NONCE) - identifier = msg.get(f.IDENTIFIER.nm) - link = self.wallet.getConnectionBy(nonce=nonce, remote=identifier) - if link: - return link - else: - raise ConnectionNotFound - - def verifySignature(self, msg: Dict[str, str]): - signature = msg.get(f.SIG.nm) - identifier = msg.get(IDENTIFIER) - msgWithoutSig = {k: v for k, v in msg.items() if k != f.SIG.nm} - # TODO This assumes the current key is the cryptonym. This is a BAD - # ASSUMPTION!!! Indy needs to provide the current key. - ser = serialize_msg_for_signing(msgWithoutSig) - signature = b58decode(signature.encode()) - typ = msg.get(TYPE) - # TODO: Maybe keeping ACCEPT_INVITE open is a better option than keeping - # an if condition here? - if typ == ACCEPT_INVITE: - verkey = msg.get(VERKEY) - else: - try: - link = self.getLinkForMsg(msg) - verkey = self.getVerkeyForLink(link) - except (ConnectionNotFound, VerkeyNotFound): - # This is for verification of `NOTIFY` events - link = self.wallet.getConnectionBy(remote=identifier) - # TODO: If verkey is None, it should be fetched from Indy. - # Assuming CID for now. - verkey = link.remoteVerkey - - v = DidVerifier(verkey, identifier=identifier) - if not v.verify(signature, ser): - raise SignatureRejected - else: - if typ == ACCEPT_INVITE: - self.logger.info('Signature accepted.') - return True - - def _getLinkByTarget(self, target) -> Connection: - return self.wallet.getConnectionBy(remote=target) - - def _checkIfLinkIdentifierWrittenToIndy( - self, li: Connection, availableClaims): - req = self.getIdentity(li.localIdentifier) - self.notifyMsgListener("\nSynchronizing...") - - def getNymReply(reply, err, availableClaims, li: Connection): - if reply.get(DATA) and json.loads(reply[DATA])[TARGET_NYM] == \ - li.localIdentifier: - self.notifyMsgListener( - " Confirmed DID written to Indy.") - self.notifyEventListeners( - EVENT_POST_ACCEPT_INVITE, connection=li) - else: - self.notifyMsgListener( - " DID is not yet written to Indy") - - self.loop.call_later(.2, ensureReqCompleted, self.loop, - (req.identifier, req.reqId), - self.client, getNymReply, (availableClaims, li)) - - def notifyResponseFromMsg(self, linkName, reqId=None): - if reqId: - # TODO: This logic assumes that the req id is time based - curTimeBasedId = getTimeBasedId() - timeTakenInMillis = convertTimeBasedReqIdToMillis( - curTimeBasedId - reqId) - - if timeTakenInMillis >= 1000: - responseTime = ' ({} sec)'.format( - round(timeTakenInMillis / 1000, 2)) - else: - responseTime = ' ({} ms)'.format(round(timeTakenInMillis, 2)) - else: - responseTime = '' - - self.notifyMsgListener("\nResponse from {}{}:".format(linkName, - responseTime)) - - def notifyToRemoteCaller(self, event, msg, signingIdr, to, origReqId=None): - resp = { - TYPE: EVENT, - EVENT_NAME: event, - DATA: {'msg': msg} - } - self.signAndSend(resp, signingIdr, to, origReqId=origReqId) - - def _handleAcceptance(self, msg): - body, (frm, ha) = msg - link = self.verifyAndGetLink(msg) - # TODO this is really kludgy code... needs refactoring - # exception handling, separation of concerns, etc. - if not link: - return - logger.debug("proceeding with connection: {}".format(link.name)) - identifier = body.get(f.IDENTIFIER.nm) - verkey = body.get(VERKEY) - idy = Identity(identifier, verkey=verkey) - link.remoteVerkey = verkey - try: - pendingCount = self.wallet.addTrustAnchoredIdentity(idy) - logger.debug("pending request count {}".format(pendingCount)) - alreadyAdded = False - except Exception as e: - if e.args[0] in ['identifier already added']: - alreadyAdded = True - else: - logger.warning("Exception raised while adding nym, " - "error was: {}".format(e.args[0])) - raise e - - def send_claims(reply=None, error=None): - return self.sendClaimList(link=link, - alreadyAdded=alreadyAdded, - sender=frm, - reqId=body.get(f.REQ_ID.nm), - reply=reply, - error=error) - - if alreadyAdded: - send_claims() - logger.debug("already accepted, " - "so directly sending available claims") - self.logger.info('Already added identifier [{}] in indy' - .format(identifier)) - # self.notifyToRemoteCaller(EVENT_NOTIFY_MSG, - # " Already accepted", - # link.verkey, frm) - else: - logger.debug( - "not added to the ledger, so add nym to the ledger " - "and then will send available claims") - reqs = self.wallet.preparePending() - # Assuming there was only one pending request - logger.debug("sending to indy {}".format(reqs[0])) - # Need to think through - # how to provide separate logging for each agent - # anyhow this class should be implemented by each agent - # so we might not even need to add it as a separate logic - self.logger.info('Creating identifier [{}] in indy' - .format(identifier)) - self._sendToIndyAndDo(reqs[0], clbk=send_claims) - - # TODO: If I have the below exception thrown, somehow the - # error msg which is sent in verifyAndGetLink is not being received - # on the other end, so for now, commented, need to come back to this - # else: - # raise NotImplementedError - - def sendClaimList(self, link, alreadyAdded, sender, - reqId, reply=None, error=None): - logger.debug("sending available claims to {}".format( - link.remoteIdentifier)) - resp = self.createInviteAcceptedMsg( - self.get_available_claim_list(link), - alreadyAccepted=alreadyAdded) - self.signAndSend(resp, link.localIdentifier, sender, - origReqId=reqId) - - def _sendToIndyAndDo(self, req, clbk=None, *args, **kwargs): - self.client.submitReqs(req) - ensureReqCompleted(self.loop, (req.identifier, req.reqId), - self.client, clbk, *args, **kwargs) - - def newAvailableClaimsPostClaimVerif(self, claimName): - raise NotImplementedError - - def sendNewAvailableClaimsData(self, nac, frm, link): - if len(nac) > 0: - resp = self.createNewAvailableClaimsMsg(nac) - self.signAndSend(resp, link.localIdentifier, frm) - - def sendPing(self, linkName): - link = self.wallet.getConnection(linkName, required=True) - self.connectTo(link=link) - ha = link.getRemoteEndpoint(required=True) - params = dict(ha=ha) - msg = { - TYPE: 'ping', - NONCE: link.request_nonce, - f.REQ_ID.nm: getTimeBasedId(), - f.IDENTIFIER.nm: link.localIdentifier - } - reqId = self.sendMessage(msg, **params) - - self.notifyMsgListener(" Ping sent.") - return reqId - - def connectTo(self, linkName=None, link=None): - assert linkName or link - if link is None: - link = self.wallet.getConnection(linkName, required=True) - ha = link.getRemoteEndpoint(required=True) - verKeyRaw = friendlyToRaw( - link.full_remote_verkey) if link.full_remote_verkey else None - publicKeyRaw = friendlyToRaw( - link.remotePubkey) if link.remotePubkey else None - - if verKeyRaw is None and publicKeyRaw is None: - raise InvalidConnectionException( - "verkey or publicKey is required for connection.") - - if publicKeyRaw is None: - publicKeyRaw = rawVerkeyToPubkey(verKeyRaw) - self.endpoint.connectIfNotConnected( - name=link.name, - ha=ha, - verKeyRaw=verKeyRaw, - publicKeyRaw=publicKeyRaw) - - # duplicate function - # def loadInvitationFile(self, filePath): - # with open(filePath) as data_file: - # request = json.load( - # data_file, object_pairs_hook=collections.OrderedDict) - # return self.load_request_dict(request) - - def load_request_str(self, json_str): - request = json.loads( - json_str, object_pairs_hook=collections.OrderedDict) - return self.load_request_dict(request) - - def load_request_dict(self, request_dict): - link_request = request_dict.get("connection-request") - if not link_request: - raise ConnectionNotFound - linkName = link_request["name"] - existingLinkInvites = self.wallet. \ - getMatchingConnections(linkName) - if len(existingLinkInvites) >= 1: - return self._merge_request(request_dict) - Connection.validate(request_dict) - link = self.load_request(request_dict) - return link - - def load_request(self, request_data): - link_request = request_data["connection-request"] - remoteIdentifier = link_request[f.IDENTIFIER.nm] - # TODO signature should be validated! - # signature = request_data["sig"] - link_request_name = link_request[NAME] - remoteEndPoint = link_request.get("endpoint", None) - remote_verkey = link_request.get("verkey", None) - linkNonce = link_request[NONCE] - proofRequestsJson = request_data.get("proof-requests", None) - - proofRequests = [] - if proofRequestsJson: - for cr in proofRequestsJson: - proofRequests.append( - ProofRequest( - cr[NAME], - cr[VERSION], - getNonceForProof(linkNonce), - cr[ATTRIBUTES], - cr[VERIFIABLE_ATTRIBUTES] if VERIFIABLE_ATTRIBUTES in cr else [], - cr[PREDICATES] if PREDICATES in cr else [])) - - self.notifyMsgListener("1 connection request found for {}.". - format(link_request_name)) - - self.notifyMsgListener("Creating connection for {}.". - format(link_request_name)) - # TODO: Would we always have a trust anchor corresponding to a link? - - li = Connection(name=link_request_name, - trustAnchor=link_request_name, - remoteIdentifier=remoteIdentifier, - remoteEndPoint=remoteEndPoint, - request_nonce=linkNonce, - proofRequests=proofRequests, - remote_verkey=remote_verkey) - - self.wallet.addConnection(li) - return li - - def load_request_file(self, filePath): - with open(filePath) as data_file: - request_data = json.load( - data_file, object_pairs_hook=collections.OrderedDict) - link_request = request_data.get("connection-request") - if not link_request: - raise ConnectionNotFound - linkName = link_request["name"] - existingLinkInvites = self.wallet. \ - getMatchingConnections(linkName) - if len(existingLinkInvites) >= 1: - return self._merge_request(request_data) - Connection.validate(request_data) - link = self.load_request(request_data) - return link - - def _merge_request(self, request_data): - link_request = request_data.get('connection-request') - linkName = link_request['name'] - link = self.wallet.getConnection(linkName) - request_proof_requests = request_data.get('proof-requests', - None) - nonce = link_request.get(NONCE) - if request_proof_requests: - for icr in request_proof_requests: - # match is found if name and version are same - matchedProofRequest = next( - (cr for cr in link.proofRequests - if (cr.name == icr[NAME] and cr.version == icr[VERSION])), - None - ) - - # if link.requestedProofs contains any claim request - if matchedProofRequest: - # merge 'attributes' and 'verifiableAttributes' - matchedProofRequest.attributes = { - **matchedProofRequest.attributes, - **icr[ATTRIBUTES] - } - matchedProofRequest.verifiableAttributes = dict( - matchedProofRequest.verifiableAttributes, **icr[VERIFIABLE_ATTRIBUTES]) - - else: - # otherwise append proof request to link - link.proofRequests.append( - ProofRequest( - icr[NAME], - icr[VERSION], - getNonceForProof(nonce), - attributes=icr[ATTRIBUTES], - verifiableAttributes=icr[VERIFIABLE_ATTRIBUTES])) - - return link - else: - raise ConnectionAlreadyExists - - def accept_request(self, link: Union[str, Connection]): - if isinstance(link, str): - link = self.wallet.getConnection(link, required=True) - elif isinstance(link, Connection): - pass - else: - raise TypeError( - "Type of connection must be either string or Link but " - "provided {}".format( - type(link))) - # TODO should move to wallet in a method like accept(link) - if not link.localIdentifier: - self.create_identifier_for_link(link) - msg = { - TYPE: ACCEPT_INVITE, - # TODO should not send this... because origin should be the sender - NONCE: link.request_nonce, - VERKEY: self.wallet.getVerkey(link.localIdentifier) - } - logger.debug("{} accepting request from {} with id {}". - format(self.name, link.name, link.remoteIdentifier)) - self.logger.info('Accepting request with nonce {} from id {}' - .format(link.request_nonce, link.remoteIdentifier)) - self.signAndSendToLink(msg, link.name) - - # def _handleSyncNymResp(self, link, additionalCallback): - # def _(reply, err): - # if err: - # raise RuntimeError(err) - # reqId = self._updateLinkWithLatestInfo(link, reply) - # if reqId: - # self.loop.call_later(.2, - # self.executeWhenResponseRcvd, - # time.time(), 8000, - # self.loop, reqId, PONG, True, - # additionalCallback, reply, err) - # else: - # additionalCallback(reply, err) - # - # return _ - - def create_identifier_for_link(self, link): - signer = DidSigner() - self.wallet.addIdentifier(signer=signer) - link.localIdentifier = signer.identifier - link.localVerkey = signer.verkey - - def _handleSyncResp(self, link, additionalCallback): - def _(reply, err): - if err: - raise RuntimeError(err) - reqId = self._updateLinkWithLatestInfo(link, reply) - if reqId: - self.loop.call_later(.2, - self.executeWhenResponseRcvd, - time.time(), 8000, - self.loop, reqId, PONG, True, - additionalCallback, reply, err) - else: - if callable(additionalCallback): - additionalCallback(reply, err) - - return _ - - def _updateLinkWithLatestInfo(self, link: Connection, reply): - if DATA in reply and reply[DATA]: - data = json.loads(reply[DATA]) - - verkey = data.get(VERKEY) - if verkey is not None: - link.remoteVerkey = data[VERKEY] - - ep = data.get(ENDPOINT) - if isinstance(ep, dict): - # TODO: Validate its an IP port pair or a malicious entity - # can crash the code - if 'ha' in ep: - ip, port = ep['ha'].split(":") - link.remoteEndPoint = (ip, int(port)) - if PUBKEY in ep: - link.remotePubkey = ep[PUBKEY] - else: - link.remotePubkey = friendlyVerkeyToPubkey( - link.full_remote_verkey) if link.full_remote_verkey else None - - link.connection_last_synced = datetime.now() - self.notifyMsgListener( - " Connection {} synced".format(link.name)) - - def _pingToEndpoint(self, name, endpoint): - self.notifyMsgListener("\nPinging target endpoint: {}". - format(endpoint)) - reqId = self.sendPing(linkName=name) - return reqId - - def sync(self, linkName, doneCallback=None): - if not self.client.isReady(): - raise NotConnectedToNetwork - link = self.wallet.getConnection(linkName, required=True) - identifier = link.remoteIdentifier - identity = Identity(identifier=identifier) - req = self.wallet.requestIdentity(identity, - sender=self.wallet.defaultId) - - self.client.submitReqs(req) - - self.loop.call_later(.2, - ensureReqCompleted, - self.loop, - (req.identifier, req.reqId), - self.client, - self._handleSyncResp(link, None)) - - attrib = Attribute(name=ENDPOINT, - value=None, - dest=identifier, - ledgerStore=LedgerStore.RAW) - - req = self.wallet.requestAttribute( - attrib, sender=self.wallet.defaultId) - self.client.submitReqs(req) - - self.loop.call_later(.2, - ensureReqCompleted, - self.loop, - (req.identifier, req.reqId), - self.client, - self._handleSyncResp(link, doneCallback)) - - def executeWhenResponseRcvd(self, startTime, maxCheckForMillis, - loop, reqId, respType, - checkIfLinkExists, clbk, *args): - - if isMaxCheckTimeExpired(startTime, maxCheckForMillis): - clbk( - None, "No response received within specified time ({} mills). " - "Retry the command and see if that works.\n". format(maxCheckForMillis)) - else: - found = False - rcvdResponses = self.rcvdMsgStore.get(reqId) - if rcvdResponses: - for msg in rcvdResponses: - body, frm = msg - if body.get(TYPE) == respType: - if checkIfLinkExists: - identifier = body.get(IDENTIFIER) - li = self._getLinkByTarget( - getCryptonym(identifier)) - linkCheckOk = li is not None - else: - linkCheckOk = True - - if linkCheckOk: - found = True - break - - if found: - clbk(*args) - else: - loop.call_later(.2, self.executeWhenResponseRcvd, - startTime, maxCheckForMillis, loop, - reqId, respType, checkIfLinkExists, clbk, *args) diff --git a/indy_client/agent/walleted_agent.py b/indy_client/agent/walleted_agent.py deleted file mode 100644 index 33f21b549..000000000 --- a/indy_client/agent/walleted_agent.py +++ /dev/null @@ -1,173 +0,0 @@ -import errno -import os - -from plenum.client.wallet import WalletStorageHelper -from plenum.common.util import normalizedWalletFileName, \ - getLastSavedWalletFileName, getWalletFilePath -from indy_client.agent.agent import Agent -from indy_client.agent.caching import Caching -from indy_client.agent.walleted import Walleted -from indy_client.anon_creds.indy_issuer import IndyIssuer -from indy_client.anon_creds.indy_prover import IndyProver -from indy_client.anon_creds.indy_verifier import IndyVerifier -from indy_client.client.client import Client -from indy_client.client.wallet.wallet import Wallet -from indy_common.config_util import getConfig - -from anoncreds.protocol.repo.attributes_repo import AttributeRepoInMemory - - -class WalletedAgent(Walleted, Agent, Caching): - def __init__(self, - name: str = None, - basedirpath: str = None, - client: Client = None, - wallet: Wallet = None, - port: int = None, - loop=None, - attrRepo=None, - config=None, - endpointArgs=None): - - Agent.__init__(self, name, basedirpath, client, port, loop=loop, - config=config, endpointArgs=endpointArgs) - - self.config = getConfig(basedirpath) - - self._wallet = None - self._walletSaver = None - - # restore any active wallet belonging to this agent - self._restoreWallet() - - # if no persisted wallet is restored and a wallet is passed, - # then use given wallet, else ignore the given wallet - if not self.wallet and wallet: - self.wallet = wallet - - # if wallet is not yet set, then create a wallet - if not self.wallet: - self.wallet = Wallet(name) - - self._attrRepo = attrRepo or AttributeRepoInMemory() - - Walleted.__init__(self) - - if self.client: - self._initIssuerProverVerifier() - - self._restoreIssuerWallet() - - def _initIssuerProverVerifier(self): - self.issuer = IndyIssuer(client=self.client, wallet=self._wallet, - attrRepo=self._attrRepo) - self.prover = IndyProver(client=self.client, wallet=self._wallet) - self.verifier = IndyVerifier(client=self.client, wallet=self._wallet) - - @property - def wallet(self): - return self._wallet - - @wallet.setter - def wallet(self, newWallet): - self._wallet = newWallet - - @property - def walletSaver(self): - if self._walletSaver is None: - self._walletSaver = WalletStorageHelper( - self.getWalletsBaseDir(), - dmode=self.config.WALLET_DIR_MODE, - fmode=self.config.WALLET_FILE_MODE) - return self._walletSaver - - @Agent.client.setter - def client(self, client): - Agent.client.fset(self, client) - if self.client: - self._initIssuerProverVerifier() - - def start(self, loop): - super().start(loop) - - def stop(self, *args, **kwargs): - self._saveAllWallets() - super().stop(*args, **kwargs) - - def getWalletsBaseDir(self): - return os.path.expanduser(os.path.join(self.config.CLI_BASE_DIR, self.config.walletsDir)) - - def getContextDir(self): - return os.path.join( - self.getWalletsBaseDir(), - "agents", self.name.lower().replace(" ", "-")) - - def _getIssuerWalletContextDir(self): - return os.path.join(self.getContextDir(), "issuer") - - def _saveAllWallets(self): - self._saveWallet(self._wallet, self.getContextDir()) - self._saveIssuerWallet() - # TODO: There are some other wallets for prover and verifier, - # which we may also have to persist/restore as need arises - - def _saveIssuerWallet(self): - if self.issuer: - self.issuer.prepareForWalletPersistence() - self._saveWallet( - self.issuer.wallet, - self._getIssuerWalletContextDir(), - walletName="issuer") - - def _saveWallet(self, wallet: Wallet, contextDir, walletName=None): - try: - walletName = walletName or wallet.name - fileName = normalizedWalletFileName(walletName) - walletFilePath = self.walletSaver.saveWallet( - wallet, getWalletFilePath(contextDir, fileName)) - self.logger.info('Active wallet "{}" saved ({})'. - format(walletName, walletFilePath)) - except IOError as ex: - self.logger.info("Error occurred while saving wallet. " + - "error no.{}, error.{}" - .format(ex.errno, ex.strerror)) - - def _restoreWallet(self): - restoredWallet, walletFilePath = self._restoreLastActiveWallet( - self.getContextDir()) - if restoredWallet: - self.wallet = restoredWallet - self.logger.info('Saved wallet "{}" restored ({})'. - format(self.wallet.name, walletFilePath)) - - def _restoreIssuerWallet(self): - if self.issuer: - restoredWallet, walletFilePath = self._restoreLastActiveWallet( - self._getIssuerWalletContextDir()) - if restoredWallet: - self.issuer.restorePersistedWallet(restoredWallet) - self.logger.info('Saved wallet "issuer" restored ({})'. - format(walletFilePath)) - - def _restoreLastActiveWallet(self, contextDir): - walletFilePath = None - try: - walletFileName = getLastSavedWalletFileName(contextDir) - walletFilePath = os.path.join(contextDir, walletFileName) - wallet = self.walletSaver.loadWallet(walletFilePath) - # TODO: What about current wallet if any? - return wallet, walletFilePath - except ValueError as e: - if not str(e) == "max() arg is an empty sequence": - self.logger.info("No wallet to restore") - except (ValueError, AttributeError) as e: - self.logger.info( - "error occurred while restoring wallet {}: {}". - format(walletFilePath, e)) - except IOError as exc: - if exc.errno == errno.ENOENT: - self.logger.debug("no such wallet file exists ({})". - format(walletFilePath)) - else: - raise exc - return None, None diff --git a/indy_client/anon_creds/constant.py b/indy_client/anon_creds/constant.py deleted file mode 100644 index 6401216c1..000000000 --- a/indy_client/anon_creds/constant.py +++ /dev/null @@ -1,21 +0,0 @@ - -ISSUER = "issuer" -NONCE = "nonce" -PROOF = "proof" - -ATTRS = "attrs" -REVEALED_ATTRS = "revealedAttrs" - -CRED_A = "A" -CRED_E = "e" -CRED_V = "v" -C_VALUE = "c" - -EVECT = "evect" -MVECT = "mvect" -VVECT = "vvect" - -A_PRIME = "Aprime" -V_PRIME_PRIME = "vprimeprime" -ENCODED_ATTRS = "encodedAttrs" -ZERO_INDEX = '0' diff --git a/indy_client/anon_creds/indy_issuer.py b/indy_client/anon_creds/indy_issuer.py deleted file mode 100644 index a72bf2ff8..000000000 --- a/indy_client/anon_creds/indy_issuer.py +++ /dev/null @@ -1,53 +0,0 @@ -from anoncreds.protocol.issuer import Issuer -from anoncreds.protocol.repo.attributes_repo import AttributeRepo -from anoncreds.protocol.repo.public_repo import PublicRepo -from anoncreds.protocol.wallet.issuer_wallet import IssuerWalletInMemory -from indy_client.anon_creds.indy_public_repo import IndyPublicRepo -from indy_client.client.wallet.wallet import Wallet - - -class IndyIssuer(Issuer): - def __init__(self, client, wallet: Wallet, attrRepo: AttributeRepo, - publicRepo: PublicRepo = None): - publicRepo = publicRepo or IndyPublicRepo(client=client, - wallet=wallet) - issuerWallet = IndyIssuerWalletInMemory(wallet.name, publicRepo) - - super().__init__(issuerWallet, attrRepo) - - def prepareForWalletPersistence(self): - # TODO: If we don't set self.wallet._repo.client to None, - # it hangs during wallet persistence, based on findings, it seems, - # somewhere it hangs during persisting client._ledger and - # client.ledgerManager - self.wallet._repo.client = None - - def restorePersistedWallet(self, issuerWallet): - curRepoClient = self.wallet._repo.client - self.wallet = issuerWallet - self._primaryIssuer._wallet = issuerWallet - self._nonRevocationIssuer._wallet = issuerWallet - self.wallet._repo.client = curRepoClient - - -class IndyIssuerWalletInMemory(IssuerWalletInMemory): - - def __init__(self, name, pubRepo): - - IssuerWalletInMemory.__init__(self, name, pubRepo) - - # available claims to anyone whose connection is accepted by the agent - self.availableClaimsToAll = [] - - # available claims only for certain invitation (by nonce) - self.availableClaimsByNonce = {} - - # available claims only for certain invitation (by nonce) - self.availableClaimsByInternalId = {} - - # mapping between specific identifier and available claims which would - # have been available once they have provided requested information - # like proof etc. - self.availableClaimsByIdentifier = {} - - self._proofRequestsSchema = {} # Dict[str, Dict[str, any]] diff --git a/indy_client/anon_creds/indy_prover.py b/indy_client/anon_creds/indy_prover.py deleted file mode 100644 index ee0881f5d..000000000 --- a/indy_client/anon_creds/indy_prover.py +++ /dev/null @@ -1,13 +0,0 @@ -from anoncreds.protocol.prover import Prover -from anoncreds.protocol.repo.public_repo import PublicRepo -from anoncreds.protocol.wallet.prover_wallet import ProverWalletInMemory -from indy_client.anon_creds.indy_public_repo import IndyPublicRepo -from indy_client.client.wallet.wallet import Wallet - - -class IndyProver(Prover): - def __init__(self, client, wallet: Wallet, publicRepo: PublicRepo = None): - publicRepo = publicRepo or IndyPublicRepo( - client=client, wallet=wallet) - proverWallet = ProverWalletInMemory(wallet.name, publicRepo) - super().__init__(proverWallet) diff --git a/indy_client/anon_creds/indy_public_repo.py b/indy_client/anon_creds/indy_public_repo.py deleted file mode 100644 index 073377f25..000000000 --- a/indy_client/anon_creds/indy_public_repo.py +++ /dev/null @@ -1,232 +0,0 @@ -import json -from typing import Optional - -from plenum.common.txn_util import get_seq_no, get_from, get_payload_data, get_type -from plenum.common.types import f - -from anoncreds.protocol.exceptions import SchemaNotFoundError -from ledger.util import F -from stp_core.loop.eventually import eventually -from plenum.common.exceptions import NoConsensusYet, OperationError -from stp_core.common.log import getlogger -from plenum.common.constants import TXN_TYPE, DATA, NAME, \ - VERSION, CURRENT_PROTOCOL_VERSION, \ - DOMAIN_LEDGER_ID - -from indy_common.constants import GET_SCHEMA, SCHEMA, \ - GET_CLAIM_DEF, CLAIM_DEF, PRIMARY, REVOCATION, GET_TXNS, CLAIM_DEF_SCHEMA_REF, CLAIM_DEF_FROM, \ - CLAIM_DEF_SIGNATURE_TYPE, CLAIM_DEF_TAG, CLAIM_DEF_TAG_DEFAULT, SCHEMA_FROM, SCHEMA_NAME, SCHEMA_VERSION, \ - SCHEMA_ATTR_NAMES, CLAIM_DEF_PUBLIC_KEYS - -from anoncreds.protocol.repo.public_repo import PublicRepo -from anoncreds.protocol.types import Schema, ID, PublicKey, \ - RevocationPublicKey, AccumulatorPublicKey, \ - Accumulator, Tails, TimestampType -from indy_common.types import Request -from indy_common.util import get_reply_if_confirmed - - -def _ensureReqCompleted(reqKey, client, clbk): - reply, err = get_reply_if_confirmed(client, *reqKey) - if err: - raise OperationError(err) - - if reply is None: - raise NoConsensusYet('not completed') - - return clbk(reply, err) - - -def _getData(result, error): - data = result.get(DATA, {}) - # TODO: we have an old txn in the live pool where DATA is stored a json string. - # We can get rid of the code above once we create a versioning support in - # txns - if isinstance(data, str): - data = json.loads(data) - seqNo = result.get(F.seqNo.name) - return data, seqNo - - -def _submitData(result, error): - data = get_payload_data(result) - seqNo = get_seq_no(result) - return data, seqNo - - -logger = getlogger() - - -class IndyPublicRepo(PublicRepo): - def __init__(self, client, wallet): - self.client = client - self.wallet = wallet - self.displayer = print - - async def getSchema(self, id: ID) -> Optional[Schema]: - data = None - issuer_id = None - if id.schemaKey: - issuer_id = id.schemaKey.issuerId - op = { - SCHEMA_FROM: issuer_id, - TXN_TYPE: GET_SCHEMA, - DATA: { - SCHEMA_NAME: id.schemaKey.name, - SCHEMA_VERSION: id.schemaKey.version, - } - } - data, seqNo = await self._sendGetReq(op) - - else: - op = { - f.LEDGER_ID.nm: DOMAIN_LEDGER_ID, - TXN_TYPE: GET_TXNS, - DATA: id.schemaId - } - res, seqNo = await self._sendGetReq(op) - if res and get_type(res) == SCHEMA: - issuer_id = get_from(res) - data = get_payload_data(res)[DATA] - - if not data or SCHEMA_ATTR_NAMES not in data: - raise SchemaNotFoundError( - 'No schema with ID={} and key={}'.format( - id.schemaId, - id.schemaKey)) - - return Schema(name=data[NAME], - version=data[VERSION], - attrNames=data[SCHEMA_ATTR_NAMES], - issuerId=issuer_id, - seqId=seqNo) - - async def getPublicKey(self, id: ID = None, signatureType='CL') -> Optional[PublicKey]: - op = { - TXN_TYPE: GET_CLAIM_DEF, - CLAIM_DEF_SCHEMA_REF: id.schemaId, - CLAIM_DEF_FROM: id.schemaKey.issuerId, - CLAIM_DEF_SIGNATURE_TYPE: signatureType, - CLAIM_DEF_TAG: CLAIM_DEF_TAG_DEFAULT - } - - data, seqNo = await self._sendGetReq(op) - if not data: - raise ValueError( - 'No CLAIM_DEF for schema with ID={} and key={}'.format( - id.schemaId, id.schemaKey)) - data = data[PRIMARY] - pk = PublicKey.from_str_dict(data)._replace(seqId=seqNo) - return pk - - async def getPublicKeyRevocation(self, id: ID, - signatureType='CL') -> Optional[RevocationPublicKey]: - op = { - TXN_TYPE: GET_CLAIM_DEF, - CLAIM_DEF_SCHEMA_REF: id.schemaId, - CLAIM_DEF_FROM: id.schemaKey.issuerId, - CLAIM_DEF_SIGNATURE_TYPE: signatureType, - CLAIM_DEF_TAG: CLAIM_DEF_TAG_DEFAULT - } - data, seqNo = await self._sendGetReq(op) - if not data: - raise ValueError( - 'No CLAIM_DEF for schema with ID={} and key={}'.format( - id.schemaId, id.schemaKey)) - if REVOCATION not in data: - return None - data = data[REVOCATION] - pkR = RevocationPublicKey.fromStrDict(data)._replace(seqId=seqNo) - return pkR - - async def getPublicKeyAccumulator(self, id: ID) -> AccumulatorPublicKey: - raise NotImplementedError - - async def getAccumulator(self, id: ID) -> Accumulator: - raise NotImplementedError - - async def getTails(self, id: ID) -> Tails: - raise NotImplementedError - - # SUBMIT - - async def submitSchema(self, - schema: Schema) -> Schema: - data = { - SCHEMA_NAME: schema.name, - SCHEMA_VERSION: schema.version, - SCHEMA_ATTR_NAMES: schema.attrNames - } - op = { - TXN_TYPE: SCHEMA, - DATA: data - } - _, seqNo = await self._sendSubmitReq(op) - if seqNo: - schema = schema._replace(issuerId=self.wallet.defaultId, - seqId=seqNo) - return schema - - async def submitPublicKeys(self, - id: ID, - pk: PublicKey, - pkR: RevocationPublicKey = None, - signatureType='CL') -> \ - (PublicKey, RevocationPublicKey): - - data = {} - if pk is not None: - data[PRIMARY] = pk.to_str_dict() - if pkR is not None: - data[REVOCATION] = pkR.toStrDict() - - op = { - TXN_TYPE: CLAIM_DEF, - CLAIM_DEF_SCHEMA_REF: id.schemaId, - CLAIM_DEF_PUBLIC_KEYS: data, - CLAIM_DEF_SIGNATURE_TYPE: signatureType, - CLAIM_DEF_TAG: CLAIM_DEF_TAG_DEFAULT - } - - _, seqNo = await self._sendSubmitReq(op) - - if seqNo: - pk = pk._replace(seqId=seqNo) - - if pkR is not None: - pkR = pkR._replace(seqId=seqNo) - - return pk, pkR - - async def submitAccumulator(self, id: ID, accumPK: AccumulatorPublicKey, - accum: Accumulator, tails: Tails): - raise NotImplementedError - - async def submitAccumUpdate(self, id: ID, accum: Accumulator, - timestampMs: TimestampType): - raise NotImplementedError - - async def _sendSubmitReq(self, op): - return await self._sendReq(op, _submitData) - - async def _sendGetReq(self, op): - return await self._sendReq(op, _getData) - - async def _sendReq(self, op, clbk): - req = Request(identifier=self.wallet.defaultId, - operation=op, - protocolVersion=CURRENT_PROTOCOL_VERSION) - req = self.wallet.prepReq(req) - self.client.submitReqs(req) - try: - # TODO: Come up with an explanation, why retryWait had to be - # increases to 1 from .5 to pass some tests and from 1 to 2 to - # pass some other tests. The client was not getting a chance to - # service its stack, we need to find a way to stop this starvation. - resp = await eventually(_ensureReqCompleted, - (req.identifier, req.reqId), - self.client, clbk, - timeout=20, retryWait=2) - except NoConsensusYet: - raise TimeoutError('Request timed out') - return resp diff --git a/indy_client/anon_creds/indy_verifier.py b/indy_client/anon_creds/indy_verifier.py deleted file mode 100644 index e8d028d69..000000000 --- a/indy_client/anon_creds/indy_verifier.py +++ /dev/null @@ -1,15 +0,0 @@ -from anoncreds.protocol.repo.public_repo import PublicRepo - -from anoncreds.protocol.verifier import Verifier -from anoncreds.protocol.wallet.wallet import WalletInMemory - -from indy_client.anon_creds.indy_public_repo import IndyPublicRepo -from indy_client.client.wallet.wallet import Wallet - - -class IndyVerifier(Verifier): - def __init__(self, client, wallet: Wallet, publicRepo: PublicRepo = None): - publicRepo = publicRepo or IndyPublicRepo( - client=client, wallet=wallet) - verifierWallet = WalletInMemory(wallet.defaultId, publicRepo) - super().__init__(verifierWallet) diff --git a/indy_client/cli/cli.py b/indy_client/cli/cli.py deleted file mode 100644 index a6a1be118..000000000 --- a/indy_client/cli/cli.py +++ /dev/null @@ -1,2183 +0,0 @@ -import ast -import asyncio -import datetime -import importlib -import json -import os -import traceback -from collections import OrderedDict -from functools import partial -from hashlib import sha256 -from operator import itemgetter -from typing import Dict, Any, Tuple, Callable, NamedTuple - -import base58 -from libnacl import randombytes -from prompt_toolkit import prompt -from prompt_toolkit.contrib.completers import WordCompleter -from prompt_toolkit.layout.lexers import SimpleLexer -from pygments.token import Token - -from anoncreds.protocol.exceptions import SchemaNotFoundError -from anoncreds.protocol.globals import KEYS, ATTR_NAMES -from anoncreds.protocol.types import Schema, ID, ProofRequest -from indy_client.agent.constants import EVENT_NOTIFY_MSG, EVENT_POST_ACCEPT_INVITE, \ - EVENT_NOT_CONNECTED_TO_ANY_ENV -from indy_client.agent.msg_constants import ERR_NO_PROOF_REQUEST_SCHEMA_FOUND -from indy_client.agent.walleted_agent import WalletedAgent -from indy_client.cli.command import acceptConnectionCmd, connectToCmd, \ - disconnectCmd, loadFileCmd, newDIDCmd, pingTargetCmd, reqClaimCmd, \ - sendAttribCmd, sendProofCmd, sendGetNymCmd, sendClaimDefCmd, sendNodeCmd, \ - sendGetAttrCmd, sendGetSchemaCmd, sendGetClaimDefCmd, \ - sendNymCmd, sendPoolUpgCmd, sendSchemaCmd, setAttrCmd, showClaimCmd, \ - listClaimsCmd, showFileCmd, showConnectionCmd, syncConnectionCmd, addGenesisTxnCmd, \ - sendProofRequestCmd, showProofRequestCmd, reqAvailClaimsCmd, listConnectionsCmd, sendPoolConfigCmd, changeKeyCmd -from indy_client.cli.helper import getNewClientGrams, \ - USAGE_TEXT, NEXT_COMMANDS_TO_TRY_TEXT -from indy_client.client.client import Client -from indy_client.client.wallet.attribute import Attribute, LedgerStore -from indy_client.client.wallet.connection import Connection -from indy_client.client.wallet.node import Node -from indy_client.client.wallet.pool_config import PoolConfig -from indy_client.client.wallet.upgrade import Upgrade -from indy_client.client.wallet.wallet import Wallet -from indy_client.utils.migration import combined_migration -from indy_client.utils.migration.combined_migration import \ - is_cli_base_dir_untouched, legacy_base_dir_exists -from indy_common.auth import Authoriser -from indy_common.config_util import getConfig -from indy_common.constants import TARGET_NYM, ROLE, TXN_TYPE, NYM, REF, \ - ACTION, SHA256, TIMEOUT, SCHEDULE, START, JUSTIFICATION, NULL, WRITES, \ - REINSTALL, SCHEMA_ATTR_NAMES, PACKAGE, APP_NAME -from indy_common.exceptions import InvalidConnectionException, ConnectionAlreadyExists, \ - ConnectionNotFound, NotConnectedToNetwork -from indy_common.identity import Identity -from indy_common.roles import Roles -from indy_common.txn_util import getTxnOrderedFields -from indy_common.util import ensureReqCompleted, getIndex, \ - invalidate_config_caches -from indy_node.__metadata__ import __version__ -from ledger.genesis_txn.genesis_txn_file_util import genesis_txn_file -from plenum.cli.cli import Cli as PlenumCli -from plenum.cli.constants import PROMPT_ENV_SEPARATOR, NO_ENV -from plenum.cli.helper import getClientGrams -from plenum.cli.phrase_word_completer import PhraseWordCompleter -from plenum.common.constants import NAME, VERSION, VERKEY, DATA, TXN_ID, FORCE, RAW -from plenum.common.exceptions import OperationError -from plenum.common.member.member import Member -from plenum.common.signer_did import DidSigner -from plenum.common.txn_util import createGenesisTxnFile, get_payload_data -from plenum.common.util import randomString, getWalletFilePath -from stp_core.crypto.signer import Signer -from stp_core.crypto.util import cleanSeed -from stp_core.network.port_dispenser import genHa - -try: - nodeMod = importlib.import_module('indy_node.server.node') - nodeClass = nodeMod.Node -except ImportError: - nodeClass = None - -""" -Objective -The plenum cli bootstraps client keys by just adding them to the nodes. -Indy needs the client nyms to be added as transactions first. -I'm thinking maybe the cli needs to support something like this: -new node all - -new client steward with DID (nym matches the genesis transactions) -client steward add bob (cli creates a signer and an ADDNYM for that signer's -cryptonym, and then an alias for bobto that cryptonym.) -new client bob (cli uses the signer previously stored for this client) -""" - -Context = NamedTuple("Context", [("link", Connection), - ("proofRequest", Any), - ("selfAttestedAttrs", Any)]) - - -class IndyCli(PlenumCli): - name = 'indy' - properName = 'Indy' - fullName = 'Indy Identity platform' - githubUrl = 'https://github.com/hyperledger/indy-node' - - NodeClass = nodeClass - ClientClass = Client - _genesisTransactions = [] - - override_file_path = None - - def __init__(self, *args, **kwargs): - IndyCli._migrate_legacy_app_data_if_just_upgraded_and_user_agrees() - - self.aliases = {} # type: Dict[str, Signer] - self.trustAnchors = set() - self.users = set() - self._agent = None - - # This specifies which environment the cli is connected to test or live - self.activeEnv = None - - super().__init__(*args, **kwargs) - - # Load available environments - self.envs = self.get_available_networks() - - # TODO bad code smell - self.curContext = Context(None, None, {}) # type: Context - - @staticmethod - def _migrate_legacy_app_data_if_just_upgraded_and_user_agrees(): - if is_cli_base_dir_untouched() and legacy_base_dir_exists(): - print('Application data from previous Indy version has been found') - answer = prompt('Do you want to migrate it? [Y/n] ') - - if not answer or answer.upper().startswith('Y'): - try: - combined_migration.migrate() - # Invalidate config caches to pick up overridden config - # parameters from migrated application data - invalidate_config_caches() - print('Application data has been migrated') - - except Exception as e: - print('Error occurred when trying to migrate' - ' application data: {}'.format(e)) - traceback.print_exc() - print('Application data has not been migrated') - - else: - print('Application data was not migrated') - - @staticmethod - def getCliVersion(): - return __version__ - - @property - def pool_ledger_dir(self): - if not self.activeEnv: - return self.ledger_base_dir - return os.path.join(self.ledger_base_dir, self.activeEnv) - - @property - def lexers(self): - lexerNames = [ - 'send_nym', - 'send_get_nym', - 'send_attrib', - 'send_get_attr', - 'send_schema', - 'send_get_schema', - 'send_claim_def', - 'send_get_claim_def', - 'send_node', - 'send_pool_upg', - 'add_genesis', - 'show_file', - 'conn', - 'disconn', - 'load_file', - 'show_connection', - 'sync_connection', - 'ping_target' - 'show_claim', - 'list_claims', - 'list_connections', - # 'show_claim_req', - 'show_proof_request', - 'request_claim', - 'accept_connection_request', - 'set_attr', - 'send_proof_request' - 'send_proof', - 'new_id', - 'request_avail_claims', - 'change_ckey' - ] - lexers = {n: SimpleLexer(Token.Keyword) for n in lexerNames} - # Add more lexers to base class lexers - return {**super().lexers, **lexers} - - @property - def completers(self): - completers = {} - completers["nym"] = WordCompleter([]) - completers["role"] = WordCompleter( - [Roles.TRUST_ANCHOR.name, Roles.STEWARD.name]) - completers["send_nym"] = PhraseWordCompleter(sendNymCmd.id) - completers["send_get_nym"] = PhraseWordCompleter(sendGetNymCmd.id) - completers["send_attrib"] = PhraseWordCompleter(sendAttribCmd.id) - completers["send_get_attr"] = PhraseWordCompleter(sendGetAttrCmd.id) - completers["send_schema"] = PhraseWordCompleter(sendSchemaCmd.id) - completers["send_get_schema"] = PhraseWordCompleter( - sendGetSchemaCmd.id) - completers["send_claim_def"] = PhraseWordCompleter(sendClaimDefCmd.id) - completers["send_get_claim_def"] = PhraseWordCompleter( - sendGetClaimDefCmd.id) - completers["send_node"] = PhraseWordCompleter(sendNodeCmd.id) - completers["send_pool_upg"] = PhraseWordCompleter(sendPoolUpgCmd.id) - completers["send_pool_config"] = PhraseWordCompleter( - sendPoolConfigCmd.id) - completers["add_genesis"] = PhraseWordCompleter( - addGenesisTxnCmd.id) - completers["show_file"] = WordCompleter([showFileCmd.id]) - completers["load_file"] = WordCompleter([loadFileCmd.id]) - completers["show_connection"] = PhraseWordCompleter( - showConnectionCmd.id) - completers["conn"] = WordCompleter([connectToCmd.id]) - completers["disconn"] = WordCompleter([disconnectCmd.id]) - completers["env_name"] = WordCompleter(self.get_available_networks()) - completers["sync_connection"] = WordCompleter([syncConnectionCmd.id]) - completers["ping_target"] = WordCompleter([pingTargetCmd.id]) - completers["show_claim"] = PhraseWordCompleter(showClaimCmd.id) - completers["request_claim"] = PhraseWordCompleter(reqClaimCmd.id) - completers["accept_connection_request"] = PhraseWordCompleter( - acceptConnectionCmd.id) - completers["set_attr"] = WordCompleter([setAttrCmd.id]) - completers["new_id"] = PhraseWordCompleter(newDIDCmd.id) - completers["list_claims"] = PhraseWordCompleter(listClaimsCmd.id) - completers["list_connections"] = PhraseWordCompleter( - listConnectionsCmd.id) - completers["show_proof_request"] = PhraseWordCompleter( - showProofRequestCmd.id) - completers["send_proof_request"] = PhraseWordCompleter( - sendProofRequestCmd.id) - completers["send_proof"] = PhraseWordCompleter(sendProofCmd.id) - completers["request_avail_claims"] = PhraseWordCompleter( - reqAvailClaimsCmd.id) - completers["change_ckey"] = PhraseWordCompleter(changeKeyCmd.id) - - return {**super().completers, **completers} - - def initializeGrammar(self): - self.clientGrams = getClientGrams() + getNewClientGrams() - super().initializeGrammar() - - @property - def actions(self): - actions = super().actions - # Add more actions to base class for indy CLI - if self._sendNymAction not in actions: - actions.extend([self._sendNymAction, - self._sendGetNymAction, - self._sendAttribAction, - self._sendGetAttrAction, - self._sendNodeAction, - self._sendPoolUpgAction, - self._sendPoolConfigAction, - self._sendSchemaAction, - self._sendGetSchemaAction, - self._sendClaimDefAction, - self._sendGetClaimDefAction, - self._addGenTxnAction, - self._showFile, - self._loadFile, - self._showConnection, - self._connectTo, - self._disconnect, - self._syncConnection, - self._pingTarget, - self._showClaim, - self._listClaims, - self._listConnections, - self._reqClaim, - self._showProofRequest, - self._accept_request_connection, - self._setAttr, - self._sendProofRequest, - self._sendProof, - self._newDID, - self._reqAvailClaims, - self._change_current_key_req - ]) - return actions - - @PlenumCli.activeWallet.setter - def activeWallet(self, wallet): - PlenumCli.activeWallet.fset(self, wallet) - if self._agent: - self._agent.wallet = self._activeWallet - - @staticmethod - def _getSetAttrUsage(): - return ['{} to '.format(setAttrCmd.id)] - - @staticmethod - def _getSendProofUsage(proofRequest: ProofRequest=None, - inviter: Connection=None): - return ['{} "{}" to "{}"'.format( - sendProofCmd.id, - proofRequest.name or "", - inviter.name or "")] - - @staticmethod - def _getShowFileUsage(filePath=None): - return ['{} {}'.format(showFileCmd.id, filePath or "")] - - @staticmethod - def _getLoadFileUsage(filePath=None): - return ['{} {}'.format( - loadFileCmd.id, - filePath or "")] - - @staticmethod - def _getShowProofRequestUsage(proofRequest: ProofRequest=None): - return ['{} "{}"'.format( - showProofRequestCmd.id, - (proofRequest and proofRequest.name) or '')] - - @staticmethod - def _getShowClaimUsage(claimName=None): - return ['{} "{}"'.format( - showClaimCmd.id, - claimName or "")] - - @staticmethod - def _getReqClaimUsage(claimName=None): - return ['{} "{}"'.format( - reqClaimCmd.id, - claimName or "")] - - @staticmethod - def _getShowConnectionUsage(connectionName=None): - return ['{} "{}"'.format( - showConnectionCmd.id, - connectionName or "")] - - @staticmethod - def _getSyncConnectionUsage(connectionName=None): - return ['{} "{}"'.format( - syncConnectionCmd.id, - connectionName or "")] - - @staticmethod - def _getAcceptConnectionUsage(connectionName=None): - return ['{} "{}"'.format( - acceptConnectionCmd.id, - connectionName or "")] - - @staticmethod - def _getPromptUsage(): - return ["prompt "] - - @property - def allEnvNames(self): - return "|".join(sorted(self.envs, reverse=True)) - - def _getConnectUsage(self): - return ["{} <{}>".format( - connectToCmd.id, - self.allEnvNames)] - - def _printMsg(self, notifier, msg): - self.print(msg) - - def _printSuggestionPostAcceptConnection(self, notifier, - connection: Connection): - suggestions = [] - if len(connection.availableClaims) > 0: - claimName = "|".join([n.name for n in connection.availableClaims]) - claimName = claimName or "" - suggestions += self._getShowClaimUsage(claimName) - suggestions += self._getReqClaimUsage(claimName) - if len(connection.proofRequests) > 0: - for pr in connection.proofRequests: - suggestions += self._getShowProofRequestUsage(pr) - suggestions += self._getSendProofUsage(pr, connection) - if suggestions: - self.printSuggestion(suggestions) - else: - self.print("") - - def sendToAgent(self, msg: Any, connection: Connection): - if not self.agent: - return - - endpoint = connection.remoteEndPoint - self.agent.sendMessage(msg, ha=endpoint) - - @property - def walletClass(self): - return Wallet - - @property - def genesisTransactions(self): - return self._genesisTransactions - - def reset(self): - self._genesisTransactions = [] - - def newNode(self, nodeName: str): - createGenesisTxnFile(self.genesisTransactions, self.basedirpath, - self.config.domainTransactionsFile, - getTxnOrderedFields(), reset=False) - nodesAdded = super().newNode(nodeName) - return nodesAdded - - def _printCannotSyncSinceNotConnectedEnvMessage(self): - - self.print("Cannot sync because not connected. Please connect first.") - self._printConnectUsage() - - def _printNotConnectedEnvMessage(self, - prefix="Not connected to Indy network"): - - self.print("{}. Please connect first.".format(prefix)) - self._printConnectUsage() - - def _printConnectUsage(self): - self.printUsage(self._getConnectUsage()) - - def newClient(self, clientName, - config=None): - if not self.activeEnv: - self._printNotConnectedEnvMessage() - # TODO: Return a dummy object that catches all attributes and - # method calls and does nothing. Alo the dummy object should - # initialise to null - return DummyClient() - - client = super().newClient(clientName, config=config) - if self.activeWallet: - client.registerObserver(self.activeWallet.handleIncomingReply) - self.activeWallet.pendSyncRequests() - prepared = self.activeWallet.preparePending() - client.submitReqs(*prepared) - - # If agent was created before the user connected to a test environment - if self._agent: - self._agent.client = client - return client - - def registerAgentListeners(self, agent): - agent.registerEventListener(EVENT_NOTIFY_MSG, self._printMsg) - agent.registerEventListener(EVENT_POST_ACCEPT_INVITE, - self._printSuggestionPostAcceptConnection) - agent.registerEventListener(EVENT_NOT_CONNECTED_TO_ANY_ENV, - self._handleNotConnectedToAnyEnv) - - def deregisterAgentListeners(self, agent): - agent.deregisterEventListener(EVENT_NOTIFY_MSG, self._printMsg) - agent.deregisterEventListener( - EVENT_POST_ACCEPT_INVITE, - self._printSuggestionPostAcceptConnection) - agent.deregisterEventListener(EVENT_NOT_CONNECTED_TO_ANY_ENV, - self._handleNotConnectedToAnyEnv) - - @property - def agent(self) -> WalletedAgent: - if self._agent is None: - _, port = genHa() - agent = WalletedAgent( - name=randomString(6), - basedirpath=self.basedirpath, - client=self.activeClient if self.activeEnv else None, - wallet=self.activeWallet, - loop=self.looper.loop, - port=port) - self.agent = agent - return self._agent - - @agent.setter - def agent(self, agent): - if self._agent is not None: - self.deregisterAgentListeners(self._agent) - self.looper.removeProdable(self._agent) - - self._agent = agent - - if agent is not None: - self.registerAgentListeners(self._agent) - self.looper.add(self._agent) - self._activeWallet = self._agent.wallet - self.wallets[self._agent.wallet.name] = self._agent.wallet - - def _handleNotConnectedToAnyEnv(self, notifier, msg): - self.print("\n{}\n".format(msg)) - self._printNotConnectedEnvMessage() - - @staticmethod - def bootstrapClientKeys(idr, verkey, nodes): - pass - - def _clientCommand(self, matchedVars): - if matchedVars.get('client') == 'client': - r = super()._clientCommand(matchedVars) - if r: - return True - - client_name = matchedVars.get('client_name') - if client_name not in self.clients: - self.print("{} cannot add a new user". - format(client_name), Token.BoldOrange) - return True - client_action = matchedVars.get('cli_action') - if client_action == 'add': - otherClientName = matchedVars.get('other_client_name') - role = self._getRole(matchedVars) - signer = DidSigner() - nym = signer.verstr - return self._addNym(nym, Identity.correctRole(role), - newVerKey=None, - otherClientName=otherClientName) - - def _getRole(self, matchedVars): - """ - :param matchedVars: - :return: NULL or the role's integer value - """ - role = matchedVars.get(ROLE) - if role is not None and role.strip() == '': - role = NULL - else: - valid = Authoriser.isValidRoleName(role) - if valid: - role = Authoriser.getRoleFromName(role) - else: - self.print("Invalid role. Valid roles are: {}". - format(", ".join(map(lambda r: r.name, Roles))), - Token.Error) - return False - return role - - def _getNym(self, nym): - identity = Identity(identifier=nym) - req = self.activeWallet.requestIdentity( - identity, sender=self.activeWallet.defaultId) - self.activeClient.submitReqs(req) - self.print("Getting nym {}".format(nym)) - - def getNymReply(reply, err, *args): - try: - if err: - self.print("Error: {}".format(err), Token.BoldOrange) - return - - if reply and reply[DATA]: - data = json.loads(reply[DATA]) - if data: - idr = base58.b58decode(nym) - if data.get(VERKEY) is None: - if len(idr) == 32: - self.print( - "Current verkey is same as DID {}" - .format(nym), Token.BoldBlue) - else: - self.print( - "No verkey ever assigned to the DID {}". - format(nym), Token.BoldBlue) - return - if data.get(VERKEY) == '': - self.print("No active verkey found for the DID {}". - format(nym), Token.BoldBlue) - else: - if data[ROLE] is not None and data[ROLE] != '': - self.print("Current verkey for NYM {} is {} with role {}" - .format(nym, data[VERKEY], - Roles.nameFromValue(data[ROLE])), - Token.BoldBlue) - else: - self.print("Current verkey for NYM {} is {}" - .format(nym, data[VERKEY]), Token.BoldBlue) - else: - self.print("NYM {} not found".format(nym), Token.BoldBlue) - except BaseException as e: - self.print("Error during fetching verkey: {}".format(e), - Token.BoldOrange) - - self.looper.loop.call_later(.2, self._ensureReqCompleted, - (req.identifier, req.reqId), - self.activeClient, getNymReply) - - def _addNym(self, nym, role, newVerKey=None, - otherClientName=None, custom_clb=None): - idy = Identity(nym, verkey=newVerKey, role=role) - try: - self.activeWallet.addTrustAnchoredIdentity(idy) - except Exception as e: - if e.args[0] == 'DID already added': - pass - else: - raise e - reqs = self.activeWallet.preparePending() - req = self.activeClient.submitReqs(*reqs)[0][0] - printStr = "Adding nym {}".format(nym) - - if otherClientName: - printStr = printStr + " for " + otherClientName - self.print(printStr) - - def out(reply, error, *args, **kwargs): - if error: - self.print("Error: {}".format(error), Token.BoldBlue) - else: - self.print("Nym {} added".format(get_payload_data(reply)[TARGET_NYM]), - Token.BoldBlue) - - self.looper.loop.call_later(.2, - self._ensureReqCompleted, - (req.identifier, req.reqId), - self.activeClient, - custom_clb or out) - return True - - def _addAttribToNym(self, nym, raw, enc, hsh): - attrib = self.activeWallet.build_attrib(nym, raw, enc, hsh) - # TODO: What is the purpose of this? - # if nym != self.activeWallet.defaultId: - # attrib.dest = nym - - self.activeWallet.addAttribute(attrib) - reqs = self.activeWallet.preparePending() - req, errs = self.activeClient.submitReqs(*reqs) - if errs: - for err in errs: - self.print("Request error: {}".format(err), Token.BoldOrange) - - if not req: - return - - req = req[0] - - self.print("Adding attributes {} for {}".format(attrib.value, nym)) - - def out(reply, error, *args, **kwargs): - if error: - self.print("Error: {}".format(error), Token.BoldOrange) - else: - self.print("Attribute added for nym {}". - format(get_payload_data(reply)[TARGET_NYM]), Token.BoldBlue) - - self.looper.loop.call_later(.2, self._ensureReqCompleted, - (req.identifier, req.reqId), - self.activeClient, out) - - def _getAttr(self, nym, raw, enc, hsh): - assert int(bool(raw)) + int(bool(enc)) + int(bool(hsh)) == 1 - if raw: - led_store = LedgerStore.RAW - data = raw - elif enc: - led_store = LedgerStore.ENC - data = enc - elif hsh: - led_store = LedgerStore.HASH - data = hsh - else: - raise RuntimeError('One of raw, enc, or hash are required.') - - attrib = Attribute(data, dest=nym, ledgerStore=led_store) - req = self.activeWallet.requestAttribute( - attrib, sender=self.activeWallet.defaultId) - self.activeClient.submitReqs(req) - self.print("Getting attr {}".format(nym)) - - def getAttrReply(reply, err, *args): - if reply and reply[DATA]: - data_to_print = None - if RAW in reply: - data = json.loads(reply[DATA]) - if data: - data_to_print = json.dumps(data) - else: - data_to_print = reply[DATA] - if data_to_print: - self.print("Found attribute {}".format(data_to_print)) - else: - self.print("Attr not found") - - self.looper.loop.call_later(.2, self._ensureReqCompleted, - (req.identifier, req.reqId), - self.activeClient, getAttrReply) - - def _getSchema(self, nym, name, version): - req = self.activeWallet.requestSchema( - nym, name, version, sender=self.activeWallet.defaultId) - self.activeClient.submitReqs(req) - self.print("Getting schema {}".format(nym)) - - def getSchema(reply, err, *args): - try: - if reply and reply[DATA] and SCHEMA_ATTR_NAMES in reply[DATA]: - self.print( - "Found schema {}" - .format(reply[DATA])) - else: - self.print("Schema not found") - except BaseException: - self.print('"data" must be in proper format', Token.Error) - - self.looper.loop.call_later(.2, self._ensureReqCompleted, - (req.identifier, req.reqId), - self.activeClient, getSchema) - - def _getClaimDef(self, seqNo, signature): - req = self.activeWallet.requestClaimDef( - seqNo, signature, sender=self.activeWallet.defaultId) - self.activeClient.submitReqs(req) - self.print("Getting claim def {}".format(seqNo)) - - def getClaimDef(reply, err, *args): - try: - if reply and reply[DATA]: - self.print( - "Found claim def {}" - .format(reply[DATA])) - else: - self.print("Claim def not found") - except BaseException: - self.print('"data" must be in proper format', Token.Error) - - self.looper.loop.call_later(.2, self._ensureReqCompleted, - (req.identifier, req.reqId), - self.activeClient, getClaimDef) - - def _sendNodeTxn(self, nym, data): - node = Node(nym, data, self.activeDID) - self.activeWallet.addNode(node) - reqs = self.activeWallet.preparePending() - req = self.activeClient.submitReqs(*reqs)[0][0] - self.print("Sending node request for node DID {} by {} " - "(request id: {})".format(nym, self.activeDID, - req.reqId)) - - def out(reply, error, *args, **kwargs): - if error: - self.print("Node request failed with error: {}".format( - error), Token.BoldOrange) - else: - self.print( - "Node request completed {}".format( - get_payload_data(reply)[TARGET_NYM]), - Token.BoldBlue) - - self.looper.loop.call_later(.2, self._ensureReqCompleted, - (req.identifier, req.reqId), - self.activeClient, out) - - def _sendPoolUpgTxn( - self, - name, - version, - action, - sha256, - schedule=None, - justification=None, - timeout=None, - force=False, - reinstall=False, - pkg_name=APP_NAME): - upgrade = Upgrade( - name, - version, - action, - sha256, - schedule=schedule, - trustee=self.activeDID, - timeout=timeout, - justification=justification, - force=force, - reinstall=reinstall, - package=pkg_name) - self.activeWallet.doPoolUpgrade(upgrade) - reqs = self.activeWallet.preparePending() - req = self.activeClient.submitReqs(*reqs)[0][0] - self.print("Sending pool upgrade {} for version {}". - format(name, version)) - - def out(reply, error, *args, **kwargs): - if error: - self.print( - "Pool upgrade failed: {}".format(error), - Token.BoldOrange) - else: - self.print("Pool Upgrade Transaction Scheduled", - Token.BoldBlue) - - self.looper.loop.call_later(.2, self._ensureReqCompleted, - (req.identifier, req.reqId), - self.activeClient, out) - - def _sendPoolConfigTxn(self, writes, force=False): - poolConfig = PoolConfig(trustee=self.activeDID, - writes=writes, force=force) - self.activeWallet.doPoolConfig(poolConfig) - reqs = self.activeWallet.preparePending() - req = self.activeClient.submitReqs(*reqs)[0][0] - self.print( - "Sending pool config writes={} force={}".format( - writes, force)) - - def out(reply, error, *args, **kwargs): - if error: - self.print("Pool config failed: {}".format( - error), Token.BoldOrange) - else: - self.print("Pool config successful", Token.BoldBlue) - - self.looper.loop.call_later(.2, self._ensureReqCompleted, - (req.identifier, req.reqId), - self.activeClient, out) - - @staticmethod - def parseAttributeString(attrs): - attrInput = {} - for attr in attrs.split(','): - name, value = attr.split('=') - name, value = name.strip(), value.strip() - attrInput[name] = value - return attrInput - - def _sendNymAction(self, matchedVars): - if matchedVars.get('send_nym') == sendNymCmd.id: - if not self.canMakeIndyRequest: - return True - nym = matchedVars.get('dest_id') - role = self._getRole(matchedVars) - newVerKey = matchedVars.get('new_ver_key') - if matchedVars.get('verkey') and newVerKey is None: - newVerKey = '' - elif newVerKey is not None: - newVerKey = newVerKey.strip() - self._addNym(nym, role, newVerKey=newVerKey) - return True - - def _sendGetNymAction(self, matchedVars): - if matchedVars.get('send_get_nym') == sendGetNymCmd.id: - if not self.hasAnyKey: - return True - if not self.canMakeIndyRequest: - return True - destId = matchedVars.get('dest_id') - self._getNym(destId) - return True - - def _sendAttribAction(self, matchedVars): - if matchedVars.get('send_attrib') == sendAttribCmd.id: - if not self.canMakeIndyRequest: - return True - nym = matchedVars.get('dest_id') - raw = matchedVars.get('raw', None) - enc = matchedVars.get('enc', None) - hsh = matchedVars.get('hash', None) - self._addAttribToNym(nym, raw, enc, hsh) - return True - - def _sendGetAttrAction(self, matchedVars): - if matchedVars.get('send_get_attr') == sendGetAttrCmd.id: - if not self.hasAnyKey: - return True - if not self.canMakeIndyRequest: - return True - nym = matchedVars.get('dest_id') - raw = matchedVars.get('raw', None) - enc = matchedVars.get('enc', None) - hsh = matchedVars.get('hash', None) - self._getAttr(nym, raw, enc, hsh) - return True - - def _sendGetSchemaAction(self, matchedVars): - if matchedVars.get('send_get_schema') == sendGetSchemaCmd.id: - if not self.canMakeIndyRequest: - return True - self.logger.debug("Processing GET_SCHEMA request") - nym = matchedVars.get('dest_id') - name = matchedVars.get('name') - version = matchedVars.get('version') - self._getSchema(nym, name, version) - return True - - def _sendGetClaimDefAction(self, matchedVars): - if matchedVars.get('send_get_claim_def') == sendGetClaimDefCmd.id: - if not self.canMakeIndyRequest: - return True - self.logger.debug("Processing GET_CLAIM_DEF request") - seqNo = int(matchedVars.get('ref')) - signature = matchedVars.get('signature_type') - self._getClaimDef(seqNo, signature) - return True - - def _sendNodeAction(self, matchedVars): - if matchedVars.get('send_node') == sendNodeCmd.id: - if not self.canMakeIndyRequest: - return True - nym = matchedVars.get('dest_id') - data = matchedVars.get('data').strip() - try: - data = ast.literal_eval(data) - self._sendNodeTxn(nym, data) - except BaseException: - self.print('"data" must be in proper format', Token.Error) - return True - - def _sendPoolUpgAction(self, matchedVars): - if matchedVars.get('send_pool_upg') == sendPoolUpgCmd.id: - if not self.canMakeIndyRequest: - return True - name = matchedVars.get(NAME).strip() - version = matchedVars.get(VERSION).strip() - action = matchedVars.get(ACTION).strip() - sha256 = matchedVars.get(SHA256).strip() - timeout = matchedVars.get(TIMEOUT) - schedule = matchedVars.get(SCHEDULE) - justification = matchedVars.get(JUSTIFICATION) - force = matchedVars.get(FORCE, "False") - reinstall = matchedVars.get(REINSTALL, "False") - package = matchedVars.get(PACKAGE, APP_NAME) - force = force == "True" - reinstall = reinstall == "True" - if action == START: - if not schedule: - self.print('{} need to be provided'.format(SCHEDULE), - Token.Error) - return True - if not timeout: - self.print('{} need to be provided'.format(TIMEOUT), - Token.Error) - return True - try: - if schedule: - schedule = ast.literal_eval(schedule.strip()) - except BaseException: - self.print('"schedule" must be in proper format', Token.Error) - return True - if timeout: - timeout = int(timeout.strip()) - self._sendPoolUpgTxn(name, version, action, sha256, - schedule=schedule, timeout=timeout, - justification=justification, force=force, - reinstall=reinstall, pkg_name=package) - return True - - def _sendPoolConfigAction(self, matchedVars): - if matchedVars.get('send_pool_config') == sendPoolConfigCmd.id: - if not self.canMakeIndyRequest: - return True - writes = matchedVars.get(WRITES, "False") - writes = writes == "True" - force = matchedVars.get(FORCE, "False") - force = force == "True" - self._sendPoolConfigTxn(writes, force=force) - return True - - def _sendSchemaAction(self, matchedVars): - if matchedVars.get('send_schema') == sendSchemaCmd.id: - self.agent.loop.call_soon(asyncio.ensure_future, - self._sendSchemaActionAsync(matchedVars)) - return True - - async def _sendSchemaActionAsync(self, matchedVars): - if not self.canMakeIndyRequest: - return True - - try: - schema = await self.agent.issuer.genSchema( - name=matchedVars.get(NAME), - version=matchedVars.get(VERSION), - attrNames=[s.strip() for s in matchedVars.get(KEYS).split(",")]) - except OperationError as ex: - self.print("Can not add SCHEMA {}".format(ex), - Token.BoldOrange) - return False - - self.print("The following schema is published " - "to the Indy distributed ledger\n", Token.BoldBlue, - newline=False) - self.print("{}".format(str(schema))) - self.print("Sequence number is {}".format(schema.seqId), - Token.BoldBlue) - - return True - - def _sendClaimDefAction(self, matchedVars): - if matchedVars.get('send_claim_def') == sendClaimDefCmd.id: - self.agent.loop.call_soon( - asyncio.ensure_future, - self._sendClaimDefActionAsync(matchedVars)) - return True - - async def _sendClaimDefActionAsync(self, matchedVars): - if not self.canMakeIndyRequest: - return True - reference = int(matchedVars.get(REF)) - id = ID(schemaId=reference) - try: - pk, pkR = await self.agent.issuer.genKeys(id) - except SchemaNotFoundError: - self.print("Schema with seqNo {} not found".format(reference), - Token.BoldOrange) - return False - - self.print("The claim definition was published to the" - " Indy distributed ledger:\n", Token.BoldBlue, - newline=False) - self.print("Sequence number is {}".format(pk[0].seqId), - Token.BoldBlue) - - return True - - def printUsageMsgs(self, msgs): - for m in msgs: - self.print(' {}'.format(m)) - self.print("\n") - - def printSuggestion(self, msgs): - self.print("\n{}".format(NEXT_COMMANDS_TO_TRY_TEXT)) - self.printUsageMsgs(msgs) - - def printUsage(self, msgs): - self.print("\n{}".format(USAGE_TEXT)) - self.printUsageMsgs(msgs) - - def _loadFile(self, matchedVars): - if matchedVars.get('load_file') == loadFileCmd.id: - if not self.agent: - self._printNotConnectedEnvMessage() - else: - givenFilePath = matchedVars.get('file_path') - filePath = IndyCli._getFilePath(givenFilePath) - try: - # TODO: Shouldn't just be the wallet be involved in loading - # a request. - connection = self.agent.load_request_file(filePath) - self._printShowAndAcceptConnectionUsage(connection.name) - except (FileNotFoundError, TypeError): - self.print("Given file does not exist") - msgs = self._getShowFileUsage() + self._getLoadFileUsage() - self.printUsage(msgs) - except ConnectionAlreadyExists: - self.print("Connection already exists") - except ConnectionNotFound: - self.print("No connection request found in the given file") - except ValueError: - self.print("Input is not a valid json" - "please check and try again") - except InvalidConnectionException as e: - self.print(e.args[0]) - return True - - @classmethod - def _getFilePath(cls, givenPath, caller_file=None): - curDirPath = os.path.dirname(os.path.abspath(caller_file or - cls.override_file_path or - __file__)) - sampleExplicitFilePath = curDirPath + "/../../" + givenPath - sampleImplicitFilePath = curDirPath + "/../../sample/" + givenPath - - if os.path.isfile(givenPath): - return givenPath - elif os.path.isfile(sampleExplicitFilePath): - return sampleExplicitFilePath - elif os.path.isfile(sampleImplicitFilePath): - return sampleImplicitFilePath - else: - return None - - def _get_request_matching_connections(self, connectionName): - exactMatched = {} - likelyMatched = {} - # if we want to search in all wallets, then, - # change [self.activeWallet] to self.wallets.values() - walletsToBeSearched = [self.activeWallet] # self.wallets.values() - for w in walletsToBeSearched: - # TODO: This should be moved to wallet - requests = w.getMatchingConnections(connectionName) - for i in requests: - if i.name == connectionName: - if w.name in exactMatched: - exactMatched[w.name].append(i) - else: - exactMatched[w.name] = [i] - else: - if w.name in likelyMatched: - likelyMatched[w.name].append(i) - else: - likelyMatched[w.name] = [i] - - # TODO: instead of a comment, this should be implemented as a test - # Here is how the return dictionary should look like: - # { - # "exactlyMatched": { - # "Default": [connectionWithExactName], - # "WalletOne" : [connectionWithExactName], - # }, "likelyMatched": { - # "Default": [similarMatches1, similarMatches2], - # "WalletOne": [similarMatches2, similarMatches3] - # } - # } - return { - "exactlyMatched": exactMatched, - "likelyMatched": likelyMatched - } - - def _syncConnectionPostEndPointRetrieval( - self, - postSync, - connection: Connection, - reply, - err, - **kwargs): - if err: - self.print(' {}'.format(err)) - return True - - postSync(connection) - - def _printUsagePostSync(self, connection): - self._printShowAndAcceptConnectionUsage(connection.name) - - def _getTargetEndpoint(self, li, postSync): - if not self.activeWallet.identifiers: - self.print("No key present in wallet for making request on Indy," - " so adding one") - self._newSigner(wallet=self.activeWallet) - if self._isConnectedToAnyEnv(): - self.print("\nSynchronizing...") - doneCallback = partial(self._syncConnectionPostEndPointRetrieval, - postSync, li) - try: - self.agent.sync(li.name, doneCallback) - except NotConnectedToNetwork: - self._printCannotSyncSinceNotConnectedEnvMessage() - else: - if not self.activeEnv: - self._printCannotSyncSinceNotConnectedEnvMessage() - - def _getOneConnectionForFurtherProcessing(self, connectionName): - totalFound, exactlyMatchedConnections, likelyMatchedConnections = \ - self._get_matching_requests_detail(connectionName) - - if totalFound == 0: - self._printNoConnectionFoundMsg() - return None - - if totalFound > 1: - self._printMoreThanOneConnectionFoundMsg( - connectionName, exactlyMatchedConnections, likelyMatchedConnections) - return None - li = self._getOneConnection( - exactlyMatchedConnections, likelyMatchedConnections) - if IndyCli.isNotMatching(connectionName, li.name): - self.print('Expanding {} to "{}"'.format(connectionName, li.name)) - return li - - def _sendAcceptInviteToTargetEndpoint(self, connection: Connection): - self.agent.accept_request(connection) - - def _acceptConnectionPostSync(self, connection: Connection): - if connection.isRemoteEndpointAvailable: - self._sendAcceptInviteToTargetEndpoint(connection) - else: - self.print("Remote endpoint ({}) not found, " - "can not connect to {}\n".format( - connection.remoteEndPoint, connection.name)) - self.logger.debug("{} has remote endpoint {}". - format(connection, connection.remoteEndPoint)) - - def _accept_connection_request(self, connectionName): - li = self._getOneConnectionForFurtherProcessing(connectionName) - - if li: - if li.isAccepted: - self._printConnectionAlreadyExcepted(li.name) - else: - self.print("Request not yet verified.") - if not li.connection_last_synced: - self.print("Connection not yet synchronized.") - - if self._isConnectedToAnyEnv(): - self.print("Attempting to sync...") - self._getTargetEndpoint(li, self._acceptConnectionPostSync) - else: - if li.isRemoteEndpointAvailable: - self._sendAcceptInviteToTargetEndpoint(li) - else: - self.print("Request acceptance aborted.") - self._printNotConnectedEnvMessage( - "Cannot sync because not connected") - - def _sync_connection_request(self, connectionName): - li = self._getOneConnectionForFurtherProcessing(connectionName) - if li: - self._getTargetEndpoint(li, self._printUsagePostSync) - - @staticmethod - def isNotMatching(source, target): - return source.lower() != target.lower() - - @staticmethod - def removeSpecialChars(name): - return name.replace('"', '').replace("'", "") if name else None - - def _printSyncConnectionUsage(self, connectionName): - msgs = self._getSyncConnectionUsage(connectionName) - self.printSuggestion(msgs) - - def _printSyncAndAcceptUsage(self, connectionName): - msgs = self._getSyncConnectionUsage(connectionName) + \ - self._getAcceptConnectionUsage(connectionName) - self.printSuggestion(msgs) - - def _printConnectionAlreadyExcepted(self, connectionName): - self.print( - "Connection {} is already accepted\n".format(connectionName)) - - def _printShowAndAcceptConnectionUsage(self, connectionName=None): - msgs = self._getShowConnectionUsage(connectionName) + \ - self._getAcceptConnectionUsage(connectionName) - self.printSuggestion(msgs) - - def _printShowAndLoadFileUsage(self): - msgs = self._getShowFileUsage() + self._getLoadFileUsage() - self.printUsage(msgs) - - def _printShowAndLoadFileSuggestion(self): - msgs = self._getShowFileUsage() + self._getLoadFileUsage() - self.printSuggestion(msgs) - - def _printNoConnectionFoundMsg(self): - self.print("No matching connection requests found in current wallet") - self._printShowAndLoadFileSuggestion() - - def _isConnectedToAnyEnv(self): - return self.activeEnv and self.activeClient and \ - self.activeClient.hasSufficientConnections - - def _accept_request_connection(self, matchedVars): - if matchedVars.get( - 'accept_connection_request') == acceptConnectionCmd.id: - connectionName = IndyCli.removeSpecialChars( - matchedVars.get('connection_name')) - self._accept_connection_request(connectionName) - return True - - def _pingTarget(self, matchedVars): - if matchedVars.get('ping') == pingTargetCmd.id: - connectionName = IndyCli.removeSpecialChars( - matchedVars.get('target_name')) - li = self._getOneConnectionForFurtherProcessing(connectionName) - if li: - if li.isRemoteEndpointAvailable: - self.agent._pingToEndpoint(li.name, li.remoteEndPoint) - else: - self.print("Please sync first to get target endpoint") - self._printSyncConnectionUsage(li.name) - return True - - def _syncConnection(self, matchedVars): - if matchedVars.get('sync_connection') == syncConnectionCmd.id: - # TODO: Shouldn't we remove single quotes too? - connectionName = IndyCli.removeSpecialChars( - matchedVars.get('connection_name')) - self._sync_connection_request(connectionName) - return True - - def _get_matching_requests_detail(self, connectionName): - connection_requests = self._get_request_matching_connections( - IndyCli.removeSpecialChars(connectionName)) - - exactlyMatchedConnections = connection_requests["exactlyMatched"] - likelyMatchedConnections = connection_requests["likelyMatched"] - - totalFound = sum([len(v) for v in {**exactlyMatchedConnections, - **likelyMatchedConnections}.values()]) - return totalFound, exactlyMatchedConnections, likelyMatchedConnections - - @staticmethod - def _getOneConnection(exactlyMatchedConnections, - likelyMatchedConnections) -> Connection: - li = None - if len(exactlyMatchedConnections) == 1: - li = list(exactlyMatchedConnections.values())[0][0] - else: - li = list(likelyMatchedConnections.values())[0][0] - return li - - def _printMoreThanOneConnectionFoundMsg( - self, - connectionName, - exactlyMatchedConnections, - likelyMatchedConnections): - self.print( - 'More than one connection matches "{}"'.format(connectionName)) - exactlyMatchedConnections.update(likelyMatchedConnections) - for k, v in exactlyMatchedConnections.items(): - for li in v: - self.print("{}".format(li.name)) - self.print("\nRe enter the command with more specific " - "connection request name") - self._printShowAndAcceptConnectionUsage() - - def _showConnection(self, matchedVars): - if matchedVars.get('show_connection') == showConnectionCmd.id: - connectionName = matchedVars.get( - 'connection_name').replace('"', '') - - totalFound, exactlyMatchedConnections, likelyMatchedConnections = \ - self._get_matching_requests_detail(connectionName) - - if totalFound == 0: - self._printNoConnectionFoundMsg() - return True - - if totalFound == 1: - li = self._getOneConnection( - exactlyMatchedConnections, likelyMatchedConnections) - - if IndyCli.isNotMatching(connectionName, li.name): - self.print('Expanding {} to "{}"'.format( - connectionName, li.name)) - - self.print("{}".format(str(li))) - if li.isAccepted: - self._printSuggestionPostAcceptConnection(self, li) - else: - self._printSyncAndAcceptUsage(li.name) - else: - self._printMoreThanOneConnectionFoundMsg( - connectionName, exactlyMatchedConnections, likelyMatchedConnections) - - return True - - # def _printNoClaimReqFoundMsg(self): - # self.print("No matching Claim Requests found in current wallet\n") - # - def _printNoProofReqFoundMsg(self): - self.print("No matching Proof Requests found in current wallet\n") - - def _printNoClaimFoundMsg(self): - self.print("No matching Claims found in " - "any connections in current wallet\n") - - def _printMoreThanOneConnectionFoundForRequest( - self, requestedName, connectionNames): - self.print( - 'More than one connection matches "{}"'.format(requestedName)) - for li in connectionNames: - self.print("{}".format(li)) - # TODO: Any suggestion in more than one connection? - - # TODO: Refactor following three methods - # as most of the pattern looks similar - - def _printRequestAlreadyMade(self, extra=""): - msg = "Request already made." - if extra: - msg += "Extra info: {}".format(extra) - self.print(msg) - - def _printMoreThanOneClaimFoundForRequest( - self, claimName, connectionAndClaimNames): - self.print('More than one match for "{}"'.format(claimName)) - for li, cl in connectionAndClaimNames: - self.print("{} in {}".format(li, cl)) - - def _findProofRequest(self, - claimReqName: str, - connectionName: str=None) -> (Connection, - ProofRequest): - matchingConnectionWithClaimReq = self.activeWallet. findAllProofRequests( - claimReqName, connectionName) # TODO rename claimReqName -> proofRequestName - - if len(matchingConnectionWithClaimReq) == 0: - self._printNoProofReqFoundMsg() - return None, None - - if len(matchingConnectionWithClaimReq) > 1: - connectionNames = [ml.name for ml, - cr in matchingConnectionWithClaimReq] - self._printMoreThanOneConnectionFoundForRequest( - claimReqName, connectionNames) - return None, None - - return matchingConnectionWithClaimReq[0] - - def _getOneConnectionAndAvailableClaim( - self, claimName, printMsgs: bool = True) -> (Connection, Schema): - matchingConnectionsWithAvailableClaim = self.activeWallet. \ - getMatchingConnectionsWithAvailableClaim(claimName) - - if len(matchingConnectionsWithAvailableClaim) == 0: - if printMsgs: - self._printNoClaimFoundMsg() - return None, None - - if len(matchingConnectionsWithAvailableClaim) > 1: - connectionNames = [ml.name for ml, - _ in matchingConnectionsWithAvailableClaim] - if printMsgs: - self._printMoreThanOneConnectionFoundForRequest( - claimName, connectionNames) - return None, None - - return matchingConnectionsWithAvailableClaim[0] - - async def _getOneConnectionAndReceivedClaim(self, claimName, printMsgs: bool = True) -> \ - (Connection, Tuple, Dict): - matchingConnectionsWithRcvdClaim = await self.agent.getMatchingConnectionsWithReceivedClaimAsync(claimName) - - if len(matchingConnectionsWithRcvdClaim) == 0: - if printMsgs: - self._printNoClaimFoundMsg() - return None, None, None - - if len(matchingConnectionsWithRcvdClaim) > 1: - connectionNames = [ml.name for ml, _, - _ in matchingConnectionsWithRcvdClaim] - if printMsgs: - self._printMoreThanOneConnectionFoundForRequest( - claimName, connectionNames) - return None, None, None - - return matchingConnectionsWithRcvdClaim[0] - - def _setAttr(self, matchedVars): - if matchedVars.get('set_attr') == setAttrCmd.id: - attrName = matchedVars.get('attr_name') - attrValue = matchedVars.get('attr_value') - curConnection, curProofReq, selfAttestedAttrs = self.curContext - if curProofReq: - selfAttestedAttrs[attrName] = attrValue - else: - self.print("No context, use below command to set the context") - self.printUsage(self._getShowProofRequestUsage()) - return True - - def _reqClaim(self, matchedVars): - if matchedVars.get('request_claim') == reqClaimCmd.id: - claimName = IndyCli.removeSpecialChars( - matchedVars.get('claim_name')) - matchingConnection, ac = \ - self._getOneConnectionAndAvailableClaim( - claimName, printMsgs=False) - if matchingConnection: - name, version, origin = ac - if IndyCli.isNotMatching(claimName, name): - self.print('Expanding {} to "{}"'.format( - claimName, name)) - self.print("Found claim {} in connection {}". - format(claimName, matchingConnection.name)) - if not self._isConnectedToAnyEnv(): - self._printNotConnectedEnvMessage() - return True - - schemaKey = (name, version, origin) - self.print("Requesting claim {} from {}...".format( - name, matchingConnection.name)) - - self.agent.sendReqClaim(matchingConnection, schemaKey) - else: - self._printNoClaimFoundMsg() - return True - - def _change_current_key_req(self, matchedVars): - if matchedVars.get('change_ckey') == changeKeyCmd.id: - if not self.canMakeIndyRequest: - return True - seed = matchedVars.get('seed') - self._change_current_key(seed=seed) - return True - - def _change_current_key(self, seed=None): - if not self.isValidSeedForNewKey(seed): - return True - - cur_id = self.activeWallet.requiredIdr() - cseed = cleanSeed(seed or randombytes(32)) - - dm = self.activeWallet.didMethods.get(None) - signer = dm.newSigner(identifier=cur_id, seed=cseed) - - def change_verkey_cb(reply, error, *args, **kwargs): - if error: - self.print("Error: {}".format(error), Token.BoldBlue) - else: - self.activeWallet.updateSigner(cur_id, signer) - self._saveActiveWallet() - self.print("Key changed for {}".format( - get_payload_data(reply)[TARGET_NYM]), Token.BoldBlue) - self.print("New verification key is {}".format( - signer.verkey), Token.BoldBlue) - - self._addNym(nym=cur_id, role=None, newVerKey=signer.verkey, - otherClientName=None, custom_clb=change_verkey_cb) - - def _createNewIdentifier(self, DID, seed, - alias=None): - if not self.isValidSeedForNewKey(seed): - return True - - if not seed: - seed = randombytes(32) - - cseed = cleanSeed(seed) - - signer = DidSigner(identifier=DID, seed=cseed, alias=alias) - - id, signer = self.activeWallet.addIdentifier(DID, - seed=cseed, alias=alias) - self.print("DID created in wallet {}".format(self.activeWallet)) - self.print("New DID is {}".format(signer.identifier)) - self.print("New verification key is {}".format(signer.verkey)) - self._setActiveIdentifier(id) - - def _reqAvailClaims(self, matchedVars): - if matchedVars.get('request_avail_claims') == reqAvailClaimsCmd.id: - connectionName = IndyCli.removeSpecialChars( - matchedVars.get('connection_name')) - li = self._getOneConnectionForFurtherProcessing(connectionName) - if li: - self.agent.sendRequestForAvailClaims(li) - return True - - def _newDID(self, matchedVars): - if matchedVars.get('new_id') == newDIDCmd.id: - DID = matchedVars.get('id') - alias = matchedVars.get('alias') - - seed = matchedVars.get('seed') - self._createNewIdentifier(DID, seed, alias) - return True - - def _sendProof(self, matchedVars): - if matchedVars.get('send_proof') == sendProofCmd.id: - proofName = IndyCli.removeSpecialChars( - matchedVars.get('proof_name').strip()) - connectionName = IndyCli.removeSpecialChars( - matchedVars.get('connection_name').strip()) - - li, proofReq = self._findProofRequest(proofName, connectionName) - - if not li or not proofReq: - return False - - self.logger.debug("Building proof using {} for {}". - format(proofReq, li)) - - # if connection or proof request doesn't match with context information, - # then set the context accordingly - if li != self.curContext.link or \ - proofReq != self.curContext.proofRequest: - self.curContext = Context(li, proofReq, {}) - - self.agent.sendProof(li, proofReq) - - return True - - def _sendProofRequest(self, matchedVars): - if matchedVars.get('send_proof_request') == 'send proof-request': - proofRequestName = IndyCli.removeSpecialChars( - matchedVars.get('proof_request_name').strip()) - target = IndyCli.removeSpecialChars( - matchedVars.get('target').strip()) - - li = self._getOneConnectionForFurtherProcessing(target) - - if li: - result = self.agent.sendProofReq(li, proofRequestName) - if result != ERR_NO_PROOF_REQUEST_SCHEMA_FOUND: - self.print('Sent proof request "{}" to {}' - .format(proofRequestName, target)) - else: - self.print(ERR_NO_PROOF_REQUEST_SCHEMA_FOUND) - else: - self.print('No connection found with name {}'.format(target)) - - return True - - async def _showReceivedOrAvailableClaim(self, claimName): - matchingConnection, rcvdClaim, attributes = \ - await self._getOneConnectionAndReceivedClaim(claimName) - if matchingConnection: - self.print("Found claim {} in connection {}". - format(claimName, matchingConnection.name)) - - # TODO: Figure out how to get time of issuance - issued = None not in attributes.values() - - if issued: - self.print("Status: {}".format(datetime.datetime.now())) - else: - self.print("Status: available (not yet issued)") - - self.print('Name: {}\nVersion: {}'.format(claimName, rcvdClaim[1])) - self.print("Attributes:") - for n, v in attributes.items(): - if v: - self.print(' {}: {}'.format(n, v)) - else: - self.print(' {}'.format(n)) - - if not issued: - self._printRequestClaimMsg(claimName) - else: - self.print("") - return rcvdClaim - else: - self.print("No matching Claims found " - "in any connections in current wallet") - - def _printRequestClaimMsg(self, claimName): - self.printSuggestion(self._getReqClaimUsage(claimName)) - - @staticmethod - def _formatProofRequestAttribute(attributes, verifiableAttributes, - matchingConnectionAndReceivedClaim): - getClaim = itemgetter(2) - - def containsAttr(key): - return lambda t: key in getClaim(t) - - formatted = 'Attributes:\n' - - for k, v in attributes.items(): - # determine if we need to show number for claims which - # were participated in proof generation - attrClaimIndex = getIndex(containsAttr( - k), matchingConnectionAndReceivedClaim) - showClaimNumber = attrClaimIndex > -1 and \ - len(matchingConnectionAndReceivedClaim) > 1 - - formatted += (' ' + - ('[{}] '.format(attrClaimIndex + 1) - if showClaimNumber else '') + - str(k) + - (' (V)' if k in verifiableAttributes else '') + - ': ' + str(v) + '\n') - - return formatted - - @staticmethod - def _printClaimsUsedInProofConstruction( - filteredMatchingClaims, proofRequestAttrs): - toPrint = '\nThe Proof is constructed from the following claims:\n' - showClaimNumber = len(filteredMatchingClaims) > 1 - claimNumber = 1 - alreadyFulfilledAttrs = {} - - for li, (name, ver, _), issuedAttrs in filteredMatchingClaims: - toPrint += '\n Claim {}({} v{} from {})\n'.format( - '[{}] '.format(claimNumber) if showClaimNumber else '', - name, ver, li.name - ) - for k, v in issuedAttrs.items(): - toPrint += (' {}'.format( - '* ' if k in proofRequestAttrs and - k not in alreadyFulfilledAttrs else ' ') + - k + ': ' + - '{}\n'.format('None' if v is None else v) - ) - if k not in alreadyFulfilledAttrs: - alreadyFulfilledAttrs[k] = True - - claimNumber += 1 - - return toPrint - - async def _fulfillProofRequestByContext(self, c: Context): - matchingConnectionAndReceivedClaim = await self.agent.getClaimsUsedForAttrs( - c.proofRequest.attributes) - - # filter all those claims who has some None value - # since they are not yet requested - filteredMatchingClaims = [] - for li, cl, issuedAttrs in matchingConnectionAndReceivedClaim: - if None not in [v for k, v in issuedAttrs.items()]: - filteredMatchingClaims.append((li, cl, issuedAttrs)) - - attributesWithValue = c.proofRequest.attributes - c.proofRequest.selfAttestedAttrs = {} - for k, v in c.proofRequest.attributes.items(): - for li, cl, issuedAttrs in filteredMatchingClaims: - if k in issuedAttrs: - attributesWithValue[k] = issuedAttrs[k] - else: - defaultValue = attributesWithValue[k] or v - selfAttestedValue = c.selfAttestedAttrs.get(k) - if selfAttestedValue: - attributesWithValue[k] = selfAttestedValue - c.proofRequest.selfAttestedAttrs[k] = selfAttestedValue - else: - attributesWithValue[k] = defaultValue - - c.proofRequest.attributes = attributesWithValue - c.proofRequest.fulfilledByClaims = filteredMatchingClaims - return True - - async def fulfillProofRequest(self, proofReqName): - proof_req_name = IndyCli.removeSpecialChars(proofReqName) - matchingConnection, proofRequest = self._findProofRequest( - proof_req_name) - - if matchingConnection and proofRequest: - if matchingConnection != self.curContext.link or \ - proofRequest != self.curContext.proofRequest: - self.curContext = Context(matchingConnection, proofRequest, {}) - self.print('Found proof request "{}" in connection "{}"'. - format(proofRequest.name, matchingConnection.name)) - - return await self._fulfillProofRequestByContext(self.curContext) - else: - return False - - async def _showProofWithMatchingClaims(self, c: Context): - self.print( - c.proofRequest.fixedInfo + - self._formatProofRequestAttribute( - c.proofRequest.attributes, - [ - v.name for k, - v in c.proofRequest.verifiableAttributes.items()], - c.proofRequest.fulfilledByClaims)) - - self.print(self._printClaimsUsedInProofConstruction( - c.proofRequest.fulfilledByClaims, c.proofRequest.attributes)) - - self.printSuggestion( - self._getSetAttrUsage() + - self._getSendProofUsage(c.proofRequest, c.link)) - - async def _fulfillAndShowConstructedProof(self, proof_request_name): - fulfilled = await self.fulfillProofRequest(proof_request_name) - if fulfilled: - await self._showProofWithMatchingClaims(self.curContext) - - def _showProofRequest(self, matchedVars): - if matchedVars.get('show_proof_request') == showProofRequestCmd.id: - proof_request_name = IndyCli.removeSpecialChars( - matchedVars.get('proof_request_name')) - - self.agent.loop.call_soon(asyncio.ensure_future, - self._fulfillAndShowConstructedProof( - proof_request_name)) - return True - - def _showClaim(self, matchedVars): - if matchedVars.get('show_claim') == showClaimCmd.id: - claimName = IndyCli.removeSpecialChars( - matchedVars.get('claim_name')) - self.agent.loop.call_soon( - asyncio.ensure_future, - self._showReceivedOrAvailableClaim(claimName)) - - return True - - def _listClaims(self, matchedVars): - if matchedVars.get('list_claims') == listClaimsCmd.id: - connection_name = IndyCli.removeSpecialChars( - matchedVars.get('connection_name')) - - li = self._getOneConnectionForFurtherProcessing(connection_name) - if li: - # TODO sync if needed, send msg to agent - self._printAvailClaims(li) - return True - - def _listConnections(self, matchedVars): - if matchedVars.get('list_connections') == listConnectionsCmd.id: - connections = self.activeWallet.getConnectionNames() - if len(connections) == 0: - self.print("No connections exists") - else: - for connection in connections: - self.print(connection + "\n") - return True - - def _printAvailClaims(self, connection): - self.print(connection.avail_claims_str()) - - def _showFile(self, matchedVars): - if matchedVars.get('show_file') == showFileCmd.id: - givenFilePath = matchedVars.get('file_path') - filePath = IndyCli._getFilePath(givenFilePath) - if not filePath: - self.print("Given file does not exist") - self.printUsage(self._getShowFileUsage()) - else: - with open(filePath, 'r') as fin: - self.print(fin.read()) - msgs = self._getLoadFileUsage(givenFilePath) - self.printSuggestion(msgs) - return True - - def canConnectToEnv(self, envName: str): - if envName == self.activeEnv: - return "Already connected to {}".format(envName) - if envName not in self.envs: - return "Unknown environment {}".format(envName) - - new_base_path = os.path.join(self.ledger_base_dir, envName) - - if not os.path.exists(os.path.join(new_base_path, - genesis_txn_file(self.config.poolTransactionsFile))): - return "Do not have information to connect to {}".format(envName) - - def _disconnect(self, matchedVars): - if matchedVars.get('disconn') == disconnectCmd.id: - self._disconnectFromCurrentEnv() - return True - - def _disconnectFromCurrentEnv(self, toConnectToNewEnv=None): - oldEnv = self.activeEnv - if not oldEnv and not toConnectToNewEnv: - self.print("Not connected to any environment.") - return True - - if not toConnectToNewEnv: - self.print("Disconnecting from {} ...".format(self.activeEnv)) - - self._saveActiveWallet() - self._wallets = {} - self._activeWallet = None - self._activeClient = None - self.activeEnv = None - self._setPrompt(self.currPromptText.replace("{}{}".format( - PROMPT_ENV_SEPARATOR, oldEnv), "")) - - if not toConnectToNewEnv: - self.print("Disconnected from {}".format(oldEnv), Token.BoldGreen) - - if toConnectToNewEnv is None: - self.restoreLastActiveWallet() - - def printWarningIfActiveWalletIsIncompatible(self): - if self._activeWallet: - if not self.checkIfWalletBelongsToCurrentContext( - self._activeWallet): - self.print(self.getWalletContextMistmatchMsg, Token.BoldOrange) - self.print("Any changes made to this wallet won't " - "be persisted.", Token.BoldOrange) - - def moveActiveWalletToNewContext(self, newEnv): - if self._activeWallet: - if not self._activeWallet.env or self._activeWallet.env == NO_ENV: - currentWalletName = self._activeWallet.name - self._activeWallet.env = newEnv - randomSuffix = '' - sourceWalletFilePath = getWalletFilePath( - self.getContextBasedWalletsBaseDir(), self.walletFileName) - targetContextDir = os.path.join(self.getWalletsBaseDir(), - newEnv) - if os.path.exists(sourceWalletFilePath): - while True: - targetWalletName = currentWalletName + randomSuffix - toBeTargetWalletFileExists = self.checkIfPersistentWalletExists( - targetWalletName, inContextDir=targetContextDir) - if not toBeTargetWalletFileExists: - self._activeWallet.name = targetWalletName - break - randomSuffix = "-{}".format(randomString(6)) - self._saveActiveWalletInDir(contextDir=targetContextDir, - printMsgs=False) - os.remove(sourceWalletFilePath) - targetWalletFilePath = getWalletFilePath( - targetContextDir, self.walletFileName) - - self.print("Current active wallet got moved to '{}' " - "environment. Here is the detail:".format(newEnv), - Token.BoldBlue) - self.print(" wallet name: {}".format( - currentWalletName), Token.BoldBlue) - self.print(" old location: {}".format( - sourceWalletFilePath), Token.BoldBlue) - self.print(" new location: {}".format( - targetWalletFilePath), Token.BoldBlue) - if randomSuffix != '': - self.print(" new wallet name: {}".format( - self._activeWallet.name), Token.BoldBlue) - self.print(" Note:\n Target environment " - "already had a wallet with name '{}', so we " - "renamed current active wallet to '{}'.\n " - " You can always rename any wallet with more " - "meaningful name with 'rename wallet' command.". - format(currentWalletName, - self._activeWallet.name), - Token.BoldBlue) - self._activeWallet = None - - def _connectTo(self, matchedVars): - if matchedVars.get('conn') == connectToCmd.id: - envName = matchedVars.get('env_name') - envError = self.canConnectToEnv(envName) - - if envError: - self.print(envError, token=Token.Error) - self._printConnectUsage() - return False - - oldEnv = self.activeEnv - - self._saveActiveWallet() - - if not oldEnv: - self.moveActiveWalletToNewContext(envName) - - isAnyWalletExistsForNewEnv = \ - self.isAnyWalletFileExistsForGivenEnv(envName) - - if oldEnv or isAnyWalletExistsForNewEnv: - self._disconnectFromCurrentEnv(envName) - - # Prompt has to be changed, so it show the environment too - self.activeEnv = envName - self._setPrompt(self.currPromptText.replace("{}{}".format( - PROMPT_ENV_SEPARATOR, oldEnv), "")) - - if isAnyWalletExistsForNewEnv: - self.restoreLastActiveWallet() - - self.printWarningIfActiveWalletIsIncompatible() - - self._buildClientIfNotExists(self.config) - self.print("Connecting to {}...".format( - envName), Token.BoldGreen) - - self.ensureClientConnected() - - if not self.activeClient or not self.activeClient.nodeReg: - msg = '\nThe information required to connect this client to the nodes cannot be found. ' \ - '\nThis is an error. To correct the error, get the file containing genesis transactions ' \ - '\n(the file name is `{}`) from the github repository and place ' \ - '\nit in directory `{}`.\n' \ - '\nThe github url is {}.\n'.format(genesis_txn_file(self.config.poolTransactionsFile), - self.ledger_base_dir, - self.githubUrl) - self.print(msg) - return False - - return True - - @property - def getActiveEnv(self): - prompt, env = PlenumCli.getPromptAndEnv(self.name, - self.currPromptText) - return env - - def get_available_networks(self): - return [check_dir for check_dir in os.listdir(self.ledger_base_dir) - if os.path.isdir( - os.path.join(self.ledger_base_dir, check_dir))] - - def getAllSubDirNamesForKeyrings(self): - lst = self.get_available_networks() - lst.append(NO_ENV) - return lst - - def updateEnvNameInWallet(self): - if not self._activeWallet.getEnvName: - self._activeWallet.env = self.activeEnv if self.activeEnv \ - else NO_ENV - - def getStatus(self): - # TODO: This needs to show active wallet and active DID - if not self.activeEnv: - self._printNotConnectedEnvMessage() - else: - if self.activeClient.hasSufficientConnections: - msg = "Connected to {} Indy network".format(self.activeEnv) - else: - msg = "Attempting connection to {} Indy network". \ - format(self.activeEnv) - self.print(msg) - - def _setPrompt(self, promptText): - if self.activeEnv: - if not promptText.endswith("{}{}".format(PROMPT_ENV_SEPARATOR, - self.activeEnv)): - promptText = "{}{}{}".format(promptText, PROMPT_ENV_SEPARATOR, - self.activeEnv) - - super()._setPrompt(promptText) - - def _addGenTxnAction(self, matchedVars): - if matchedVars.get('add_genesis'): - nym = matchedVars.get('dest_id') - role = Identity.correctRole(self._getRole(matchedVars)) - if role: - role = role.upper() - txn = Member.nym_txn(nym=nym, - role=role, - txn_id=sha256(randomString(6).encode()).hexdigest()) - # TODO: need to check if this needs to persist as well - self.genesisTransactions.append(txn) - self.print('Genesis transaction added.') - return True - - @staticmethod - def bootstrapClientKey(client, node, identifier=None): - pass - - def ensureClientConnected(self): - if self._isConnectedToAnyEnv(): - self.print("Connected to {}.".format( - self.activeEnv), Token.BoldBlue) - else: - self.looper.loop.call_later(.2, self.ensureClientConnected) - - def ensureAgentConnected(self, otherAgentHa, clbk: Callable = None, - *args): - if not self.agent: - return - if self.agent.endpoint.isConnectedTo(ha=otherAgentHa): - # TODO: Remove this print - self.logger.debug("Agent {} connected to {}". - format(self.agent, otherAgentHa)) - if clbk: - clbk(*args) - else: - self.looper.loop.call_later(.2, self.ensureAgentConnected, - otherAgentHa, clbk, *args) - - def _ensureReqCompleted(self, reqKey, client, clbk=None, pargs=None, - kwargs=None, cond=None): - ensureReqCompleted(self.looper.loop, reqKey, client, clbk, pargs=pargs, - kwargs=kwargs, cond=cond) - - def addAlias(self, reply, err, client, alias, signer): - if not self.canMakeIndyRequest: - return True - - txnId = reply[TXN_ID] - op = { - TARGET_NYM: alias, - TXN_TYPE: NYM, - # TODO: Should REFERENCE be symmetrically encrypted and the key - # should then be disclosed in another transaction - REF: txnId - } - self.print("Adding alias {}".format(alias), Token.BoldBlue) - self.aliases[alias] = signer - client.submit(op, identifier=self.activeSigner.identifier) - - def print(self, msg, token=None, newline=True): - super().print(msg, token=token, newline=newline) - - def createFunctionMappings(self): - from collections import defaultdict - - def promptHelper(): - self.print("Changes the prompt to provided principal name") - self.printUsage(self._getPromptUsage()) - - def principalsHelper(): - self.print("A person like Alice, " - "an organization like Faber College, " - "or an IoT-style thing") - - def loadHelper(): - self.print("Creates the connection, generates DID and signing keys") - self.printUsage(self._getLoadFileUsage("")) - - def showHelper(): - self.print("Shows the info about the connection") - self.printUsage(self._getShowFileUsage("")) - - def showConnectionHelper(): - self.print( - "Shows connection info in case of one matching connection, " - "otherwise shows all the matching connections") - self.printUsage(self._getShowConnectionUsage()) - - def connectHelper(): - self.print("Lets you connect to the respective environment") - self.printUsage(self._getConnectUsage()) - - def syncHelper(): - self.print("Synchronizes the connection between the endpoints") - self.printUsage(self._getSyncConnectionUsage()) - - def defaultHelper(): - self.printHelp() - - mappings = { - 'show': showHelper, - 'prompt': promptHelper, - 'principals': principalsHelper, - 'load': loadHelper, - 'show connection': showConnectionHelper, - 'connect': connectHelper, - 'sync': syncHelper - } - - return defaultdict(lambda: defaultHelper, **mappings) - - def getTopComdMappingKeysForHelp(self): - return ['helpAction', 'connectTo', 'disconnect', 'statusAction'] - - def getHelpCmdIdsToShowUsage(self): - return ["help", "connect"] - - def cmdHandlerToCmdMappings(self): - # The 'key' of 'mappings' dictionary is action handler function name - # without leading underscore sign. Each such funcation name should be - # mapped here, its other thing that if you don't want to display it - # in help, map it to None, but mapping should be present, that way it - # will force developer to either write help message for those cli - # commands or make a decision to not show it in help message. - - mappings = OrderedDict() - mappings.update(super().cmdHandlerToCmdMappings()) - mappings['connectTo'] = connectToCmd - mappings['disconnect'] = disconnectCmd - mappings['addGenTxnAction'] = addGenesisTxnCmd - mappings['newDID'] = newDIDCmd - mappings['sendNymAction'] = sendNymCmd - mappings['sendGetNymAction'] = sendGetNymCmd - mappings['sendAttribAction'] = sendAttribCmd - mappings['sendGetAttrAction'] = sendGetAttrCmd - mappings['sendNodeAction'] = sendNodeCmd - mappings['sendPoolUpgAction'] = sendPoolUpgCmd - mappings['sendPoolConfigAction'] = sendPoolConfigCmd - mappings['sendSchemaAction'] = sendSchemaCmd - mappings['sendGetSchemaAction'] = sendGetSchemaCmd - mappings['sendClaimDefAction'] = sendClaimDefCmd - mappings['sendGetClaimDefAction'] = sendGetClaimDefCmd - mappings['showFile'] = showFileCmd - mappings['loadFile'] = loadFileCmd - mappings['showConnection'] = showConnectionCmd - mappings['syncConnection'] = syncConnectionCmd - mappings['pingTarget'] = pingTargetCmd - mappings['acceptrequestconnection'] = acceptConnectionCmd - mappings['showClaim'] = showClaimCmd - mappings['listClaims'] = listClaimsCmd - mappings['listConnections'] = listConnectionsCmd - mappings['reqClaim'] = reqClaimCmd - mappings['showProofRequest'] = showProofRequestCmd - mappings['addGenTxnAction'] = addGenesisTxnCmd - mappings['setAttr'] = setAttrCmd - mappings['sendProofRequest'] = sendProofRequestCmd - mappings['sendProof'] = sendProofCmd - mappings['reqAvailClaims'] = reqAvailClaimsCmd - mappings['changecurrentkeyreq'] = changeKeyCmd - - # TODO: These seems to be obsolete, so either we need to remove these - # command handlers or let it point to None - mappings['addGenesisAction'] = None # overriden by addGenTxnAction - - return mappings - - @property - def canMakeIndyRequest(self): - if not self.hasAnyKey: - return False - if not self.activeEnv: - self._printNotConnectedEnvMessage() - return False - if not self.checkIfWalletBelongsToCurrentContext(self._activeWallet): - self.print(self.getWalletContextMistmatchMsg, Token.BoldOrange) - return False - - return True - - def getConfig(self, homeDir=None): - return getConfig(homeDir) - - -class DummyClient: - def submitReqs(self, *reqs): - pass - - @property - def hasSufficientConnections(self): - pass diff --git a/indy_client/cli/command.py b/indy_client/cli/command.py deleted file mode 100644 index 2dd75d793..000000000 --- a/indy_client/cli/command.py +++ /dev/null @@ -1,267 +0,0 @@ -from plenum.cli.command import Command -from indy_common.roles import Roles -from indy_common.transactions import IndyTransactions - -nymName = IndyTransactions.NYM.name -getNymName = IndyTransactions.GET_NYM.name -attribName = IndyTransactions.ATTRIB.name -getAttrName = IndyTransactions.GET_ATTR.name -nodeName = IndyTransactions.NODE.name -schemaName = IndyTransactions.SCHEMA.name -getSchemaName = IndyTransactions.GET_SCHEMA.name -poolUpgradeName = IndyTransactions.POOL_UPGRADE.name -claimDefName = IndyTransactions.CLAIM_DEF.name -getClaimDefName = IndyTransactions.GET_CLAIM_DEF.name -poolConfigName = IndyTransactions.POOL_CONFIG.name -changeKeyName = IndyTransactions.CHANGE_KEY.name - -sendNymCmd = Command( - id="send {nym}".format( - nym=nymName), - title="Adds given DID to indy", - usage="send {nym} dest= role= [verkey=]".format( - nym=nymName), - examples=[ - "send {nym} dest=BiCMHDqC5EjheFHumZX9nuAoVEp8xyuBgiRi5JcY5whi role={role}".format( - nym=nymName, - role=Roles.TRUST_ANCHOR.name), - "send {nym} dest=33A18XMqWqTzDpLHXLR5nT verkey=~Fem61Q5SnYhGVVHByQNxHj".format( - nym=nymName)]) - -sendGetNymCmd = Command( - id="send {getNym}".format( - getNym=getNymName), - title="Get NYM from indy", - usage="send {getNym} dest=".format( - getNym=getNymName), - examples="send {getNym} dest=33A18XMqWqTzDpLHXLR5nT".format( - getNym=getNymName)) - -sendAttribCmd = Command( - id="send {attrib}".format(attrib=attribName), - title="Adds attributes to existing DID", - usage="send {attrib} dest= [raw={{}}] [hash=] [enc=]".format( - attrib=attribName), - examples='send {attrib} dest=33A18XMqWqTzDpLHXLR5nT raw={{"endpoint": "127.0.0.1:5555"}}'.format(attrib=attribName)) - -sendGetAttrCmd = Command( - id="send {getAttr}".format( - getAttr=getAttrName), - title="Get ATTR from indy", - usage="send {getAttr} dest= [raw=] [hash=] [enc=]".format( - getAttr=getAttrName), - examples="send {getAttr} dest=33A18XMqWqTzDpLHXLR5nT raw=endpoint".format( - getAttr=getAttrName)) - - -sendNodeCmd = Command( - id="send {node}".format(node=nodeName), - title="Adds a node to the pool", - usage="send {node} dest= data={{}}".format( - node=nodeName), - note="Only Steward (must be already added on indy) can execute this command to add new node to the pool", - examples='send {node} dest=87Ys5T2eZfau4AATsBZAYvqwvD8XL5xYCHgg2o1ffjqg data={{"services":["VALIDATOR"], ' - '"node_ip": "127.0.0.1", "node_port": 9711, "client_ip": "127.0.0.1", "client_port": 9712, ' - '"alias": "Node101", "blskey": "00000000000000000000000000000000"}}'.format(node=nodeName)) - -sendPoolUpgCmd = Command( - id="send {poolUpgrade}".format(poolUpgrade=poolUpgradeName), - title="Sends instructions to nodes to update themselves", - usage="send {poolUpgrade} name= version= sha256= action= schedule= timeout= force= reinstall= package=".format( - poolUpgrade=poolUpgradeName), - examples="send {poolUpgrade} name=upgrade-01 " - "version=0.0.1 sha256=e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 action=start " - "schedule={{'AtDfpKFe1RPgcr5nnYBw1Wxkgyn8Zjyh5MzFoEUTeoV3': " - "'2017-01-25T12:49:05.258870+00:00', " - "'4yC546FFzorLPgTNTc6V43DnpFrR8uHvtunBxb2Suaa2': " - "'2017-01-25T12:33:53.258870+00:00', " - "'JpYerf4CssDrH76z7jyQPJLnZ1vwYgvKbvcp16AB5RQ': " - "'2017-01-25T12:44:01.258870+00:00', " - "'DG5M4zFm33Shrhjj6JB7nmx9BoNJUq219UXDfvwBDPe2': " - "'2017-01-25T12:38:57.258870+00:00'}} " - "timeout=10 " - "force=False " - "reinstall=False " - "package=indy-node".format(poolUpgrade=poolUpgradeName)) - -sendSchemaCmd = Command( - id="send {schema}".format( - schema=schemaName), - title="Adds schema to indy", - usage="send {schema} name= version= keys=".format( - schema=schemaName), - examples="send {schema} name=Degree version=1.0 keys=undergrad,last_name,first_name,birth_date,postgrad,expiry_date".format( - schema=schemaName)) - -sendGetSchemaCmd = Command( - id="send {getSchema}".format( - getSchema=getSchemaName), - title="Gets schema from indy", - usage="send {getSchema} dest= name= version=".format( - getSchema=getSchemaName), - examples="send {getSchema} dest=33A18XMqWqTzDpLHXLR5nT name=Degree version=1.0".format( - getSchema=getSchemaName)) - - -sendClaimDefCmd = Command( - id="send {claimDef}".format(claimDef=claimDefName), - title="Adds claim definition for given schema", - usage="send {claimDef} ref= signature_type=".format( - claimDef=claimDefName), - examples="send {claimDef} ref=10 signature_type=CL".format(claimDef=claimDefName)) - -sendGetClaimDefCmd = Command( - id="send {getClaimDef}".format( - getClaimDef=getClaimDefName), - title="Gets claim definition from indy", - usage="send {getClaimDef} ref= signature_type=".format( - getClaimDef=getClaimDefName), - examples="send {getClaimDef} ref=10 signature_type=CL".format( - getClaimDef=getClaimDefName)) - -sendProofRequestCmd = Command( - id="send proof request", - title="Send a proof request", - usage="send proofreq to ", - examples="send proofreq Over-21 to JaneDo") - -showFileCmd = Command( - id="show", - title="Shows content of given file", - usage="show ", - examples="show sample/faber-request.indy") - -loadFileCmd = Command( - id="load", - title="Creates the connection", - usage="load ", - examples="load sample/faber-request.indy") - -showConnectionCmd = Command( - id="show connection", - title="Shows connection info in case of one matching connection, otherwise shows all the matching connection names", - usage="show connection ", - examples="show connection faber") - -connectToCmd = Command( - id="connect", - title="Lets you connect to the respective environment", - usage="connect sandbox|live", - examples=["connect sandbox", "connect live"]) - -disconnectCmd = Command( - id="disconnect", - title="Disconnects from currently connected environment", - usage="disconnect") - -syncConnectionCmd = Command( - id="sync", - title="Synchronizes the connection between the endpoints", - usage="sync connection ", - examples="sync connection faber") - -pingTargetCmd = Command( - id="ping", - title="Pings given remote's endpoint", - usage="ping ", - examples="ping faber") - -showClaimCmd = Command( - id="show claim", - title="Shows given claim information", - usage="show claim ", - examples="show claim Transcript") - -listClaimsCmd = Command( - id="list claims", - title="Refresh the list of claims", - usage="list claims ", - examples="list claims faber") - -listConnectionsCmd = Command( - id='list connections', - title='List available connections in active wallet', - usage='list connections', - examples='list connections' -) - -reqClaimCmd = Command( - id="request claim", - title="Request given claim", - usage="request claim ", - examples="request claim Transcript") - -showProofRequestCmd = Command( - id="show proof request", - title="Shows given proof request", - usage="show proof request ", - examples="show proof request Transcription") - -acceptConnectionCmd = Command( - id="accept request from", - title="Accept request from given remote", - usage="accept request from ", - examples="accept request from Faber") - -setAttrCmd = Command( - id="set", - title="Sets given value to given attribute name", - usage="set to ", - examples="set first_name to Alice") - -sendProofCmd = Command( - id="send proof", - title="Sends given proof to given remote", - usage="send proof to ", - examples="send proof Job-Application to Acme Corp") - -addGenesisTxnCmd = Command( - id="add genesis transaction", - title="Adds given genesis transaction", - usage="add genesis transaction {nym} dest= [role=]".format( - nym=nymName), - examples=[ - 'add genesis transaction {nym} dest=2ru5PcgeQzxF7QZYwQgDkG2K13PRqyigVw99zMYg8eML'.format( - nym=nymName), - 'add genesis transaction {nym} dest=2ru5PcgeQzxF7QZYwQgDkG2K13PRqyigVw99zMYg8eML role={role}'.format( - nym=nymName, role=Roles.STEWARD.name), - 'add genesis transaction {node} for 2ru5PcgeQzxF7QZYwQgDkG2K13PRqyigVw99zMYg8eML by FvDi9xQZd1CZitbK15BNKFbA7izCdXZjvxf91u3rQVzW with data ' - '{{"node_ip": "localhost", "node_port": "9701", "client_ip": "localhost", "client_port": "9702", "alias": "AliceNode"}}'.format(node=nodeName)]) - -newDIDCmd = Command( - id="new DID", - title="Creates new DID", - usage="new DID [|abbr|crypto] [with seed ] [as ]", - note="crypto = cryptographic DID, abbr = abbreviated verkey", - examples=[ - "new DID", - "new DID abbr", - "new DID 4QxzWk3ajdnEA37NdNU5Kt", - "new DID with seed aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - "new DID abbr with seed aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - "new DID 4QxzWk3ajdnEA37NdNU5Kt with seed aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"]) - -reqAvailClaimsCmd = Command( - id="request available claims from", - title="Requests all available claims from given connection", - usage="request available claims from ", - examples="request available claims from Faber" -) - -sendPoolConfigCmd = Command( - id="send {poolConfig}".format(poolConfig=poolConfigName), - title="Sends write configuration to pool", - usage="send {poolConfig} writes= force=".format( - poolConfig=poolConfigName), - examples="send {poolConfig} writes=True force=False".format( - poolConfig=poolConfigName) -) - -changeKeyCmd = Command( - id="change current key", - title="Changes key for the current identifier", - usage="change current key [with seed ]", - examples=[ - "change current key", - "change current key with seed aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"] -) diff --git a/indy_client/cli/constants.py b/indy_client/cli/constants.py deleted file mode 100644 index ee54cc155..000000000 --- a/indy_client/cli/constants.py +++ /dev/null @@ -1,213 +0,0 @@ -from plenum.cli.constants import CLIENT_GRAMS_CLIENT_COMMAND_REG_EX, relist, \ - CLI_CMDS, getPipedRegEx, CLIENT_GRAMS_USE_KEYPAIR_REG_EX -from indy_common.roles import Roles -from indy_common.transactions import IndyTransactions - -CLIENT_GRAMS_CLIENT_WITH_DID_FORMATTED_REG_EX = getPipedRegEx( - CLIENT_GRAMS_CLIENT_COMMAND_REG_EX + - "\s+ (?Pwith\s+DID) " - "\s+ (?P[a-zA-Z0-9=]+) \s*") \ - .format(relist(CLI_CMDS)) - -CLIENT_GRAMS_CLIENT_ADD_FORMATTED_REG_EX = getPipedRegEx( - "(\s* (?Pclient) \s+ (?P[a-zA-Z0-9]+) " - "\s+ (?Padd) \s+ (?P{trustAnchor}|user) " - "\s+ (?P[a-zA-Z0-9]+) \s*)".format(trustAnchor=Roles.TRUST_ANCHOR.name)) - -CLIENT_GRAMS_USE_KEYPAIR_FORMATTED_REG_EX = getPipedRegEx( - CLIENT_GRAMS_USE_KEYPAIR_REG_EX) - -# TODO we can genericize the other TXN types in the same way -TXN_NYM = "(\s* (?P<{{cmdName}}>{{cmd}}\s+{nym}) " \ - "\s+ (?Pdest=) \s* (?P[A-Za-z0-9+=/]*)" \ - "(\s+ (?Prole=) \s* (?P{trustee}|{trustAnchor}|{steward}|))?" \ - "(\s+ (?Pverkey=) \s* (?P[~A-Za-z0-9+=/]*))?)".format(nym=IndyTransactions.NYM.name, - trustee=Roles.TRUSTEE.name, - trustAnchor=Roles.TRUST_ANCHOR.name, - steward=Roles.STEWARD.name) - -SEND_NYM_REG_EX = TXN_NYM.format(cmdName='send_nym', cmd='send') - -ADD_GENESIS_NYM_REG_EX = TXN_NYM.format(cmdName='add_genesis', - cmd='add \s+ genesis \s+ transaction') - -NEW_ID_REG_EX = "(\s* (?Pnew\s+DID)" \ - "\s? (?P([A-Za-z0-9+=/]+))? " \ - "\s? (with\s+seed\s+(?P[a-zA-Z0-9]+))? " \ - "\s? (as\s+(?P[a-zA-Z0-9-]+))?)" - -GET_NYM_REG_EX = "(\s* (?Psend\s+{getNym}) " \ - "\s+ (?Pdest=)\s*(?P[A-Za-z0-9+=/]+) \s*) ".format( - getNym=IndyTransactions.GET_NYM.name) - -GET_ATTR_REG_EX = \ - "(\s* (?Psend\s+{attrib}) " \ - "\s+ dest=\s*(?P[A-Za-z0-9+=/]+) " \ - "\s+ ((raw=(?P[A-Za-z0-9+=/]+))|(hash=(?P[A-Fa-f0-9]+))|(enc=(?P[A-Za-z0-9+=/]+)) \s*) \s*) ".format( - attrib=IndyTransactions.GET_ATTR.name) - -ADD_ATTRIB_REG_EX = \ - "(\s* (?Psend\s+{attrib}) " \ - "\s+ dest=\s*(?P[A-Za-z0-9+=/]+) " \ - "\s+ ((raw=(?P\{{\s*.*\}}))|(hash=(?P[A-Fa-f0-9]+))|(enc=(?P[A-Za-z0-9+=/]+))) \s*) ".format( - attrib=IndyTransactions.ATTRIB.name) - -SEND_SCHEMA_REG_EX = "(\s*(?Psend\s+{schema})" \ - "\s+(?Pname=)\s*(?P[A-Za-z0-9-_]+)" \ - "\s*(?Pversion=)\s*(?P[0-9.]+)" \ - "\s+(?Pkeys=)\s*(?P[0-9a-zA-Z-_,\s]+)\s*)".format( - schema=IndyTransactions.SCHEMA.name) - -GET_SCHEMA_REG_EX = "(\s*(?Psend\s+{getSchema})" \ - "\s+(?Pdest=) \s* (?P[A-Za-z0-9+=/]*)" \ - "\s+(?Pname=)\s*(?P[A-Za-z0-9-_]+)" \ - "\s*(?Pversion=)\s*(?P[0-9.]+)\s*)".format( - getSchema=IndyTransactions.GET_SCHEMA.name) - - -SEND_CLAIM_DEF_REG_EX = "(\s*(?Psend\s+{issKey})" \ - "\s+(?Pref=)\s*(?P[0-9]+)\s*)"\ - "\s*(?Psignature_type=)\s*(?P[A-Z0-9]+)" \ - .format(issKey=IndyTransactions.CLAIM_DEF.name) - -GET_CLAIM_DEF_REG_EX = "(\s*(?Psend\s+{issKey})" \ - "\s+(?Pref=)\s*(?P[0-9]+)\s*)"\ - "\s*(?Psignature_type=)\s*(?P[A-Z0-9]+)" \ - .format(issKey=IndyTransactions.GET_CLAIM_DEF.name) - - -ADD_ATTRS_PROVER_REG_EX = "(\s*(?Pattribute \s+ known \s+ to) " \ - "\s+ (?P[A-Za-z0-9+=/]+) " \ - "\s+ (?P[A-Za-z0-9_,+=/ ]+) \s*)" - -INIT_ATTR_REPO_REG_EX = "(\s*(?Pinitialize " \ - "\s+ mock \s+ attribute \s+ repo)\s*)" - -ADD_ATTRS_REG_EX = "(\s*(?Padd \s+ attribute) " \ - "\s+ (?P[A-Za-z0-9_,+=/ ]+) " \ - "\s+ for \s+ (?P[a-zA-Z0-9\-_]+) \s*)" - -SHOW_FILE_REG_EX = "(\s*(?Pshow) " \ - "\s+ (?P[A-Za-z0-9+-.=/]+)\s*)" - -CONNECT_REG_EX = "(\s*(?Pconnect) \s+ (?P[A-Za-z0-9-]+)\s*)" - -DISCONNECT_REG_EX = "(\s*(?Pdisconnect))" - -LOAD_FILE_REG_EX = "(\s*(?Pload) " \ - "\s+ (?P[A-Za-z0-9+-.=/]+)\s*)" - -SHOW_LINK_REG_EX = '(\s*(?Pshow \s+ connection) ' \ - '\s+ (?P[A-Za-z0-9-." ]+) \s*)' - -SYNC_LINK_REG_EX = '(\s*(?Psync) ' \ - '\s+ (?P[A-Za-z0-9-." ]+) \s*)' - -PING_TARGET_REG_EX = '(\s*(?Pping) ' \ - '\s+ (?P[A-Za-z0-9-." ]+) \s*)' - -ACCEPT_LINK_REG_EX = \ - '(\s*(?Paccept \s+ request \s+ from) ' \ - '\s+ (?P[A-Za-z0-9-." ]+) \s*)' - -SHOW_CLAIM_REG_EX = '(\s*(?Pshow \s+ claim) ' \ - '\s+ (?P[A-Za-z0-9-." ]+) ' \ - '\s*)' - -LIST_CLAIMS_REG_EX = '(\s*(?Plist \s+ claims) ' \ - '\s+ (?P[A-Za-z0-9-." ]+) \s*)' - -LIST_LINKS_REG_EX = '(\s*(?Plist \s+ connections))' - -REQUEST_CLAIM_REG_EX = '(\s*(?Prequest \s+ claim) ' \ - '\s+ (?P[A-Za-z0-9-." ]+) ' \ - '\s*)' - -# SHOW_CLAIM_REQ_REG_EX = '(\s*(?Pshow \s+ claim \s+ request) ' \ -# '\s+ (?P[A-Za-z0-9-." ]+) ' \ -# '\s*)' -# -SHOW_PROOF_REQ_REG_EX = '(\s*(?Pshow \s+ proof \s+ request) ' \ - '\s+ (?P[A-Za-z0-9-." ]+) ' \ - '\s*)' - -SET_ATTRIBUTE_REG_EX = '(\s*(?Pset) ' \ - '\s+ (?P[A-Za-z-_0-9]+) ' \ - '\s+ to \s+ (?P[A-Za-z0-9+-_,." /]+)' \ - '\s*)' - -SEND_PROOF_REG_EX = '(\s*(?Psend \s+ proof) ' \ - '\s+ (?P[A-Za-z0-9-." ]+) ' \ - '\s+ to \s+ (?P[A-Za-z0-9-." ]+) \s*)' - -SEND_PROOF_REQ_REG_EX = '(\s*(?Psend \s+ proof-request) ' \ - '\s+ (?P[A-Za-z0-9-." ]+) ' \ - '\s+ to (?P[A-Za-z0-9-." ]+) \s*)' - -SEND_NODE_REG_EX = "(\s* (?Psend\s+{node}) " \ - "\s+ dest=\s*(?P[A-Za-z0-9+/]+) " \ - "\s+ data=(?P\{{\s*.*\}}) \s*) ".format( - node=IndyTransactions.NODE.name) - -SEND_POOL_UPG_REG_EX = "(\s*(?Psend\s+{poolUpgrade})" \ - "\s+(?Pname=)\s*(?P[A-Za-z0-9-_]+)" \ - "\s*(?Pversion=)\s*(?P[0-9.]+)" \ - "\s*(?Psha256=)\s*(?P[a-f0-9]+)" \ - "(\s+ (?Paction=)\s*(?Pstart|cancel))" \ - '(\s+ (?Pjustification=)\s*(?P\"[a-zA-z0-9-_\s]+\") \s*)? ' \ - "(\s+ (?Pschedule=)\s*(?P\{{\s*.*\}}) \s*)? " \ - "(\s+ (?Ptimeout=)\s*(?P[0-9+]+))?)" \ - "(\s+ (?Pforce=)\s*(?PTrue|False))?" \ - "(\s+ (?Preinstall=)\s*(?PTrue|False))?" \ - "(\s+ (?Ppackage=)\s*(?P.+))?".format( - poolUpgrade=IndyTransactions.POOL_UPGRADE.name) - - -REQ_AVAIL_CLAIMS_REG_EX = '(\s*(?Prequest \s+ available \s+ claims \s+ from) ' \ - '\s+ (?P[A-Za-z0-9-." ]+) \s*)' - - -SEND_POOL_CONFIG_REG_EX = "(\s*(?Psend\s+{poolConfig})" \ - "\s+(?Pwrites=)\s*(?PTrue|False))" \ - "(\s+(?Pforce=)\s*(?PTrue|False))?".format( - poolConfig=IndyTransactions.POOL_CONFIG.name) - -CHANGE_CURENT_KEY_REG_EX = "(\s*(?Pchange\s+current\s+key))" \ - "(\s? with\s+seed\s+(?P[a-zA-Z0-9]+))?" - - -SEND_NYM_FORMATTED_REG_EX = getPipedRegEx(SEND_NYM_REG_EX) -GET_NYM_FORMATTED_REG_EX = getPipedRegEx(GET_NYM_REG_EX) -GET_ATTR_FORMATTED_REG_EX = getPipedRegEx(GET_ATTR_REG_EX) -ADD_ATTRIB_FORMATTED_REG_EX = getPipedRegEx(ADD_ATTRIB_REG_EX) -SEND_SCHEMA_FORMATTED_REG_EX = getPipedRegEx(SEND_SCHEMA_REG_EX) -GET_SCHEMA_FORMATTED_REG_EX = getPipedRegEx(GET_SCHEMA_REG_EX) -SEND_CLAIM_DEF_FORMATTED_REG_EX = getPipedRegEx(SEND_CLAIM_DEF_REG_EX) -GET_CLAIM_DEF_FORMATTED_REG_EX = getPipedRegEx(GET_CLAIM_DEF_REG_EX) -ADD_GENESIS_FORMATTED_REG_EX = getPipedRegEx(ADD_GENESIS_NYM_REG_EX) -INIT_ATTR_REPO_FORMATTED_REG_EX = getPipedRegEx(INIT_ATTR_REPO_REG_EX) -ADD_ATTRS_FORMATTED_REG_EX = getPipedRegEx(ADD_ATTRS_REG_EX) -SHOW_FILE_FORMATTED_REG_EX = getPipedRegEx(SHOW_FILE_REG_EX) -LOAD_FILE_FORMATTED_REG_EX = getPipedRegEx(LOAD_FILE_REG_EX) -SHOW_LINK_FORMATTED_REG_EX = getPipedRegEx(SHOW_LINK_REG_EX) -ADD_ATTRS_PROVER_FORMATTED_REG_EX = getPipedRegEx(ADD_ATTRS_PROVER_REG_EX) -CONNECT_FORMATTED_REG_EX = getPipedRegEx(CONNECT_REG_EX) -DISCONNECT_FORMATTED_REG_EX = getPipedRegEx(DISCONNECT_REG_EX) -SYNC_LINK_FORMATTED_REG_EX = getPipedRegEx(SYNC_LINK_REG_EX) -ACCEPT_LINK_FORMATTED_REG_EX = getPipedRegEx(ACCEPT_LINK_REG_EX) -SHOW_CLAIM_FORMATTED_REG_EX = getPipedRegEx(SHOW_CLAIM_REG_EX) -LIST_CLAIMS_FORMATTED_REG_EX = getPipedRegEx(LIST_CLAIMS_REG_EX) -LIST_LINKS_FORMATTED_REG_EX = getPipedRegEx(LIST_LINKS_REG_EX) -REQUEST_CLAIM_FORMATTED_REG_EX = getPipedRegEx(REQUEST_CLAIM_REG_EX) -# SHOW_CLAIM_REQ_FORMATTED_REG_EX = getPipedRegEx(SHOW_CLAIM_REQ_REG_EX) -SHOW_PROOF_REQ_FORMATTED_REG_EX = getPipedRegEx(SHOW_PROOF_REQ_REG_EX) -SET_ATTRIBUTE_FORMATTED_REG_EX = getPipedRegEx(SET_ATTRIBUTE_REG_EX) -PING_TARGET_FORMATTED_REG_EX = getPipedRegEx(PING_TARGET_REG_EX) -SEND_PROOF_FORMATTED_REG_EX = getPipedRegEx(SEND_PROOF_REG_EX) -SEND_PROOF_REQ_FORMATTED_REG_EX = getPipedRegEx(SEND_PROOF_REQ_REG_EX) -SEND_NODE_FORMATTED_REG_EX = getPipedRegEx(SEND_NODE_REG_EX) -SEND_POOL_UPG_FORMATTED_REG_EX = getPipedRegEx(SEND_POOL_UPG_REG_EX) -SEND_POOL_CONFIG_FORMATTED_REG_EX = getPipedRegEx(SEND_POOL_CONFIG_REG_EX) -REQ_AVAIL_CLAIMS_FORMATTED_REG_EX = getPipedRegEx(REQ_AVAIL_CLAIMS_REG_EX) -NEW_ID_FORMATTED_REG_EX = getPipedRegEx(NEW_ID_REG_EX) -CHANGE_CURENT_KEY_FORMATTED_REG_EX = getPipedRegEx(CHANGE_CURENT_KEY_REG_EX) diff --git a/indy_client/cli/genesisTxns.py b/indy_client/cli/genesisTxns.py deleted file mode 100644 index c99724529..000000000 --- a/indy_client/cli/genesisTxns.py +++ /dev/null @@ -1,30 +0,0 @@ -from plenum.common.constants import STEWARD, TXN_ID -from plenum.common.types import f - -from indy_common.constants import TXN_TYPE, TARGET_NYM, ROLE, NYM, TRUST_ANCHOR - -STEWARD_SEED = b'steward seed used for signer....' -TRUST_ANCHOR_SEED = b'sponsors are people too.........' - -GENESIS_TRANSACTIONS = [ - { - TXN_TYPE: NYM, - TARGET_NYM: 'bx3ePPiBdRywm16OOmZdtlzF5FGmX06Fj2sAYbMdF18=', - TXN_ID: '6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b', - ROLE: STEWARD - }, - { - TXN_TYPE: NYM, - f.IDENTIFIER.nm: 'bx3ePPiBdRywm16OOmZdtlzF5FGmX06Fj2sAYbMdF18=', - TARGET_NYM: 'MnT3cFlVvVu7QO+QzPp5seU14pkOT7go1PsqDWZSrbo=', - ROLE: TRUST_ANCHOR, - TXN_ID: '6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4c' - }, - # { - # TXN_TYPE: NYM, - # f.IDENTIFIER.nm: 'OP2h59vBVQerRi6FjoOoMhSTv4CAemeEg4LPtDHaEWw=', - # TARGET_NYM: 'ARyM91PzDKveCuqkV9B6TJ5f9YxI8Aw/cz5eDAduNUs=', - # ROLE: TRUST_ANCHOR, - # TXN_ID: '6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4d' - # } -] diff --git a/indy_client/cli/helper.py b/indy_client/cli/helper.py deleted file mode 100644 index 9680e775d..000000000 --- a/indy_client/cli/helper.py +++ /dev/null @@ -1,74 +0,0 @@ -from indy_client.cli.constants import \ - CLIENT_GRAMS_CLIENT_WITH_DID_FORMATTED_REG_EX, \ - CLIENT_GRAMS_CLIENT_ADD_FORMATTED_REG_EX, SEND_NYM_FORMATTED_REG_EX, \ - GET_NYM_FORMATTED_REG_EX, \ - GET_ATTR_FORMATTED_REG_EX, GET_SCHEMA_FORMATTED_REG_EX, \ - GET_CLAIM_DEF_FORMATTED_REG_EX, \ - ADD_ATTRIB_FORMATTED_REG_EX, SEND_SCHEMA_FORMATTED_REG_EX, \ - ADD_GENESIS_FORMATTED_REG_EX, \ - INIT_ATTR_REPO_FORMATTED_REG_EX, ADD_ATTRS_FORMATTED_REG_EX, \ - ADD_ATTRS_PROVER_FORMATTED_REG_EX, CONNECT_FORMATTED_REG_EX, \ - SHOW_FILE_FORMATTED_REG_EX, LOAD_FILE_FORMATTED_REG_EX, \ - SHOW_LINK_FORMATTED_REG_EX, SYNC_LINK_FORMATTED_REG_EX, \ - ACCEPT_LINK_FORMATTED_REG_EX, SHOW_CLAIM_FORMATTED_REG_EX, \ - LIST_CLAIMS_FORMATTED_REG_EX, REQUEST_CLAIM_FORMATTED_REG_EX, \ - SET_ATTRIBUTE_FORMATTED_REG_EX, SHOW_PROOF_REQ_FORMATTED_REG_EX, \ - SEND_CLAIM_DEF_FORMATTED_REG_EX, SEND_PROOF_FORMATTED_REG_EX, \ - PING_TARGET_FORMATTED_REG_EX, SEND_NODE_FORMATTED_REG_EX, \ - SEND_POOL_UPG_FORMATTED_REG_EX, DISCONNECT_FORMATTED_REG_EX, \ - NEW_ID_FORMATTED_REG_EX, SEND_PROOF_REQ_FORMATTED_REG_EX, \ - REQ_AVAIL_CLAIMS_FORMATTED_REG_EX, LIST_LINKS_FORMATTED_REG_EX, SEND_POOL_CONFIG_FORMATTED_REG_EX, \ - CHANGE_CURENT_KEY_FORMATTED_REG_EX -# SHOW_CLAIM_REQ_FORMATTED_REG_EX - - -def getNewClientGrams(): - # TODO: Why do we have to manually pipe each regex except the last - # one? Fix this - return [ - ADD_GENESIS_FORMATTED_REG_EX, - # Regex for `new client steward with DID ` - CLIENT_GRAMS_CLIENT_WITH_DID_FORMATTED_REG_EX, - # Regex for `client steward add TRUST ANCHOR bob` or `client steward - # add user bob` - CLIENT_GRAMS_CLIENT_ADD_FORMATTED_REG_EX, - SEND_NYM_FORMATTED_REG_EX, - GET_NYM_FORMATTED_REG_EX, - ADD_ATTRIB_FORMATTED_REG_EX, - GET_ATTR_FORMATTED_REG_EX, - SEND_SCHEMA_FORMATTED_REG_EX, - GET_SCHEMA_FORMATTED_REG_EX, - SEND_CLAIM_DEF_FORMATTED_REG_EX, - GET_CLAIM_DEF_FORMATTED_REG_EX, - INIT_ATTR_REPO_FORMATTED_REG_EX, - ADD_ATTRS_FORMATTED_REG_EX, - SHOW_LINK_FORMATTED_REG_EX, - SHOW_FILE_FORMATTED_REG_EX, - LOAD_FILE_FORMATTED_REG_EX, - ADD_ATTRS_PROVER_FORMATTED_REG_EX, - CONNECT_FORMATTED_REG_EX, - DISCONNECT_FORMATTED_REG_EX, - SYNC_LINK_FORMATTED_REG_EX, - ACCEPT_LINK_FORMATTED_REG_EX, - # SHOW_CLAIM_REQ_FORMATTED_REG_EX, - SHOW_PROOF_REQ_FORMATTED_REG_EX, - SHOW_CLAIM_FORMATTED_REG_EX, - LIST_CLAIMS_FORMATTED_REG_EX, - LIST_LINKS_FORMATTED_REG_EX, - REQUEST_CLAIM_FORMATTED_REG_EX, - SET_ATTRIBUTE_FORMATTED_REG_EX, - PING_TARGET_FORMATTED_REG_EX, - SEND_PROOF_FORMATTED_REG_EX, - SEND_NODE_FORMATTED_REG_EX, - SEND_POOL_UPG_FORMATTED_REG_EX, - SEND_POOL_CONFIG_FORMATTED_REG_EX, - REQ_AVAIL_CLAIMS_FORMATTED_REG_EX, - NEW_ID_FORMATTED_REG_EX, - SEND_PROOF_REQ_FORMATTED_REG_EX, - REQ_AVAIL_CLAIMS_FORMATTED_REG_EX, - CHANGE_CURENT_KEY_FORMATTED_REG_EX - ] - - -NEXT_COMMANDS_TO_TRY_TEXT = "Try Next:" -USAGE_TEXT = "Usage:" diff --git a/indy_client/client/__init__.py b/indy_client/client/__init__.py deleted file mode 100644 index 2b2910f01..000000000 --- a/indy_client/client/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -ISSUER = "issuer" -PROVER = "prover" -VERIFIER = "verifier" -roles = [ISSUER, PROVER, VERIFIER] diff --git a/indy_client/client/client.py b/indy_client/client/client.py deleted file mode 100644 index 71eaf522e..000000000 --- a/indy_client/client/client.py +++ /dev/null @@ -1,205 +0,0 @@ -import os -import json -import uuid -from collections import deque -from typing import Dict, Union, Tuple, Optional, Callable - -from base58 import b58decode, b58encode - -from plenum.client.client import Client as PlenumClient -from plenum.common.error import fault -from plenum.common.txn_util import get_type -from stp_core.common.log import getlogger -from plenum.common.startable import Status - -from plenum.common.constants import REPLY, NAME, VERSION, REQACK, REQNACK, \ - TXN_ID, TARGET_NYM, NONCE, STEWARD, OP_FIELD_NAME, REJECT, TYPE -from plenum.common.types import f -from plenum.common.util import libnacl -from plenum.server.router import Router -from stp_core.network.auth_mode import AuthMode -from stp_zmq.simple_zstack import SimpleZStack - -from indy_common.constants import TXN_TYPE, ATTRIB, DATA, GET_NYM, ROLE, \ - NYM, GET_TXNS, LAST_TXN, TXNS, SCHEMA, CLAIM_DEF, SKEY, DISCLO, \ - GET_ATTR, TRUST_ANCHOR, GET_CLAIM_DEF, GET_SCHEMA - -from indy_client.persistence.client_req_rep_store_file import ClientReqRepStoreFile -from indy_client.persistence.client_txn_log import ClientTxnLog -from indy_common.config_util import getConfig -from stp_core.types import HA -from indy_common.state import domain - -from indy_client.agent.jsonpickle_util import setUpJsonpickle -from indy_client.client.wallet.migration import migrate_indy_wallet_raw - -from indy_common.plugin_helper import writeAnonCredPlugin -from plenum.client.wallet import WALLET_RAW_MIGRATORS - - -logger = getlogger() - - -class Client(PlenumClient): - anoncredsAreSetUp = False - - def __init__(self, - name: str=None, - nodeReg: Dict[str, HA]=None, - ha: Union[HA, Tuple[str, int]]=None, - peerHA: Union[HA, Tuple[str, int]]=None, - basedirpath: str=None, - config=None, - sighex: str=None): - self.config = config or getConfig() - self.setupAnoncreds() - - basedirpath = basedirpath or os.path.join(self.config.CLI_NETWORK_DIR, self.config.NETWORK_NAME) - super().__init__(name, - nodeReg, - ha, - basedirpath, - config=config, - sighex=sighex) - self.autoDiscloseAttributes = False - self.requestedPendingTxns = False - self.hasAnonCreds = bool(peerHA) - if self.hasAnonCreds: - self.peerHA = peerHA if isinstance(peerHA, HA) else HA(*peerHA) - - stackargs = dict(name=self.stackName, - ha=peerHA, - main=True, - auth_mode=AuthMode.ALLOW_ANY.value) - - self.peerMsgRoutes = [] - self.peerMsgRouter = Router(*self.peerMsgRoutes) - self.peerStack = self.peerStackClass( - stackargs, msgHandler=self.handlePeerMessage) - self.peerStack.sign = self.sign - self.peerInbox = deque() - - # To let client send this transactions to just one node - self._read_only_requests = {GET_NYM, - GET_ATTR, - GET_CLAIM_DEF, - GET_SCHEMA} - - @property - def peerStackClass(self): - return SimpleZStack - - def setupAnoncreds(self): - if self.anoncredsAreSetUp is False: - writeAnonCredPlugin(os.path.expanduser(self.config.CLI_BASE_DIR)) - # This is to setup anoncreds wallet related custom jsonpickle handlers to - # serialize/deserialize it properly - setUpJsonpickle() - WALLET_RAW_MIGRATORS.append(migrate_indy_wallet_raw) - self.anoncredsAreSetUp = True - - def handlePeerMessage(self, msg): - """ - Use the peerMsgRouter to pass the messages to the correct - function that handles them - - :param msg: the P2P client message. - """ - return self.peerMsgRouter.handle(msg) - - def getReqRepStore(self): - return ClientReqRepStoreFile(self.ledger_dir) - - def getTxnLogStore(self): - return ClientTxnLog(self.ledger_dir) - - def handleOneNodeMsg(self, wrappedMsg, excludeFromCli=None) -> None: - msg, sender = wrappedMsg - # excludeGetTxns = (msg.get(OP_FIELD_NAME) == REPLY and - # msg[f.RESULT.nm].get(TXN_TYPE) == GET_TXNS) - excludeReqAcks = msg.get(OP_FIELD_NAME) == REQACK - excludeReqNacks = msg.get(OP_FIELD_NAME) == REQNACK - excludeReply = msg.get(OP_FIELD_NAME) == REPLY - excludeReject = msg.get(OP_FIELD_NAME) == REJECT - excludeFromCli = excludeFromCli or excludeReqAcks or excludeReqNacks \ - or excludeReply or excludeReject - super().handleOneNodeMsg(wrappedMsg, excludeFromCli) - if OP_FIELD_NAME not in msg: - logger.error("Op absent in message {}".format(msg)) - - def requestConfirmed(self, key) -> bool: - return self.txnLog.hasTxnWithReqId(key) - - def hasConsensus(self, identifier: str, reqId: int) -> Optional[str]: - return super().hasConsensus(identifier, reqId) - - def prepare_for_state(self, result): - request_type = result[TYPE] - if request_type == GET_NYM: - return domain.prepare_get_nym_for_state(result) - if request_type == GET_ATTR: - attr_type, path, value, hashed_value, value_bytes = \ - domain.prepare_get_attr_for_state(result) - return path, value_bytes - if request_type == GET_CLAIM_DEF: - return domain.prepare_get_claim_def_for_state(result) - if request_type == GET_SCHEMA: - return domain.prepare_get_schema_for_state(result) - raise ValueError("Cannot make state key for " - "request of type {}" - .format(request_type)) - - def getTxnsByType(self, txnType): - return self.txnLog.getTxnsByType(txnType) - - # TODO: Just for now. Remove it later - def doAttrDisclose(self, origin, target, txnId, key): - box = libnacl.public.Box(b58decode(origin), b58decode(target)) - - data = json.dumps({TXN_ID: txnId, SKEY: key}) - nonce, boxedMsg = box.encrypt(data.encode(), pack_nonce=False) - - op = { - TARGET_NYM: target, - TXN_TYPE: DISCLO, - NONCE: b58encode(nonce).decode("utf-8"), - DATA: b58encode(boxedMsg).decode("utf-8") - } - self.submit(op, identifier=origin) - - def doGetAttributeTxn(self, identifier, attrName): - op = { - TARGET_NYM: identifier, - TXN_TYPE: GET_ATTR, - DATA: json.dumps({"name": attrName}) - } - self.submit(op, identifier=identifier) - - @staticmethod - def _getDecryptedData(encData, key): - data = bytes(bytearray.fromhex(encData)) - rawKey = bytes(bytearray.fromhex(key)) - box = libnacl.secret.SecretBox(rawKey) - decData = box.decrypt(data).decode() - return json.loads(decData) - - def hasNym(self, nym): - for txn in self.txnLog.getTxnsByType(NYM): - if get_type(txn) == NYM: - return True - return False - - def _statusChanged(self, old, new): - super()._statusChanged(old, new) - - def start(self, loop): - super().start(loop) - if self.hasAnonCreds and self.status not in Status.going(): - self.peerStack.start() - - async def prod(self, limit) -> int: - s = await super().prod(limit) - if self.hasAnonCreds: - return s + await self.peerStack.service(limit) - else: - return s diff --git a/indy_client/client/lib/Cargo.lock b/indy_client/client/lib/Cargo.lock deleted file mode 100644 index c2a5903e9..000000000 --- a/indy_client/client/lib/Cargo.lock +++ /dev/null @@ -1,112 +0,0 @@ -[root] -name = "sovclient" -version = "0.1.0" -dependencies = [ - "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", - "rust-base58 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "libc" -version = "0.2.20" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "num" -version = "0.1.36" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "num-bigint 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)", - "num-complex 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)", - "num-integer 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)", - "num-iter 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)", - "num-rational 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)", - "num-traits 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "num-bigint" -version = "0.1.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "num-integer 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)", - "num-traits 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "num-complex" -version = "0.1.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "num-traits 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "num-integer" -version = "0.1.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "num-traits 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "num-iter" -version = "0.1.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "num-integer 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)", - "num-traits 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "num-rational" -version = "0.1.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "num-bigint 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)", - "num-integer 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)", - "num-traits 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "num-traits" -version = "0.1.36" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "rand" -version = "0.3.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rust-base58" -version = "0.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "num 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rustc-serialize" -version = "0.3.22" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[metadata] -"checksum libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)" = "684f330624d8c3784fb9558ca46c4ce488073a8d22450415c5eb4f4cfb0d11b5" -"checksum num 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)" = "bde7c03b09e7c6a301ee81f6ddf66d7a28ec305699e3d3b056d2fc56470e3120" -"checksum num-bigint 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)" = "88b14378471f7c2adc5262f05b4701ef53e8da376453a8d8fee48e51db745e49" -"checksum num-complex 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)" = "f0c78e054dd19c3fd03419ade63fa661e9c49bb890ce3beb4eee5b7baf93f92f" -"checksum num-integer 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)" = "fb24d9bfb3f222010df27995441ded1e954f8f69cd35021f6bef02ca9552fb92" -"checksum num-iter 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)" = "287a1c9969a847055e1122ec0ea7a5c5d6f72aad97934e131c83d5c08ab4e45c" -"checksum num-rational 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)" = "54ff603b8334a72fbb27fe66948aac0abaaa40231b3cecd189e76162f6f38aaf" -"checksum num-traits 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)" = "a16a42856a256b39c6d3484f097f6713e14feacd9bfb02290917904fae46c81c" -"checksum rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "022e0636ec2519ddae48154b028864bdce4eaf7d35226ab8e65c611be97b189d" -"checksum rust-base58 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b313b91fcdc6719ad41fa2dad2b7e810b03833fae4bf911950e15529a5f04439" -"checksum rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)" = "237546c689f20bb44980270c73c3b9edd0891c1be49cc1274406134a66d3957b" diff --git a/indy_client/client/lib/Cargo.toml b/indy_client/client/lib/Cargo.toml deleted file mode 100644 index 0f9811fc6..000000000 --- a/indy_client/client/lib/Cargo.toml +++ /dev/null @@ -1,17 +0,0 @@ -[package] -name = "sovclient" -version = "0.1.0" -authors = ["Daniel Hardman "] - -[lib] -name = "sovclient" -crate-type = ["dylib"] - -[dependencies] -libc = "0.2.0" -rust-base58 = "0.0.4" -time = "0.1" - -#[dependencies.fixture] -#git = "https://github.com/Jenselme/rust-fixture" -#tag = "0.1.5" \ No newline at end of file diff --git a/indy_client/client/lib/src/constants.rs b/indy_client/client/lib/src/constants.rs deleted file mode 100644 index 8b763ca11..000000000 --- a/indy_client/client/lib/src/constants.rs +++ /dev/null @@ -1,3 +0,0 @@ -pub const BAD_FIRST_ARG: i32 = -1; -pub const BAD_SECOND_ARG: i32 = -2; -pub const BAD_THIRD_ARG: i32 = -3; diff --git a/indy_client/client/lib/src/internal/mod.rs b/indy_client/client/lib/src/internal/mod.rs deleted file mode 100644 index 579bc000f..000000000 --- a/indy_client/client/lib/src/internal/mod.rs +++ /dev/null @@ -1,253 +0,0 @@ -extern crate libc; -extern crate time; - - -use libc::{c_char}; -use std::ffi::{CStr}; -use std::panic; -use std::ptr; - - -/// Allow state to be owned inside this object. C-callable functions exposed by this library -/// are not object-oriented, so we create a static list of these objects and then refer to them -/// by id as the first parameter of each function. This allows us to look up state without -/// requiring the consumer of the lib to manage it in an object that crosses the lib boundary. -pub struct Client { - /// When current request should time out, in nanoseconds since - request_timeout_after: SteadyTime, - response_callback: fn(client_id: i32, callback_arg: u64, error_num: i32, data: *mut c_char), - callback_arg: u64, - error_num: i32, - data: [u8] -} - -impl Client { - - pub fn new(host_and_port: &str) -> Client { - Client {} - } -} - -// Temporary measure until we can maintain a mutexed list of clients. -static THE_ONLY_CLIENT_RIGHT_NOW: Client = Client {}; - -pub fn get_client_from_id(client_id: i32) -> Option<&'static Client> { - // Right now, this next line is a useless optimization. But when we have a client array, - // it will save a mutex on error cases. - if client_id < 0 { return None } - if client_id == 0 { return Some(&THE_ONLY_CLIENT_RIGHT_NOW) } - None -} - -/// Rust's ptr::null() returns a const null ptr; this function gives us a null, mutable char * -/// in a single step. -pub fn null_ptr_as_c_str() -> *mut c_char { - let p: *const c_char = ptr::null(); - p as *mut c_char -} - -// this next function is a stub where we need to call libsodium to wrap messages, unless we decide -// to use CurveCP instead. - -// Example of how to link to a system library. Libsodium will follow this pattern. -/* -#[link(name = "snappy")] -extern { - fn snappy_compress(input: *const u8, - input_length: size_t, - compressed: *mut u8, - compressed_length: *mut size_t) -> c_int; -} -*/ - - -/// Convert a c-style string into a Rust-style String, owned on the Rust side of the lib boundary. -/// If the input value is a null ptr, contains invalid utf-8, or has other panic-worthy problems, -/// return None. An empty string (a C-style array where the first byte is null) is considered -/// valid. -pub fn string_from_c_ptr(cstr: *const c_char) -> Option { - if !cstr.is_null() { - - // Catch any panics that may happen inside Rust over the next few lines of logic, so we don't - // attempt to propagate the panics across a lib boundary; that would yield undefined behavior. - // This mechanis is not foolproof--some panics in Rust abort a process instead of unwinding - // the stack. It is not intended to work like a generic try...catch. But it does make our - // library more robust. See http://bit.ly/2koEXss. - let result = panic::catch_unwind(|| { - - // Now wrap arg in a CStr, which gives a Rust-style object interface to the ptr. - // This is unsafe for several reasons; the ones we can't protect against are bogus ptrs - // or ptrs to data that isn't null-terminated. In such cases, the next line will cause an - // access violation because the current impl of from_ptr() attempts to find the null - // terminator. This behavior may change; it is an accident rather than a contract of the - // from_ptr method. - let cstr = unsafe { CStr::from_ptr(cstr) }; - - // Now, attempt to get a reference to the string slice (&str). This will only succeed if - // the text is valid utf8; otherwise an error is returned. - let x = cstr.to_str(); - if x.is_err() { return Err(x) } - - // Convert Result<> to &str. We know this will succeed, if we got this far. - let slice = x.unwrap(); - - // We have a lifetime/ownership challenge. The CStr that provides the &str is about to - // go out of scope as this closure exits. Although the underlying memory is not going - // to be harvested (because CStr doesn't do that), the &str becomes invalid. So we - // have to take ownership (make a copy) of the buffer to keep our value outside the - // closure's scope. We do this by returning a String instead of a CStr. - Ok(slice.to_string()) - }); - - // See what our closure gave us. If a valid value, return it; otherwise, return None. - if result.is_ok() { - return Some(result.unwrap().unwrap()); - } - } - None -} - - -macro_rules! check_client { - ($x:ident, $e:expr) => { - let client = get_client_from_id($x); - if client.is_none() { return $e } - } -} - - -macro_rules! check_client_with_null_as_error { - ($x:ident) => { check_client!($x, null_ptr_as_c_str()) } -} - - -macro_rules! check_client_with_num_as_error { - ($x:ident) => { check_client!($x, BAD_FIRST_ARG) } -} - -macro_rules! check_useful_str { - ($x:ident, $e:expr) => { - let $x = match string_from_c_ptr($x) { - None => return $e, - Some(val) => val - }; - if $x.is_empty() { return $e } - } -} - -macro_rules! check_useful_str_with_null_as_error { - ($x:ident) => { check_useful_str!($x, null_ptr_as_c_str()) } -} - - -/// Use public key cryptography to encrypt a message for a particular recipient. -pub fn encrypt_msg(msg: &[u8], src_priv_key: &[u8], tgt_pub_key: &[u8]) { -/* - Sample C code from libsodium: - - #define MESSAGE (const unsigned char *) "test" - #define MESSAGE_LEN 4 - #define CIPHERTEXT_LEN (crypto_box_MACBYTES + MESSAGE_LEN) - - unsigned char alice_publickey[crypto_box_PUBLICKEYBYTES]; - unsigned char alice_secretkey[crypto_box_SECRETKEYBYTES]; - crypto_box_keypair(alice_publickey, alice_secretkey); - - unsigned char bob_publickey[crypto_box_PUBLICKEYBYTES]; - unsigned char bob_secretkey[crypto_box_SECRETKEYBYTES]; - crypto_box_keypair(bob_publickey, bob_secretkey); - - unsigned char nonce[crypto_box_NONCEBYTES]; - unsigned char ciphertext[CIPHERTEXT_LEN]; - randombytes_buf(nonce, sizeof nonce); - if (crypto_box_easy(ciphertext, MESSAGE, MESSAGE_LEN, nonce, - bob_publickey, alice_secretkey) != 0) { - /* error */ - } - - unsigned char decrypted[MESSAGE_LEN]; - if (crypto_box_open_easy(decrypted, ciphertext, CIPHERTEXT_LEN, nonce, - alice_publickey, bob_secretkey) != 0) { - /* message for Bob pretending to be from Alice has been forged! */ - } -*/ -} - -/* -use std::thread; -use std::net; - -fn socket(listen_on: net::SocketAddr) -> net::UdpSocket { - let attempt = net::UdpSocket::bind(listen_on); - let mut socket; - match attempt { - Ok(sock) => { - println!("Bound socket to {}", listen_on); - socket = sock; - }, - Err(err) => panic!("Could not bind: {}", err) - } - socket -} - -fn read_message(socket: net::UdpSocket) -> Vec { - let mut buf: [u8; 1] = [0; 1]; - println!("Reading data"); - let result = socket.recv_from(&mut buf); - drop(socket); - let mut data; - match result { - Ok((amt, src)) => { - println!("Received data from {}", src); - data = Vec::from(&buf[0..amt]); - }, - Err(err) => panic!("Read error: {}", err) - } - data -} - -pub fn send_message(send_addr: net::SocketAddr, target: net::SocketAddr, data: Vec) { - let socket = socket(send_addr); - println!("Sending data"); - let result = socket.send_to(&data, target); - drop(socket); - match result { - Ok(amt) => println!("Sent {} bytes", amt), - Err(err) => panic!("Write error: {}", err) - } -} - -pub fn listen(listen_on: net::SocketAddr) -> thread::JoinHandle> { - let socket = socket(listen_on); - let handle = thread::spawn(move || { - read_message(socket) - }); - handle -} - -#[cfg(test)] -mod test { - use std::net; - use std::thread; - use super::*; - - #[test] - fn test_udp() { - println!("UDP"); - let ip = net::Ipv4Addr::new(127, 0, 0, 1); - let listen_addr = net::SocketAddrV4::new(ip, 8888); - let send_addr = net::SocketAddrV4::new(ip, 8889); - let future = listen(net::SocketAddr::V4(listen_addr)); - let message: Vec = vec![10]; - // give the thread 3s to open the socket - thread::sleep_ms(3000); - send_message(net::SocketAddr::V4(send_addr), net::SocketAddr::V4(listen_addr), message); - println!("Waiting"); - let received = future.join().unwrap(); - println!("Got {} bytes", received.len()); - assert_eq!(1, received.len()); - assert_eq!(10, received[0]); - } -} -*/ - diff --git a/indy_client/client/lib/src/lib.rs b/indy_client/client/lib/src/lib.rs deleted file mode 100644 index 2496b76c5..000000000 --- a/indy_client/client/lib/src/lib.rs +++ /dev/null @@ -1,213 +0,0 @@ -#![crate_type = "lib"] - -// Jan 25, 2017: Turn off certain warnings while we're experimenting with techniques, features, and -// tests. For the time being, we don't want the noise. Remove these attributes when we are ready to -// be serious about implementation; we do NOT want to ignore these for more than a few days. -#![allow(dead_code)] -#![allow(unused_variables)] - - -// To make it easy to use C data types, import the libc crate. -extern crate libc; - - -use libc::{c_char}; -use std::ffi::{CString}; - -#[macro_use] -mod internal; - -mod tests; -mod constants; -mod strutil; - -use constants::*; -use internal::*; -//use strutil::*; - - -/// Create a client handle that manages state such as a connection to the ledger, propagated errors, -/// and so forth. All calls to the ledger require a client as context. -/// -/// An individual client is NOT inherently threadsafe; callers should ensure either that a client -/// is only accessed from a single thread, or that it is mutexed appropriately. Clients are cheap -/// and easy to create, so creating one per thread is perfectly reasonable. You can have as many -/// clients working in parallel as you like. -/// -/// @return the id of the client on success (in which case the number will be non-negative), -/// or an error code on failure (in which case the number will be negative). -#[no_mangle] -pub extern fn init_client(host_and_port: *const c_char) -> i32 { - check_useful_str!(host_and_port, BAD_FIRST_ARG); - - // All error conditions have been tested; add the client to our internal list and return - // its index. - - 0 // for now, hard-code the index of 0. -} - -/// Release a client to free its resources. This call is idempotent. On success, return 0. -/// On failure, return an error. -#[no_mangle] -pub extern fn release_client(client_id: i32) -> i32 { - let client = get_client_from_id(client_id); - 0 -} - -/// Write a new DID to the ledger, or update an existing DID's attributes. -/// @param dest: the DID that will be created or modified--or a DID alias. -/// @param verkey: the verkey for the new DID. Optional; if empty/null, defaults to same value as dest. -/// @param xref: if dest is an alias, this is the DID it refers to. Otherwise, ignored. -/// @param data: Optional. The alias for the DID. -/// @param role: Optional. One of "USER", "TRUST_ANCHOR", "STEWARD", "TRUSTEE", or null/empty. -/// Assigns a role to the DID, or removes all roles (and thus all privileges for writing) if -/// null empty. (The latter can only be one by a trustee.) -/// Only a steward can create new trust anchors; only other trustees can create a new trustee. -#[no_mangle] -pub extern fn set_did(client_id: i32, did: *const c_char, verkey: *const c_char, xref: *const c_char, data: *const c_char, role: *const c_char) -> i32 { - check_client_with_num_as_error!(client_id); - check_useful_str!(did, BAD_SECOND_ARG); - 0 -} - -/// Find the current verification key for a given DID. Returns a base-58-encoded string on success, -/// an empty string if there is no current key for the DID (it is under guardianship), or null if -/// the client or DID is invalid. -/// -/// @param client_id: An opaque numeric handle returned by init_client() and not yet closed by -/// release_client(). -/// -/// Returns a C-style const char * that was allocated by the lib and must be freed by it. The caller -/// must call free_str() once the string has been read and is no longer needed, else memory will leak. -#[no_mangle] -pub extern fn get_verkey(client_id: i32, did: *const c_char) -> *mut c_char { - check_client_with_null_as_error!(client_id); - check_useful_str_with_null_as_error!(did); - if did.len() != 40 { return null_ptr_as_c_str() } - - let s = CString::new(r#"MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCABMC"#).unwrap(); - // Transfer ownership of this string to the c caller; Rust is no longer responsible. - s.into_raw() -} - -/// Look up information about a DID; return a full DDO if the DID exists, or null if the client or -/// DID are invalid. -/// -/// This answers the same question as get_verkey(), and many more. It is substantially less -/// efficient because the data it returns is "heavy" and requires parsing, so it should only be -/// used if the extra data is necessary. -/// -/// Returns a C-style const char * that was allocated by the lib and must be freed by it. The caller -/// must call free_str() once the string has been read and is no longer needed, else memory will leak. -#[no_mangle] -pub extern fn get_ddo(client_id: i32, did: *const c_char) -> *mut c_char { - check_client_with_null_as_error!(client_id); - check_useful_str_with_null_as_error!(did); - - let s = CString::new(r#"{ - "@context": "https://example.org/did/v1", - "id": "did:sov:21tDAKCERh95uGgKbJNHYp", - "equiv-id": [ - "did:sov:33ad7beb1abc4a26b89246", - "did:sov:f336a645f5a941b7ab8oac" - ], - "verkey": "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCABMC", - "control": [ - "self", - "did:sov:bsAdB81oHKaCmLTsgajtp9AoAHE9ei4", - "did:sov:21tDAKCERh95uGgKbJNHYpE8WEogrsf" - ], - "service": { - "openid": "https://openid.example.com/456", - "xdi": "https://xdi.example.com/123" - }, - "type": "http://schema.org/Person", - "creator": "did:sov:21tDAKCERh95uGgKbJNHYpE8WEogrsf", - "created": "2002-10-10T17:00:00Z", - "updated": "2016-10-17T02:41:00Z", - "signature": { - "type": "LinkedDataSignature2015", - "created": "2016-02-08T16:02:20Z", - "Creator": "did:sov:21tDAKCERh95uGgKbJNHYpE8WEogrsf/keys/1", - "signatureValue": "IOmA4R7TfhkYTYW87z640O3GYFldw0yqie9Wl1kZ5OBYNAKOwG5uOsPRK8/2C4STOWF+83cMcbZ3CBMq2/gi25s=" - } -}"#).unwrap(); - s.into_raw() -} - -/// Free a pointer previously allocated by a function that returns a string from the library. -/// Calling this function with a null pointer is a no-op. -#[no_mangle] -pub extern fn free_str(c_ptr: *mut c_char) { - if !c_ptr.is_null() { - // convert the pointer back to `CString` - // it will be automatically dropped immediately - unsafe { CString::from_raw(c_ptr); } - } -} - -/// Set an arbitrary attribute for a DID. -/// @param hash: the sha256 hash of the attribute value. Required. -/// @param raw: the raw bytes of the attribute value. Optional and often omitted--in which case -/// what's recorded on the ledger is just proof of existence, with the value stored elsewhere. -/// This param is used to record public data such as the mailing address of a government -/// office; it should be null for data that has any privacy constraints. -/// @param enc: the encrypted bytes of the attribute value. -#[no_mangle] -pub extern fn set_attr(client_id: i32, did: *const c_char, hash: &[u8], raw: &[u8], enc: &[u8]) -> i32 { - check_client_with_num_as_error!(client_id); - 0 -} - -/// Get an arbitrary attribute for a DID. -/// -/// Returns a C-style const char * that was allocated by the lib and must be freed by it. The caller -/// must call free_str() once the string has been read and is no longer needed, else memory will leak. -#[no_mangle] -pub extern fn get_attr(client_id: i32, did: *const c_char, attr_name: *const c_char) -> *mut c_char { - check_client_with_null_as_error!(client_id); - check_useful_str_with_null_as_error!(did); - check_useful_str_with_null_as_error!(attr_name); - let s = CString::new(r#"attrval"#).unwrap(); - return s.into_raw(); -} - -/// Define a schema on the ledger (e.g., for a claim type or proof type). -/// @param schema: json in the style of schema.org, json-ld, etc. -#[no_mangle] -pub extern fn set_schema(client_id: i32, schema: *const c_char) -> i32 { - check_client_with_num_as_error!(client_id); - check_useful_str!(schema, BAD_SECOND_ARG); - 0 -} - -/// Retrieve the definition for a particular schema, as stored on the ledger. -/// -/// Returns a C-style const char * that was allocated by the lib and must be freed by it. The caller -/// must call free_str() once the string has been read and is no longer needed, else memory will leak. -#[no_mangle] -pub extern fn get_schema(client_id: i32) -> *mut c_char { - check_client_with_null_as_error!(client_id); - let s = CString::new(r#"schema"#).unwrap(); - return s.into_raw(); -} - -#[no_mangle] -pub extern fn set_issuer_key(client_id: i32, issuer_key: &[u8]) -> i32 { - check_client_with_num_as_error!(client_id); - 0 -} - -/// Gets the key for the issuer of a claim? Not sure how this fits. It's a transaction type in the -/// overall transaction catalog; need research on use case. -/// -/// Returns a C-style const char * that was allocated by the lib and must be freed by it. The caller -/// must call free_str() once the string has been read and is no longer needed, else memory will leak. -#[no_mangle] -pub extern fn get_issuer_key(client_id: i32) -> *mut c_char { - check_client_with_null_as_error!(client_id); - let s = CString::new(r#"issuerkey"#).unwrap(); - return s.into_raw(); -} - -// TODO: NODE, PROPOSE, CANCEL, EXECUTE, VOTE, CONFIG, DECRY diff --git a/indy_client/client/lib/src/strutil.rs b/indy_client/client/lib/src/strutil.rs deleted file mode 100644 index 63a57b993..000000000 --- a/indy_client/client/lib/src/strutil.rs +++ /dev/null @@ -1,20 +0,0 @@ -extern crate rust_base58; - -/*use rust_base58::{ToBase58, FromBase58}; - -fn main() { - let x = &[1, 2, 3]; - - // to_base58() returns a String - let x_b58 = x.to_base58(); - assert_eq!("Ldp", x_b58); - - // from_base58() returns a Vec - let x_again = x_b58.from_base58().unwrap(); - assert_eq!(x, &x_again[..]); - - // from_base58() can fail, for example due to the input string - // containing an invalid base58 character like "I": - assert!("I".from_base58().is_err()); -} -*/ \ No newline at end of file diff --git a/indy_client/client/lib/src/tests/mod.rs b/indy_client/client/lib/src/tests/mod.rs deleted file mode 100644 index 3ecf6b1ca..000000000 --- a/indy_client/client/lib/src/tests/mod.rs +++ /dev/null @@ -1,260 +0,0 @@ -#![cfg(test)] - -use super::*; // refer to exported functions the same way lib consumers do. -use internal::*; // refer to internal impl details without decoration -use constants::*; - -extern crate libc; -use libc::{c_int, size_t, c_char}; -use std::ffi::{CStr, CString}; -use std::ptr; - - -// A macro to make stubs less verbose. There's probably a more idiomatic way to do this... -macro_rules! ximpl { - () => {{ panic!("not implemented"); }} -} - - -// ---- tests that exercise the client's ability to do transactions and handle associated errors ---- -// (Some of these may be unnecessary because the ledger tests should already prove correctness, -// but a certain amount of redundancy may be useful, especially if the client lib has distinct -// codepaths for different transaction and parameter inputs.) - -/* -#[test] -fn new_nym_succeeds() { - // We might just do one simple create scenario and call it good. Alternatively, we could get - // exhaustive here, creating variations of this test to exercise scenarios where the actor - // in question is a trust anchor, a steward, or a trustee, permuted by values in the "role" param, - // which can be USER, TRUST_ANCHOR, STEWARD, TRUSTEE. The ledger will already handle all the - // permutations--we only need to test them at this layer if something in the client handles - // them differently. - ximpl!() -} - -#[test] -fn updated_nym_succeeds() { - // We might just do one simple create scenario and call it good. Alternatively, we could get - // exhaustive here, creating variations of this test to exercise scenarios where the actor - // in question is a trust anchor, a steward, or a trustee, permuted by values in the "role" param, - // which can be USER, TRUST_ANCHOR, STEWARD, TRUSTEE. The ledger will already handle all the - // permutations--we only need to test them at this layer if something in the client handles - // them differently. - ximpl!() -} - -/* -#[test] -fn create_existing_nym_fails() { - // Is it worth catching this at the client layer? If not, this test is unnecessary. - ximpl!() -} -*/ - -#[test] -fn nym_with_malformed_request_fails() { - // Ledger tests will already catch this. However, should some malformedness be caught - // client-side to further insulate the ledger from useless load? If so, we might want to - // catch malformed json in the data param, a bad hash, or a nym that we don't own or control. - ximpl!() -} - -#[test] -fn attr_with_valid_params_succeeds() { - ximpl!() -} - -#[test] -fn attr_with_invalid_params_fails() { - // Things to check: non-existent NYM or a NYM that we don't own; an attrib that's null. - ximpl!() -} - -#[test] -fn get_attr_with_valid_params_succeeds() { - ximpl!() -} - -#[test] -fn get_attr_with_invalid_params_fails() { - // Things to check: non-existent NYM or a NYM that we don't own; an attrib that's null. - ximpl!() -} - -#[test] -fn get_existing_nym_succeeds() { - // Call GET_NYM with a valid value; should get answer that we accept, including all the - // characteristics of the NYM that matter (its verkey, etc). We might need several variations - // of this test to cover the following scenarios: - // - lookup full CID - // - lookup DID only - // - lookup where we already have the current verkey and just want to confirm - // - lookup where we want to get back everything from the DDO - // - lookup where we want to get back a subset - ximpl!() -} - -#[test] -fn get_existing_nym_rejects_bad_proof() { - // Call GET_NYM with a valid value, but arrange for response to contain a proof that's - // invalid because: A) lacks signature(s); B) proofs don't add up. Either way, client - // should reject. - ximpl!() -} - -#[test] -fn get_nonexistent_nym_fails() { - ximpl!() -} - -// Do we need the DISCLO txn tested? - -#[test] -fn get_schema_succeeds() { - ximpl!() -} - -#[test] -fn set_schema_succeeds() { - ximpl!() -} - -#[test] -fn set_invalid_schema_fails() { - // bad data json: should we catch client-side? - ximpl!() -} - -#[test] -fn set_issuer_key_succeeds() { - ximpl!() -} - -/* -#[test] -fn set_issuer_key_with_bad_schema_seq_num_fails() { - // Is this worth testing, client-side? I think not. - ximpl!() -} -*/ - -#[test] -fn set_issuer_key_with_bad_data_json_fails() { - // Is this worth testing, client-side? Maybe, if we want client to do wellformedness or - // json schema validation. - ximpl!() -} - -#[test] -fn get_issuer_key_succeeds() { - ximpl!() -} - -#[test] -fn get_issuer_key_with_bad_schema_seq_num_fails() { - ximpl!() -} - -#[test] -fn rotate_verkey_with_current_verkey_succeeds() { - ximpl!() -} - -#[test] -fn rotate_verkey_with_revoked_verkey_fails() { - ximpl!() -} - -#[test] -fn revoke_verkey_with_current_verkey_succeeds() { - // This is just like rotating the verkey, except new verkey is null -- which you'd do if you - // wanted to permanently terminate an identity. That should be legal. - ximpl!() -} - -#[test] -fn revoke_verkey_with_revoked_verkey_fails() { - ximpl!() -} - -#[test] -fn trustee_can_change_nyms_role_to_none_whereas_others_cant() { - ximpl!() -} - -#[test] -fn calling_any_external_func_with_bad_client_id_fails() { - // Almost all our C-callable functions require a client id as the first param. If caller gives - // an invalid value, we should fail immediately and gracefully. This is a test of our plumbing. - ximpl!() -} -*/ - -#[test] -fn allocate_and_free_str_round_trip() { - let sample_did = CString::new("x").unwrap(); - let raw = sample_did.into_raw(); // Rust stops tracking ownership - let s = get_verkey(0, raw); - free_str(s); -} - -#[test] -fn init_client_with_empty_str() { - let empty = CString::new("").unwrap(); - let n = init_client(empty.as_ptr()); - assert_eq!(n, BAD_FIRST_ARG); -} - -#[test] -fn init_client_with_null_ptr() { - let p: *const c_char = ptr::null(); - let n = init_client(p); - assert_eq!(n, BAD_FIRST_ARG); -} - -/* -#[test] -fn base_58_matches_ledger_base_58() { - // Prove that - ximpl!() -} -*/ - - -// --------- About Fixtures --------- - -// For now, assume a test fixture that sets up a fake validator pool once (before first test) -// and creates a single client with id=0, that is targeted at that pool. This allows us to -// exercise our C-callable functions with client id=0 across all test functions. See notes -// about fancier test fixtures at the bottom of the module. - -// Ideal future state: convert to use fixtures in the same style that we know and love from pytest and -// similar frameworks. Figure out why https://github.com/Jenselme/rust-fixture/blob/master/src/lib.rs -// no longer compiles; it looks like Rust's syntax has changed in the last 2 years. When -// this is fixed, uncomment the [dependencies.fixture] section in Cargo.toml, plus the following 3 lines: -// #![feature(phase)] -// #[phase(plugin)] -// extern crate fixture; - -// As an intermediate step between the no-fixture world and the nice-fixture world, we could -// use the older xUnit-style approach to test fixtures. Rust has a nice testrunner (cargo), -// but it doesn't assume object-oriented, so you have to call the setup and teardown functions -// yourself unless you use a solution like the one at http://bit.ly/2jrhqq5. That solution -// doesn't seem mature yet, so this mechanism is more primitive; to use it, we'd have to call -// setup and teardown directly, in each test. - -/*fn start_simulated_cluster() { - -} - -fn setup() -> Client { - // Create a cluster that is accessible on localhost. - start_simulated_cluster(); - Client::new("localhost:12345") -} - -fn teardown() { - -} -*/ diff --git a/indy_client/client/wallet/attribute.py b/indy_client/client/wallet/attribute.py deleted file mode 100644 index ac1a7ca55..000000000 --- a/indy_client/client/wallet/attribute.py +++ /dev/null @@ -1,109 +0,0 @@ -from enum import unique, IntEnum -from typing import Optional, TypeVar - -from plenum.common.constants import TXN_TYPE, TARGET_NYM, RAW, ENC, HASH, ORIGIN, CURRENT_PROTOCOL_VERSION -from indy_common.generates_request import GeneratesRequest -from indy_common.constants import ATTRIB, GET_ATTR -from indy_common.types import Request -from stp_core.types import Identifier - -Value = TypeVar('Value', str, dict) - - -class AttributeKey: - def __init__(self, - name: str, - origin: Identifier, - dest: Optional[Identifier]=None): - self.name = name - self.origin = origin - self.dest = dest - - def key(self): - return self.name, self.origin, self.dest - - -@unique -class LedgerStore(IntEnum): - """ - How to store an attribute on the distributed ledger. - - 1. DONT: don't store on public ledger - 2. HASH: store just a hash - 3. ENC: store encrypted - 4. RAW: store in plain text - """ - DONT = 1 - HASH = 2 - ENC = 3 - RAW = 4 - - @property - def isWriting(self) -> bool: - """ - Return whether this transaction needs to be written - """ - return self != self.DONT - - -class Attribute(AttributeKey, GeneratesRequest): - # TODO we want to store a history of the attribute changes - def __init__(self, - name: str, # local human friendly name - value: Value=None, # None when we gt the attribute - origin: Identifier=None, # authoring of the attribute - dest: Optional[Identifier]=None, # target - ledgerStore: LedgerStore=LedgerStore.DONT, - encKey: Optional[str]=None, # encryption key - seqNo: Optional[int]=None): # ledger sequence number - super().__init__(name, origin, dest) - self.value = value - self.ledgerStore = ledgerStore - self.encKey = encKey - self.seqNo = seqNo - - def _op_fill_attr_type_and_data(self, op, data): - if self.ledgerStore == LedgerStore.RAW: - op[RAW] = data - elif self.ledgerStore == LedgerStore.ENC: - op[ENC] = data - elif self.ledgerStore == LedgerStore.HASH: - op[HASH] = data - elif self.ledgerStore == LedgerStore.DONT: - raise RuntimeError("This attribute cannot be stored externally") - else: - raise RuntimeError("Unknown ledgerStore: {}". - format(self.ledgerStore)) - - def _op(self): - op = { - TXN_TYPE: ATTRIB - } - if self.dest: - op[TARGET_NYM] = self.dest - self._op_fill_attr_type_and_data(op, self.value) - - return op - - def ledgerRequest(self): - if self.ledgerStore.isWriting and not self.seqNo: - assert self.origin is not None - return Request(identifier=self.origin, - operation=self._op(), - protocolVersion=CURRENT_PROTOCOL_VERSION) - - def _opForGet(self): - op = { - TARGET_NYM: self.dest, - TXN_TYPE: GET_ATTR, - } - self._op_fill_attr_type_and_data(op, self.name) - if self.origin: - op[ORIGIN] = self.origin - return op - - def getRequest(self, requestAuthor: Identifier): - if not self.seqNo: - return Request(identifier=requestAuthor, - operation=self._opForGet(), - protocolVersion=CURRENT_PROTOCOL_VERSION) diff --git a/indy_client/client/wallet/connection.py b/indy_client/client/wallet/connection.py deleted file mode 100644 index 22991208e..000000000 --- a/indy_client/client/wallet/connection.py +++ /dev/null @@ -1,265 +0,0 @@ - -from plenum.common.constants import NAME, NONCE -from plenum.common.signer_did import DidIdentity -from plenum.common.types import f -from plenum.common.util import prettyDateDifference, friendlyToRaw -from plenum.common.verifier import DidVerifier -from anoncreds.protocol.types import AvailableClaim - -from indy_common.exceptions import InvalidConnectionException, \ - RemoteEndpointNotFound, NotFound - - -class constant: - TRUST_ANCHOR = "Trust Anchor" - SIGNER_IDENTIFIER = "Identifier" - SIGNER_VER_KEY = "Verification Key" - SIGNER_VER_KEY_EMPTY = '' - - REMOTE_IDENTIFIER = "Remote" - REMOTE_VER_KEY = "Remote Verification Key" - REMOTE_VER_KEY_SAME_AS_ID = '' - REMOTE_END_POINT = "Remote endpoint" - SIGNATURE = "Signature" - CLAIM_REQUESTS = "Claim Requests" - AVAILABLE_CLAIMS = "Available Claims" - RECEIVED_CLAIMS = "Received Claims" - - CONNECTION_NONCE = "Nonce" - CONNECTION_STATUS = "Request status" - CONNECTION_LAST_SYNCED = "Last Synced" - CONNECTION_LAST_SEQ_NO = "Last Sync no" - CONNECTION_STATUS_ACCEPTED = "Accepted" - - CONNECTION_NOT_SYNCHRONIZED = "" - UNKNOWN_WAITING_FOR_SYNC = "" - - CONNECTION_ITEM_PREFIX = '\n ' - - NOT_AVAILABLE = "Not Available" - - NOT_ASSIGNED = "not yet assigned" - - -class Connection: - def __init__(self, - name, - localIdentifier=None, - localVerkey=None, - trustAnchor=None, - remoteIdentifier=None, - remoteEndPoint=None, - remotePubkey=None, - request_nonce=None, - proofRequests=None, - internalId=None, - remote_verkey=None): - self.name = name - self.localIdentifier = localIdentifier - self.localVerkey = localVerkey - self.trustAnchor = trustAnchor - self.remoteIdentifier = remoteIdentifier - self.remoteEndPoint = remoteEndPoint - self.remotePubkey = remotePubkey - self.request_nonce = request_nonce - - # for optionally storing a reference to an identifier in another system - # for example, a college may already have a student ID for a particular - # person, and that student ID can be put in this field - self.internalId = internalId - - self.proofRequests = proofRequests or [] # type: List[ProofRequest] - self.verifiedClaimProofs = [] - self.availableClaims = [] # type: List[AvailableClaim] - - self.remoteVerkey = remote_verkey - self.connection_status = None - self.connection_last_synced = None - self.connection_last_sync_no = None - - def __repr__(self): - return self.key - - @property - def key(self): - return self.name - - @property - def isRemoteEndpointAvailable(self): - return self.remoteEndPoint and self.remoteEndPoint != \ - constant.NOT_AVAILABLE - - @property - def isAccepted(self): - return self.connection_status == constant.CONNECTION_STATUS_ACCEPTED - - def __str__(self): - localIdr = self.localIdentifier if self.localIdentifier \ - else constant.NOT_ASSIGNED - trustAnchor = self.trustAnchor or "" - trustAnchorStatus = '(not yet written to Indy)' - if self.remoteVerkey is not None: - if self.remoteIdentifier == self.remoteVerkey: - remoteVerKey = constant.REMOTE_VER_KEY_SAME_AS_ID - else: - remoteVerKey = self.remoteVerkey - else: - remoteVerKey = constant.UNKNOWN_WAITING_FOR_SYNC - - remoteEndPoint = self.remoteEndPoint or \ - constant.UNKNOWN_WAITING_FOR_SYNC - if isinstance(remoteEndPoint, tuple): - remoteEndPoint = "{}:{}".format(*remoteEndPoint) - connectionStatus = 'not verified, remote verkey unknown' - connection_last_synced = prettyDateDifference( - self.connection_last_synced) or constant.CONNECTION_NOT_SYNCHRONIZED - - if connection_last_synced != constant.CONNECTION_NOT_SYNCHRONIZED and \ - remoteEndPoint == constant.UNKNOWN_WAITING_FOR_SYNC: - remoteEndPoint = constant.NOT_AVAILABLE - - if self.isAccepted: - trustAnchorStatus = '(confirmed)' - if self.remoteVerkey is None: - remoteVerKey = constant.REMOTE_VER_KEY_SAME_AS_ID - connectionStatus = self.connection_status - - # TODO: The verkey would be same as the local identifier until we - # support key rotation - # TODO: This should be set as verkey in case of DID but need it from - # wallet - verKey = self.localVerkey if self.localVerkey else constant.SIGNER_VER_KEY_EMPTY - fixed_connection_heading = "Connection" - if not self.isAccepted: - fixed_connection_heading += " (not yet accepted)" - - # TODO: Refactor to use string interpolation - # try: - fixed_connection_items = \ - '\n' \ - 'Name: ' + self.name + '\n' \ - 'DID: ' + localIdr + '\n' \ - 'Trust anchor: ' + trustAnchor + ' ' + trustAnchorStatus + '\n' \ - 'Verification key: ' + verKey + '\n' \ - 'Signing key: ' '\n' \ - 'Remote: ' + (self.remoteIdentifier or - constant.UNKNOWN_WAITING_FOR_SYNC) + '\n' \ - 'Remote Verification key: ' + remoteVerKey + '\n' \ - 'Remote endpoint: ' + remoteEndPoint + '\n' \ - 'Request nonce: ' + self.request_nonce + '\n' \ - 'Request status: ' + connectionStatus + '\n' - - optional_connection_items = "" - if len(self.proofRequests) > 0: - optional_connection_items += "Proof Request(s): {}". \ - format(", ".join([cr.name for cr in self.proofRequests])) \ - + '\n' - - if self.availableClaims: - optional_connection_items += self.avail_claims_str() - - if self.connection_last_sync_no: - optional_connection_items += 'Last sync seq no: ' + \ - self.connection_last_sync_no + '\n' - - fixedEndingLines = 'Last synced: ' + connection_last_synced - - connection_items = fixed_connection_items + \ - optional_connection_items + fixedEndingLines - indented_connection_items = constant.CONNECTION_ITEM_PREFIX.join( - connection_items.splitlines()) - return fixed_connection_heading + indented_connection_items - - def avail_claims_str(self): - claim_names = [name for name, _, _ in self.availableClaims] - return "Available Claim(s): {}".\ - format(", ".join(claim_names)) + '\n' - - @staticmethod - def validate(request_data): - - def checkIfFieldPresent(msg, searchInName, fieldName): - if not msg.get(fieldName): - raise InvalidConnectionException( - "Field not found in {}: {}".format( - searchInName, fieldName)) - - checkIfFieldPresent(request_data, 'given input', 'sig') - checkIfFieldPresent(request_data, 'given input', 'connection-request') - connection_request = request_data.get("connection-request") - connection_request_req_fields = [f.IDENTIFIER.nm, NAME, NONCE] - for fn in connection_request_req_fields: - checkIfFieldPresent(connection_request, 'connection-request', fn) - - def getRemoteEndpoint(self, required=False): - if not self.remoteEndPoint and required: - raise RemoteEndpointNotFound - - if isinstance(self.remoteEndPoint, tuple): - return self.remoteEndPoint - elif isinstance(self.remoteEndPoint, str): - ip, port = self.remoteEndPoint.split(":") - return ip, int(port) - elif self.remoteEndPoint is None: - return None - else: - raise ValueError('Cannot convert endpoint {} to HA'. - format(self.remoteEndPoint)) - - @property - def remoteVerkey(self): - if not hasattr(self, '_remoteVerkey'): - return None - - if self._remoteVerkey is None: - return None - - # This property should be used to fetch verkey compared to - # remoteVerkey, its a more consistent name and takes care of - # abbreviated verkey - i = DidIdentity(self.remoteIdentifier, verkey=self._remoteVerkey) - - return i.verkey - - @property - def full_remote_verkey(self): - verkey = self.remoteVerkey - if verkey is None: - return None - - i = DidIdentity(self.remoteIdentifier, verkey=verkey) - full_verkey = i.full_verkey - return full_verkey - - @remoteVerkey.setter - def remoteVerkey(self, new_val): - self._remoteVerkey = new_val - - def find_available_claims(self, name=None, version=None, origin=None): - return [ac for ac in self.availableClaims - if (not name or name == ac.name) and - (not version or version == ac.version) and - (not origin or origin == ac.origin)] - - def find_available_claim(self, name=None, version=None, origin=None, - max_one=True, required=True): - _ = self.find_available_claims(name, version, origin) - assert not max_one or len(_) <= 1, \ - 'more than one matching available claim found' - if required and len(_) == 0: - raise NotFound - return _[0] if _ else None - - def find_proof_requests(self, name=None, version=None): - return [pr for pr in self.proofRequests - if (not name or name == pr.name) and - (not version or version == pr.version)] - - def find_proof_request(self, name=None, version=None, - max_one=True, required=True): - _ = self.find_proof_requests(name, version) - assert not max_one or len(_) <= 1, \ - 'more than one matching available claim found' - if required and len(_) == 0: - raise NotFound - return _[0] if _ else None diff --git a/indy_client/client/wallet/migration.py b/indy_client/client/wallet/migration.py deleted file mode 100644 index dbfa95760..000000000 --- a/indy_client/client/wallet/migration.py +++ /dev/null @@ -1,104 +0,0 @@ -from abc import ABCMeta - -from jsonpickle import tags - - -class BaseWalletRawMigration(metaclass=ABCMeta): - - def _traverse_dict(self, d): - for key in d: - self._traverse_object(d[key]) - - def _traverse_list(self, l): - for item in l: - self._traverse_object(item) - - def _traverse_object(self, v): - if isinstance(v, dict): - self._traverse_dict(v) - elif isinstance(v, list): - self._traverse_list(v) - - def try_apply(self, raw): - self._traverse_object(raw) - - -class TerminologyWalletRawMigration(BaseWalletRawMigration): - - _LINK_FIELD_RENAMINGS = { - 'linkStatus': 'connection_status', - 'linkLastSynced': 'connection_last_synced', - 'linkLastSyncNo': 'connection_last_sync_no', - 'invitationNonce': 'request_nonce', - - # rule for the intermediate renaming state (MGL version) - 'connectionLastSynced': 'connection_last_synced' - } - - def __process_wallet(self, wallet): - if '_links' in wallet: - wallet['_connections'] = wallet.pop('_links') - - def __process_link(self, link): - link[tags.OBJECT] = \ - 'sovrin_client.client.wallet.connection.Connection' - for key in link: - if key in self._LINK_FIELD_RENAMINGS: - link[self._LINK_FIELD_RENAMINGS[key]] = link.pop(key) - - def _traverse_dict(self, d): - if d.get(tags.OBJECT) == 'sovrin_client.client.wallet.wallet.Wallet': - self.__process_wallet(d) - if d.get(tags.OBJECT) == 'sovrin_client.client.wallet.link.Link': - self.__process_link(d) - super()._traverse_dict(d) - - -class RebrandingWalletRawMigration(BaseWalletRawMigration): - - def __process_did_methods(self, didMethods): - if 'd' in didMethods: - d = didMethods['d'] - if isinstance(d, dict) and 'sovrin' in d: - d['indy'] = d.pop('sovrin') - - def __process_did_method(self, didMethod): - if 'name' in didMethod and isinstance(didMethod['name'], str): - didMethod['name'] = \ - didMethod['name'].replace('sovrin', 'indy') - if 'pattern' in didMethod and isinstance(didMethod['pattern'], str): - didMethod['pattern'] = \ - didMethod['pattern'].replace('sovrin', 'indy') - - def _traverse_dict(self, d): - if tags.OBJECT in d: - if d[tags.OBJECT] == 'plenum.common.did_method.DidMethods': - self.__process_did_methods(d) - if d[tags.OBJECT] == 'plenum.common.did_method.DidMethod': - self.__process_did_method(d) - - if isinstance(d[tags.OBJECT], str): - d[tags.OBJECT] = \ - d[tags.OBJECT].replace('sovrin', 'indy') - d[tags.OBJECT] = \ - d[tags.OBJECT].replace('Sovrin', 'Indy') - - super()._traverse_dict(d) - - -class MultiNetworkWalletRawMigration(BaseWalletRawMigration): - - def __process_wallet(self, wallet): - if wallet.get('env') == 'test': - wallet['env'] = 'sandbox' - - def _traverse_dict(self, d): - if d.get(tags.OBJECT) == 'indy_client.client.wallet.wallet.Wallet': - self.__process_wallet(d) - super()._traverse_dict(d) - - -def migrate_indy_wallet_raw(raw): - TerminologyWalletRawMigration().try_apply(raw) - RebrandingWalletRawMigration().try_apply(raw) - MultiNetworkWalletRawMigration().try_apply(raw) diff --git a/indy_client/client/wallet/node.py b/indy_client/client/wallet/node.py deleted file mode 100644 index ad7743e9f..000000000 --- a/indy_client/client/wallet/node.py +++ /dev/null @@ -1,27 +0,0 @@ -from plenum.common.constants import TXN_TYPE, TARGET_NYM, NODE, DATA, CURRENT_PROTOCOL_VERSION -from indy_common.generates_request import GeneratesRequest -from indy_common.types import Request -from stp_core.types import Identifier - - -class Node(GeneratesRequest): - def __init__(self, id: Identifier, data: dict, steward: Identifier): - self.id = id - self.data = data - self.steward = steward - self.seqNo = None - - def _op(self): - op = { - TXN_TYPE: NODE, - TARGET_NYM: self.id, - DATA: self.data - } - return op - - def ledgerRequest(self): - if not self.seqNo: - assert self.id is not None - return Request(identifier=self.steward, - operation=self._op(), - protocolVersion=CURRENT_PROTOCOL_VERSION) diff --git a/indy_client/client/wallet/pool_config.py b/indy_client/client/wallet/pool_config.py deleted file mode 100644 index 4ae326ae6..000000000 --- a/indy_client/client/wallet/pool_config.py +++ /dev/null @@ -1,34 +0,0 @@ -import random - -from stp_core.types import Identifier -from plenum.common.constants import TXN_TYPE, FORCE, CURRENT_PROTOCOL_VERSION -from indy_common.generates_request import GeneratesRequest -from indy_common.constants import POOL_CONFIG, WRITES -from indy_common.types import Request - - -class PoolConfig(GeneratesRequest): - def __init__(self, trustee: Identifier, writes=True, force=False): - self.trustee = trustee - self.writes = writes - self.force = force - self.seqNo = None - - def _op(self): - op = { - TXN_TYPE: POOL_CONFIG, - WRITES: self.writes, - FORCE: self.force - } - return op - - @property - def key(self): - return '.'.join([str(self.writes), str(self.force)]) - - def ledgerRequest(self): - if not self.seqNo: - return Request(identifier=self.trustee, - operation=self._op(), - protocolVersion=CURRENT_PROTOCOL_VERSION, - reqId=random.randint(10, 100000)) diff --git a/indy_client/client/wallet/trustAnchoring.py b/indy_client/client/wallet/trustAnchoring.py deleted file mode 100644 index f9ba1a7fc..000000000 --- a/indy_client/client/wallet/trustAnchoring.py +++ /dev/null @@ -1,40 +0,0 @@ - -from indy_common.identity import Identity -from stp_core.types import Identifier - - -class TrustAnchoring: - """ - Mixin to add trust anchoring behaviors to a Wallet - """ - - def __init__(self): - self._trustAnchored = {} # type: Dict[Identifier, Identity] - - def createIdInWallet(self, idy: Identity): - if idy.identifier in self._trustAnchored: - del self._trustAnchored[idy.identifier] - self._trustAnchored[idy.identifier] = idy - - def addTrustAnchoredIdentity(self, idy: Identity): - self.createIdInWallet(idy) - self._sendIdReq(idy) - - def _sendIdReq(self, idy): - req = idy.ledgerRequest() - if req: - if not req._identifier: - req._identifier = self.defaultId - self.pendRequest(req, idy.identifier) - return len(self._pending) - - def updateTrustAnchoredIdentity(self, idy): - storedId = self._trustAnchored.get(idy.identifier) - if storedId: - storedId.seqNo = None - else: - self.createIdInWallet(idy) - self._sendIdReq(idy) - - def getTrustAnchoredIdentity(self, idr): - return self._trustAnchored.get(idr) diff --git a/indy_client/client/wallet/upgrade.py b/indy_client/client/wallet/upgrade.py deleted file mode 100644 index 31b690e0a..000000000 --- a/indy_client/client/wallet/upgrade.py +++ /dev/null @@ -1,51 +0,0 @@ -from stp_core.types import Identifier -from plenum.common.constants import TXN_TYPE, NAME, VERSION, FORCE, CURRENT_PROTOCOL_VERSION -from indy_common.generates_request import GeneratesRequest -from indy_common.constants import POOL_UPGRADE, ACTION, SCHEDULE, \ - SHA256, TIMEOUT, START, JUSTIFICATION, REINSTALL, APP_NAME, PACKAGE -from indy_common.types import Request - - -class Upgrade(GeneratesRequest): - def __init__(self, name: str, version: str, action: str, sha256: str, - trustee: Identifier, schedule: dict=None, timeout=None, - justification=None, force=False, reinstall=False, package=APP_NAME): - self.name = name - self.version = version - self.action = action - self.schedule = schedule - self.sha256 = sha256 - self.timeout = timeout - self.justification = justification - self.trustee = trustee - self.seqNo = None - self.force = force - self.reinstall = reinstall - self.package = package - - def _op(self): - op = { - TXN_TYPE: POOL_UPGRADE, - NAME: self.name, - VERSION: self.version, - ACTION: self.action, - SHA256: self.sha256, - FORCE: self.force, - PACKAGE: self.package, - SCHEDULE: self.schedule, - TIMEOUT: self.timeout, - JUSTIFICATION: self.justification, - REINSTALL: self.reinstall, - } - - return op - - @property - def key(self): - return '.'.join([self.name, self.version, self.action]) - - def ledgerRequest(self): - if not self.seqNo: - return Request(identifier=self.trustee, - operation=self._op(), - protocolVersion=CURRENT_PROTOCOL_VERSION) diff --git a/indy_client/client/wallet/wallet.py b/indy_client/client/wallet/wallet.py deleted file mode 100644 index 96a5cdb14..000000000 --- a/indy_client/client/wallet/wallet.py +++ /dev/null @@ -1,447 +0,0 @@ -import datetime -import json -import operator -from collections import OrderedDict -from collections import deque -from typing import List -from typing import Optional - -from indy_common.serialization import attrib_raw_data_serializer -from indy_common.state import domain -from ledger.util import F -from plenum.client.wallet import Wallet as PWallet -from plenum.common.did_method import DidMethods -from plenum.common.txn_util import get_seq_no, get_reply_identifier, get_reply_txntype, get_reply_nym, get_payload_data, \ - get_from -from plenum.common.util import randomString -from stp_core.common.log import getlogger -from plenum.common.constants import TXN_TYPE, TARGET_NYM, DATA, \ - IDENTIFIER, NYM, ROLE, VERKEY, NODE, NAME, VERSION, ORIGIN, CURRENT_PROTOCOL_VERSION, RAW, ENC, HASH -from plenum.common.types import f - -from indy_client.client.wallet.attribute import Attribute, AttributeKey, \ - LedgerStore -from indy_client.client.wallet.connection import Connection -from indy_client.client.wallet.node import Node -from indy_client.client.wallet.trustAnchoring import TrustAnchoring -from indy_client.client.wallet.upgrade import Upgrade -from indy_client.client.wallet.pool_config import PoolConfig -from indy_common.did_method import DefaultDidMethods -from indy_common.exceptions import ConnectionNotFound -from indy_common.types import Request -from indy_common.identity import Identity -from indy_common.constants import ATTRIB, GET_TXNS, GET_ATTR, \ - GET_NYM, POOL_UPGRADE, GET_SCHEMA, GET_CLAIM_DEF, POOL_CONFIG, CLAIM_DEF_TAG, \ - CLAIM_DEF_SIGNATURE_TYPE, CLAIM_DEF_SCHEMA_REF, CLAIM_DEF_FROM, CLAIM_DEF_TAG_DEFAULT -from stp_core.types import Identifier - -ENCODING = "utf-8" - -logger = getlogger() - - -# TODO: Maybe we should have a thinner wallet which should not have -# ProverWallet -class Wallet(PWallet, TrustAnchoring): - - clientNotPresentMsg = "The wallet does not have a client associated with it" - - def __init__(self, - name: str=None, - supportedDidMethods: DidMethods=None): - PWallet.__init__(self, - name, - supportedDidMethods or DefaultDidMethods) - TrustAnchoring.__init__(self) - - self._attributes = {} # type: Dict[(str, Identifier, - # Optional[Identifier]), Attribute] - - self.env = None # Helps to know associated environment - self._nodes = {} - self._upgrades = {} - self._pconfigs = {} - - self._connections = OrderedDict() # type: Dict[str, Connection] - # Note, ordered dict to make iteration deterministic - - self.knownIds = {} # type: Dict[str, Identifier] - - # transactions not yet submitted - self._pending = deque() # type Tuple[Request, Tuple[str, Identifier, - # Optional[Identifier]] - - # pending transactions that have been prepared (probably submitted) - self._prepared = {} # type: Dict[(Identifier, int), Request] - self.lastKnownSeqs = {} # type: Dict[str, int] - - self.replyHandler = { - ATTRIB: self._attribReply, - GET_ATTR: self._getAttrReply, - NYM: self._nymReply, - GET_NYM: self._getNymReply, - GET_TXNS: self._getTxnsReply, - NODE: self._nodeReply, - POOL_UPGRADE: self._poolUpgradeReply, - POOL_CONFIG: self._poolConfigReply - } - - @property - def pendingCount(self): - return len(self._pending) - - @staticmethod - def _isMatchingName(needle, haystack): - return needle.lower() in haystack.lower() - - # TODO: The names getMatchingLinksWithAvailableClaim and - # getMatchingLinksWithReceivedClaim should be fixed. Difference between - # `AvailableClaim` and `ReceivedClaim` is that for ReceivedClaim we - # have attribute values from issuer. - - # TODO: Few of the below methods have duplicate code, need to refactor it - def getMatchingConnectionsWithAvailableClaim(self, claimName=None): - matchingConnectionsAndAvailableClaim = [] - for k, li in self._connections.items(): - for cl in li.availableClaims: - if not claimName or Wallet._isMatchingName(claimName, cl[0]): - matchingConnectionsAndAvailableClaim.append((li, cl)) - return matchingConnectionsAndAvailableClaim - - def findAllProofRequests(self, claimReqName, connectionName=None): - matches = [] - for k, li in self._connections.items(): - for cpr in li.proofRequests: - if Wallet._isMatchingName(claimReqName, cpr.name): - if connectionName is None or Wallet._isMatchingName( - connectionName, li.name): - matches.append((li, cpr)) - return matches - - def getMatchingConnectionsWithProofReq( - self, proofReqName, connectionName=None): - matchingConnectionAndProofReq = [] - for k, li in self._connections.items(): - for cpr in li.proofRequests: - if Wallet._isMatchingName(proofReqName, cpr.name): - if connectionName is None or Wallet._isMatchingName( - connectionName, li.name): - matchingConnectionAndProofReq.append((li, cpr)) - return matchingConnectionAndProofReq - - def addAttribute(self, attrib: Attribute): - """ - Used to create a new attribute on Indy - :param attrib: attribute to add - :return: number of pending txns - """ - self._attributes[attrib.key()] = attrib - req = attrib.ledgerRequest() - if req: - self.pendRequest(req, attrib.key()) - return len(self._pending) - - def addNode(self, node: Node): - """ - Used to add a new node on Indy - :param node: Node - :return: number of pending txns - """ - self._nodes[node.id] = node - req = node.ledgerRequest() - if req: - self.pendRequest(req, node.id) - return len(self._pending) - - def doPoolUpgrade(self, upgrade: Upgrade): - """ - Used to send a new code upgrade - :param upgrade: upgrade data - :return: number of pending txns - """ - key = upgrade.key - self._upgrades[key] = upgrade - req = upgrade.ledgerRequest() - if req: - self.pendRequest(req, key) - return len(self._pending) - - def doPoolConfig(self, pconfig: PoolConfig): - """ - Used to send a new code upgrade - :param PoolConfig: upgrade data - :return: number of pending txns - """ - key = pconfig.key - self._pconfigs[key] = pconfig - req = pconfig.ledgerRequest() - if req: - self.pendRequest(req, key) - return len(self._pending) - - def hasAttribute(self, key: AttributeKey) -> bool: - """ - Checks if attribute is present in the wallet - @param key: Attribute unique key - @return: - """ - return bool(self.getAttribute(key)) - - def getAttribute(self, key: AttributeKey): - return self._attributes.get(key.key()) - - def getNode(self, id: Identifier): - return self._nodes.get(id) - - def getPoolUpgrade(self, key: str): - return self._upgrades.get(key) - - def getPoolConfig(self, key: str): - return self._pconfigs.get(key) - - def getAttributesForNym(self, idr: Identifier): - return [a for a in self._attributes.values() if a.dest == idr] - - def addConnection(self, connection: Connection): - self._connections[connection.key] = connection - - def addLastKnownSeqs(self, identifier, seqNo): - self.lastKnownSeqs[identifier] = seqNo - - def getLastKnownSeqs(self, identifier): - return self.lastKnownSeqs.get(identifier) - - def pendSyncRequests(self): - # pendingTxnsReqs = self.getPendingTxnRequests() - # for req in pendingTxnsReqs: - # self.pendRequest(req) - - # GET_TXNS is discontinued - pass - - def preparePending(self, limit=None): - new = {} - count = 0 - while self._pending and (limit is None or count < limit): - req, key = self._pending.pop() - sreq = self.signRequest(req) - new[req.identifier, req.reqId] = sreq, key - count += 1 - self._prepared.update(new) - # Return request in the order they were submitted - return sorted([req for req, _ in new.values()], - key=operator.attrgetter("reqId")) - - def handleIncomingReply(self, observer_name, reqId, frm, result, - numReplies): - """ - Called by an external entity, like a Client, to notify of incoming - replies - :return: - """ - preparedReq = self._prepared.get(get_reply_identifier(result), reqId) - if not preparedReq: - raise RuntimeError('no matching prepared value for {},{}'. - format(get_reply_identifier(result), reqId)) - typ = get_reply_txntype(result) - if typ and typ in self.replyHandler: - self.replyHandler[typ](result, preparedReq) - # else: - # raise NotImplementedError('No handler for {}'.format(typ)) - - def _attrib_data_from_reply(self, result): - dt = get_payload_data(result) - dest = dt[TARGET_NYM] - origin = get_from(result) - val = None - if RAW in dt: - val = json.loads(dt[RAW]) - elif ENC in dt: - val = dt[ENC] - elif HASH in dt: - val = dt[HASH] - return origin, dest, val - - def _attribReply(self, result, preparedReq): - origin, dest, val = self._attrib_data_from_reply(result) - for attrib in self.getAttributesForNym(dest): - attrib_val = attrib.value - if attrib.ledgerStore == LedgerStore.RAW: - attrib_val = json.loads(attrib_val) - if attrib.origin == origin and attrib_val == val: - attrib.seqNo = get_seq_no(result) - - def _getAttrReply(self, result, preparedReq): - # TODO: Confirm if we need to add the retrieved attribute to the wallet. - # If yes then change the graph query on node to return the sequence - # number of the attribute txn too. - attr_type, attr_key = domain._extract_attr_typed_value(result) - for attrib in self.getAttributesForNym(result[TARGET_NYM]): - if attrib.name == attr_key: - attrib.seqNo = result[f.SEQ_NO.nm] - attrib.value = result[DATA] - if attr_type == 'raw': - attrib.value = attrib_raw_data_serializer.deserialize(attrib.value) - attrib.value = attrib_raw_data_serializer.serialize(attrib.value, toBytes=False) - - def _nymReply(self, result, preparedReq): - target = get_reply_nym(result) - idy = self._trustAnchored.get(target) - if idy: - idy.seqNo = get_seq_no(result) - else: - logger.warning( - "Target {} not found in trust anchored".format(target)) - - def _nodeReply(self, result, preparedReq): - _, nodeKey = preparedReq - node = self.getNode(nodeKey) - node.seqNo = get_seq_no(result) - - def _poolUpgradeReply(self, result, preparedReq): - _, upgKey = preparedReq - upgrade = self.getPoolUpgrade(upgKey) - upgrade.seqNo = get_seq_no(result) - - def _poolConfigReply(self, result, preparedReq): - _, cfgKey = preparedReq - pconf = self.getPoolConfig(cfgKey) - pconf.seqNo = get_seq_no(result) - - def _getNymReply(self, result, preparedReq): - jsonData = result.get(DATA) - if jsonData: - data = json.loads(jsonData) - nym = data.get(TARGET_NYM) - idy = self.knownIds.get(nym) - if idy: - idy.role = data.get(ROLE) or None - idy.trustAnchor = data.get(f.IDENTIFIER.nm) - idy.last_synced = datetime.datetime.utcnow() - idy.verkey = data.get(VERKEY) - # TODO: THE GET_NYM reply should contain the sequence number of - # the NYM transaction - - def _getTxnsReply(self, result, preparedReq): - # TODO - pass - - def pendRequest(self, req, key=None): - self._pending.appendleft((req, key)) - - def getConnectionInvitation(self, name: str): - return self._connections.get(name) - - def getMatchingConnections(self, name: str) -> List[Connection]: - allMatched = [] - for k, v in self._connections.items(): - if self._isMatchingName(name, k): - allMatched.append(v) - return allMatched - - # TODO: sender by default should be `self.defaultId` - def requestAttribute(self, attrib: Attribute, sender): - """ - Used to get a raw attribute from Indy - :param attrib: attribute to add - :return: number of pending txns - """ - self._attributes[attrib.key()] = attrib - req = attrib.getRequest(sender) - if req: - return self.prepReq(req, key=attrib.key()) - - def requestSchema(self, nym, name, version, sender): - """ - Used to get a schema from Indy - :param nym: nym that schema is attached to - :param name: name of schema - :param version: version of schema - :return: req object - """ - operation = {TARGET_NYM: nym, - TXN_TYPE: GET_SCHEMA, - DATA: {NAME: name, - VERSION: version} - } - - req = Request(sender, - operation=operation, - protocolVersion=CURRENT_PROTOCOL_VERSION) - return self.prepReq(req) - - def requestClaimDef(self, seqNo, signature, sender): - """ - Used to get a claim def from Indy - :param seqNo: reference number of schema - :param signature: CL is only supported option currently - :return: req object - """ - operation = {TXN_TYPE: GET_CLAIM_DEF, - CLAIM_DEF_FROM: sender, - CLAIM_DEF_SCHEMA_REF: seqNo, - CLAIM_DEF_SIGNATURE_TYPE: signature, - CLAIM_DEF_TAG: CLAIM_DEF_TAG_DEFAULT - } - - req = Request(sender, - operation=operation, - protocolVersion=CURRENT_PROTOCOL_VERSION) - return self.prepReq(req) - - # TODO: sender by default should be `self.defaultId` - def requestIdentity(self, identity: Identity, sender): - # Used to get a nym from Indy - self.knownIds[identity.identifier] = identity - req = identity.getRequest(sender) - if req: - return self.prepReq(req) - - def prepReq(self, req, key=None): - self.pendRequest(req, key=key) - return self.preparePending(limit=1)[0] - - def getConnection(self, name, required=False) -> Connection: - con_name = self._connections.get(name) - if not con_name and required: - logger.debug("Wallet has connections {}".format(self._connections)) - raise ConnectionNotFound(name) - return con_name - - def getConnectionBy(self, - remote: Identifier=None, - nonce=None, - internalId=None, - required=False) -> Optional[Connection]: - for _, li in self._connections.items(): - if (not remote or li.remoteIdentifier == remote) and \ - (not nonce or li.request_nonce == nonce) and \ - (not internalId or li.internalId == internalId): - return li - if required: - raise ConnectionNotFound - - def getIdentity(self, idr): - # TODO, Question: Should it consider self owned identities too or - # should it just have identities that are retrieved from the DL - return self.knownIds.get(idr) - - def getConnectionNames(self): - return list(self._connections.keys()) - - def build_attrib(self, nym, raw=None, enc=None, hsh=None): - assert int(bool(raw)) + int(bool(enc)) + int(bool(hsh)) == 1 - if raw: - store = LedgerStore.RAW - data = raw - elif enc: - store = LedgerStore.ENC - data = enc - elif hsh: - store = LedgerStore.HASH - data = hsh - else: - raise RuntimeError('One of raw, enc, or hash are required.') - - return Attribute(randomString(5), data, self.defaultId, - dest=nym, ledgerStore=store) diff --git a/indy_client/persistence/client_req_rep_store.py b/indy_client/persistence/client_req_rep_store.py deleted file mode 100644 index 1c86934e3..000000000 --- a/indy_client/persistence/client_req_rep_store.py +++ /dev/null @@ -1,18 +0,0 @@ -from abc import abstractmethod - -from plenum.persistence.client_req_rep_store import ClientReqRepStore as \ - PClientReqRepStore - - -class ClientReqRepStore(PClientReqRepStore): - @abstractmethod - def __init__(self, *args, **kwargs): - pass - - @abstractmethod - def setLastTxnForIdentifier(self, identifier, value: str): - pass - - @abstractmethod - def getLastTxnForIdentifier(self, identifier): - pass diff --git a/indy_client/persistence/client_req_rep_store_file.py b/indy_client/persistence/client_req_rep_store_file.py deleted file mode 100644 index 064245081..000000000 --- a/indy_client/persistence/client_req_rep_store_file.py +++ /dev/null @@ -1,41 +0,0 @@ -import json -import os - -from plenum.common.util import updateFieldsWithSeqNo -from plenum.persistence.client_req_rep_store_file import ClientReqRepStoreFile \ - as PClientReqRepStoreFile - -from indy_common.txn_util import getTxnOrderedFields - - -class ClientReqRepStoreFile(PClientReqRepStoreFile): - def __init__(self, dataLocation): - super().__init__(dataLocation) - self.lastTxnsFileName = "last_txn_for_id" - - @property - def txnFieldOrdering(self): - fields = getTxnOrderedFields() - return updateFieldsWithSeqNo(fields) - - def setLastTxnForIdentifier(self, identifier, value: str): - filePath = os.path.join(self.dataLocation, self.lastTxnsFileName) - if not os.path.exists(filePath): - open(filePath, 'w').close() - with open(filePath, "r+") as f: - data = f.read().strip() - data = json.loads(data) if data else {} - data[identifier] = value - f.seek(0) - f.write(json.dumps(data)) - - def getLastTxnForIdentifier(self, identifier): - try: - data = {} - with open(os.path.join(self.dataLocation, self.lastTxnsFileName), - "r") as f: - data = f.read().strip() - data = json.loads(data) if data else {} - return data.get(identifier) - except FileNotFoundError: - return None diff --git a/indy_client/persistence/client_txn_log.py b/indy_client/persistence/client_txn_log.py deleted file mode 100644 index 87d52b4e4..000000000 --- a/indy_client/persistence/client_txn_log.py +++ /dev/null @@ -1,25 +0,0 @@ -from typing import List - -from plenum.common.txn_util import get_type -from plenum.common.util import updateFieldsWithSeqNo -from plenum.persistence.client_txn_log import ClientTxnLog as PClientTxnLog - -from indy_common.txn_util import getTxnOrderedFields - - -class ClientTxnLog(PClientTxnLog): - - @property - def txnFieldOrdering(self): - fields = getTxnOrderedFields() - return updateFieldsWithSeqNo(fields) - - def getTxnsByType(self, txnType: str) -> List: - txns = [] - for val in self.transactionLog.iterator(include_key=False, - include_value=True): - txn = self.serializer.deserialize( - val, fields=self.txnFieldOrdering) - if get_type(txn) == txnType: - txns.append(txn) - return txns diff --git a/indy_client/script_helper.py b/indy_client/script_helper.py deleted file mode 100644 index 9cf2d68bb..000000000 --- a/indy_client/script_helper.py +++ /dev/null @@ -1,32 +0,0 @@ -import os -import shutil - -from plenum.common.util import randomString - -keepFilesInClientReset = [ - 'pool_transactions_sandbox', - 'indy_config.py', - 'sample', - 'pool_transactions_local', - 'pool_transactions_live' -] - - -def performIndyBaseDirCleanup(baseDir): - backupDir = None - while True: - backupDir = baseDir + "-" + randomString(6) - if not os.path.exists(backupDir): - shutil.copytree(baseDir, backupDir) - print("\nIndy base directory {} backed up at: {}". - format(baseDir, backupDir)) - break - - for filename in os.listdir(baseDir): - filepath = os.path.join(baseDir, filename) - if filename not in keepFilesInClientReset: - if os.path.isdir(filepath): - shutil.rmtree(filepath) - else: - os.remove(filepath) - return backupDir diff --git a/indy_client/test/__init__.py b/indy_client/test/__init__.py deleted file mode 100644 index c0b8f3464..000000000 --- a/indy_client/test/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -def run(): - import pytest - pytest.main() - - -if __name__ == "__main__": - run() diff --git a/indy_client/test/__main__.py b/indy_client/test/__main__.py deleted file mode 100644 index ebd27b440..000000000 --- a/indy_client/test/__main__.py +++ /dev/null @@ -1,3 +0,0 @@ -from indy_client import test - -test.run() diff --git a/indy_client/test/agent/__init__.py b/indy_client/test/agent/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/indy_client/test/agent/acme.py b/indy_client/test/agent/acme.py deleted file mode 100644 index 1d41b8d6f..000000000 --- a/indy_client/test/agent/acme.py +++ /dev/null @@ -1,187 +0,0 @@ -import os - -from indy_common.config_util import getConfig -from plenum.common.signer_did import DidSigner -from indy_client.agent.helper import bootstrap_schema -from indy_client.client.wallet.wallet import Wallet -from stp_core.common.log import getlogger -from indy_client.agent.runnable_agent import RunnableAgent -from indy_client.agent.agent import create_client -from indy_client.test.agent.mock_backend_system import MockBackendSystem - -from indy_client.agent.walleted_agent import WalletedAgent -from indy_client.test.constants import primes -from indy_client.test.agent.helper import buildAcmeWallet -from indy_client.test.client.TestClient import TestClient - -from anoncreds.protocol.types import AttribType, AttribDef, ID - -logger = getlogger() - -schema_id = None - -ACME_SEED = b'Acme0000000000000000000000000000' -ACME_ID = DidSigner(seed=ACME_SEED).identifier -ACME_VERKEY = DidSigner(seed=ACME_SEED).verkey -ACME_SIGNER = DidSigner(seed=ACME_SEED) - - -class AcmeAgent(WalletedAgent): - async def postProofVerif(self, claimName, link, frm): - if claimName == "Job-Application": - - for schema in await self.issuer.wallet.getAllSchemas(): - - if schema.name == 'Job-Certificate': - await self._set_available_claim_by_internal_id(link.internalId, - ID(schemaKey=schema.getKey(), - schemaId=schema.seqId)) - - claims = self.get_available_claim_list(link) - self.sendNewAvailableClaimsData(claims, frm, link) - - -def create_acme(name=None, wallet=None, base_dir_path=None, - port=6666, client=None): - if client is None: - client = create_client(base_dir_path=base_dir_path, client_class=TestClient) - - endpoint_args = {'onlyListener': True} - if wallet: - endpoint_args['seed'] = wallet._signerById(wallet.defaultId).seed - else: - wallet = Wallet(name) - wallet.addIdentifier(signer=ACME_SIGNER) - endpoint_args['seed'] = ACME_SEED - - agent = AcmeAgent(name=name or "Acme Corp", - basedirpath=base_dir_path, - client=client, - wallet=wallet, - port=port, - endpointArgs=endpoint_args) - - # maps request nonces to internal ids - agent._invites = { - "57fbf9dc8c8e6acde33de98c6d747b28c": (1, "Alice"), - "3a2eb72eca8b404e8d412c5bf79f2640": (2, "Carol"), - "8513d1397e87cada4214e2a650f603eb": (3, "Frank"), - "810b78be79f29fc81335abaa4ee1c5e8": (4, "Bob") - } - - job_cert_def = AttribDef('Job-Certificate', - [AttribType('first_name', encode=True), - AttribType('last_name', encode=True), - AttribType('employee_status', encode=True), - AttribType('experience', encode=True), - AttribType('salary_bracket', encode=True)]) - - job_appl_def = AttribDef('Job-Application', - [AttribType('first_name', encode=True), - AttribType('last_name', encode=True), - AttribType('phone_number', encode=True), - AttribType('degree', encode=True), - AttribType('status', encode=True), - AttribType('ssn', encode=True)]) - - agent.add_attribute_definition(job_cert_def) - agent.add_attribute_definition(job_appl_def) - - backend = MockBackendSystem(job_cert_def) - backend.add_record(1, - first_name="Alice", - last_name="Garcia", - employee_status="Permanent", - experience="3 years", - salary_bracket="between $50,000 to $100,000") - - backend.add_record(2, - first_name="Carol", - last_name="Atkinson", - employee_status="Permanent", - experience="2 years", - salary_bracket="between $60,000 to $90,000") - - backend.add_record(3, - first_name="Frank", - last_name="Jeffrey", - employee_status="Temporary", - experience="4 years", - salary_bracket="between $40,000 to $80,000") - - backend.add_record(4, - first_name="Bob", - last_name="Richards", - employee_status="On Contract", - experience="3 years", - salary_bracket="between $50,000 to $70,000") - - agent.set_issuer_backend(backend) - - agent._proofRequestsSchema = { - "Job-Application-v0.2": { - "name": "Job-Application", - "version": "0.2", - "attributes": { - "first_name": "string", - "last_name": "string", - "phone_number": "string", - "degree": "string", - "status": "string", - "ssn": "string" - }, - "verifiableAttributes": ["degree", "status", "ssn"] - }, - "Job-Application-v0.3": { - "name": "Job-Application-2", - "version": "0.3", - "attributes": { - "first_name": "string", - "last_name": "string", - "phone_number": "string", - "degree": "string", - "status": "string", - "ssn": "string" - }, - "verifiableAttributes": ["degree", "status"] - } - } - - return agent - - -async def bootstrap_acme(agent): - await bootstrap_schema(agent, - 'Job-Certificate', - 'Job-Certificate', - '0.2', - primes["prime1"][0], - primes["prime1"][1]) - - await bootstrap_schema(agent, - 'Job-Application', - 'Job-Application', - '0.2', - primes["prime2"][0], - primes["prime2"][1]) - - -if __name__ == "__main__": - args = RunnableAgent.parser_cmd_args() - name = 'Acme Corp' - port = args.port - if port is None: - port = 6666 - network = args.network or 'sandbox' - with_cli = args.withcli - - config = getConfig() - base_dir_path = os.path.expanduser( - os.path.join( - config.CLI_NETWORK_DIR, network - )) - - agent = create_acme(name=name, wallet=buildAcmeWallet(), - base_dir_path=base_dir_path, port=port) - RunnableAgent.run_agent(agent, bootstrap=bootstrap_acme(agent), - with_cli=with_cli) diff --git a/indy_client/test/agent/agent_wallet_from_minimal_go_live b/indy_client/test/agent/agent_wallet_from_minimal_go_live deleted file mode 100755 index 34ee1201e..000000000 --- a/indy_client/test/agent/agent_wallet_from_minimal_go_live +++ /dev/null @@ -1,1195 +0,0 @@ -{ - "_trustAnchored": { - - }, - "defaultId": "EqwgnuqynKnRRQ6q5Lzov8", - "_prepared": { - "json://{\"py/tuple\": [\"EqwgnuqynKnRRQ6q5Lzov8\", 1503393560772137]}": { - "py/tuple": [{ - "py/object": "sovrin_common.types.Request", - "py/state": { - "identifier": "EqwgnuqynKnRRQ6q5Lzov8", - "reqId": 1503393560772137, - "signature": "2V7Wn6ttVs2duQdwk3PQcy7QWepsEJP5JfvCYBueMrFtvzxdPfFVu6FDT7rbryWrBxELqudnDp4fgvaYVKFGk1rA", - "digest": "a9e2ee0f09931317a70d66d9c4b7ec60239207ce761c2110d61960401c940aac", - "operation": { - "type": "105", - "dest": "ULtgFQJe6bjiFbs7ke3NJD" - } - } - }, - null] - }, - "json://{\"py/tuple\": [\"EqwgnuqynKnRRQ6q5Lzov8\", 1503393560775165]}": { - "py/tuple": [{ - "py/object": "sovrin_common.types.Request", - "py/state": { - "identifier": "EqwgnuqynKnRRQ6q5Lzov8", - "reqId": 1503393560775165, - "signature": "5e3bm44RSwmCfY1YxdcP9RYnqk6x87rrnqBYBK3hcZpinR5CwcSsWipjoJmgApfDVaNLQ9MPuPrRyLSMd5LZeiWB", - "digest": "00bdd941ab3aba82db9823088400bf1cf5f9bfa7fe32ea41b440a864e2cd02c6", - "operation": { - "raw": "endpoint", - "type": "104", - "dest": "ULtgFQJe6bjiFbs7ke3NJD" - } - } - }, - { - "py/tuple": ["endpoint", - null, - "ULtgFQJe6bjiFbs7ke3NJD"] - }] - }, - "json://{\"py/tuple\": [\"EqwgnuqynKnRRQ6q5Lzov8\", 1503394271312644]}": { - "py/tuple": [{ - "py/object": "sovrin_common.types.Request", - "py/state": { - "identifier": "EqwgnuqynKnRRQ6q5Lzov8", - "reqId": 1503394271312644, - "signature": "4gswnonQKmTMBgEPHkoJcV7PSoQaBZmGE9AbEvxEXhDyvNCUCXiv68MndpSnLqPLTdVeqgX83xEKzeqCdH8B8rv7", - "digest": "12bbf4070b7947ff04a7c8fcd14d49a5c9fb155fbe57f2a07bfab83d776cf07f", - "operation": { - "type": "105", - "dest": "H2aKRiDeq8aLZSydQMDbtf" - } - } - }, - null] - }, - "json://{\"py/tuple\": [\"EqwgnuqynKnRRQ6q5Lzov8\", 1503394162422414]}": { - "py/tuple": [{ - "py/object": "sovrin_common.types.Request", - "py/state": { - "identifier": "EqwgnuqynKnRRQ6q5Lzov8", - "reqId": 1503394162422414, - "signature": "5zjYPBSXPARb2SafWMPWo7gtxZ4HaFHJ4FLZ6phSdbRFEvXetSpYyeqjMZfDtL9czsh8tKiHbyA2Y6Qhkbvypymu", - "digest": "bb97a580530e3c6e12d905c71f03355eeaa7702858c38d1912db02afeaa18782", - "operation": { - "signature_type": "CL", - "origin": "CzkavE58zgX7rUMrzSinLr", - "ref": 25, - "type": "108" - } - } - }, - null] - }, - "json://{\"py/tuple\": [\"EqwgnuqynKnRRQ6q5Lzov8\", 1503393863127405]}": { - "py/tuple": [{ - "py/object": "sovrin_common.types.Request", - "py/state": { - "identifier": "EqwgnuqynKnRRQ6q5Lzov8", - "reqId": 1503393863127405, - "signature": "ytjpREq53xGrQ6BrKWA1waHCUx9szFxTcKLtdikjJimBiFLDE9vAkPw8WqX86xqiQJCEUM2ovBLcPAbEZGRGK4L", - "digest": "1963349fd52a613ed6144a7e06c26bc11112ed0dd8dafa115c9f5d6a788e16b3", - "operation": { - "type": "107", - "dest": "ULtgFQJe6bjiFbs7ke3NJD", - "data": { - "version": "1.2", - "name": "Transcript" - } - } - } - }, - null] - }, - "json://{\"py/tuple\": [\"EqwgnuqynKnRRQ6q5Lzov8\", 1503393880969848]}": { - "py/tuple": [{ - "py/object": "sovrin_common.types.Request", - "py/state": { - "identifier": "EqwgnuqynKnRRQ6q5Lzov8", - "reqId": 1503393880969848, - "signature": "3rrEpTStmVjn3Xvbdpr43TUo2TKynjK9V9DrvCphgfm4dkpn8qHtkP3NvBXVvp7ACfBYRMZnjLZnhEafb3csqs9v", - "digest": "e33da6465da0ac0e5282bee3723f6aff9a9f78e77537394e5e3e1248919b2340", - "operation": { - "signature_type": "CL", - "origin": "ULtgFQJe6bjiFbs7ke3NJD", - "ref": 23, - "type": "108" - } - } - }, - null] - }, - "json://{\"py/tuple\": [\"EqwgnuqynKnRRQ6q5Lzov8\", 1503393964832777]}": { - "py/tuple": [{ - "py/object": "sovrin_common.types.Request", - "py/state": { - "identifier": "EqwgnuqynKnRRQ6q5Lzov8", - "reqId": 1503393964832777, - "signature": "48Di1kbwoptkT94K5fioucDbGrityYieHXimQMeFdokVE9MJt9SPRmFjvpYGANwneZ9nr2Tz88PXygF9kfmvHfwk", - "digest": "114fa090c927acb2457a3725a455e4b371b2a89775efa0eaba1f7ad8b429c236", - "operation": { - "raw": "endpoint", - "type": "104", - "dest": "CzkavE58zgX7rUMrzSinLr" - } - } - }, - { - "py/tuple": ["endpoint", - null, - "CzkavE58zgX7rUMrzSinLr"] - }] - }, - "json://{\"py/tuple\": [\"EqwgnuqynKnRRQ6q5Lzov8\", 1503393966193786]}": { - "py/tuple": [{ - "py/object": "sovrin_common.types.Request", - "py/state": { - "identifier": "EqwgnuqynKnRRQ6q5Lzov8", - "reqId": 1503393966193786, - "signature": "HVANkfC1eBJWciFLbFR2UbrAtfx2hefLiMsnDDRecsENZ2qFWYhrJTc2EEtxsMiQpsg6fmhVk89Fgywbqj34eNs", - "digest": "2c7739b9745fdacc4c1b1f52ac357ca71ff0c31fad357070a6fc4d7c16349bc5", - "operation": { - "type": "105", - "dest": "CkQZNWaAQSLyY5iqw6KTx" - } - } - }, - null] - }, - "json://{\"py/tuple\": [\"EqwgnuqynKnRRQ6q5Lzov8\", 1503393562115741]}": { - "py/tuple": [{ - "py/object": "sovrin_common.types.Request", - "py/state": { - "identifier": "EqwgnuqynKnRRQ6q5Lzov8", - "reqId": 1503393562115741, - "signature": "22PH7P298PC5BokAKrGHPrFGTnMAVB5ktZnER7hKo93g9HcucVpwiERpFRFb5HPT5eyoK1bbDxN4gtBnqpQ3nTmR", - "digest": "4cde0c80da76289617c5bb0a1780713e6367f90076f4027dda172796323abf09", - "operation": { - "type": "105", - "dest": "BpbaEBJ16MoXpv7GU4y9u1" - } - } - }, - null] - }, - "json://{\"py/tuple\": [\"EqwgnuqynKnRRQ6q5Lzov8\", 1503393964831448]}": { - "py/tuple": [{ - "py/object": "sovrin_common.types.Request", - "py/state": { - "identifier": "EqwgnuqynKnRRQ6q5Lzov8", - "reqId": 1503393964831448, - "signature": "2Z1tTLCvBQcNvpv6UysYjr3VRWPoyP6bNG9ou1kPjFxhJzsDSNhuwTXzQfnuGedfwSoE2QaWsrowdFfZxSTmDtvu", - "digest": "2d2ec6bcc0cfcd3080622ec01fe7e11c37cd3352bb7dd6d8f3559e4b2f12d091", - "operation": { - "type": "105", - "dest": "CzkavE58zgX7rUMrzSinLr" - } - } - }, - null] - }, - "json://{\"py/tuple\": [\"EqwgnuqynKnRRQ6q5Lzov8\", 1503394272580665]}": { - "py/tuple": [{ - "py/object": "sovrin_common.types.Request", - "py/state": { - "identifier": "EqwgnuqynKnRRQ6q5Lzov8", - "reqId": 1503394272580665, - "signature": "4eXTotvTstApaasdjaY7jxWytSrEd6WK1Sd7cmx7KmtPTubboPP84TXN3BS1c8HGn7ZkgNfj6hbMKCDPVYpZoS1p", - "digest": "071ca10cac748707babdff1554610db1b78b018b30f3d70f84fde3df3dfbd903", - "operation": { - "type": "105", - "dest": "6KYi5Pb7rcvJb8ZwKb2nvm" - } - } - }, - null] - }, - "json://{\"py/tuple\": [\"EqwgnuqynKnRRQ6q5Lzov8\", 1503394135766499]}": { - "py/tuple": [{ - "py/object": "sovrin_common.types.Request", - "py/state": { - "identifier": "EqwgnuqynKnRRQ6q5Lzov8", - "reqId": 1503394135766499, - "signature": "4D63kic9Yktbk9ETeNCaRRptCoBb7ULR6Qak4vWpCeFX3RSW3V8odA3idxqHipupuez8iCvYS4rbfrnAjTMwaZm3", - "digest": "610d3a81fff0444906a729662386396f22307f3270b012a48264b8a3e04d6783", - "operation": { - "type": "107", - "dest": "CzkavE58zgX7rUMrzSinLr", - "data": { - "version": "0.2", - "name": "Job-Certificate" - } - } - } - }, - null] - }, - "json://{\"py/tuple\": [\"EqwgnuqynKnRRQ6q5Lzov8\", 1503394271313999]}": { - "py/tuple": [{ - "py/object": "sovrin_common.types.Request", - "py/state": { - "identifier": "EqwgnuqynKnRRQ6q5Lzov8", - "reqId": 1503394271313999, - "signature": "GH5bFtcozc7mXX1QxmhE3BLnGrfyVk9GPUvCpNHwQzg3yd3YbL7p17PiYJjx3U6JtjFPiNhSnTb53JRDePpfo45", - "digest": "e6a5dcd5794ef39b40aaca7e7084dbbf7123a5a0b9fb2a7f8b1d7598733d2f0a", - "operation": { - "raw": "endpoint", - "type": "104", - "dest": "H2aKRiDeq8aLZSydQMDbtf" - } - } - }, - { - "py/tuple": ["endpoint", - null, - "H2aKRiDeq8aLZSydQMDbtf"] - }] - } - }, - "knownIds": { - "CkQZNWaAQSLyY5iqw6KTx": { - "last_synced": { - "__reduce__": [{ - "py/type": "datetime.datetime" - }, - ["B+EIFgkaBgS9vQ=="]], - "py/object": "datetime.datetime" - }, - "identity": { - "_verkey": "EbzRNZFQKP5jsXrKXrLWkC", - "py/object": "plenum.common.signer_did.DidIdentity", - "abbreviated": true, - "_identifier": "CkQZNWaAQSLyY5iqw6KTx" - }, - "trustAnchor": "CzkavE58zgX7rUMrzSinLr", - "_role": null, - "seqNo": null, - "py/object": "sovrin_common.identity.Identity" - }, - "6KYi5Pb7rcvJb8ZwKb2nvm": { - "last_synced": { - "__reduce__": [{ - "py/type": "datetime.datetime" - }, - ["B+EIFgkfDAqr5Q=="]], - "py/object": "datetime.datetime" - }, - "identity": { - "_verkey": "6BFmCCvvtJBjYtYLCDVj37", - "py/object": "plenum.common.signer_did.DidIdentity", - "abbreviated": true, - "_identifier": "6KYi5Pb7rcvJb8ZwKb2nvm" - }, - "trustAnchor": "H2aKRiDeq8aLZSydQMDbtf", - "_role": null, - "seqNo": null, - "py/object": "sovrin_common.identity.Identity" - }, - "BpbaEBJ16MoXpv7GU4y9u1": { - "last_synced": { - "__reduce__": [{ - "py/type": "datetime.datetime" - }, - ["B+EIFgkTFgPa5g=="]], - "py/object": "datetime.datetime" - }, - "identity": { - "_verkey": "JkkV9B3Z2vsKWwThFLG2cN", - "py/object": "plenum.common.signer_did.DidIdentity", - "abbreviated": true, - "_identifier": "BpbaEBJ16MoXpv7GU4y9u1" - }, - "trustAnchor": "ULtgFQJe6bjiFbs7ke3NJD", - "_role": null, - "seqNo": null, - "py/object": "sovrin_common.identity.Identity" - }, - "ULtgFQJe6bjiFbs7ke3NJD": { - "last_synced": { - "__reduce__": [{ - "py/type": "datetime.datetime" - }, - ["B+EIFgkTFA2q1Q=="]], - "py/object": "datetime.datetime" - }, - "identity": { - "_verkey": "5kh3FB4H3NKq7tUDqeqHc1", - "py/object": "plenum.common.signer_did.DidIdentity", - "abbreviated": true, - "_identifier": "ULtgFQJe6bjiFbs7ke3NJD" - }, - "trustAnchor": "Th7MpTaRZVRYnPiabds81Y", - "_role": "101", - "seqNo": null, - "py/object": "sovrin_common.identity.Identity" - }, - "H2aKRiDeq8aLZSydQMDbtf": { - "last_synced": { - "__reduce__": [{ - "py/type": "datetime.datetime" - }, - ["B+EIFgkfCwejBQ=="]], - "py/object": "datetime.datetime" - }, - "identity": { - "_verkey": "3sphzTb2itL2mwSeJ1Ji28", - "py/object": "plenum.common.signer_did.DidIdentity", - "abbreviated": true, - "_identifier": "H2aKRiDeq8aLZSydQMDbtf" - }, - "trustAnchor": "Th7MpTaRZVRYnPiabds81Y", - "_role": "101", - "seqNo": null, - "py/object": "sovrin_common.identity.Identity" - }, - "CzkavE58zgX7rUMrzSinLr": { - "last_synced": { - "__reduce__": [{ - "py/type": "datetime.datetime" - }, - ["B+EIFgkaBA622w=="]], - "py/object": "datetime.datetime" - }, - "identity": { - "_verkey": "WjXEvZ9xj4Tz9sLtzf7HVP", - "py/object": "plenum.common.signer_did.DidIdentity", - "abbreviated": true, - "_identifier": "CzkavE58zgX7rUMrzSinLr" - }, - "trustAnchor": "Th7MpTaRZVRYnPiabds81Y", - "_role": "101", - "seqNo": null, - "py/object": "sovrin_common.identity.Identity" - } - }, - "idsToSigners": { - "BpbaEBJ16MoXpv7GU4y9u1": { - "seed": { - "py/b64": "snguqTspwh7eX5XiHoFcJsO3fwWNFMo8+0UP3Z8XtI4=\n" - }, - "sk": { - "py/id": 77 - }, - "naclSigner": { - "keyhex": { - "py/b64": "YjI3ODJlYTkzYjI5YzIxZWRlNWY5NWUyMWU4MTVjMjZjM2I3N2YwNThkMTRjYTNjZmI0NTBmZGQ5\nZjE3YjQ4ZQ==\n" - }, - "keyraw": { - "py/b64": "snguqTspwh7eX5XiHoFcJsO3fwWNFMo8+0UP3Z8XtI4=\n" - }, - "verhex": { - "py/b64": "NTdhMGE2NmMwNzU0MDA0M2NlMjUwZjA2YmUxMDFjNzg4ZmM3MjM1NWUyMWViOThjY2ZiMzUzZjRh\nMTYzZGY2Zg==\n" - }, - "key": { - "_signing_key": { - "py/b64": "snguqTspwh7eX5XiHoFcJsO3fwWNFMo8+0UP3Z8XtI5XoKZsB1QAQ84lDwa+EBx4j8cjVeIeuYzP\ns1P0oWPfbw==\n" - }, - "verify_key": { - "_key": { - "py/b64": "V6CmbAdUAEPOJQ8GvhAceI/HI1XiHrmMz7NT9KFj328=\n" - }, - "py/object": "stp_core.crypto.nacl_wrappers.VerifyKey" - }, - "py/object": "stp_core.crypto.nacl_wrappers.SigningKey", - "_seed": { - "py/b64": "snguqTspwh7eX5XiHoFcJsO3fwWNFMo8+0UP3Z8XtI4=\n" - } - }, - "py/object": "stp_core.crypto.nacl_wrappers.Signer", - "verraw": { - "py/b64": "V6CmbAdUAEPOJQ8GvhAceI/HI1XiHrmMz7NT9KFj328=\n" - } - }, - "_alias": null, - "py/object": "plenum.common.signer_did.DidSigner", - "_verkey": "JkkV9B3Z2vsKWwThFLG2cN", - "abbreviated": true, - "_identifier": "BpbaEBJ16MoXpv7GU4y9u1" - }, - "EqwgnuqynKnRRQ6q5Lzov8": { - "py/id": 67 - }, - "6KYi5Pb7rcvJb8ZwKb2nvm": { - "seed": { - "py/b64": "2JWwi9rB6gKH3tZ+zhBy1YBED1ex5vvLxx4N8y0z1g4=\n" - }, - "sk": { - "py/id": 73 - }, - "naclSigner": { - "keyhex": { - "py/b64": "ZDg5NWIwOGJkYWMxZWEwMjg3ZGVkNjdlY2UxMDcyZDU4MDQ0MGY1N2IxZTZmYmNiYzcxZTBkZjMy\nZDMzZDYwZQ==\n" - }, - "keyraw": { - "py/b64": "2JWwi9rB6gKH3tZ+zhBy1YBED1ex5vvLxx4N8y0z1g4=\n" - }, - "verhex": { - "py/b64": "MmIxNDlmM2Q5M2NhYTMxMzE2ZTZiYzYyNGI5NjhmOGEyOWVjM2E3NmQ4OTI5Nzc4OGQzY2ZjMDRm\nOTczNmNhMg==\n" - }, - "key": { - "_signing_key": { - "py/b64": "2JWwi9rB6gKH3tZ+zhBy1YBED1ex5vvLxx4N8y0z1g4rFJ89k8qjExbmvGJLlo+KKew6dtiSl3iN\nPPwE+XNsog==\n" - }, - "verify_key": { - "_key": { - "py/b64": "KxSfPZPKoxMW5rxiS5aPiinsOnbYkpd4jTz8BPlzbKI=\n" - }, - "py/object": "stp_core.crypto.nacl_wrappers.VerifyKey" - }, - "py/object": "stp_core.crypto.nacl_wrappers.SigningKey", - "_seed": { - "py/b64": "2JWwi9rB6gKH3tZ+zhBy1YBED1ex5vvLxx4N8y0z1g4=\n" - } - }, - "py/object": "stp_core.crypto.nacl_wrappers.Signer", - "verraw": { - "py/b64": "KxSfPZPKoxMW5rxiS5aPiinsOnbYkpd4jTz8BPlzbKI=\n" - } - }, - "_alias": null, - "py/object": "plenum.common.signer_did.DidSigner", - "_verkey": "6BFmCCvvtJBjYtYLCDVj37", - "abbreviated": true, - "_identifier": "6KYi5Pb7rcvJb8ZwKb2nvm" - }, - "CkQZNWaAQSLyY5iqw6KTx": { - "seed": { - "py/b64": "bSVU7UUzWzTjQc66W2+rz/GOf7bjdzzfbxbPtjAuR/w=\n" - }, - "sk": { - "py/id": 81 - }, - "naclSigner": { - "keyhex": { - "py/b64": "NmQyNTU0ZWQ0NTMzNWIzNGUzNDFjZWJhNWI2ZmFiY2ZmMThlN2ZiNmUzNzczY2RmNmYxNmNmYjYz\nMDJlNDdmYw==\n" - }, - "keyraw": { - "py/b64": "bSVU7UUzWzTjQc66W2+rz/GOf7bjdzzfbxbPtjAuR/w=\n" - }, - "verhex": { - "py/b64": "MDFhM2VlNzUyY2QyYTczMDViODFlNTFmY2E4NjQyMGI2ZTI5N2EyYWQzZDMzN2I0NDUyMDVmNTQw\nMDg0NmRhNQ==\n" - }, - "key": { - "_signing_key": { - "py/b64": "bSVU7UUzWzTjQc66W2+rz/GOf7bjdzzfbxbPtjAuR/wBo+51LNKnMFuB5R/KhkILbil6KtPTN7RF\nIF9UAIRtpQ==\n" - }, - "verify_key": { - "_key": { - "py/b64": "AaPudSzSpzBbgeUfyoZCC24peirT0ze0RSBfVACEbaU=\n" - }, - "py/object": "stp_core.crypto.nacl_wrappers.VerifyKey" - }, - "py/object": "stp_core.crypto.nacl_wrappers.SigningKey", - "_seed": { - "py/b64": "bSVU7UUzWzTjQc66W2+rz/GOf7bjdzzfbxbPtjAuR/w=\n" - } - }, - "py/object": "stp_core.crypto.nacl_wrappers.Signer", - "verraw": { - "py/b64": "AaPudSzSpzBbgeUfyoZCC24peirT0ze0RSBfVACEbaU=\n" - } - }, - "_alias": null, - "py/object": "plenum.common.signer_did.DidSigner", - "_verkey": "EbzRNZFQKP5jsXrKXrLWkC", - "abbreviated": true, - "_identifier": "CkQZNWaAQSLyY5iqw6KTx" - } - }, - "_attributes": { - "json://{\"py/tuple\": [\"endpoint\", null, \"H2aKRiDeq8aLZSydQMDbtf\"]}": { - "value": "{\"endpoint\": {\"ha\": \"10.0.0.4:7777\", \"pubkey\": \"AGBjYvyM3SFnoiDGAEzkSLHvqyzVkXeMZfKDvdpEsC2x\"}}", - "origin": null, - "seqNo": 22, - "encKey": null, - "name": "endpoint", - "dest": "H2aKRiDeq8aLZSydQMDbtf", - "py/object": "sovrin_client.client.wallet.attribute.Attribute", - "ledgerStore": { - "py/id": 2 - } - }, - "json://{\"py/tuple\": [\"endpoint\", null, \"CzkavE58zgX7rUMrzSinLr\"]}": { - "value": "{\"endpoint\": {\"ha\": \"10.0.0.3:6666\", \"pubkey\": \"C5eqjU7NMVMGGfGfx2ubvX5H9X346bQt5qeziVAo3naQ\"}}", - "origin": null, - "seqNo": 19, - "encKey": null, - "name": "endpoint", - "dest": "CzkavE58zgX7rUMrzSinLr", - "py/object": "sovrin_client.client.wallet.attribute.Attribute", - "ledgerStore": { - "py/object": "sovrin_client.client.wallet.attribute.LedgerStore", - "py/enumvalue": 4 - } - }, - "json://{\"py/tuple\": [\"endpoint\", null, \"ULtgFQJe6bjiFbs7ke3NJD\"]}": { - "value": "{\"endpoint\": {\"ha\": \"10.0.0.2:5555\", \"pubkey\": \"5hmMA64DDQz5NzGJNVtRzNwpkZxktNQds21q3Wxxa62z\"}}", - "origin": null, - "seqNo": 17, - "encKey": null, - "name": "endpoint", - "dest": "ULtgFQJe6bjiFbs7ke3NJD", - "py/object": "sovrin_client.client.wallet.attribute.Attribute", - "ledgerStore": { - "py/id": 2 - } - } - }, - "aliasesToIds": { - - }, - "_upgrades": { - - }, - "env": "test", - "replyHandler": { - - }, - "_name": "Default", - "didMethods": { - "default": { - "py/id": 65 - }, - "py/object": "plenum.common.did_method.DidMethods", - "d": { - "sovrin": { - "signerConstructor": { - "py/type": "plenum.common.signer_did.DidSigner" - }, - "pattern": "did:sovrin:", - "py/object": "plenum.common.did_method.DidMethod", - "name": "sovrin" - } - } - }, - "_pending": { - "py/reduce": [{ - "py/type": "collections.deque" - }, - { - "py/tuple": [[]] - }, - null, - null, - null] - }, - "py/object": "sovrin_client.client.wallet.wallet.Wallet", - "_pconfigs": { - - }, - "_nodes": { - - }, - "lastKnownSeqs": { - - }, - "_connections": { - "py/reduce": [{ - "py/type": "collections.OrderedDict" - }, - { - "py/tuple": [] - }, - null, - null, - { - "py/tuple": [{ - "py/tuple": ["Faber College", - { - "remotePubkey": "5hmMA64DDQz5NzGJNVtRzNwpkZxktNQds21q3Wxxa62z", - "remoteIdentifier": "ULtgFQJe6bjiFbs7ke3NJD", - "linkStatus": "Accepted", - "localIdentifier": "BpbaEBJ16MoXpv7GU4y9u1", - "trustAnchor": "Faber College", - "linkLastSyncNo": null, - "verifiedClaimProofs": [], - "name": "Faber College", - "internalId": null, - "availableClaims": [{ - "py/seq": ["Transcript", - "1.2", - "ULtgFQJe6bjiFbs7ke3NJD"], - "py/object": "anoncreds.protocol.types.AvailableClaim", - "py/newargs": { - "py/tuple": ["Transcript", - "1.2", - "ULtgFQJe6bjiFbs7ke3NJD"] - } - }], - "connectionLastSynced": { - "__reduce__": [{ - "py/type": "datetime.datetime" - }, - ["B+EIFgwTFA8NmQ=="]], - "py/object": "datetime.datetime" - }, - "invitationNonce": "b1134a647eb818069c089e7694f63e6d", - "localVerkey": "~JkkV9B3Z2vsKWwThFLG2cN", - "_remoteVerkey": "~5kh3FB4H3NKq7tUDqeqHc1", - "proofRequests": [], - "remoteEndPoint": { - "py/tuple": ["10.0.0.2", - 5555] - }, - "py/object": "sovrin_client.client.wallet.link.Link" - }] - }, - { - "py/tuple": ["Acme Corp", - { - "remotePubkey": "C5eqjU7NMVMGGfGfx2ubvX5H9X346bQt5qeziVAo3naQ", - "remoteIdentifier": "CzkavE58zgX7rUMrzSinLr", - "linkStatus": "Accepted", - "localIdentifier": "CkQZNWaAQSLyY5iqw6KTx", - "trustAnchor": "Acme Corp", - "linkLastSyncNo": null, - "verifiedClaimProofs": [], - "name": "Acme Corp", - "internalId": null, - "availableClaims": [{ - "py/seq": ["Job-Certificate", - "0.2", - "CzkavE58zgX7rUMrzSinLr"], - "py/object": "anoncreds.protocol.types.AvailableClaim", - "py/newargs": { - "py/tuple": ["Job-Certificate", - "0.2", - "CzkavE58zgX7rUMrzSinLr"] - } - }], - "connectionLastSynced": { - "__reduce__": [{ - "py/type": "datetime.datetime" - }, - ["B+EIFgwaBQDmfw=="]], - "py/object": "datetime.datetime" - }, - "invitationNonce": "57fbf9dc8c8e6acde33de98c6d747b28c", - "localVerkey": "~EbzRNZFQKP5jsXrKXrLWkC", - "_remoteVerkey": "~WjXEvZ9xj4Tz9sLtzf7HVP", - "proofRequests": [{ - "ts": null, - "attributes": { - "py/reduce": [{ - "py/type": "collections.OrderedDict" - }, - { - "py/tuple": [] - }, - null, - null, - { - "py/tuple": [{ - "py/tuple": ["first_name", - "Alice"] - }, - { - "py/tuple": ["last_name", - "Garcia"] - }, - { - "py/tuple": ["phone_number", - "123-45-6789"] - }, - { - "py/tuple": ["degree", - "Bachelor of Science, Marketing"] - }, - { - "py/tuple": ["status", - "graduated"] - }, - { - "py/tuple": ["ssn", - "123-45-6789"] - }] - }] - }, - "verifiableAttributes": { - "10a7586a-42ce-4a6b-b94f-185be4cddcaf": { - "py/seq": ["status", - null, - null], - "py/object": "anoncreds.protocol.types.AttributeInfo", - "py/newargs": { - "py/tuple": ["status", - null, - null] - } - }, - "7f1eec2c-2fbb-43f3-9daa-249b4682d364": { - "py/seq": ["ssn", - null, - null], - "py/object": "anoncreds.protocol.types.AttributeInfo", - "py/newargs": { - "py/tuple": ["ssn", - null, - null] - } - }, - "aa241ad1-b3cc-476e-9ad4-01b91f9d4441": { - "py/seq": ["degree", - null, - null], - "py/object": "anoncreds.protocol.types.AttributeInfo", - "py/newargs": { - "py/tuple": ["degree", - null, - null] - } - } - }, - "selfAttestedAttrs": { - "first_name": "Alice", - "last_name": "Garcia", - "phone_number": "123-45-6789" - }, - "name": "Job-Application", - "fulfilledByClaims": [{ - "py/tuple": [{ - "py/id": 6 - }, - { - "py/id": 8 - }, - { - "py/reduce": [{ - "py/type": "collections.OrderedDict" - }, - { - "py/tuple": [] - }, - null, - null, - { - "py/tuple": [{ - "py/tuple": ["student_name", - "Alice Garcia"] - }, - { - "py/tuple": ["ssn", - "123-45-6789"] - }, - { - "py/tuple": ["degree", - "Bachelor of Science, Marketing"] - }, - { - "py/tuple": ["year", - "2015"] - }, - { - "py/tuple": ["status", - "graduated"] - }] - }] - }] - }], - "nonce": 1871218719015472932666560146158750511756, - "predicates": { - - }, - "version": "0.2", - "py/object": "anoncreds.protocol.types.ProofRequest", - "seqNo": null - }], - "remoteEndPoint": { - "py/tuple": ["10.0.0.3", - 6666] - }, - "py/object": "sovrin_client.client.wallet.link.Link" - }] - }, - { - "py/tuple": ["Thrift Bank", - { - "remotePubkey": "AGBjYvyM3SFnoiDGAEzkSLHvqyzVkXeMZfKDvdpEsC2x", - "remoteIdentifier": "H2aKRiDeq8aLZSydQMDbtf", - "linkStatus": "Accepted", - "localIdentifier": "6KYi5Pb7rcvJb8ZwKb2nvm", - "trustAnchor": "Thrift Bank", - "linkLastSyncNo": null, - "verifiedClaimProofs": [], - "name": "Thrift Bank", - "internalId": null, - "availableClaims": [], - "connectionLastSynced": { - "__reduce__": [{ - "py/type": "datetime.datetime" - }, - ["B+EIFgwfCwgQDg=="]], - "py/object": "datetime.datetime" - }, - "invitationNonce": "77fbf9dc8c8e6acde33de98c6d747b28c", - "localVerkey": "~6BFmCCvvtJBjYtYLCDVj37", - "_remoteVerkey": "~3sphzTb2itL2mwSeJ1Ji28", - "proofRequests": [{ - "ts": null, - "attributes": { - "py/reduce": [{ - "py/type": "collections.OrderedDict" - }, - { - "py/tuple": [] - }, - null, - null, - { - "py/tuple": [{ - "py/tuple": ["salary_bracket", - "between $50,000 to $100,000"] - }, - { - "py/tuple": ["employee_status", - "Permanent"] - }] - }] - }, - "verifiableAttributes": { - "a702a318-07ed-4d6e-a60f-57677d8fcb84": { - "py/seq": ["salary_bracket", - null, - null], - "py/object": "anoncreds.protocol.types.AttributeInfo", - "py/newargs": { - "py/tuple": ["salary_bracket", - null, - null] - } - }, - "d980fcb4-5d02-4081-a67b-d301f55d27d9": { - "py/seq": ["employee_status", - null, - null], - "py/object": "anoncreds.protocol.types.AttributeInfo", - "py/newargs": { - "py/tuple": ["employee_status", - null, - null] - } - } - }, - "selfAttestedAttrs": { - - }, - "name": "Loan-Application-Basic", - "fulfilledByClaims": [{ - "py/tuple": [{ - "py/id": 12 - }, - { - "py/id": 14 - }, - { - "py/reduce": [{ - "py/type": "collections.OrderedDict" - }, - { - "py/tuple": [] - }, - null, - null, - { - "py/tuple": [{ - "py/tuple": ["first_name", - "Alice"] - }, - { - "py/tuple": ["last_name", - "Garcia"] - }, - { - "py/tuple": ["employee_status", - "Permanent"] - }, - { - "py/tuple": ["experience", - "3 years"] - }, - { - "py/tuple": ["salary_bracket", - "between $50,000 to $100,000"] - }] - }] - }] - }], - "nonce": 2551783452857349859593309361022286934668, - "predicates": { - - }, - "version": "0.1", - "py/object": "anoncreds.protocol.types.ProofRequest", - "seqNo": null - }, - { - "ts": null, - "attributes": { - "py/reduce": [{ - "py/type": "collections.OrderedDict" - }, - { - "py/tuple": [] - }, - null, - null, - { - "py/tuple": [{ - "py/tuple": ["first_name", - "Alice"] - }, - { - "py/tuple": ["last_name", - "Garcia"] - }, - { - "py/tuple": ["ssn", - "123-45-6789"] - }] - }] - }, - "verifiableAttributes": { - "5fabdc0c-012e-45b4-b76a-d0992e9a32d8": { - "py/seq": ["ssn", - null, - null], - "py/object": "anoncreds.protocol.types.AttributeInfo", - "py/newargs": { - "py/tuple": ["ssn", - null, - null] - } - }, - "3d41a82b-a2cf-413e-a3e1-18df836750a5": { - "py/seq": ["first_name", - null, - null], - "py/object": "anoncreds.protocol.types.AttributeInfo", - "py/newargs": { - "py/tuple": ["first_name", - null, - null] - } - }, - "dbe499d2-9cce-488c-8d8c-83e233c538ed": { - "py/seq": ["last_name", - null, - null], - "py/object": "anoncreds.protocol.types.AttributeInfo", - "py/newargs": { - "py/tuple": ["last_name", - null, - null] - } - } - }, - "selfAttestedAttrs": { - - }, - "name": "Loan-Application-KYC", - "fulfilledByClaims": [{ - "py/tuple": [{ - "py/id": 6 - }, - { - "py/id": 8 - }, - { - "py/reduce": [{ - "py/type": "collections.OrderedDict" - }, - { - "py/tuple": [] - }, - null, - null, - { - "py/tuple": [{ - "py/tuple": ["student_name", - "Alice Garcia"] - }, - { - "py/tuple": ["ssn", - "123-45-6789"] - }, - { - "py/tuple": ["degree", - "Bachelor of Science, Marketing"] - }, - { - "py/tuple": ["year", - "2015"] - }, - { - "py/tuple": ["status", - "graduated"] - }] - }] - }] - }, - { - "py/tuple": [{ - "py/id": 12 - }, - { - "py/id": 14 - }, - { - "py/reduce": [{ - "py/type": "collections.OrderedDict" - }, - { - "py/tuple": [] - }, - null, - null, - { - "py/tuple": [{ - "py/tuple": ["first_name", - "Alice"] - }, - { - "py/tuple": ["last_name", - "Garcia"] - }, - { - "py/tuple": ["employee_status", - "Permanent"] - }, - { - "py/tuple": ["experience", - "3 years"] - }, - { - "py/tuple": ["salary_bracket", - "between $50,000 to $100,000"] - }] - }] - }] - }], - "nonce": 2551783452857349859593309361022286934668, - "predicates": { - - }, - "version": "0.1", - "py/object": "anoncreds.protocol.types.ProofRequest", - "seqNo": null - }, - { - "ts": null, - "attributes": { - "py/reduce": [{ - "py/type": "collections.OrderedDict" - }, - { - "py/tuple": [] - }, - null, - null, - { - "py/tuple": [{ - "py/tuple": ["first_name", - "string"] - }, - { - "py/tuple": ["last_name", - "string"] - }] - }] - }, - "verifiableAttributes": { - "f2510aa7-6274-431d-8a57-69d8307f7c11": { - "py/seq": ["first_name", - null, - null], - "py/object": "anoncreds.protocol.types.AttributeInfo", - "py/newargs": { - "py/tuple": ["first_name", - null, - null] - } - }, - "d6ddc5f2-e272-47d9-96d5-777b4d89e835": { - "py/seq": ["last_name", - null, - null], - "py/object": "anoncreds.protocol.types.AttributeInfo", - "py/newargs": { - "py/tuple": ["last_name", - null, - null] - } - } - }, - "selfAttestedAttrs": { - - }, - "name": "Name-Proof", - "fulfilledByClaims": [], - "nonce": 2551783452857349859593309361022286934668, - "predicates": { - - }, - "version": "0.1", - "py/object": "anoncreds.protocol.types.ProofRequest", - "seqNo": null - }], - "remoteEndPoint": { - "py/tuple": ["10.0.0.4", - 7777] - }, - "py/object": "sovrin_client.client.wallet.link.Link" - }] - }] - }] - }, - "ids": { - "EqwgnuqynKnRRQ6q5Lzov8": { - "py/seq": [{ - "py/id": 67 - }, - 1503394272580665], - "py/object": "plenum.client.wallet.IdData", - "py/newargs": { - "py/tuple": [{ - "seed": { - "py/b64": "wAoEN+cjMPjdPkUuvI378QXO16jC1ZUUHll95hICJ1U=\n" - }, - "sk": { - "py/id": 69 - }, - "naclSigner": { - "keyhex": { - "py/b64": "YzAwYTA0MzdlNzIzMzBmOGRkM2U0NTJlYmM4ZGZiZjEwNWNlZDdhOGMyZDU5NTE0MWU1OTdkZTYx\nMjAyMjc1NQ==\n" - }, - "keyraw": { - "py/b64": "wAoEN+cjMPjdPkUuvI378QXO16jC1ZUUHll95hICJ1U=\n" - }, - "verhex": { - "py/b64": "NzAxYzM0N2M1ZTk1ZGFjNmU1MDAyYjkxNTQ0OWQ5OTk5ZjAyOGJkZGFlMzljZmMxMGY5MGMyODM5\nYmFiYTU2ZQ==\n" - }, - "key": { - "_signing_key": { - "py/b64": "wAoEN+cjMPjdPkUuvI378QXO16jC1ZUUHll95hICJ1VwHDR8XpXaxuUAK5FUSdmZnwKL3a45z8EP\nkMKDm6ulbg==\n" - }, - "verify_key": { - "_key": { - "py/b64": "cBw0fF6V2sblACuRVEnZmZ8Ci92uOc/BD5DCg5urpW4=\n" - }, - "py/object": "stp_core.crypto.nacl_wrappers.VerifyKey" - }, - "py/object": "stp_core.crypto.nacl_wrappers.SigningKey", - "_seed": { - "py/b64": "wAoEN+cjMPjdPkUuvI378QXO16jC1ZUUHll95hICJ1U=\n" - } - }, - "py/object": "stp_core.crypto.nacl_wrappers.Signer", - "verraw": { - "py/b64": "cBw0fF6V2sblACuRVEnZmZ8Ci92uOc/BD5DCg5urpW4=\n" - } - }, - "_alias": null, - "py/object": "plenum.common.signer_did.DidSigner", - "_verkey": "LdquEm7MuXuUCeT9vC58P7", - "abbreviated": true, - "_identifier": "EqwgnuqynKnRRQ6q5Lzov8" - }, - 1503394272580665] - } - } - } -} diff --git a/indy_client/test/agent/base_agent.py b/indy_client/test/agent/base_agent.py deleted file mode 100644 index bef06683d..000000000 --- a/indy_client/test/agent/base_agent.py +++ /dev/null @@ -1,197 +0,0 @@ -import os -import signal -from os.path import expanduser - -from ioflo.base.consoling import Console -from plenum.cli.cli import Exit - -from stp_core.common.log import Logger, getlogger -from indy_client.agent.run_agent import runBootstrap - -from indy_client.test.agent.test_walleted_agent import TestWalletedAgent - -from plenum.common.constants import NAME, VERSION - -from anoncreds.protocol.types import ID -from indy_client.agent.exception import NonceNotFound -from indy_client.client.client import Client -from indy_client.client.wallet.wallet import Wallet -from indy_client.test.constants import primes -from indy_common.config import agentLoggingLevel -from indy_common.config_util import getConfig - - -class BaseAgent(TestWalletedAgent): - def __init__(self, - name: str, - basedirpath: str, - client: Client = None, - wallet: Wallet = None, - port: int = None, - loop=None, - config=None, - endpointArgs=None): - - config = config or getConfig() - basedirpath = basedirpath or os.path.expanduser(config.CLI_BASE_DIR) - - portParam, _ = self.getPassedArgs() - - self.logger = getlogger() - - super().__init__(name, basedirpath, client, wallet, - portParam or port, loop=loop, config=config, - endpointArgs=endpointArgs) - - self.claimVersionNumber = 0.01 - - self._invites = {} - - self.updateClaimVersionFile(self.getClaimVersionFileName()) - - signal.signal(signal.SIGTERM, self.exit_gracefully) - - self.setupLogging(self.getLoggerFilePath()) - - def getLoggerFilePath(self, name=None): - config = getConfig() - path = expanduser('{}'.format(config.CLI_BASE_DIR)) - return '{}/{}.log'.format(path, - (name or self.name).replace(" ", - "-").lower()) - - def getClaimVersionFileName(self): - return self.name.replace(" ", "-").lower() + "-schema-version.txt" - - def updateClaimVersionFile(self, fileName,): - claimVersionFilePath = '{}/{}'.format(self.basedirpath, fileName) - # get version number from file - if os.path.isfile(claimVersionFilePath): - try: - with open(claimVersionFilePath, mode='r+') as file: - self.claimVersionNumber = float(file.read()) + 0.001 - file.seek(0) - # increment version and update file - file.write(str(self.claimVersionNumber)) - file.truncate() - except OSError as e: - self.logger.warning( - 'Error occurred while reading version file: ' - 'error:{}'.format(e)) - raise e - except ValueError as e: - self.logger.warning('Invalid version number') - raise e - else: - try: - with open(claimVersionFilePath, mode='w') as file: - file.write(str(self.claimVersionNumber)) - except OSError as e: - self.logger.warning('Error creating version file {}'.format(e)) - raise e - - def setupLogging(self, filePath): - Logger().setLogLevel(agentLoggingLevel) - Logger().enableFileLogging(filePath) - - def getInternalIdByInvitedNonce(self, nonce): - if nonce in self._invites: - return self._invites[nonce] - else: - raise NonceNotFound - - def getAvailableClaimList(self, link): - assert link - assert link.request_nonce - assert link.remoteIdentifier - return self.issuer.wallet.availableClaimsToAll + \ - self.issuer.wallet.availableClaimsByNonce.get(link.request_nonce, []) + \ - self.issuer.wallet.availableClaimsByIdentifier.get( - link.remoteIdentifier, []) - - def isClaimAvailable(self, link, claimName): - return claimName in [cl.get("name") for cl in - self.getAvailableClaimList(link)] - - def getSchemaKeysToBeGenerated(self): - raise NotImplemented - - def getSchemaKeysForClaimsAvailableToAll(self): - return self.getSchemaKeysToBeGenerated() - - def getSchemaKeysForClaimsAvailableToSpecificNonce(self): - return {} - - def getAttrDefs(self): - raise NotImplemented - - def getAttrs(self): - raise NotImplemented - - async def postProofVerif(self, claimName, link, frm): - pass - - async def initAvailableClaimList(self): - async def getSchema(schemaKey): - schema = await self.issuer.wallet.getSchema(ID(schemaKey)) - return { - NAME: schema.name, - VERSION: schema.version, - "schemaSeqNo": schema.seqId - } - - for schemaKey in self.getSchemaKeysForClaimsAvailableToAll(): - schema = await getSchema(schemaKey) - self.issuer.wallet.availableClaimsToAll.append(schema) - - for nonce, schemaNames in self.getSchemaKeysForClaimsAvailableToSpecificNonce().items(): - for schemaName in schemaNames: - schemaKeys = list( - filter( - lambda sk: sk.name == schemaName, - self.getSchemaKeysToBeGenerated())) - assert len(schemaKeys) == 1, \ - "no such schema name found in generated schema keys" - schema = await getSchema(schemaKeys[0]) - oldAvailClaims = self.issuer.wallet.availableClaimsByNonce.get(nonce, [ - ]) - oldAvailClaims.append(schema) - self.issuer.wallet.availableClaimsByNonce[nonce] = oldAvailClaims - - def _addAttribute(self, schemaKey, proverId, link): - attr = self.getAttrs()[self.getInternalIdByInvitedNonce(proverId)] - self.issuer._attrRepo.addAttributes(schemaKey=schemaKey, - userId=proverId, - attributes=attr) - - async def addSchemasToWallet(self): - for schemaKey in self.getSchemaKeysToBeGenerated(): - matchedAttrDefs = list(filter(lambda ad: ad.name == schemaKey.name, - self.getAttrDefs())) - assert len(matchedAttrDefs) == 1, \ - "check if agent has attrib def and it's name is equivalent " \ - "to it's corresponding schema key name" - attrDef = matchedAttrDefs[0] - if not self.issuer.isSchemaExists(schemaKey): - self.logger.info("schema not found in wallet, will go and " - "get id from repo: {}".format(str(schemaKey))) - schema = await self.issuer.genSchema(schemaKey.name, - schemaKey.version, - attrDef.attribNames()) - if schema: - schemaId = ID(schemaKey=schema.getKey(), - schemaId=schema.seqId, seqId=schema.seqId) - p_prime, q_prime = primes["prime2"] - await self.issuer.genKeys(schemaId, p_prime=p_prime, q_prime=q_prime) - await self.issuer.issueAccumulator(schemaId=schemaId, iA='110', L=5) - else: - self.logger.info( - "schema is already loaded in wallet: {}".format( - str(schemaKey))) - await self.initAvailableClaimList() - - async def bootstrap(self): - await runBootstrap(self.addSchemasToWallet) - - def exit_gracefully(self, signum, frame): - raise Exit("OS process terminated/stopped") diff --git a/indy_client/test/agent/conftest.py b/indy_client/test/agent/conftest.py deleted file mode 100644 index 34ad33e5d..000000000 --- a/indy_client/test/agent/conftest.py +++ /dev/null @@ -1,424 +0,0 @@ -from indy_client.test import waits - -from plenum.common.signer_did import DidSigner - -from indy_client.test.agent.test_walleted_agent import TestWalletedAgent -from indy_common.strict_types import strict_types -from stp_core.network.port_dispenser import genHa - -strict_types.defaultShouldCheck = True - -# def pytest_configure(config): -# setattr(sys, '_called_from_test', True) -# -# -# def pytest_unconfigure(config): -# delattr(sys, '_called_from_test') -# -# -import json -import os - -import pytest - -import sample -from stp_core.loop.looper import Looper -from plenum.common.util import randomString -from stp_core.loop.eventually import eventually -from plenum.test.helper import assertFunc -from indy_client.agent.walleted_agent import WalletedAgent -from indy_client.client.wallet.attribute import Attribute, LedgerStore -from indy_client.client.wallet.wallet import Wallet -from indy_common.constants import ENDPOINT, TRUST_ANCHOR -from indy_client.test.agent.acme import create_acme, bootstrap_acme -from indy_client.test.agent.faber import create_faber, bootstrap_faber -from indy_client.test.agent.helper import ensureAgentConnected, buildFaberWallet, \ - buildAcmeWallet, buildThriftWallet, startAgent -from indy_client.test.agent.thrift import create_thrift -from indy_node.test.helper import addAttributeAndCheck -from indy_client.test.helper import createNym, TestClient - -# noinspection PyUnresolvedReferences -from indy_node.test.conftest import nodeSet, genesisTxns - -# noinspection PyUnresolvedReferences -from plenum.test.conftest import poolTxnStewardData, poolTxnStewardNames - - -@pytest.fixture(scope="module") -def emptyLooper(): - with Looper() as l: - yield l - - -@pytest.fixture(scope="module") -def walletBuilder(): - def _(name): - wallet = Wallet(name) - wallet.addIdentifier(signer=DidSigner()) - return wallet - - return _ - - -@pytest.fixture(scope="module") -def aliceWallet(walletBuilder): - return walletBuilder("Alice") - - -@pytest.fixture(scope="module") -def faberWallet(): - return buildFaberWallet() - - -@pytest.fixture(scope="module") -def acmeWallet(): - return buildAcmeWallet() - - -@pytest.fixture(scope="module") -def thriftWallet(): - return buildThriftWallet() - - -@pytest.fixture(scope="module") -def agentBuilder(tdirWithClientPoolTxns): - def _(wallet, basedir=None): - basedir = basedir or tdirWithClientPoolTxns - _, port = genHa() - _, clientPort = genHa() - client = TestClient(randomString(6), - ha=("0.0.0.0", clientPort), - basedirpath=basedir) - - agent = WalletedAgent(name=wallet.name, - basedirpath=basedir, - client=client, - wallet=wallet, - port=port) - - return agent - - return _ - - -@pytest.fixture(scope="module") -def aliceAgent(aliceWallet, agentBuilder): - agent = agentBuilder(aliceWallet) - return agent - - -@pytest.fixture(scope="module") -def aliceAdded(nodeSet, steward, stewardWallet, - emptyLooper, aliceAgent): - addAgent(emptyLooper, aliceAgent, steward, stewardWallet) - - -@pytest.fixture(scope="module") -def aliceIsRunning(emptyLooper, aliceAgent, aliceAdded): - emptyLooper.add(aliceAgent) - return aliceAgent - - -@pytest.fixture(scope="module") -def aliceAgentConnected(nodeSet, - aliceAgent, - aliceIsRunning, - emptyLooper): - emptyLooper.run( - eventually( - assertFunc, aliceAgent.client.isReady)) - return aliceAgent - - -@pytest.fixture(scope="module") -def agentIpAddress(): - return "127.0.0.1" - - -@pytest.fixture(scope="module") -def faberAgentPort(): - return genHa()[1] - - -@pytest.fixture(scope="module") -def acmeAgentPort(): - return genHa()[1] - - -@pytest.fixture(scope="module") -def thriftAgentPort(): - return genHa()[1] - - -@pytest.fixture(scope="module") -def faberAgent(tdirWithClientPoolTxns, faberAgentPort, faberWallet): - return create_faber(faberWallet.name, faberWallet, - base_dir_path=tdirWithClientPoolTxns, - port=faberAgentPort) - - -@pytest.fixture(scope="module") -def faberBootstrap(faberAgent): - return bootstrap_faber(faberAgent) - - -@pytest.fixture(scope="module") -def acmeBootstrap(acmeAgent): - return bootstrap_acme(acmeAgent) - - -@pytest.fixture(scope="module") -def faberAdded(nodeSet, - steward, - stewardWallet, - emptyLooper, - faberAgent): - return addAgent(emptyLooper, faberAgent, steward, stewardWallet) - - -@pytest.fixture(scope="module") -def faberIsRunning(emptyLooper, tdirWithPoolTxns, faberWallet, - faberAgent, faberAdded, faberBootstrap): - return startAgent(emptyLooper, faberAgent, faberWallet, - bootstrap=faberBootstrap) - - -@pytest.fixture(scope="module") -def acmeAgent(tdirWithClientPoolTxns, acmeAgentPort, acmeWallet): - return create_acme(acmeWallet.name, acmeWallet, - base_dir_path=tdirWithClientPoolTxns, - port=acmeAgentPort) - - -@pytest.fixture(scope="module") -def acmeAdded(nodeSet, - steward, - stewardWallet, - emptyLooper, - acmeAgent): - return addAgent(emptyLooper, acmeAgent, steward, stewardWallet) - - -@pytest.fixture(scope="module") -def acmeIsRunning(emptyLooper, tdirWithPoolTxns, acmeWallet, acmeAgent, - acmeAdded, acmeBootstrap): - return startAgent(emptyLooper, acmeAgent, acmeWallet, - bootstrap=acmeBootstrap) - - -@pytest.fixture(scope="module") -def thriftAgent(tdirWithClientPoolTxns, thriftAgentPort, thriftWallet): - return create_thrift(thriftWallet.name, thriftWallet, - base_dir_path=tdirWithClientPoolTxns, - port=thriftAgentPort) - - -@pytest.fixture(scope="module") -def thfiftAdded(nodeSet, - steward, - stewardWallet, - emptyLooper, - thriftAgent): - return addAgent(emptyLooper, thriftAgent, steward, stewardWallet) - - -@pytest.fixture(scope="module") -def thriftIsRunning(emptyLooper, tdirWithPoolTxns, thriftWallet, - thriftAgent, thriftAdded): - return startAgent(emptyLooper, thriftAgent, thriftWallet) - - -# TODO: Rename it, not clear whether link is added to which wallet and -# who is adding -@pytest.fixture(scope="module") -def faberLinkAdded(faberIsRunning): - pass - - -@pytest.fixture(scope="module") -def acmeLinkAdded(acmeIsRunning): - pass - - -@pytest.fixture(scope="module") -def faberNonceForAlice(): - return 'b1134a647eb818069c089e7694f63e6d' - - -@pytest.fixture(scope="module") -def acmeNonceForAlice(): - return '57fbf9dc8c8e6acde33de98c6d747b28c' - - -@pytest.fixture(scope="module") -def aliceAcceptedFaber(faberIsRunning, faberNonceForAlice, faberAdded, - aliceIsRunning, emptyLooper, - alice_faber_request_loaded, - alice_faber_request_link_synced): - """ - Faber creates a Link object, generates a link request file. - Start FaberAgent - Start AliceAgent and send a ACCEPT_INVITE to FaberAgent. - """ - - check_accept_request(emptyLooper, - faberNonceForAlice, - aliceIsRunning, - faberIsRunning, - linkName='Faber College') - - -@pytest.fixture(scope="module") -def faber_request(): - return get_request_file('faber-request.indy') - - -@pytest.fixture(scope="module") -def acme_request(): - return get_request_file('acme-job-application.indy') - - -@pytest.fixture(scope="module") -def alice_faber_request_loaded(aliceAgent, faber_request): - link = agent_request_loaded(aliceAgent, faber_request) - assert link.name == 'Faber College' - return link - - -@pytest.fixture(scope="module") -def alice_faber_request_link_synced(alice_faber_request_loaded, - aliceAgentConnected, - aliceAgent: WalletedAgent, - emptyLooper, - faberAdded): - agent_request_link_synced(aliceAgent, - alice_faber_request_loaded.name, - emptyLooper) - - -@pytest.fixture(scope="module") -def alice_acme_request_loaded(aliceAgent, acme_request): - link = agent_request_loaded(aliceAgent, acme_request) - assert link.name == 'Acme Corp' - return link - - -@pytest.fixture(scope="module") -def alice_acme_request_link_synced(alice_acme_request_loaded, - aliceAgentConnected, - aliceAgent: WalletedAgent, - emptyLooper, - acmeAdded): - agent_request_link_synced(aliceAgent, alice_acme_request_loaded.name, - emptyLooper) - - -@pytest.fixture(scope="module") -def aliceAcceptedAcme(acmeIsRunning, acmeNonceForAlice, acmeAdded, - aliceIsRunning, emptyLooper, - alice_acme_request_link_synced): - """ - Faber creates a Link object, generates a link request file. - Start FaberAgent - Start AliceAgent and send a ACCEPT_INVITE to FaberAgent. - """ - - check_accept_request(emptyLooper, - acmeNonceForAlice, - aliceIsRunning, - acmeIsRunning, - linkName='Acme Corp') - - -def check_accept_request(emptyLooper, - nonce, - inviteeAgent: WalletedAgent, - inviterAgentAndWallet, - linkName): - """ - Assumes link identified by linkName is already created - """ - assert nonce - inviterAgent, inviterWallet = inviterAgentAndWallet # type: WalletedAgent, Wallet - - inviteeAgent.connectTo(linkName) - inviteeAcceptanceLink = inviteeAgent.wallet.getConnection(linkName, - required=True) - ensureAgentConnected(emptyLooper, inviteeAgent, inviteeAcceptanceLink) - - inviteeAgent.accept_request(linkName) - internalId = inviterAgent.get_internal_id_by_nonce(nonce) - - def chk(): - assert inviteeAcceptanceLink.remoteEndPoint[1] == inviterAgent.endpoint.ha[1] - assert inviteeAcceptanceLink.isAccepted - - link = inviterAgent.wallet.getConnectionBy(internalId=internalId) - assert link - assert link.remoteIdentifier == inviteeAcceptanceLink.localIdentifier - - timeout = waits.expected_accept_request() - emptyLooper.run(eventually(chk, timeout=timeout)) - - -def addAgent(looper, agent, steward, stewardWallet): - # 1. add Agent's NYM (by Steward) - agentNym = agent.wallet.defaultId - createNym(looper, - agentNym, - steward, - stewardWallet, - role=TRUST_ANCHOR, - verkey=agent.wallet.getVerkey()) - - # 2. add client to the loop - looper.add(agent.client) - - # 3. add attribute to the Agent's NYM with endpoint information (by - # Agent's client) - ep = '127.0.0.1:{}'.format(agent.port) - attributeData = json.dumps({ENDPOINT: {'ha': ep}}) - - attrib = Attribute(name='{}_endpoint'.format(agentNym), - origin=agentNym, - value=attributeData, - dest=agentNym, - ledgerStore=LedgerStore.RAW) - addAttributeAndCheck(looper, agent.client, agent.wallet, attrib) - return attrib - - -def get_request_file(fileName): - sampleDir = os.path.dirname(sample.__file__) - return os.path.join(sampleDir, fileName) - - -def agent_request_loaded(agent, request): - link = agent.load_request_file(request) - assert link - return link - - -def agent_request_link_synced(agent, - linkName, - looper): - done = False - - def cb(reply, err): - nonlocal done - assert reply - assert not err - done = True - - def checkDone(): - assert done, 'never got reply for agent connection sync' - - agent.sync(linkName, cb) - looper.run(eventually(checkDone)) - - link = agent.wallet.getConnection(linkName, required=True) - assert link - ep = link.remoteEndPoint - assert ep - assert len(ep) == 2 diff --git a/indy_client/test/agent/faber.py b/indy_client/test/agent/faber.py deleted file mode 100644 index b9ff344ad..000000000 --- a/indy_client/test/agent/faber.py +++ /dev/null @@ -1,137 +0,0 @@ -import os -from anoncreds.protocol.exceptions import SchemaNotFoundError - -from indy_common.config_util import getConfig -from plenum.common.signer_did import DidSigner -from indy_client.agent.helper import bootstrap_schema, buildAgentWallet -from indy_client.client.wallet.wallet import Wallet -from indy_client.test.client.TestClient import TestClient -from indy_client.test.constants import primes - -from stp_core.common.log import getlogger -from indy_client.agent.runnable_agent import RunnableAgent -from indy_client.agent.agent import create_client -from indy_client.test.agent.mock_backend_system import MockBackendSystem - -from anoncreds.protocol.types import AttribType, AttribDef, ID, SchemaKey -from indy_client.agent.walleted_agent import WalletedAgent - -logger = getlogger() - -FABER_SEED = b'Faber000000000000000000000000000' -FABER_SIGNER = DidSigner(seed=FABER_SEED) -FABER_ID = FABER_SIGNER.identifier -FABER_VERKEY = FABER_SIGNER.verkey - - -def create_faber(name=None, wallet=None, base_dir_path=None, - port=5555, client=None): - - if client is None: - client = create_client(base_dir_path=base_dir_path, client_class=TestClient) - - endpoint_args = {'onlyListener': True} - if wallet: - endpoint_args['seed'] = wallet._signerById(wallet.defaultId).seed - else: - wallet = Wallet(name) - wallet.addIdentifier(signer=FABER_SIGNER) - endpoint_args['seed'] = FABER_SEED - - agent = WalletedAgent(name=name or "Faber College", - basedirpath=base_dir_path, - client=client, - wallet=wallet, - port=port, - endpointArgs=endpoint_args) - - agent._invites = { - "b1134a647eb818069c089e7694f63e6d": (1, "Alice"), - "2a2eb72eca8b404e8d412c5bf79f2640": (2, "Carol"), - "7513d1397e87cada4214e2a650f603eb": (3, "Frank"), - "710b78be79f29fc81335abaa4ee1c5e8": (4, "Bob") - } - - transcript_def = AttribDef('Transcript', - [AttribType('student_name', encode=True), - AttribType('ssn', encode=True), - AttribType('degree', encode=True), - AttribType('year', encode=True), - AttribType('status', encode=True)]) - - agent.add_attribute_definition(transcript_def) - - backend = MockBackendSystem(transcript_def) - - backend.add_record(1, - student_name="Alice Garcia", - ssn="123-45-6789", - degree="Bachelor of Science, Marketing", - year="2015", - status="graduated") - - backend.add_record(2, - student_name="Carol Atkinson", - ssn="783-41-2695", - degree="Bachelor of Science, Physics", - year="2012", - status="graduated") - - backend.add_record(3, - student_name="Frank Jeffrey", - ssn="996-54-1211", - degree="Bachelor of Arts, History", - year="2013", - status="dropped") - - backend.add_record(4, - student_name="Bob Richards", - ssn="151-44-5876", - degree="MBA, Finance", - year="2015", - status="graduated") - - agent.set_issuer_backend(backend) - - return agent - - -async def bootstrap_faber(agent): - schema_id = ID(SchemaKey("Transcript", "1.2", - "FuN98eH2eZybECWkofW6A9BKJxxnTatBCopfUiNxo6ZB")) - - try: - schema = await agent.issuer.wallet.getSchema(schema_id) - except SchemaNotFoundError: - schema_id = await bootstrap_schema(agent, - 'Transcript', - 'Transcript', - '1.2', - primes["prime1"][0], - primes["prime1"][1]) - - await agent._set_available_claim_by_internal_id(1, schema_id) - await agent._set_available_claim_by_internal_id(2, schema_id) - await agent._set_available_claim_by_internal_id(3, schema_id) - await agent._set_available_claim_by_internal_id(4, schema_id) - - -if __name__ == "__main__": - args = RunnableAgent.parser_cmd_args() - name = "Faber College" - port = args.port - if port is None: - port = 5555 - network = args.network or 'sandbox' - with_cli = args.withcli - - config = getConfig() - base_dir_path = os.path.expanduser( - os.path.join( - config.CLI_NETWORK_DIR, network - )) - - agent = create_faber(name=name, wallet=buildAgentWallet( - name, FABER_SEED), base_dir_path=base_dir_path, port=port) - RunnableAgent.run_agent( - agent, bootstrap=bootstrap_faber(agent), with_cli=with_cli) diff --git a/indy_client/test/agent/helper.py b/indy_client/test/agent/helper.py deleted file mode 100644 index 77848b6ea..000000000 --- a/indy_client/test/agent/helper.py +++ /dev/null @@ -1,66 +0,0 @@ -import argparse -import sys - -from indy_client.agent.helper import buildAgentWallet -from indy_client.test import waits -from stp_core.loop.eventually import eventually -from indy_client.agent.run_agent import runAgent - - -def connectAgents(agent1, agent2): - e1 = agent1.endpoint - e2 = agent2.endpoint - e1.connectTo(e2.ha) - - -def ensureAgentConnected(looper, agent, link): - linkHa = link.getRemoteEndpoint(required=True) - - def _checkConnected(): - assert agent.endpoint.isConnectedTo(ha=linkHa) - - timeout = waits.expectedAgentConnected() - looper.run(eventually(_checkConnected, timeout=timeout)) - - -def getAgentCmdLineParams(): - if sys.stdin.isatty(): - parser = argparse.ArgumentParser( - description="Starts agents with given port, cred def and issuer seq") - - parser.add_argument('--port', required=False, - help='port where agent will listen') - - parser.add_argument('--withcli', - help='if given, agent will start in cli mode', - action='store_true') - - args = parser.parse_args() - port = int(args.port) if args.port else None - with_cli = args.withcli - return port, with_cli - else: - return None, False - - -def buildFaberWallet(): - return buildAgentWallet( - "FaberCollege", b'Faber000000000000000000000000000') - - -def buildAcmeWallet(): - return buildAgentWallet("AcmeCorp", b'Acme0000000000000000000000000000') - - -def buildThriftWallet(): - return buildAgentWallet("ThriftBank", b'Thrift00000000000000000000000000') - - -def startAgent(looper, agent, wallet, bootstrap=None): - agent = agent - wallet.pendSyncRequests() - prepared = wallet.preparePending() - agent.client.submitReqs(*prepared) - - runAgent(agent, looper, bootstrap=bootstrap) - return agent, wallet diff --git a/indy_client/test/agent/issuer_wallet_from_minimal_go_live b/indy_client/test/agent/issuer_wallet_from_minimal_go_live deleted file mode 100755 index d0c2a35b8..000000000 --- a/indy_client/test/agent/issuer_wallet_from_minimal_go_live +++ /dev/null @@ -1,1252 +0,0 @@ -{ - "_pks": { - - }, - "_schemasByKey": { - - }, - "_tails": { - - }, - "_sks": { - - }, - "_accumPks": { - - }, - "_pkRs": { - - }, - "_schemasById": { - - }, - "_attributes": { - - }, - "_repo": { - "client": null, - "py/object": "sovrin_client.anon_creds.sovrin_public_repo.SovrinPublicRepo", - "wallet": { - "_trustAnchored": { - - }, - "defaultId": "EqwgnuqynKnRRQ6q5Lzov8", - "_prepared": { - "json://{\"py/tuple\": [\"EqwgnuqynKnRRQ6q5Lzov8\", 1503393560772137]}": { - "py/tuple": [{ - "py/object": "sovrin_common.types.Request", - "py/state": { - "identifier": "EqwgnuqynKnRRQ6q5Lzov8", - "reqId": 1503393560772137, - "signature": "2V7Wn6ttVs2duQdwk3PQcy7QWepsEJP5JfvCYBueMrFtvzxdPfFVu6FDT7rbryWrBxELqudnDp4fgvaYVKFGk1rA", - "digest": "a9e2ee0f09931317a70d66d9c4b7ec60239207ce761c2110d61960401c940aac", - "operation": { - "type": "105", - "dest": "ULtgFQJe6bjiFbs7ke3NJD" - } - } - }, - null] - }, - "json://{\"py/tuple\": [\"EqwgnuqynKnRRQ6q5Lzov8\", 1503393560775165]}": { - "py/tuple": [{ - "py/object": "sovrin_common.types.Request", - "py/state": { - "identifier": "EqwgnuqynKnRRQ6q5Lzov8", - "reqId": 1503393560775165, - "signature": "5e3bm44RSwmCfY1YxdcP9RYnqk6x87rrnqBYBK3hcZpinR5CwcSsWipjoJmgApfDVaNLQ9MPuPrRyLSMd5LZeiWB", - "digest": "00bdd941ab3aba82db9823088400bf1cf5f9bfa7fe32ea41b440a864e2cd02c6", - "operation": { - "raw": "endpoint", - "type": "104", - "dest": "ULtgFQJe6bjiFbs7ke3NJD" - } - } - }, - { - "py/tuple": ["endpoint", - null, - "ULtgFQJe6bjiFbs7ke3NJD"] - }] - }, - "json://{\"py/tuple\": [\"EqwgnuqynKnRRQ6q5Lzov8\", 1503394271312644]}": { - "py/tuple": [{ - "py/object": "sovrin_common.types.Request", - "py/state": { - "identifier": "EqwgnuqynKnRRQ6q5Lzov8", - "reqId": 1503394271312644, - "signature": "4gswnonQKmTMBgEPHkoJcV7PSoQaBZmGE9AbEvxEXhDyvNCUCXiv68MndpSnLqPLTdVeqgX83xEKzeqCdH8B8rv7", - "digest": "12bbf4070b7947ff04a7c8fcd14d49a5c9fb155fbe57f2a07bfab83d776cf07f", - "operation": { - "type": "105", - "dest": "H2aKRiDeq8aLZSydQMDbtf" - } - } - }, - null] - }, - "json://{\"py/tuple\": [\"EqwgnuqynKnRRQ6q5Lzov8\", 1503394162422414]}": { - "py/tuple": [{ - "py/object": "sovrin_common.types.Request", - "py/state": { - "identifier": "EqwgnuqynKnRRQ6q5Lzov8", - "reqId": 1503394162422414, - "signature": "5zjYPBSXPARb2SafWMPWo7gtxZ4HaFHJ4FLZ6phSdbRFEvXetSpYyeqjMZfDtL9czsh8tKiHbyA2Y6Qhkbvypymu", - "digest": "bb97a580530e3c6e12d905c71f03355eeaa7702858c38d1912db02afeaa18782", - "operation": { - "signature_type": "CL", - "origin": "CzkavE58zgX7rUMrzSinLr", - "ref": 25, - "type": "108" - } - } - }, - null] - }, - "json://{\"py/tuple\": [\"EqwgnuqynKnRRQ6q5Lzov8\", 1503393863127405]}": { - "py/tuple": [{ - "py/object": "sovrin_common.types.Request", - "py/state": { - "identifier": "EqwgnuqynKnRRQ6q5Lzov8", - "reqId": 1503393863127405, - "signature": "ytjpREq53xGrQ6BrKWA1waHCUx9szFxTcKLtdikjJimBiFLDE9vAkPw8WqX86xqiQJCEUM2ovBLcPAbEZGRGK4L", - "digest": "1963349fd52a613ed6144a7e06c26bc11112ed0dd8dafa115c9f5d6a788e16b3", - "operation": { - "type": "107", - "dest": "ULtgFQJe6bjiFbs7ke3NJD", - "data": { - "version": "1.2", - "name": "Transcript" - } - } - } - }, - null] - }, - "json://{\"py/tuple\": [\"EqwgnuqynKnRRQ6q5Lzov8\", 1503393880969848]}": { - "py/tuple": [{ - "py/object": "sovrin_common.types.Request", - "py/state": { - "identifier": "EqwgnuqynKnRRQ6q5Lzov8", - "reqId": 1503393880969848, - "signature": "3rrEpTStmVjn3Xvbdpr43TUo2TKynjK9V9DrvCphgfm4dkpn8qHtkP3NvBXVvp7ACfBYRMZnjLZnhEafb3csqs9v", - "digest": "e33da6465da0ac0e5282bee3723f6aff9a9f78e77537394e5e3e1248919b2340", - "operation": { - "signature_type": "CL", - "origin": "ULtgFQJe6bjiFbs7ke3NJD", - "ref": 23, - "type": "108" - } - } - }, - null] - }, - "json://{\"py/tuple\": [\"EqwgnuqynKnRRQ6q5Lzov8\", 1503393964832777]}": { - "py/tuple": [{ - "py/object": "sovrin_common.types.Request", - "py/state": { - "identifier": "EqwgnuqynKnRRQ6q5Lzov8", - "reqId": 1503393964832777, - "signature": "48Di1kbwoptkT94K5fioucDbGrityYieHXimQMeFdokVE9MJt9SPRmFjvpYGANwneZ9nr2Tz88PXygF9kfmvHfwk", - "digest": "114fa090c927acb2457a3725a455e4b371b2a89775efa0eaba1f7ad8b429c236", - "operation": { - "raw": "endpoint", - "type": "104", - "dest": "CzkavE58zgX7rUMrzSinLr" - } - } - }, - { - "py/tuple": ["endpoint", - null, - "CzkavE58zgX7rUMrzSinLr"] - }] - }, - "json://{\"py/tuple\": [\"EqwgnuqynKnRRQ6q5Lzov8\", 1503393966193786]}": { - "py/tuple": [{ - "py/object": "sovrin_common.types.Request", - "py/state": { - "identifier": "EqwgnuqynKnRRQ6q5Lzov8", - "reqId": 1503393966193786, - "signature": "HVANkfC1eBJWciFLbFR2UbrAtfx2hefLiMsnDDRecsENZ2qFWYhrJTc2EEtxsMiQpsg6fmhVk89Fgywbqj34eNs", - "digest": "2c7739b9745fdacc4c1b1f52ac357ca71ff0c31fad357070a6fc4d7c16349bc5", - "operation": { - "type": "105", - "dest": "CkQZNWaAQSLyY5iqw6KTx" - } - } - }, - null] - }, - "json://{\"py/tuple\": [\"EqwgnuqynKnRRQ6q5Lzov8\", 1503393562115741]}": { - "py/tuple": [{ - "py/object": "sovrin_common.types.Request", - "py/state": { - "identifier": "EqwgnuqynKnRRQ6q5Lzov8", - "reqId": 1503393562115741, - "signature": "22PH7P298PC5BokAKrGHPrFGTnMAVB5ktZnER7hKo93g9HcucVpwiERpFRFb5HPT5eyoK1bbDxN4gtBnqpQ3nTmR", - "digest": "4cde0c80da76289617c5bb0a1780713e6367f90076f4027dda172796323abf09", - "operation": { - "type": "105", - "dest": "BpbaEBJ16MoXpv7GU4y9u1" - } - } - }, - null] - }, - "json://{\"py/tuple\": [\"EqwgnuqynKnRRQ6q5Lzov8\", 1503393964831448]}": { - "py/tuple": [{ - "py/object": "sovrin_common.types.Request", - "py/state": { - "identifier": "EqwgnuqynKnRRQ6q5Lzov8", - "reqId": 1503393964831448, - "signature": "2Z1tTLCvBQcNvpv6UysYjr3VRWPoyP6bNG9ou1kPjFxhJzsDSNhuwTXzQfnuGedfwSoE2QaWsrowdFfZxSTmDtvu", - "digest": "2d2ec6bcc0cfcd3080622ec01fe7e11c37cd3352bb7dd6d8f3559e4b2f12d091", - "operation": { - "type": "105", - "dest": "CzkavE58zgX7rUMrzSinLr" - } - } - }, - null] - }, - "json://{\"py/tuple\": [\"EqwgnuqynKnRRQ6q5Lzov8\", 1503394272580665]}": { - "py/tuple": [{ - "py/object": "sovrin_common.types.Request", - "py/state": { - "identifier": "EqwgnuqynKnRRQ6q5Lzov8", - "reqId": 1503394272580665, - "signature": "4eXTotvTstApaasdjaY7jxWytSrEd6WK1Sd7cmx7KmtPTubboPP84TXN3BS1c8HGn7ZkgNfj6hbMKCDPVYpZoS1p", - "digest": "071ca10cac748707babdff1554610db1b78b018b30f3d70f84fde3df3dfbd903", - "operation": { - "type": "105", - "dest": "6KYi5Pb7rcvJb8ZwKb2nvm" - } - } - }, - null] - }, - "json://{\"py/tuple\": [\"EqwgnuqynKnRRQ6q5Lzov8\", 1503394135766499]}": { - "py/tuple": [{ - "py/object": "sovrin_common.types.Request", - "py/state": { - "identifier": "EqwgnuqynKnRRQ6q5Lzov8", - "reqId": 1503394135766499, - "signature": "4D63kic9Yktbk9ETeNCaRRptCoBb7ULR6Qak4vWpCeFX3RSW3V8odA3idxqHipupuez8iCvYS4rbfrnAjTMwaZm3", - "digest": "610d3a81fff0444906a729662386396f22307f3270b012a48264b8a3e04d6783", - "operation": { - "type": "107", - "dest": "CzkavE58zgX7rUMrzSinLr", - "data": { - "version": "0.2", - "name": "Job-Certificate" - } - } - } - }, - null] - }, - "json://{\"py/tuple\": [\"EqwgnuqynKnRRQ6q5Lzov8\", 1503394271313999]}": { - "py/tuple": [{ - "py/object": "sovrin_common.types.Request", - "py/state": { - "identifier": "EqwgnuqynKnRRQ6q5Lzov8", - "reqId": 1503394271313999, - "signature": "GH5bFtcozc7mXX1QxmhE3BLnGrfyVk9GPUvCpNHwQzg3yd3YbL7p17PiYJjx3U6JtjFPiNhSnTb53JRDePpfo45", - "digest": "e6a5dcd5794ef39b40aaca7e7084dbbf7123a5a0b9fb2a7f8b1d7598733d2f0a", - "operation": { - "raw": "endpoint", - "type": "104", - "dest": "H2aKRiDeq8aLZSydQMDbtf" - } - } - }, - { - "py/tuple": ["endpoint", - null, - "H2aKRiDeq8aLZSydQMDbtf"] - }] - } - }, - "knownIds": { - "CkQZNWaAQSLyY5iqw6KTx": { - "last_synced": { - "__reduce__": [{ - "py/type": "datetime.datetime" - }, - ["B+EIFgkaBgS9vQ=="]], - "py/object": "datetime.datetime" - }, - "identity": { - "_verkey": "EbzRNZFQKP5jsXrKXrLWkC", - "py/object": "plenum.common.signer_did.DidIdentity", - "abbreviated": true, - "_identifier": "CkQZNWaAQSLyY5iqw6KTx" - }, - "trustAnchor": "CzkavE58zgX7rUMrzSinLr", - "_role": null, - "seqNo": null, - "py/object": "sovrin_common.identity.Identity" - }, - "6KYi5Pb7rcvJb8ZwKb2nvm": { - "last_synced": { - "__reduce__": [{ - "py/type": "datetime.datetime" - }, - ["B+EIFgkfDAqr5Q=="]], - "py/object": "datetime.datetime" - }, - "identity": { - "_verkey": "6BFmCCvvtJBjYtYLCDVj37", - "py/object": "plenum.common.signer_did.DidIdentity", - "abbreviated": true, - "_identifier": "6KYi5Pb7rcvJb8ZwKb2nvm" - }, - "trustAnchor": "H2aKRiDeq8aLZSydQMDbtf", - "_role": null, - "seqNo": null, - "py/object": "sovrin_common.identity.Identity" - }, - "BpbaEBJ16MoXpv7GU4y9u1": { - "last_synced": { - "__reduce__": [{ - "py/type": "datetime.datetime" - }, - ["B+EIFgkTFgPa5g=="]], - "py/object": "datetime.datetime" - }, - "identity": { - "_verkey": "JkkV9B3Z2vsKWwThFLG2cN", - "py/object": "plenum.common.signer_did.DidIdentity", - "abbreviated": true, - "_identifier": "BpbaEBJ16MoXpv7GU4y9u1" - }, - "trustAnchor": "ULtgFQJe6bjiFbs7ke3NJD", - "_role": null, - "seqNo": null, - "py/object": "sovrin_common.identity.Identity" - }, - "ULtgFQJe6bjiFbs7ke3NJD": { - "last_synced": { - "__reduce__": [{ - "py/type": "datetime.datetime" - }, - ["B+EIFgkTFA2q1Q=="]], - "py/object": "datetime.datetime" - }, - "identity": { - "_verkey": "5kh3FB4H3NKq7tUDqeqHc1", - "py/object": "plenum.common.signer_did.DidIdentity", - "abbreviated": true, - "_identifier": "ULtgFQJe6bjiFbs7ke3NJD" - }, - "trustAnchor": "Th7MpTaRZVRYnPiabds81Y", - "_role": "101", - "seqNo": null, - "py/object": "sovrin_common.identity.Identity" - }, - "H2aKRiDeq8aLZSydQMDbtf": { - "last_synced": { - "__reduce__": [{ - "py/type": "datetime.datetime" - }, - ["B+EIFgkfCwejBQ=="]], - "py/object": "datetime.datetime" - }, - "identity": { - "_verkey": "3sphzTb2itL2mwSeJ1Ji28", - "py/object": "plenum.common.signer_did.DidIdentity", - "abbreviated": true, - "_identifier": "H2aKRiDeq8aLZSydQMDbtf" - }, - "trustAnchor": "Th7MpTaRZVRYnPiabds81Y", - "_role": "101", - "seqNo": null, - "py/object": "sovrin_common.identity.Identity" - }, - "CzkavE58zgX7rUMrzSinLr": { - "last_synced": { - "__reduce__": [{ - "py/type": "datetime.datetime" - }, - ["B+EIFgkaBA622w=="]], - "py/object": "datetime.datetime" - }, - "identity": { - "_verkey": "WjXEvZ9xj4Tz9sLtzf7HVP", - "py/object": "plenum.common.signer_did.DidIdentity", - "abbreviated": true, - "_identifier": "CzkavE58zgX7rUMrzSinLr" - }, - "trustAnchor": "Th7MpTaRZVRYnPiabds81Y", - "_role": "101", - "seqNo": null, - "py/object": "sovrin_common.identity.Identity" - } - }, - "idsToSigners": { - "BpbaEBJ16MoXpv7GU4y9u1": { - "seed": { - "py/b64": "snguqTspwh7eX5XiHoFcJsO3fwWNFMo8+0UP3Z8XtI4=\n" - }, - "sk": { - "py/id": 79 - }, - "naclSigner": { - "keyhex": { - "py/b64": "YjI3ODJlYTkzYjI5YzIxZWRlNWY5NWUyMWU4MTVjMjZjM2I3N2YwNThkMTRjYTNjZmI0NTBmZGQ5\nZjE3YjQ4ZQ==\n" - }, - "keyraw": { - "py/b64": "snguqTspwh7eX5XiHoFcJsO3fwWNFMo8+0UP3Z8XtI4=\n" - }, - "verhex": { - "py/b64": "NTdhMGE2NmMwNzU0MDA0M2NlMjUwZjA2YmUxMDFjNzg4ZmM3MjM1NWUyMWViOThjY2ZiMzUzZjRh\nMTYzZGY2Zg==\n" - }, - "key": { - "_signing_key": { - "py/b64": "snguqTspwh7eX5XiHoFcJsO3fwWNFMo8+0UP3Z8XtI5XoKZsB1QAQ84lDwa+EBx4j8cjVeIeuYzP\ns1P0oWPfbw==\n" - }, - "verify_key": { - "_key": { - "py/b64": "V6CmbAdUAEPOJQ8GvhAceI/HI1XiHrmMz7NT9KFj328=\n" - }, - "py/object": "stp_core.crypto.nacl_wrappers.VerifyKey" - }, - "py/object": "stp_core.crypto.nacl_wrappers.SigningKey", - "_seed": { - "py/b64": "snguqTspwh7eX5XiHoFcJsO3fwWNFMo8+0UP3Z8XtI4=\n" - } - }, - "py/object": "stp_core.crypto.nacl_wrappers.Signer", - "verraw": { - "py/b64": "V6CmbAdUAEPOJQ8GvhAceI/HI1XiHrmMz7NT9KFj328=\n" - } - }, - "_alias": null, - "py/object": "plenum.common.signer_did.DidSigner", - "_verkey": "JkkV9B3Z2vsKWwThFLG2cN", - "abbreviated": true, - "_identifier": "BpbaEBJ16MoXpv7GU4y9u1" - }, - "EqwgnuqynKnRRQ6q5Lzov8": { - "py/id": 69 - }, - "6KYi5Pb7rcvJb8ZwKb2nvm": { - "seed": { - "py/b64": "2JWwi9rB6gKH3tZ+zhBy1YBED1ex5vvLxx4N8y0z1g4=\n" - }, - "sk": { - "py/id": 75 - }, - "naclSigner": { - "keyhex": { - "py/b64": "ZDg5NWIwOGJkYWMxZWEwMjg3ZGVkNjdlY2UxMDcyZDU4MDQ0MGY1N2IxZTZmYmNiYzcxZTBkZjMy\nZDMzZDYwZQ==\n" - }, - "keyraw": { - "py/b64": "2JWwi9rB6gKH3tZ+zhBy1YBED1ex5vvLxx4N8y0z1g4=\n" - }, - "verhex": { - "py/b64": "MmIxNDlmM2Q5M2NhYTMxMzE2ZTZiYzYyNGI5NjhmOGEyOWVjM2E3NmQ4OTI5Nzc4OGQzY2ZjMDRm\nOTczNmNhMg==\n" - }, - "key": { - "_signing_key": { - "py/b64": "2JWwi9rB6gKH3tZ+zhBy1YBED1ex5vvLxx4N8y0z1g4rFJ89k8qjExbmvGJLlo+KKew6dtiSl3iN\nPPwE+XNsog==\n" - }, - "verify_key": { - "_key": { - "py/b64": "KxSfPZPKoxMW5rxiS5aPiinsOnbYkpd4jTz8BPlzbKI=\n" - }, - "py/object": "stp_core.crypto.nacl_wrappers.VerifyKey" - }, - "py/object": "stp_core.crypto.nacl_wrappers.SigningKey", - "_seed": { - "py/b64": "2JWwi9rB6gKH3tZ+zhBy1YBED1ex5vvLxx4N8y0z1g4=\n" - } - }, - "py/object": "stp_core.crypto.nacl_wrappers.Signer", - "verraw": { - "py/b64": "KxSfPZPKoxMW5rxiS5aPiinsOnbYkpd4jTz8BPlzbKI=\n" - } - }, - "_alias": null, - "py/object": "plenum.common.signer_did.DidSigner", - "_verkey": "6BFmCCvvtJBjYtYLCDVj37", - "abbreviated": true, - "_identifier": "6KYi5Pb7rcvJb8ZwKb2nvm" - }, - "CkQZNWaAQSLyY5iqw6KTx": { - "seed": { - "py/b64": "bSVU7UUzWzTjQc66W2+rz/GOf7bjdzzfbxbPtjAuR/w=\n" - }, - "sk": { - "py/id": 83 - }, - "naclSigner": { - "keyhex": { - "py/b64": "NmQyNTU0ZWQ0NTMzNWIzNGUzNDFjZWJhNWI2ZmFiY2ZmMThlN2ZiNmUzNzczY2RmNmYxNmNmYjYz\nMDJlNDdmYw==\n" - }, - "keyraw": { - "py/b64": "bSVU7UUzWzTjQc66W2+rz/GOf7bjdzzfbxbPtjAuR/w=\n" - }, - "verhex": { - "py/b64": "MDFhM2VlNzUyY2QyYTczMDViODFlNTFmY2E4NjQyMGI2ZTI5N2EyYWQzZDMzN2I0NDUyMDVmNTQw\nMDg0NmRhNQ==\n" - }, - "key": { - "_signing_key": { - "py/b64": "bSVU7UUzWzTjQc66W2+rz/GOf7bjdzzfbxbPtjAuR/wBo+51LNKnMFuB5R/KhkILbil6KtPTN7RF\nIF9UAIRtpQ==\n" - }, - "verify_key": { - "_key": { - "py/b64": "AaPudSzSpzBbgeUfyoZCC24peirT0ze0RSBfVACEbaU=\n" - }, - "py/object": "stp_core.crypto.nacl_wrappers.VerifyKey" - }, - "py/object": "stp_core.crypto.nacl_wrappers.SigningKey", - "_seed": { - "py/b64": "bSVU7UUzWzTjQc66W2+rz/GOf7bjdzzfbxbPtjAuR/w=\n" - } - }, - "py/object": "stp_core.crypto.nacl_wrappers.Signer", - "verraw": { - "py/b64": "AaPudSzSpzBbgeUfyoZCC24peirT0ze0RSBfVACEbaU=\n" - } - }, - "_alias": null, - "py/object": "plenum.common.signer_did.DidSigner", - "_verkey": "EbzRNZFQKP5jsXrKXrLWkC", - "abbreviated": true, - "_identifier": "CkQZNWaAQSLyY5iqw6KTx" - } - }, - "_attributes": { - "json://{\"py/tuple\": [\"endpoint\", null, \"H2aKRiDeq8aLZSydQMDbtf\"]}": { - "value": "{\"endpoint\": {\"ha\": \"10.0.0.4:7777\", \"pubkey\": \"AGBjYvyM3SFnoiDGAEzkSLHvqyzVkXeMZfKDvdpEsC2x\"}}", - "origin": null, - "seqNo": 22, - "encKey": null, - "name": "endpoint", - "dest": "H2aKRiDeq8aLZSydQMDbtf", - "py/object": "sovrin_client.client.wallet.attribute.Attribute", - "ledgerStore": { - "py/id": 4 - } - }, - "json://{\"py/tuple\": [\"endpoint\", null, \"CzkavE58zgX7rUMrzSinLr\"]}": { - "value": "{\"endpoint\": {\"ha\": \"10.0.0.3:6666\", \"pubkey\": \"C5eqjU7NMVMGGfGfx2ubvX5H9X346bQt5qeziVAo3naQ\"}}", - "origin": null, - "seqNo": 19, - "encKey": null, - "name": "endpoint", - "dest": "CzkavE58zgX7rUMrzSinLr", - "py/object": "sovrin_client.client.wallet.attribute.Attribute", - "ledgerStore": { - "py/object": "sovrin_client.client.wallet.attribute.LedgerStore", - "py/enumvalue": 4 - } - }, - "json://{\"py/tuple\": [\"endpoint\", null, \"ULtgFQJe6bjiFbs7ke3NJD\"]}": { - "value": "{\"endpoint\": {\"ha\": \"10.0.0.2:5555\", \"pubkey\": \"5hmMA64DDQz5NzGJNVtRzNwpkZxktNQds21q3Wxxa62z\"}}", - "origin": null, - "seqNo": 17, - "encKey": null, - "name": "endpoint", - "dest": "ULtgFQJe6bjiFbs7ke3NJD", - "py/object": "sovrin_client.client.wallet.attribute.Attribute", - "ledgerStore": { - "py/id": 4 - } - } - }, - "aliasesToIds": { - - }, - "_upgrades": { - - }, - "env": "test", - "replyHandler": { - - }, - "_name": "Default", - "didMethods": { - "default": { - "py/id": 67 - }, - "py/object": "plenum.common.did_method.DidMethods", - "d": { - "sovrin": { - "signerConstructor": { - "py/type": "plenum.common.signer_did.DidSigner" - }, - "pattern": "did:sovrin:", - "py/object": "plenum.common.did_method.DidMethod", - "name": "sovrin" - } - } - }, - "_pending": { - "py/reduce": [{ - "py/type": "collections.deque" - }, - { - "py/tuple": [[]] - }, - null, - null, - null] - }, - "py/object": "sovrin_client.client.wallet.wallet.Wallet", - "_pconfigs": { - - }, - "_nodes": { - - }, - "lastKnownSeqs": { - - }, - "_connections": { - "py/reduce": [{ - "py/type": "collections.OrderedDict" - }, - { - "py/tuple": [] - }, - null, - null, - { - "py/tuple": [{ - "py/tuple": ["Faber College", - { - "remotePubkey": "5hmMA64DDQz5NzGJNVtRzNwpkZxktNQds21q3Wxxa62z", - "remoteIdentifier": "ULtgFQJe6bjiFbs7ke3NJD", - "linkStatus": "Accepted", - "localIdentifier": "BpbaEBJ16MoXpv7GU4y9u1", - "trustAnchor": "Faber College", - "linkLastSyncNo": null, - "verifiedClaimProofs": [], - "name": "Faber College", - "internalId": null, - "availableClaims": [{ - "py/seq": ["Transcript", - "1.2", - "ULtgFQJe6bjiFbs7ke3NJD"], - "py/object": "anoncreds.protocol.types.AvailableClaim", - "py/newargs": { - "py/tuple": ["Transcript", - "1.2", - "ULtgFQJe6bjiFbs7ke3NJD"] - } - }], - "connectionLastSynced": { - "__reduce__": [{ - "py/type": "datetime.datetime" - }, - ["B+EIFgwTFA8NmQ=="]], - "py/object": "datetime.datetime" - }, - "invitationNonce": "b1134a647eb818069c089e7694f63e6d", - "localVerkey": "~JkkV9B3Z2vsKWwThFLG2cN", - "_remoteVerkey": "~5kh3FB4H3NKq7tUDqeqHc1", - "proofRequests": [], - "remoteEndPoint": { - "py/tuple": ["10.0.0.2", - 5555] - }, - "py/object": "sovrin_client.client.wallet.link.Link" - }] - }, - { - "py/tuple": ["Acme Corp", - { - "remotePubkey": "C5eqjU7NMVMGGfGfx2ubvX5H9X346bQt5qeziVAo3naQ", - "remoteIdentifier": "CzkavE58zgX7rUMrzSinLr", - "linkStatus": "Accepted", - "localIdentifier": "CkQZNWaAQSLyY5iqw6KTx", - "trustAnchor": "Acme Corp", - "linkLastSyncNo": null, - "verifiedClaimProofs": [], - "name": "Acme Corp", - "internalId": null, - "availableClaims": [{ - "py/seq": ["Job-Certificate", - "0.2", - "CzkavE58zgX7rUMrzSinLr"], - "py/object": "anoncreds.protocol.types.AvailableClaim", - "py/newargs": { - "py/tuple": ["Job-Certificate", - "0.2", - "CzkavE58zgX7rUMrzSinLr"] - } - }], - "connectionLastSynced": { - "__reduce__": [{ - "py/type": "datetime.datetime" - }, - ["B+EIFgwaBQDmfw=="]], - "py/object": "datetime.datetime" - }, - "invitationNonce": "57fbf9dc8c8e6acde33de98c6d747b28c", - "localVerkey": "~EbzRNZFQKP5jsXrKXrLWkC", - "_remoteVerkey": "~WjXEvZ9xj4Tz9sLtzf7HVP", - "proofRequests": [{ - "ts": null, - "attributes": { - "py/reduce": [{ - "py/type": "collections.OrderedDict" - }, - { - "py/tuple": [] - }, - null, - null, - { - "py/tuple": [{ - "py/tuple": ["first_name", - "Alice"] - }, - { - "py/tuple": ["last_name", - "Garcia"] - }, - { - "py/tuple": ["phone_number", - "123-45-6789"] - }, - { - "py/tuple": ["degree", - "Bachelor of Science, Marketing"] - }, - { - "py/tuple": ["status", - "graduated"] - }, - { - "py/tuple": ["ssn", - "123-45-6789"] - }] - }] - }, - "verifiableAttributes": { - "10a7586a-42ce-4a6b-b94f-185be4cddcaf": { - "py/seq": ["status", - null, - null], - "py/object": "anoncreds.protocol.types.AttributeInfo", - "py/newargs": { - "py/tuple": ["status", - null, - null] - } - }, - "7f1eec2c-2fbb-43f3-9daa-249b4682d364": { - "py/seq": ["ssn", - null, - null], - "py/object": "anoncreds.protocol.types.AttributeInfo", - "py/newargs": { - "py/tuple": ["ssn", - null, - null] - } - }, - "aa241ad1-b3cc-476e-9ad4-01b91f9d4441": { - "py/seq": ["degree", - null, - null], - "py/object": "anoncreds.protocol.types.AttributeInfo", - "py/newargs": { - "py/tuple": ["degree", - null, - null] - } - } - }, - "selfAttestedAttrs": { - "first_name": "Alice", - "last_name": "Garcia", - "phone_number": "123-45-6789" - }, - "name": "Job-Application", - "fulfilledByClaims": [{ - "py/tuple": [{ - "py/id": 8 - }, - { - "py/id": 10 - }, - { - "py/reduce": [{ - "py/type": "collections.OrderedDict" - }, - { - "py/tuple": [] - }, - null, - null, - { - "py/tuple": [{ - "py/tuple": ["student_name", - "Alice Garcia"] - }, - { - "py/tuple": ["ssn", - "123-45-6789"] - }, - { - "py/tuple": ["degree", - "Bachelor of Science, Marketing"] - }, - { - "py/tuple": ["year", - "2015"] - }, - { - "py/tuple": ["status", - "graduated"] - }] - }] - }] - }], - "nonce": 1871218719015472932666560146158750511756, - "predicates": { - - }, - "version": "0.2", - "py/object": "anoncreds.protocol.types.ProofRequest", - "seqNo": null - }], - "remoteEndPoint": { - "py/tuple": ["10.0.0.3", - 6666] - }, - "py/object": "sovrin_client.client.wallet.link.Link" - }] - }, - { - "py/tuple": ["Thrift Bank", - { - "remotePubkey": "AGBjYvyM3SFnoiDGAEzkSLHvqyzVkXeMZfKDvdpEsC2x", - "remoteIdentifier": "H2aKRiDeq8aLZSydQMDbtf", - "linkStatus": "Accepted", - "localIdentifier": "6KYi5Pb7rcvJb8ZwKb2nvm", - "trustAnchor": "Thrift Bank", - "linkLastSyncNo": null, - "verifiedClaimProofs": [], - "name": "Thrift Bank", - "internalId": null, - "availableClaims": [], - "connectionLastSynced": { - "__reduce__": [{ - "py/type": "datetime.datetime" - }, - ["B+EIFgwfCwgQDg=="]], - "py/object": "datetime.datetime" - }, - "invitationNonce": "77fbf9dc8c8e6acde33de98c6d747b28c", - "localVerkey": "~6BFmCCvvtJBjYtYLCDVj37", - "_remoteVerkey": "~3sphzTb2itL2mwSeJ1Ji28", - "proofRequests": [{ - "ts": null, - "attributes": { - "py/reduce": [{ - "py/type": "collections.OrderedDict" - }, - { - "py/tuple": [] - }, - null, - null, - { - "py/tuple": [{ - "py/tuple": ["salary_bracket", - "between $50,000 to $100,000"] - }, - { - "py/tuple": ["employee_status", - "Permanent"] - }] - }] - }, - "verifiableAttributes": { - "a702a318-07ed-4d6e-a60f-57677d8fcb84": { - "py/seq": ["salary_bracket", - null, - null], - "py/object": "anoncreds.protocol.types.AttributeInfo", - "py/newargs": { - "py/tuple": ["salary_bracket", - null, - null] - } - }, - "d980fcb4-5d02-4081-a67b-d301f55d27d9": { - "py/seq": ["employee_status", - null, - null], - "py/object": "anoncreds.protocol.types.AttributeInfo", - "py/newargs": { - "py/tuple": ["employee_status", - null, - null] - } - } - }, - "selfAttestedAttrs": { - - }, - "name": "Loan-Application-Basic", - "fulfilledByClaims": [{ - "py/tuple": [{ - "py/id": 14 - }, - { - "py/id": 16 - }, - { - "py/reduce": [{ - "py/type": "collections.OrderedDict" - }, - { - "py/tuple": [] - }, - null, - null, - { - "py/tuple": [{ - "py/tuple": ["first_name", - "Alice"] - }, - { - "py/tuple": ["last_name", - "Garcia"] - }, - { - "py/tuple": ["employee_status", - "Permanent"] - }, - { - "py/tuple": ["experience", - "3 years"] - }, - { - "py/tuple": ["salary_bracket", - "between $50,000 to $100,000"] - }] - }] - }] - }], - "nonce": 2551783452857349859593309361022286934668, - "predicates": { - - }, - "version": "0.1", - "py/object": "anoncreds.protocol.types.ProofRequest", - "seqNo": null - }, - { - "ts": null, - "attributes": { - "py/reduce": [{ - "py/type": "collections.OrderedDict" - }, - { - "py/tuple": [] - }, - null, - null, - { - "py/tuple": [{ - "py/tuple": ["first_name", - "Alice"] - }, - { - "py/tuple": ["last_name", - "Garcia"] - }, - { - "py/tuple": ["ssn", - "123-45-6789"] - }] - }] - }, - "verifiableAttributes": { - "5fabdc0c-012e-45b4-b76a-d0992e9a32d8": { - "py/seq": ["ssn", - null, - null], - "py/object": "anoncreds.protocol.types.AttributeInfo", - "py/newargs": { - "py/tuple": ["ssn", - null, - null] - } - }, - "3d41a82b-a2cf-413e-a3e1-18df836750a5": { - "py/seq": ["first_name", - null, - null], - "py/object": "anoncreds.protocol.types.AttributeInfo", - "py/newargs": { - "py/tuple": ["first_name", - null, - null] - } - }, - "dbe499d2-9cce-488c-8d8c-83e233c538ed": { - "py/seq": ["last_name", - null, - null], - "py/object": "anoncreds.protocol.types.AttributeInfo", - "py/newargs": { - "py/tuple": ["last_name", - null, - null] - } - } - }, - "selfAttestedAttrs": { - - }, - "name": "Loan-Application-KYC", - "fulfilledByClaims": [{ - "py/tuple": [{ - "py/id": 8 - }, - { - "py/id": 10 - }, - { - "py/reduce": [{ - "py/type": "collections.OrderedDict" - }, - { - "py/tuple": [] - }, - null, - null, - { - "py/tuple": [{ - "py/tuple": ["student_name", - "Alice Garcia"] - }, - { - "py/tuple": ["ssn", - "123-45-6789"] - }, - { - "py/tuple": ["degree", - "Bachelor of Science, Marketing"] - }, - { - "py/tuple": ["year", - "2015"] - }, - { - "py/tuple": ["status", - "graduated"] - }] - }] - }] - }, - { - "py/tuple": [{ - "py/id": 14 - }, - { - "py/id": 16 - }, - { - "py/reduce": [{ - "py/type": "collections.OrderedDict" - }, - { - "py/tuple": [] - }, - null, - null, - { - "py/tuple": [{ - "py/tuple": ["first_name", - "Alice"] - }, - { - "py/tuple": ["last_name", - "Garcia"] - }, - { - "py/tuple": ["employee_status", - "Permanent"] - }, - { - "py/tuple": ["experience", - "3 years"] - }, - { - "py/tuple": ["salary_bracket", - "between $50,000 to $100,000"] - }] - }] - }] - }], - "nonce": 2551783452857349859593309361022286934668, - "predicates": { - - }, - "version": "0.1", - "py/object": "anoncreds.protocol.types.ProofRequest", - "seqNo": null - }, - { - "ts": null, - "attributes": { - "py/reduce": [{ - "py/type": "collections.OrderedDict" - }, - { - "py/tuple": [] - }, - null, - null, - { - "py/tuple": [{ - "py/tuple": ["first_name", - "string"] - }, - { - "py/tuple": ["last_name", - "string"] - }] - }] - }, - "verifiableAttributes": { - "f2510aa7-6274-431d-8a57-69d8307f7c11": { - "py/seq": ["first_name", - null, - null], - "py/object": "anoncreds.protocol.types.AttributeInfo", - "py/newargs": { - "py/tuple": ["first_name", - null, - null] - } - }, - "d6ddc5f2-e272-47d9-96d5-777b4d89e835": { - "py/seq": ["last_name", - null, - null], - "py/object": "anoncreds.protocol.types.AttributeInfo", - "py/newargs": { - "py/tuple": ["last_name", - null, - null] - } - } - }, - "selfAttestedAttrs": { - - }, - "name": "Name-Proof", - "fulfilledByClaims": [], - "nonce": 2551783452857349859593309361022286934668, - "predicates": { - - }, - "version": "0.1", - "py/object": "anoncreds.protocol.types.ProofRequest", - "seqNo": null - }], - "remoteEndPoint": { - "py/tuple": ["10.0.0.4", - 7777] - }, - "py/object": "sovrin_client.client.wallet.link.Link" - }] - }] - }] - }, - "ids": { - "EqwgnuqynKnRRQ6q5Lzov8": { - "py/seq": [{ - "py/id": 69 - }, - 1503394272580665], - "py/object": "plenum.client.wallet.IdData", - "py/newargs": { - "py/tuple": [{ - "seed": { - "py/b64": "wAoEN+cjMPjdPkUuvI378QXO16jC1ZUUHll95hICJ1U=\n" - }, - "sk": { - "py/id": 71 - }, - "naclSigner": { - "keyhex": { - "py/b64": "YzAwYTA0MzdlNzIzMzBmOGRkM2U0NTJlYmM4ZGZiZjEwNWNlZDdhOGMyZDU5NTE0MWU1OTdkZTYx\nMjAyMjc1NQ==\n" - }, - "keyraw": { - "py/b64": "wAoEN+cjMPjdPkUuvI378QXO16jC1ZUUHll95hICJ1U=\n" - }, - "verhex": { - "py/b64": "NzAxYzM0N2M1ZTk1ZGFjNmU1MDAyYjkxNTQ0OWQ5OTk5ZjAyOGJkZGFlMzljZmMxMGY5MGMyODM5\nYmFiYTU2ZQ==\n" - }, - "key": { - "_signing_key": { - "py/b64": "wAoEN+cjMPjdPkUuvI378QXO16jC1ZUUHll95hICJ1VwHDR8XpXaxuUAK5FUSdmZnwKL3a45z8EP\nkMKDm6ulbg==\n" - }, - "verify_key": { - "_key": { - "py/b64": "cBw0fF6V2sblACuRVEnZmZ8Ci92uOc/BD5DCg5urpW4=\n" - }, - "py/object": "stp_core.crypto.nacl_wrappers.VerifyKey" - }, - "py/object": "stp_core.crypto.nacl_wrappers.SigningKey", - "_seed": { - "py/b64": "wAoEN+cjMPjdPkUuvI378QXO16jC1ZUUHll95hICJ1U=\n" - } - }, - "py/object": "stp_core.crypto.nacl_wrappers.Signer", - "verraw": { - "py/b64": "cBw0fF6V2sblACuRVEnZmZ8Ci92uOc/BD5DCg5urpW4=\n" - } - }, - "_alias": null, - "py/object": "plenum.common.signer_did.DidSigner", - "_verkey": "LdquEm7MuXuUCeT9vC58P7", - "abbreviated": true, - "_identifier": "EqwgnuqynKnRRQ6q5Lzov8" - }, - 1503394272580665] - } - } - } - } - }, - "_accumSks": { - - }, - "walletId": "Default", - "_accums": { - - }, - "_proofRequestsSchema": { - - }, - "availableClaimsByNonce": { - - }, - "availableClaimsByIdentifier": { - - }, - "_m2s": { - - }, - "_skRs": { - - }, - "availableClaimsToAll": [], - "py/object": "sovrin_client.anon_creds.sovrin_issuer.SovrinIssuerWalletInMemory", - "availableClaimsByInternalId": { - - } -} diff --git a/indy_client/test/agent/messages.py b/indy_client/test/agent/messages.py deleted file mode 100644 index bb8239aa0..000000000 --- a/indy_client/test/agent/messages.py +++ /dev/null @@ -1,86 +0,0 @@ -from anoncreds.protocol.utils import crypto_int_to_str, isCryptoInteger, intToArrayBytes - - -def get_claim_request_libindy_msg(claim_req, schema_seq_no): - return ({ - 'type': 'CLAIM_REQUEST', - 'data': { - 'issuer_did': 'FuN98eH2eZybECWkofW6A9BKJxxnTatBCopfUiNxo6ZB', - 'blinded_ms': { - 'prover_did': 'b1134a647eb818069c089e7694f63e6d', - 'u': str(crypto_int_to_str(claim_req.U)), - 'ur': None - }, - 'schema_seq_no': schema_seq_no - }, - 'nonce': 'b1134a647eb818069c089e7694f63e6d', - }) - - -def get_claim_libindy_msg(signature, schema_seq_no): - return ({'type': 'CLAIM', - 'refRequestId': 1498207862797639, - 'data': { - 'claim': '{' - '"ssn": ["123-45-6789", "744326867119662813058574151710572260086480987778735990385444735594385781152"], ' - '"student_name": ["Alice Garcia", "42269428060847300013074105341288624461740820166347597208920185513943254001053"], ' - '"year": ["2015", "76155730627064255622230347398579434243999717245284701820698087443021519005597"],' - '"status": ["graduated", "79954080701401061138041003494589205197191732193019334789897013390726508263804"], ' - '"degree": ["Bachelor of Science, Marketing", "111351644242834420607747624840774158853435703856237568018084128306949040580032"]}', - 'schema_seq_no': schema_seq_no, - 'revoc_reg_seq_no': None, - 'issuer_did': 'FuN98eH2eZybECWkofW6A9BKJxxnTatBCopfUiNxo6ZB', - 'signature': { - 'non_revocation_claim': None, - 'primary_claim': { - 'm2': '{}'.format(crypto_int_to_str(signature.primaryClaim.m2)), - 'e': '{}'.format(str(signature.primaryClaim.e)), - 'v': '{}'.format(str(signature.primaryClaim.v)), - 'a': '{}'.format(crypto_int_to_str(signature.primaryClaim.A))} - } - }, - 'reqId': 1498207879197729, - 'signature': '3v4CJnCpFv3on9DJKzourd9RfvX3gz5yXY1jkhxc8FktHVbvx1ghBJC7DUYMAJzApPUAYMyTzyMB6Dm8HEzhAtvM', - 'identifier': 'ULtgFQJe6bjiFbs7ke3NJD'}, ('Faber College', ('127.0.0.1', 6918))) - - -def get_proof_libindy_msg(link, proof_req, proof, uuid, schema_seq_no): - eqProof = proof.proofs[str(uuid)].proof.primaryProof.eqProof - - return ({'type': 'PROOF', - 'nonce': '{}'.format(link.request_nonce), - 'proof_request': proof_req.to_str_dict(), - 'proof': { - 'proofs': { - uuid: { - 'proof': { - 'primary_proof': { - 'eq_proof': { - 'revealed_attrs': {k: str(v) for k, v in eqProof.revealedAttrs.items()}, - 'a_prime': '{}'.format(crypto_int_to_str(eqProof.Aprime)), - 'e': '{}'.format(crypto_int_to_str(eqProof.e)), - 'v': '{}'.format(crypto_int_to_str(eqProof.v)), - 'm': {k: str(crypto_int_to_str(v)) for k, v in eqProof.m.items()}, - 'm1': '{}'.format(crypto_int_to_str(eqProof.m1)), - 'm2': '{}'.format(crypto_int_to_str(eqProof.m2)) - }, - 'ge_proofs': {} - }, - 'non_revoc_proof': None - }, - 'issuer_did': 'FuN98eH2eZybECWkofW6A9BKJxxnTatBCopfUiNxo6ZB', - 'schema_seq_no': schema_seq_no, - 'revoc_reg_seq_no': None - } - }, - 'aggregated_proof': { - 'c_hash': '{}'.format(str(proof.aggregatedProof.cHash)), - 'c_list': [intToArrayBytes(v) for v in proof.aggregatedProof.CList if isCryptoInteger(v)] - }, - 'requested_proof': { - 'revealed_attrs': proof.requestedProof.revealed_attrs, - 'unrevealed_attrs': proof.requestedProof.unrevealed_attrs, - 'self_attested_attrs': proof.requestedProof.self_attested_attrs, - 'predicates': proof.requestedProof.predicates - } - }}) diff --git a/indy_client/test/agent/mock_backend_system.py b/indy_client/test/agent/mock_backend_system.py deleted file mode 100644 index df1e20793..000000000 --- a/indy_client/test/agent/mock_backend_system.py +++ /dev/null @@ -1,14 +0,0 @@ -from indy_client.agent.backend import BackendSystem - - -class MockBackendSystem(BackendSystem): - - def __init__(self, attrDef): - self._attrDef = attrDef - self._attrs = {} # type: Dict[int, AttribDef] - - def add_record(self, internal_id, **vals): - self._attrs[internal_id] = self._attrDef.attribs(**vals) - - def get_record_by_internal_id(self, internal_id): - return self._attrs[internal_id] diff --git a/indy_client/test/agent/test_accept_invitation.py b/indy_client/test/agent/test_accept_invitation.py deleted file mode 100644 index 3b0fd86a9..000000000 --- a/indy_client/test/agent/test_accept_invitation.py +++ /dev/null @@ -1,82 +0,0 @@ -import logging - -import pytest - -from indy_client.test.agent.conftest import check_accept_request - -concerningLogLevels = [logging.WARNING, - logging.ERROR, - logging.CRITICAL] - -whitelist = ["is not connected - message will not be sent immediately. " - "If this problem does not resolve itself - " - "check your firewall settings", - "with invalid state proof from"] - - -def testFaberCreateLink(faberLinkAdded): - pass - - -def test_alice_loads_faber_request(alice_faber_request_loaded): - pass - - -def test_alice_syncs_faber_request_link(alice_faber_request_link_synced): - pass - - -def testFaberAdded(faberAdded): - pass - - -def testAliceAgentConnected(faberAdded, aliceAgentConnected): - pass - - -@pytest.mark.skipif('sys.platform == "win32"', reason='SOV-332') -def test_alice_accept_faber_request(aliceAcceptedFaber): - pass - - -@pytest.mark.skipif('sys.platform == "win32"', reason='SOV-332') -def test_alice_accept_acme_request(aliceAcceptedAcme): - pass - - -@pytest.mark.skip(reason="SOV-562. Not yet implemented") -def testAddSchema(): - raise NotImplementedError - - -@pytest.mark.skip(reason="SOV-562. Not yet implemented") -def testAddClaimDefs(): - raise NotImplementedError - - -@pytest.mark.skip(reason="SOV-563. Incomplete implementation") -def testMultipleAcceptance(aliceAcceptedFaber, - faberIsRunning, - faberLinkAdded, - faberAdded, - walletBuilder, - agentBuilder, - emptyLooper, - faberNonceForAlice): - """ - For the test agent, Faber. Any invite nonce is acceptable. - """ - faberAgent, _ = faberIsRunning - assert len(faberAgent.wallet._connections) == 1 - link = next(faberAgent.wallet._connections.values()) - wallet = walletBuilder("Bob") - otherAgent = agentBuilder(wallet) - emptyLooper.add(otherAgent) - - check_accept_request(emptyLooper, - nonce=faberNonceForAlice, - inviteeAgent=otherAgent, - inviterAgentAndWallet=faberIsRunning, - linkName=link.name) - - assert len(faberAgent.wallet._connections) == 2 diff --git a/indy_client/test/agent/test_anoncreds_agent.py b/indy_client/test/agent/test_anoncreds_agent.py deleted file mode 100644 index 0deeecb39..000000000 --- a/indy_client/test/agent/test_anoncreds_agent.py +++ /dev/null @@ -1,36 +0,0 @@ -from indy_client.test import waits - -from stp_core.loop.eventually import eventually - -from anoncreds.protocol.types import SchemaKey, ID - - -def testAnonCreds(aliceAgent, aliceAcceptedFaber, aliceAcceptedAcme, - acmeAgent, emptyLooper): - # 1. request Claims from Faber - faberLink = aliceAgent.wallet.getConnection('Faber College') - name, version, origin = faberLink.availableClaims[0] - schemaKey = SchemaKey(name, version, origin) - aliceAgent.sendReqClaim(faberLink, schemaKey) - - # 2. check that claim is received from Faber - async def chkClaims(): - claim = await aliceAgent.prover.wallet.getClaimSignature(ID(schemaKey)) - assert claim.primaryClaim - timeout = waits.expectedClaimsReceived() - emptyLooper.run(eventually(chkClaims, timeout=timeout)) - - # 3. send proof to Acme - acme_link, acme_proof_req = aliceAgent.wallet.getMatchingConnectionsWithProofReq( - "Job-Application", "Acme Corp")[0] - aliceAgent.sendProof(acme_link, acme_proof_req) - - # 4. check that proof is verified by Acme - def chkProof(): - internalId = acmeAgent.get_internal_id_by_nonce( - acme_link.request_nonce) - link = acmeAgent.wallet.getConnectionBy(internalId=internalId) - assert "Job-Application" in link.verifiedClaimProofs - - timeout = waits.expectedClaimsReceived() - emptyLooper.run(eventually(chkProof, timeout=timeout)) diff --git a/indy_client/test/agent/test_anoncreds_claim.py b/indy_client/test/agent/test_anoncreds_claim.py deleted file mode 100644 index df1935fb8..000000000 --- a/indy_client/test/agent/test_anoncreds_claim.py +++ /dev/null @@ -1,62 +0,0 @@ -from indy_client.test import waits -from stp_core.loop.eventually import eventually -from anoncreds.protocol.types import SchemaKey, ID -from indy_client.test.agent.messages import get_claim_libindy_msg - - -def test_claim_from_libindy_works( - aliceAgent, - aliceAcceptedFaber, - aliceAcceptedAcme, - acmeAgent, - emptyLooper, - faberAgent): - faberLink = aliceAgent.wallet.getConnection('Faber College') - name, version, origin = faberLink.availableClaims[0] - schemaKey = SchemaKey(name, version, origin) - timeout = waits.expectedClaimsReceived() - - schema = faberAgent.issuer.wallet._schemasByKey[schemaKey] - - async def create_claim_and_send_to_prover(): - claimReq = await aliceAgent.prover.createClaimRequest( - schemaId=ID(schemaKey), - proverId='b1134a647eb818069c089e7694f63e6d', - reqNonRevoc=False) - - assert claimReq - - attr = faberAgent.issuer_backend.get_record_by_internal_id(1) - faberAgent.issuer._attrRepo.addAttributes(schemaKey=schemaKey, - userId=claimReq.userId, - attributes=attr) - claim_signature, claim_attributes = await faberAgent.issuer.issueClaim(ID(schemaKey=schemaKey), claimReq) - - msg = get_claim_libindy_msg(claim_signature, schema.seqId) - - await aliceAgent.handleReqClaimResponse(msg) - - emptyLooper.run(eventually( - create_claim_and_send_to_prover, timeout=timeout)) - - # 2. check that claim is received from Faber - async def chkClaims(): - claim = await aliceAgent.prover.wallet.getClaimSignature(ID(schemaKey)) - assert claim.primaryClaim - - emptyLooper.run(eventually(chkClaims, timeout=timeout)) - - # 3. send proof to Acme - acme_link, acme_proof_req = aliceAgent.wallet.getMatchingConnectionsWithProofReq( - "Job-Application", "Acme Corp")[0] - aliceAgent.sendProof(acme_link, acme_proof_req) - - # 4. check that proof is verified by Acme - def chkProof(): - internalId = acmeAgent.get_internal_id_by_nonce( - acme_link.request_nonce) - link = acmeAgent.wallet.getConnectionBy(internalId=internalId) - assert "Job-Application" in link.verifiedClaimProofs - - timeout = waits.expectedClaimsReceived() - emptyLooper.run(eventually(chkProof, timeout=timeout)) diff --git a/indy_client/test/agent/test_anoncreds_claim_request.py b/indy_client/test/agent/test_anoncreds_claim_request.py deleted file mode 100644 index e7a97349d..000000000 --- a/indy_client/test/agent/test_anoncreds_claim_request.py +++ /dev/null @@ -1,56 +0,0 @@ -from indy_client.test import waits -from stp_core.loop.eventually import eventually -from anoncreds.protocol.types import SchemaKey, ID -from indy_client.test.agent.messages import get_claim_request_libindy_msg - - -def test_claim_request_from_libindy_works( - aliceAgent, - aliceAcceptedFaber, - aliceAcceptedAcme, - acmeAgent, - emptyLooper, - faberAgent): - faberLink = aliceAgent.wallet.getConnection('Faber College') - name, version, origin = faberLink.availableClaims[0] - schemaKey = SchemaKey(name, version, origin) - timeout = waits.expectedClaimsReceived() - - schema = faberAgent.issuer.wallet._schemasByKey[schemaKey] - - async def create_claim_init_data_and_send_msg(): - claimReq = await aliceAgent.prover.createClaimRequest( - schemaId=ID(schemaKey), - proverId='b1134a647eb818069c089e7694f63e6d', - reqNonRevoc=False) - - assert claimReq - - msg = get_claim_request_libindy_msg(claimReq, schema.seqId) - - aliceAgent.signAndSendToLink(msg=msg, linkName=faberLink.name) - - emptyLooper.run(eventually( - create_claim_init_data_and_send_msg, timeout=timeout)) - - # 2. check that claim is received from Faber - async def chkClaims(): - claim = await aliceAgent.prover.wallet.getClaimSignature(ID(schemaKey)) - assert claim.primaryClaim - - emptyLooper.run(eventually(chkClaims, timeout=timeout)) - - # 3. send proof to Acme - acme_link, acme_proof_req = aliceAgent.wallet.getMatchingConnectionsWithProofReq( - "Job-Application", "Acme Corp")[0] - aliceAgent.sendProof(acme_link, acme_proof_req) - - # 4. check that proof is verified by Acme - def chkProof(): - internalId = acmeAgent.get_internal_id_by_nonce( - acme_link.request_nonce) - link = acmeAgent.wallet.getConnectionBy(internalId=internalId) - assert "Job-Application" in link.verifiedClaimProofs - - timeout = waits.expectedClaimsReceived() - emptyLooper.run(eventually(chkProof, timeout=timeout)) diff --git a/indy_client/test/agent/test_anoncreds_proof.py b/indy_client/test/agent/test_anoncreds_proof.py deleted file mode 100644 index 62f0f3767..000000000 --- a/indy_client/test/agent/test_anoncreds_proof.py +++ /dev/null @@ -1,57 +0,0 @@ -from indy_client.test import waits -from stp_core.loop.eventually import eventually -from anoncreds.protocol.types import SchemaKey, ID, ProofRequest -from indy_client.test.agent.messages import get_proof_libindy_msg - - -def test_proof_from_libindy_works( - aliceAgent, - aliceAcceptedFaber, - aliceAcceptedAcme, - acmeAgent, - emptyLooper, - faberAgent): - # 1. request Claims from Faber - faberLink = aliceAgent.wallet.getConnection('Faber College') - name, version, origin = faberLink.availableClaims[0] - schemaKey = SchemaKey(name, version, origin) - aliceAgent.sendReqClaim(faberLink, schemaKey) - - schema = faberAgent.issuer.wallet._schemasByKey[schemaKey] - - # 2. check that claim is received from Faber - async def chkClaims(): - claim = await aliceAgent.prover.wallet.getClaimSignature(ID(schemaKey)) - assert claim.primaryClaim - - timeout = waits.expectedClaimsReceived() - emptyLooper.run(eventually(chkClaims, timeout=timeout)) - - # 3. send proof to Acme - acme_link, acme_proof_req = aliceAgent.wallet.getMatchingConnectionsWithProofReq( - "Job-Application", "Acme Corp")[0] - - async def create_proof(): - proofRequest = ProofRequest("proof1", - "1.0", - int(acme_proof_req.nonce), - verifiableAttributes=acme_proof_req.verifiableAttributes, - predicates=acme_proof_req.predicates) - - proof = await aliceAgent.prover.presentProof(proofRequest) - - msg = get_proof_libindy_msg( - acme_link, acme_proof_req, proof, str(schema.seqId), schema.seqId) - - aliceAgent.signAndSendToLink(msg=msg, linkName=acme_link.name) - - emptyLooper.run(eventually(create_proof, timeout=timeout)) - - # 4. check that proof is verified by Acme - def chkProof(): - internalId = acmeAgent.get_internal_id_by_nonce( - acme_link.request_nonce) - link = acmeAgent.wallet.getConnectionBy(internalId=internalId) - assert "Job-Application" in link.verifiedClaimProofs - - emptyLooper.run(eventually(chkProof, timeout=timeout)) diff --git a/indy_client/test/agent/test_anoncreds_send_proof_request.py b/indy_client/test/agent/test_anoncreds_send_proof_request.py deleted file mode 100644 index 512576c79..000000000 --- a/indy_client/test/agent/test_anoncreds_send_proof_request.py +++ /dev/null @@ -1,46 +0,0 @@ -from indy_client.test import waits -from stp_core.loop.eventually import eventually -from anoncreds.protocol.types import SchemaKey, ID - - -def test_send_proof_works(aliceAgent, aliceAcceptedFaber, aliceAcceptedAcme, - acmeAgent, emptyLooper): - # 1. request Claims from Faber - faberLink = aliceAgent.wallet.getConnection('Faber College') - name, version, origin = faberLink.availableClaims[0] - schemaKey = SchemaKey(name, version, origin) - aliceAgent.sendReqClaim(faberLink, schemaKey) - - # 2. check that claim is received from Faber - async def chkClaims(): - claim = await aliceAgent.prover.wallet.getClaimSignature(ID(schemaKey)) - assert claim.primaryClaim - - emptyLooper.run(eventually( - chkClaims, timeout=waits.expectedClaimsReceived())) - - # 3. send Proof Request to Alice - alice_link = acmeAgent.wallet.getConnection('Alice') - acmeAgent.sendProofReq(alice_link, 'Job-Application-v0.3') - - def chkProofRequest(): - assert len(aliceAgent.wallet.getMatchingConnectionsWithProofReq( - "Job-Application-2", "Acme Corp")) > 0 - - emptyLooper.run(eventually(chkProofRequest, - timeout=waits.expectedClaimsReceived())) - - # 4. send proof to Acme - acme_link, acme_proof_req = aliceAgent.wallet.getMatchingConnectionsWithProofReq( - "Job-Application-2", "Acme Corp")[0] - aliceAgent.sendProof(acme_link, acme_proof_req) - - # 5. check that proof is verified by Acme - def chkProof(): - internalId = acmeAgent.get_internal_id_by_nonce( - acme_link.request_nonce) - link = acmeAgent.wallet.getConnectionBy(internalId=internalId) - assert "Job-Application-2" in link.verifiedClaimProofs - - emptyLooper.run(eventually( - chkProof, timeout=waits.expectedClaimsReceived())) diff --git a/indy_client/test/agent/test_connection.py b/indy_client/test/agent/test_connection.py deleted file mode 100644 index 110b779fc..000000000 --- a/indy_client/test/agent/test_connection.py +++ /dev/null @@ -1,10 +0,0 @@ -import pytest - - -@pytest.mark.skip("SOV-564. Not yet implemented") -def test_connect(): - """ - Connect to the indy network and ensure we have the latest keys for all of - the owner's identifiers. - """ - raise NotImplementedError diff --git a/indy_client/test/agent/test_general_use_case.py b/indy_client/test/agent/test_general_use_case.py deleted file mode 100644 index 2477ab376..000000000 --- a/indy_client/test/agent/test_general_use_case.py +++ /dev/null @@ -1,236 +0,0 @@ -import json -import pytest - -from indy_client.agent.walleted_agent import WalletedAgent -from indy_client.test.agent.mock_backend_system import MockBackendSystem - -import anoncreds.protocol.types -from indy_client.client.wallet.wallet import Wallet -from indy_client.test.constants import primes -from indy_common.identity import Identity -from indy_common.constants import TRUST_ANCHOR -from indy_node.pool.local_pool import create_local_pool - -# noinspection PyUnresolvedReferences -from indy_node.test.conftest import tdir, nodeSet, tconf, \ - updatedPoolTxnData, txnPoolNodeSet, poolTxnData, \ - dirName, tdirWithPoolTxns, \ - domainTxnOrderedFields, genesisTxns, stewardWallet, poolTxnStewardData, \ - poolTxnStewardNames, trusteeWallet, trusteeData, poolTxnTrusteeNames, \ - patchPluginManager, txnPoolNodesLooper, \ - poolTxnNodeNames, allPluginsPath, tdirWithNodeKeepInited, testNodeClass, \ - genesisTxns - -BANK_SEED = b'BANK0000000000000000000000000000' - - -class RefAgent(WalletedAgent): - - def create_connection_request(self, internal_id, name): - - nonce = str(self.verifier.generateNonce()) - # endpoint = self.endpoint.host_address() - # TODO: this should be done by endpoint - endpoint = "127.0.0.1" + ":" + str(self.endpoint.ha[1]) - - msg = {'connection-request': { - 'name': self.name, - 'identifier': self._wallet.defaultId, - 'nonce': nonce, - 'endpoint': endpoint, - 'verkey': self._wallet.getVerkey(self.wallet.defaultId) - }, - 'sig': None - } - - self._invites[nonce] = (internal_id, name) - - signature = self.wallet.signMsg(msg, self.wallet.defaultId) - - msg['sig'] = signature - - return json.dumps(msg) - - -@pytest.mark.skip("Broken logic of placing of nodes and clients.") -def test_end_to_end(tconf): - base_dir = tconf.CLI_BASE_DIR - - print('*' * 20) - print(base_dir) - print('*' * 20) - - with create_local_pool(base_dir) as network: - - print(network.genesis_transactions) - - network.runFor(5) - - client = network.create_client(5555) - - bank_wallet = Wallet() - bank_agent = RefAgent(name="bank", - basedirpath=base_dir, - client=client, - wallet=bank_wallet, - port=8787, - endpointArgs={'seed': BANK_SEED, - 'onlyListener': True}) - - network.add(bank_agent) - - bank_id, bank_verkey = bank_agent.new_identifier(seed=BANK_SEED) - - print(bank_id) - print(bank_verkey) - - s1_agent = network.steward_agent() - - s1_agent.publish_trust_anchor(Identity(identifier=bank_id, - verkey=bank_verkey, - role=TRUST_ANCHOR)) - network.runFor(5) - - # this allows calling asynchronous functions from a synchronous context - run_async = network.run - - bank_attribute_definition = anoncreds.protocol.types.AttribDef( - 'basic', [ - anoncreds.protocol.types.AttribType( - 'title', encode=True), anoncreds.protocol.types.AttribType( - 'first_name', encode=True), anoncreds.protocol.types.AttribType( - 'last_name', encode=True), anoncreds.protocol.types.AttribType( - 'address_1', encode=True), anoncreds.protocol.types.AttribType( - 'address_2', encode=True), anoncreds.protocol.types.AttribType( - 'address_3', encode=True), anoncreds.protocol.types.AttribType( - 'postcode_zip', encode=True), anoncreds.protocol.types.AttribType( - 'date_of_birth', encode=True), anoncreds.protocol.types.AttribType( - 'account_type', encode=True), anoncreds.protocol.types.AttribType( - 'year_opened', encode=True), anoncreds.protocol.types.AttribType( - 'account_status', encode=True)]) - - bank_agent.add_attribute_definition(bank_attribute_definition) - - backend = MockBackendSystem(bank_attribute_definition) - - alices_id_in_banks_system = 1999891343 - bobs_id_in_banks_system = 2911891343 - - backend.add_record(alices_id_in_banks_system, - title='Mrs.', - first_name='Alicia', - last_name='Garcia', - address_1='H-301', - address_2='Street 1', - address_3='UK', - postcode_zip='G61 3NR', - date_of_birth='December 28, 1990', - account_type='savings', - year_opened='2000', - account_status='active') - backend.add_record(bobs_id_in_banks_system, - title='Mrs.', - first_name='Jay', - last_name='Raj', - address_1='222', - address_2='Baker Street', - address_3='UK', - postcode_zip='G61 3NR', - date_of_birth='January 15, 1980', - account_type='savings', - year_opened='1999', - account_status='active') - - bank_agent.set_issuer_backend(backend) - - schema_id = run_async( - bank_agent.publish_schema('basic', - schema_name='Bank Membership', - schema_version='1.0')) - - # NOTE: do NOT use known primes in a non-test environment - - issuer_pub_key, revocation_pub_key = run_async( - bank_agent.publish_issuer_keys(schema_id, - p_prime=primes["prime1"][0], - q_prime=primes["prime1"][1])) - print(issuer_pub_key) - print(revocation_pub_key) - - # TODO: Not implemented yet - # accPK = run_async(bank_agent.publish_revocation_registry( - # schema_id=schema_id)) - - # print(accPK) - - run_async(bank_agent._set_available_claim_by_internal_id( - alices_id_in_banks_system, schema_id)) - run_async(bank_agent._set_available_claim_by_internal_id( - bobs_id_in_banks_system, schema_id)) - - alice_wallet = Wallet() - alice_agent = RefAgent(name="Alice", - basedirpath=base_dir, - client=client, - wallet=alice_wallet, - port=8786) - - network.add(alice_agent) - - network.runFor(1) - - request = bank_agent.create_connection_request( - alices_id_in_banks_system, "Alice") - - # Transfer of this request happens out-of-band (website, QR code, etc) - - alices_link_to_bank = alice_agent.load_request_str(request) - - # notice the link is not accepted - print(alices_link_to_bank) - - alice_agent.accept_request(alices_link_to_bank) - - network.runFor(10) - - # notice that the link is accepted - print(alices_link_to_bank) - - banks_link_to_alice = bank_agent.get_link_by_name( - alices_id_in_banks_system) - - # note the available claims are now there - print(banks_link_to_alice) - - claim_to_request = alices_link_to_bank.find_available_claim( - name='Bank Membership') - - print(claim_to_request) - - run_async(alice_agent.send_claim(alices_link_to_bank, - claim_to_request)) - network.runFor(5) - - claim = run_async(alice_agent.get_claim(schema_id)) - print(claim) - - # ######## - # # PROOF - # ######## - # bank_agent._proofRequestsSchema['Address'] = { - # "name": "Address", - # "version": "0.2", - # "attributes": { - # "address_1": "string", - # "address_2": "string", - # "address_3": "string", - # "state": "string", - # "postcode_zip": "string", - # }, - # "verifiableAttributes": ["postcode_zip"] - # } - # - # bank_agent.sendProofReq(banks_link_to_alice, 'Address') - # - # network.runFor(3) - # print() diff --git a/indy_client/test/agent/test_owner_identifiers.py b/indy_client/test/agent/test_owner_identifiers.py deleted file mode 100644 index 0d8a80684..000000000 --- a/indy_client/test/agent/test_owner_identifiers.py +++ /dev/null @@ -1,28 +0,0 @@ -import pytest - - -@pytest.mark.skip("SOV-565. Not yet implemented") -def test_add_identifier(): - """ - Add an owner identifier. - Ensure the appropriate values are set for the associated synced key. - """ - raise NotImplementedError - - -@pytest.mark.skip("SOV-565. Not yet implemented") -def test_add_second_identifier(): - """ - Add another owner identifier. - Ensure agent.ownerIdentifiers is set up properly. - """ - raise NotImplementedError - - -@pytest.mark.skip("SOV-565. Not yet implemented") -def test_sync_identifier_keys(): - """ - Connect to the indy network and ensure we have the latest keys for all of - the owner's identifiers. - """ - raise NotImplementedError diff --git a/indy_client/test/agent/test_owner_request_handling.py b/indy_client/test/agent/test_owner_request_handling.py deleted file mode 100644 index 875059d8a..000000000 --- a/indy_client/test/agent/test_owner_request_handling.py +++ /dev/null @@ -1,25 +0,0 @@ -import pytest - - -@pytest.mark.skip("SOV-566. Not yet implemented") -def testUnsignedRequest(): - """ - Ensure an unsigned request is not allowed. - """ - raise NotImplementedError - - -@pytest.mark.skip("SOV-566. Not yet implemented") -def testRequestSignedByUnknownIdentifier(): - """ - Ensure a request signed by an unknown party is not allowed. - """ - raise NotImplementedError - - -@pytest.mark.skip("SOV-566. Not yet implemented") -def testRequestSignedByKnownIdentifier(): - """ - Ensure a properly signed request is allowed. - """ - raise NotImplementedError diff --git a/indy_client/test/agent/test_ping.py b/indy_client/test/agent/test_ping.py deleted file mode 100644 index 5c075be79..000000000 --- a/indy_client/test/agent/test_ping.py +++ /dev/null @@ -1,31 +0,0 @@ -from indy_client.test import waits -from stp_core.loop.eventually import eventually -from plenum.test.testable import spy, SpyLog - -from indy_client.agent.constants import PING, PONG - -whitelist = ["is not connected - message will not be sent immediately.If this problem does not resolve itself - check your firewall settings"] - - -def testPing(aliceAcceptedFaber, faberIsRunning, aliceAgent, emptyLooper): - faberAgent, _ = faberIsRunning - - faber_log = SpyLog() - alice_log = SpyLog() - faberAgent.msgHandlers[PING] = spy( - faberAgent._handlePing, False, True, spy_log=faber_log) - aliceAgent.msgHandlers[PONG] = spy( - aliceAgent._handlePong, False, True, spy_log=alice_log) - - recvd_pings = 0 - recvd_pongs = 0 - aliceAgent.sendPing('Faber College') - - def chk(): - assert (recvd_pings + 1) == faber_log.count( - faberAgent._handlePing.__name__) - assert (recvd_pongs + 1) == alice_log.count( - aliceAgent._handlePong.__name__) - - timeout = waits.expectedAgentPing() - emptyLooper.run(eventually(chk, retryWait=1, timeout=timeout)) diff --git a/indy_client/test/agent/test_restore_agent_wallets_from_mgl.py b/indy_client/test/agent/test_restore_agent_wallets_from_mgl.py deleted file mode 100644 index ead69632b..000000000 --- a/indy_client/test/agent/test_restore_agent_wallets_from_mgl.py +++ /dev/null @@ -1,54 +0,0 @@ -import os -import shutil - -from indy_client.agent.walleted_agent import WalletedAgent -from indy_client.anon_creds.indy_issuer import IndyIssuerWalletInMemory -from indy_client.anon_creds.indy_public_repo import IndyPublicRepo -from indy_client.client.wallet.wallet import Wallet -from indy_client.test.client.TestClient import TestClient -from stp_core.network.port_dispenser import genHa - - -def test_restore_agent_wallets_from_minimal_go_live(tconf, tdirWithClientPoolTxns): - source_dir = os.path.dirname(os.path.realpath(__file__)) - agent_wallet_source_path = os.path.join( - source_dir, 'agent_wallet_from_minimal_go_live') - issuer_wallet_source_path = os.path.join( - source_dir, 'issuer_wallet_from_minimal_go_live') - - agent_wallets_dir = os.path.join(tconf.CLI_BASE_DIR, tconf.walletsDir, - 'agents', 'test-agent') - issuer_wallet_dir = os.path.join(agent_wallets_dir, 'issuer') - - os.makedirs(issuer_wallet_dir) - shutil.copy(agent_wallet_source_path, - os.path.join(agent_wallets_dir, 'default.wallet')) - shutil.copy(issuer_wallet_source_path, - os.path.join(issuer_wallet_dir, 'issuer.wallet')) - - client = TestClient('test-client', - ha=genHa(), - basedirpath=tdirWithClientPoolTxns) - agent = WalletedAgent('test-agent', - basedirpath=tdirWithClientPoolTxns, - client=client) - - agent_wallet = agent.wallet - assert isinstance(agent_wallet, Wallet) - - agent_connections = agent_wallet.getConnectionNames() - assert len(agent_connections) == 3 - assert 'Acme Corp' in agent_connections - assert 'Faber College' in agent_connections - assert 'Thrift Bank' in agent_connections - - issuer_wallet = agent.issuer.wallet - assert isinstance(issuer_wallet, IndyIssuerWalletInMemory) - assert isinstance(issuer_wallet._repo, IndyPublicRepo) - assert isinstance(issuer_wallet._repo.wallet, Wallet) - - issuer_connections = issuer_wallet._repo.wallet.getConnectionNames() - assert len(issuer_connections) == 3 - assert 'Acme Corp' in issuer_connections - assert 'Faber College' in issuer_connections - assert 'Thrift Bank' in issuer_connections diff --git a/indy_client/test/agent/test_startup_shutdown.py b/indy_client/test/agent/test_startup_shutdown.py deleted file mode 100644 index c90204b18..000000000 --- a/indy_client/test/agent/test_startup_shutdown.py +++ /dev/null @@ -1,28 +0,0 @@ -from plenum.common.startable import Status -from pytest import fixture - -from indy_client.agent.agent import Agent - - -@fixture(scope="module") -def agent(tdir): - return Agent('agent1', tdir) - - -@fixture(scope="module") -def startedAgent(emptyLooper, agent): - emptyLooper.add(agent) - return agent - - -def testStartup(startedAgent, emptyLooper): - assert startedAgent.isGoing() is True - assert startedAgent.get_status() is Status.starting - emptyLooper.runFor(.1) - assert startedAgent.get_status() is Status.started - - -def testShutdown(startedAgent): - startedAgent.stop() - assert startedAgent.isGoing() is False - assert startedAgent.get_status() is Status.stopped diff --git a/indy_client/test/agent/test_walleted_agent.py b/indy_client/test/agent/test_walleted_agent.py deleted file mode 100644 index 06279cfe0..000000000 --- a/indy_client/test/agent/test_walleted_agent.py +++ /dev/null @@ -1,9 +0,0 @@ -from plenum.test.testable import spyable -from indy_client.agent.walleted_agent import WalletedAgent -from indy_client.agent.runnable_agent import RunnableAgent - - -# @spyable( -# methods=[WalletedAgent._handlePing, WalletedAgent._handlePong]) -class TestWalletedAgent(WalletedAgent, RunnableAgent): - pass diff --git a/indy_client/test/agent/thrift.py b/indy_client/test/agent/thrift.py deleted file mode 100644 index 73dddfbce..000000000 --- a/indy_client/test/agent/thrift.py +++ /dev/null @@ -1,85 +0,0 @@ -import os - -from indy_common.config_util import getConfig -from plenum.common.signer_did import DidSigner -from indy_client.agent.constants import EVENT_NOTIFY_MSG -from indy_client.client.wallet.wallet import Wallet -from stp_core.common.log import getlogger -from indy_client.agent.runnable_agent import RunnableAgent -from indy_client.agent.agent import create_client - -from indy_client.agent.walleted_agent import WalletedAgent -from indy_client.test.agent.helper import buildThriftWallet -from indy_client.test.client.TestClient import TestClient - -logger = getlogger() - -THRIFT_SEED = b'Thrift00000000000000000000000000' -THRIFT_SIGNER = DidSigner(seed=THRIFT_SEED) -THRIFT_ID = THRIFT_SIGNER.identifier -THRIFT_VERKEY = THRIFT_SIGNER.verkey - - -class ThriftAgent(WalletedAgent): - async def postProofVerif(self, claimName, link, frm): - if claimName == "Loan-Application-Basic": - self.notifyToRemoteCaller( - EVENT_NOTIFY_MSG, - " Loan eligibility criteria satisfied," - " please send another claim " - "'Loan-Application-KYC'\n", - self.wallet.defaultId, - frm) - - -def create_thrift(name=None, wallet=None, base_dir_path=None, - port=7777, client=None): - endpoint_args = {'onlyListener': True} - if wallet: - endpoint_args['seed'] = wallet._signerById(wallet.defaultId).seed - else: - wallet = Wallet(name) - wallet.addIdentifier(signer=THRIFT_SIGNER) - endpoint_args['seed'] = THRIFT_SEED - - if client is None: - client = create_client(base_dir_path=base_dir_path, client_class=TestClient) - - agent = ThriftAgent(name=name or 'Thrift Bank', - basedirpath=base_dir_path, - client=client, - wallet=wallet, - port=port, - endpointArgs=endpoint_args) - - agent._invites = { - "77fbf9dc8c8e6acde33de98c6d747b28c": (1, "Alice"), - "ousezru20ic4yz3j074trcgthwlsnfsef": (2, "Bob") - } - - return agent - - -async def bootstrap_thrift(agent): - pass - - -if __name__ == "__main__": - args = RunnableAgent.parser_cmd_args() - name = 'Thrift Bank' - port = args.port - if port is None: - port = 7777 - network = args.network or 'sandbox' - with_cli = args.withcli - - config = getConfig() - base_dir_path = os.path.expanduser( - os.path.join( - config.CLI_NETWORK_DIR, network - )) - - agent = create_thrift(name=name, wallet=buildThriftWallet(), - base_dir_path=base_dir_path, port=port) - RunnableAgent.run_agent(agent, bootstrap=bootstrap_thrift(agent), - with_cli=with_cli) diff --git a/indy_client/test/anon_creds/__init__.py b/indy_client/test/anon_creds/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/indy_client/test/anon_creds/conftest.py b/indy_client/test/anon_creds/conftest.py deleted file mode 100644 index ce39a5463..000000000 --- a/indy_client/test/anon_creds/conftest.py +++ /dev/null @@ -1,104 +0,0 @@ -import pytest -from anoncreds.protocol.issuer import Issuer -from anoncreds.protocol.repo.attributes_repo import AttributeRepoInMemory -from anoncreds.protocol.types import AttribType, AttribDef, Schema, ID -from anoncreds.protocol.wallet.issuer_wallet import IssuerWalletInMemory - -from indy_client.anon_creds.indy_public_repo import IndyPublicRepo - -GVT = AttribDef('gvt', - [AttribType('name', encode=True), - AttribType('age', encode=False), - AttribType('height', encode=False), - AttribType('sex', encode=True)]) - -# We perform all tests twice: -# - no revocation case (primary key only) -# - revocation case (both primary and revocation keys) -# There are two Schemas generated (one for each branch of tests) -revoc_params = ['revocation', 'no_revocation'] - - -@pytest.fixture(scope="module") -def public_repo(steward, stewardWallet): - return IndyPublicRepo(steward, stewardWallet) - - -@pytest.fixture(scope="module") -def public_repo_2(trustee, trusteeWallet): - return IndyPublicRepo(trustee, trusteeWallet) - - -@pytest.fixture(scope="module") -def public_repo_for_client(client1, added_client_without_role): - return IndyPublicRepo(client1, added_client_without_role) - - -@pytest.fixture(scope="module") -def issuer(public_repo): - return Issuer(IssuerWalletInMemory('issuer1', public_repo), - AttributeRepoInMemory()) - - -@pytest.fixture(scope="module", params=revoc_params) -def schema(request, stewardWallet): - return Schema(name='GVT', - version='1.0' if request.param == 'revocation' else '2.0', - attrNames=GVT.attribNames(), - issuerId=stewardWallet.defaultId, - seqId=None) - - -@pytest.fixture(scope="module") -def submitted_schema(public_repo, schema, looper): - return looper.run(public_repo.submitSchema(schema)) - - -@pytest.fixture(scope="module") -def submitted_schema_ID(submitted_schema): - return ID(schemaKey=submitted_schema.getKey(), - schemaId=submitted_schema.seqId) - - -@pytest.fixture(scope="module") -def public_secret_key(submitted_schema_ID, issuer, primes1, looper): - return looper.run( - issuer._primaryIssuer.genKeys(submitted_schema_ID, **primes1)) - - -@pytest.fixture(scope="module") -def public_secret_revocation_key(issuer, looper, schema): - if schema.version == '2.0': - return (None, None) - return looper.run(issuer._nonRevocationIssuer.genRevocationKeys()) - - -@pytest.fixture(scope="module") -def public_key(public_secret_key): - return public_secret_key[0] - - -@pytest.fixture(scope="module") -def public_revocation_key(public_secret_revocation_key): - return public_secret_revocation_key[0] - - -@pytest.fixture(scope="module") -def submitted_claim_def(submitted_schema_ID, public_repo, public_secret_key, - public_secret_revocation_key, looper): - pk, sk = public_secret_key - pkR, skR = public_secret_revocation_key - return looper.run(public_repo.submitPublicKeys(id=submitted_schema_ID, - pk=pk, - pkR=pkR, - signatureType='CL')) - - -@pytest.fixture(scope="module") -def submitted_public_key(submitted_claim_def): - return submitted_claim_def[0] - - -@pytest.fixture(scope="module") -def submitted_public_revocation_key(submitted_claim_def): - return submitted_claim_def[1] diff --git a/indy_client/test/anon_creds/test_anoncreds_usage.py b/indy_client/test/anon_creds/test_anoncreds_usage.py deleted file mode 100644 index 6084af6c3..000000000 --- a/indy_client/test/anon_creds/test_anoncreds_usage.py +++ /dev/null @@ -1,67 +0,0 @@ -import pytest - -from anoncreds.protocol.repo.attributes_repo import AttributeRepoInMemory -from anoncreds.protocol.types import ID, PredicateGE, AttributeInfo, ProofRequest -from indy_client.anon_creds.indy_issuer import IndyIssuer -from indy_client.anon_creds.indy_prover import IndyProver -from indy_client.anon_creds.indy_verifier import IndyVerifier -from indy_client.test.anon_creds.conftest import GVT - - -@pytest.fixture(scope="module") -def attrRepo(): - return AttributeRepoInMemory() - - -@pytest.fixture(scope="module") -def issuer(steward, stewardWallet, attrRepo): - return IndyIssuer(steward, stewardWallet, attrRepo) - - -@pytest.fixture(scope="module") -def prover(userClientA, userWalletA): - return IndyProver(userClientA, userWalletA) - - -@pytest.fixture(scope="module") -def verifier(userClientB, userWalletB): - return IndyVerifier(userClientB, userWalletB) - - -def testAnonCredsPrimaryOnly( - issuer, prover, verifier, attrRepo, primes1, looper): - async def doTestAnonCredsPrimaryOnly(): - # 1. Create a Schema - schema = await issuer.genSchema('GVT', '1.0', GVT.attribNames()) - schemaId = ID(schemaKey=schema.getKey(), schemaId=schema.seqId) - - # 2. Create keys for the Schema - await issuer.genKeys(schemaId, **primes1) - - # 3. Issue accumulator - # TODO: Not implemented yet - # await issuer.issueAccumulator(schemaId=schemaId, iA='110', L=5) - - # 4. set attributes for user1 - attrs = GVT.attribs(name='Alex', age=28, height=175, sex='male') - proverId = str(prover.proverId) - attrRepo.addAttributes(schema.getKey(), proverId, attrs) - - # 5. request Claims - claimsReq = await prover.createClaimRequest(schemaId, proverId, False) - (claim_signature, claim_attributes) = await issuer.issueClaim(schemaId, claimsReq) - await prover.processClaim(schemaId, claim_attributes, claim_signature) - - # 6. proof Claims - proofRequest = ProofRequest( - "proof1", "1.0", verifier.generateNonce(), verifiableAttributes={ - 'attr_uuid': AttributeInfo( - 'name', schema.seqId)}, predicates={ - 'predicate_uuid': PredicateGE( - 'age', 18)}) - - proof = await prover.presentProof(proofRequest) - assert proof.requestedProof.revealed_attrs['attr_uuid'][1] == 'Alex' - assert await verifier.verify(proofRequest, proof) - - looper.run(doTestAnonCredsPrimaryOnly) diff --git a/indy_client/test/anon_creds/test_claim_def.py b/indy_client/test/anon_creds/test_claim_def.py deleted file mode 100644 index 70ad0d242..000000000 --- a/indy_client/test/anon_creds/test_claim_def.py +++ /dev/null @@ -1,102 +0,0 @@ -import random -import sys - -import pytest - -from plenum.common.exceptions import OperationError -from stp_core.common.log import getlogger - -logger = getlogger() - - -def test_can_not_submit_claim_def_by_identity_owner(submitted_schema_ID, - public_key, - public_revocation_key, - looper, - public_repo_for_client): - with pytest.raises(OperationError) as ex_info: - looper.run(public_repo_for_client.submitPublicKeys(id=submitted_schema_ID, - pk=public_key, - pkR=public_revocation_key, - signatureType='CL')) - assert "role cannot add claim def" in ex_info[0] - - -def test_submit_claim_def(submitted_claim_def): - assert submitted_claim_def - - -def test_submit_claim_def_same_schema_different_signature_type( - submitted_claim_def, - looper, public_repo, - submitted_schema_ID, - public_key, public_revocation_key): - assert submitted_claim_def - looper.run(public_repo.submitPublicKeys(id=submitted_schema_ID, - pk=public_key, - pkR=public_revocation_key, - signatureType='CL2')) - - -def test_submit_same_claim_def_by_different_issuer( - submitted_claim_def, - looper, public_repo_2, - submitted_schema_ID, - public_key, public_revocation_key): - assert submitted_claim_def - looper.run(public_repo_2.submitPublicKeys(id=submitted_schema_ID, - pk=public_key, - pkR=public_revocation_key, - signatureType='CL')) - - -def test_get_primary_public_key(submitted_schema_ID, submitted_public_key, - public_repo, looper): - pk = looper.run(public_repo.getPublicKey(id=submitted_schema_ID, - signatureType='CL')) - assert pk == submitted_public_key - - -def test_get_primary_public_key_non_existent(submitted_schema_ID, - public_repo, looper): - schemaId = submitted_schema_ID._replace( - schemaId=random.randint(100, 300)) - with pytest.raises(ValueError): - looper.run(public_repo.getPublicKey(id=schemaId, signatureType='CL')) - - -def test_get_revocation_public_key(submitted_schema_ID, - submitted_public_revocation_key, - public_repo, looper): - pk = looper.run( - public_repo.getPublicKeyRevocation(id=submitted_schema_ID, - signatureType='CL')) - - if sys.platform == 'win32': - assert pk - logger.warning("Gotten public revocation key is not verified " - "on Windows for matching against submitted public " - "revocation key since they are different on Windows " - "due to an issue in charm-crypto package.") - else: - assert pk == submitted_public_revocation_key - - -def test_get_revocation_public_key_non_existent(submitted_schema_ID, - public_repo, looper): - schemaId = submitted_schema_ID._replace( - schemaId=random.randint(100, 300)) - with pytest.raises(ValueError): - looper.run(public_repo.getPublicKeyRevocation(id=schemaId, - signatureType='CL')) - - -def test_submit_claim_def_same_schema_and_signature_type(submitted_claim_def, - looper, public_repo, - submitted_schema_ID, - public_key, public_revocation_key): - assert submitted_claim_def - looper.run(public_repo.submitPublicKeys(id=submitted_schema_ID, - pk=public_key, - pkR=public_revocation_key, - signatureType='CL')) diff --git a/indy_client/test/anon_creds/test_schema.py b/indy_client/test/anon_creds/test_schema.py deleted file mode 100644 index c1fa6d1a7..000000000 --- a/indy_client/test/anon_creds/test_schema.py +++ /dev/null @@ -1,85 +0,0 @@ -from random import randint - -import pytest -from anoncreds.protocol.exceptions import SchemaNotFoundError -from anoncreds.protocol.types import ID, Schema - -from plenum.common.exceptions import OperationError -from plenum.common.util import randomString -from stp_core.common.log import getlogger - -logger = getlogger() -whitelist = ['Consensus for ReqId:'] - - -def test_submit_schema(submitted_schema, schema): - assert submitted_schema - assert submitted_schema.seqId - - # initial schema has stub seqno - excluding seqno from comparison - def withNoSeqId(schema): - return schema._replace(seqId=None) - - assert withNoSeqId(submitted_schema) == withNoSeqId(schema) - - -def test_submit_same_schema_twice(looper, public_repo, - schema, - submitted_schema): - assert submitted_schema - with pytest.raises(OperationError) as ex_info: - looper.run( - public_repo.submitSchema(schema) - ) - ex_info.match("can have one and only one SCHEMA with name GVT and version 1.0'") # This line is unreachable in case of an exception - - -def test_can_not_submit_schema_by_identity_owner(looper, - public_repo_for_client, - schema): - with pytest.raises(OperationError) as ex_info: - looper.run( - public_repo_for_client.submitSchema(schema) - ) - ex_info.match("role cannot add claim def") # This line is unreachable in case of an exception - - -def test_can_not_submit_schema_with_empty_attr_names(looper, - public_repo, - stewardWallet): - schema = Schema(name='newSchema', - version='4.0', - attrNames=[], - issuerId=stewardWallet.defaultId, - seqId=None) - - with pytest.raises(OperationError) as ex_info: - looper.run( - public_repo.submitSchema(schema) - ) - ex_info.match("attr_names in schema can not be empty") # This line is unreachable in case of an exception - - -def test_get_schema(submitted_schema, public_repo, looper): - key = submitted_schema.getKey() - schema = looper.run(public_repo.getSchema(ID(schemaKey=key))) - assert schema == submitted_schema - - -def test_get_schema_by_seqno(submitted_schema, public_repo, looper): - schema = looper.run(public_repo.getSchema( - ID(schemaId=submitted_schema.seqId))) - assert schema == submitted_schema - - -def test_get_schema_by_invalid_seqno(submitted_schema, public_repo, looper): - with pytest.raises(SchemaNotFoundError): - looper.run(public_repo.getSchema( - ID(schemaId=(submitted_schema.seqId + randint(100, 1000))))) - - -def test_get_schema_non_existent(submitted_schema, public_repo, looper): - key = submitted_schema.getKey() - key = key._replace(name=key.name + randomString(5)) - with pytest.raises(SchemaNotFoundError): - looper.run(public_repo.getSchema(ID(schemaKey=key))) diff --git a/indy_client/test/cli/__init__.py b/indy_client/test/cli/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/indy_client/test/cli/conftest.py b/indy_client/test/cli/conftest.py deleted file mode 100644 index 7714c55bc..000000000 --- a/indy_client/test/cli/conftest.py +++ /dev/null @@ -1,1473 +0,0 @@ -import json -import os -import re -import tempfile -from typing import List - -import base58 -import pytest -from plenum.bls.bls_crypto_factory import create_default_bls_crypto_factory -from plenum.common.signer_did import DidSigner -from indy_client.test.agent.acme import ACME_ID, ACME_SEED -from indy_client.test.agent.acme import ACME_VERKEY -from indy_client.test.agent.faber import FABER_ID, FABER_VERKEY, FABER_SEED -from indy_client.test.agent.thrift import THRIFT_ID, THRIFT_VERKEY, THRIFT_SEED -from indy_common.config_helper import NodeConfigHelper -from ledger.genesis_txn.genesis_txn_file_util import create_genesis_txn_init_ledger -from plenum.common.txn_util import get_type - -from stp_core.crypto.util import randomSeed -from stp_core.network.port_dispenser import genHa - -import plenum -from plenum.common import util -from plenum.common.constants import ALIAS, NODE_IP, NODE_PORT, CLIENT_IP, \ - CLIENT_PORT, SERVICES, VALIDATOR, BLS_KEY, TXN_TYPE, NODE, NYM, \ - BLS_KEY_PROOF -from plenum.common.constants import CLIENT_STACK_SUFFIX -from plenum.common.exceptions import BlowUp -from plenum.common.signer_simple import SimpleSigner -from plenum.common.util import randomString -from plenum.test import waits -from plenum.test.test_node import checkNodesConnected, ensureElectionsDone - -from plenum.test.conftest import txnPoolNodeSet, patchPluginManager, tdirWithNodeKeepInited -from stp_core.loop.eventually import eventually -from stp_core.common.log import getlogger -from plenum.test.conftest import tdirWithPoolTxns -from indy_client.cli.helper import USAGE_TEXT, NEXT_COMMANDS_TO_TRY_TEXT -from indy_client.test.helper import createNym, buildStewardClient -from indy_common.constants import ENDPOINT, TRUST_ANCHOR -from indy_common.roles import Roles -from indy_common.test.conftest import poolTxnTrusteeNames -from indy_common.test.conftest import domainTxnOrderedFields -from indy_node.test.helper import TestNode -from plenum.common.keygen_utils import initNodeKeysForBothStacks - -# plenum.common.util.loggingConfigured = False - -from stp_core.loop.looper import Looper -from plenum.test.cli.helper import newKeyPair, doByCtx - -from indy_client.test.cli.helper import ensureNodesCreated, get_connection_request, \ - getPoolTxnData, newCLI, getCliBuilder, P, prompt_is, addAgent, doSendNodeCmd, addNym -from indy_client.test.agent.conftest import faberIsRunning as runningFaber, \ - acmeIsRunning as runningAcme, thriftIsRunning as runningThrift, emptyLooper,\ - faberWallet, acmeWallet, thriftWallet, agentIpAddress, \ - faberAgentPort, acmeAgentPort, thriftAgentPort, faberAgent, acmeAgent, \ - thriftAgent, faberBootstrap, acmeBootstrap -from indy_client.test.cli.helper import connect_and_check_output, disconnect_and_check_output -from indy_common.config_helper import ConfigHelper -from stp_core.crypto.util import randomSeed - - -@pytest.fixture("module") -def ledger_base_dir(tconf): - return tconf.CLI_NETWORK_DIR - - -@pytest.yield_fixture(scope="session") -def cliTempLogger(): - file_name = "indy_cli_test.log" - file_path = os.path.join(tempfile.tempdir, file_name) - with open(file_path, 'w'): - pass - return file_path - - -@pytest.yield_fixture(scope="module") -def looper(): - with Looper(debug=False) as l: - yield l - - -@pytest.fixture("module") -def cli(looper, client_tdir): - return newCLI(looper, client_tdir) - - -@pytest.fixture(scope="module") -def newKeyPairCreated(cli): - return newKeyPair(cli) - - -@pytest.fixture(scope="module") -def CliBuilder(tdir, tdirWithPoolTxns, tdirWithDomainTxns, - txnPoolNodesLooper, tconf, cliTempLogger): - return getCliBuilder( - tdir, - tconf, - tdirWithPoolTxns, - tdirWithDomainTxns, - logFileName=cliTempLogger, - def_looper=txnPoolNodesLooper) - - -def getDefaultUserMap(name): - return { - 'wallet-name': name, - } - - -@pytest.fixture(scope="module") -def aliceMap(): - return getDefaultUserMap("Alice") - - -@pytest.fixture(scope="module") -def earlMap(): - return getDefaultUserMap("Earl") - - -@pytest.fixture(scope="module") -def bobMap(): - return getDefaultUserMap("Bob") - - -@pytest.fixture(scope="module") -def susanMap(): - return getDefaultUserMap("Susan") - - -@pytest.fixture(scope="module") # noqa -def faberMap(agentIpAddress, faberAgentPort): - ha = "{}:{}".format(agentIpAddress, faberAgentPort) - return {'inviter': 'Faber College', - 'invite': "sample/faber-request.indy", - 'invite-not-exists': "sample/faber-request.indy.not.exists", - 'inviter-not-exists': "non-existing-inviter", - 'seed': FABER_SEED.decode(), - "remote": FABER_ID, - "remote-verkey": FABER_VERKEY, - "nonce": "b1134a647eb818069c089e7694f63e6d", - ENDPOINT: ha, - "invalidEndpointAttr": json.dumps({ENDPOINT: {'ha': ' 127.0.0.1:11'}}), - "endpointAttr": json.dumps({ENDPOINT: {'ha': ha}}), - "claims": "Transcript", - "claim-to-show": "Transcript", - "proof-req-to-match": "Transcript", - 'wallet-name': 'Faber'} - - -@pytest.fixture(scope="module") # noqa -def acmeMap(agentIpAddress, acmeAgentPort): - ha = "{}:{}".format(agentIpAddress, acmeAgentPort) - return {'inviter': 'Acme Corp', - ENDPOINT: ha, - "endpointAttr": json.dumps({ENDPOINT: {'ha': ha}}), - "invalidEndpointAttr": json.dumps({ENDPOINT: {'ha': '127.0.0.1: 11'}}), - 'invite': 'sample/acme-job-application.indy', - 'invite-no-pr': 'sample/acme-job-application-no-pr.indy', - 'invite-not-exists': 'sample/acme-job-application.indy.not.exists', - 'inviter-not-exists': 'non-existing-inviter', - 'seed': ACME_SEED.decode(), - "remote": ACME_ID, - "remote-verkey": ACME_VERKEY, - 'nonce': '57fbf9dc8c8e6acde33de98c6d747b28c', - 'proof-requests': 'Job-Application', - 'proof-request-to-show': 'Job-Application', - 'claim-ver-req-to-show': '0.2', - 'proof-req-to-match': 'Job-Application', - 'claims': '', - 'rcvd-claim-transcript-provider': 'Faber College', - 'rcvd-claim-transcript-name': 'Transcript', - 'rcvd-claim-transcript-version': '1.2', - 'send-proof-target': 'Alice', - 'pr-name': 'Job-Application', - 'pr-schema-version': '0.2', - 'wallet-name': 'Acme'} - - -@pytest.fixture(scope="module") # noqa -def thriftMap(agentIpAddress, thriftAgentPort): - ha = "{}:{}".format(agentIpAddress, thriftAgentPort) - return {'inviter': 'Thrift Bank', - 'invite': "sample/thrift-loan-application.indy", - 'invite-not-exists': "sample/thrift-loan-application.indy.not.exists", - 'inviter-not-exists': "non-existing-inviter", - 'seed': THRIFT_SEED.decode(), - "remote": THRIFT_ID, - "remote-verkey": THRIFT_VERKEY, - "nonce": "77fbf9dc8c8e6acde33de98c6d747b28c", - ENDPOINT: ha, - "endpointAttr": json.dumps({ENDPOINT: {'ha': ha}}), - "invalidEndpointAttr": json.dumps({ENDPOINT: {'ha': '127.0.0.1:4A78'}}), - "proof-requests": "Loan-Application-Basic, Loan-Application-KYC", - "rcvd-claim-job-certificate-name": "Job-Certificate", - "rcvd-claim-job-certificate-version": "0.2", - "rcvd-claim-job-certificate-provider": "Acme Corp", - "claim-ver-req-to-show": "0.1", - 'wallet-name': 'Thrift'} - - -@pytest.fixture(scope="module") -def loadInviteOut(nextCommandsToTryUsageLine): - return ["1 connection request found for {inviter}.", - "Creating connection for {inviter}.", - ''] + \ - nextCommandsToTryUsageLine + \ - [' show connection "{inviter}"', - ' accept request from "{inviter}"', - '', - ''] - - -@pytest.fixture(scope="module") -def fileNotExists(): - return ["Given file does not exist"] - - -@pytest.fixture(scope="module") -def canNotSyncMsg(): - return ["Cannot sync because not connected"] - - -@pytest.fixture(scope="module") -def syncWhenNotConnected(canNotSyncMsg, connectUsage): - return canNotSyncMsg + connectUsage - - -@pytest.fixture(scope="module") -def canNotAcceptMsg(): - return ["Cannot accept because not connected"] - - -@pytest.fixture(scope="module") -def acceptWhenNotConnected(canNotAcceptMsg, connectUsage): - return canNotAcceptMsg + connectUsage - - -@pytest.fixture(scope="module") -def acceptUnSyncedWithoutEndpointWhenConnected( - common_accept_request_msgs, syncedInviteAcceptedOutWithoutClaims): - return common_accept_request_msgs + \ - syncedInviteAcceptedOutWithoutClaims - - -@pytest.fixture(scope="module") -def common_accept_requests_msgs(): - return ["Request not yet verified", - "Connection not yet synchronized.", - ] - - -@pytest.fixture(scope="module") -def acceptUnSyncedWhenNotConnected(common_accept_requests_msgs, - canNotSyncMsg, connectUsage): - return common_accept_requests_msgs + \ - ["Request acceptance aborted."] + \ - canNotSyncMsg + connectUsage - - -@pytest.fixture(scope="module") -def usageLine(): - return [USAGE_TEXT] - - -@pytest.fixture(scope="module") -def nextCommandsToTryUsageLine(): - return [NEXT_COMMANDS_TO_TRY_TEXT] - - -@pytest.fixture(scope="module") -def connectUsage(usageLine): - return usageLine + [" connect"] - - -@pytest.fixture(scope="module") -def notConnectedStatus(connectUsage): - return ['Not connected to Indy network. Please connect first.', ''] +\ - connectUsage +\ - ['', ''] - - -@pytest.fixture(scope="module") -def newKeyringOut(): - return ["New wallet {wallet-name} created", - 'Active wallet set to "{wallet-name}"' - ] - - -@pytest.fixture(scope="module") -def connectionAlreadyExists(): - return ["Connection already exists"] - - -@pytest.fixture(scope="module") -def jobApplicationProofRequestMap(): - return { - 'proof-request-version': '0.2', - 'proof-request-attr-first_name': 'first_name', - 'proof-request-attr-last_name': 'last_name', - 'proof-request-attr-phone_number': 'phone_number', - 'proof-request-attr-degree': 'degree', - 'proof-request-attr-status': 'status', - 'proof-request-attr-ssn': 'ssn' - } - - -@pytest.fixture(scope="module") -def unsyncedInviteAcceptedWhenNotConnected(availableClaims): - return [ - "Response from {inviter}", - "Trust established.", - "DID created in Indy." - ] + availableClaims + [ - "Cannot check if DID is written to Indy." - ] - - -@pytest.fixture(scope="module") -def syncedInviteAcceptedOutWithoutClaims(): - return [ - "Signature accepted.", - "Trust established.", - "DID created in Indy.", - "Synchronizing...", - "Confirmed DID written to Indy." - ] - - -@pytest.fixture(scope="module") -def availableClaims(): - return ["Available Claim(s): {claims}"] - - -@pytest.fixture(scope="module") -def syncedInviteAcceptedWithClaimsOut( - syncedInviteAcceptedOutWithoutClaims, availableClaims): - return syncedInviteAcceptedOutWithoutClaims + availableClaims - - -@pytest.fixture(scope="module") -def unsycedAcceptedInviteWithoutClaimOut(syncedInviteAcceptedOutWithoutClaims): - return [ - "Request not yet verified", - "Attempting to sync...", - "Synchronizing...", - ] + syncedInviteAcceptedOutWithoutClaims + \ - ["Confirmed DID written to Indy."] - - -@pytest.fixture(scope="module") -def unsyced_already_accepted_request_accepted_out(): - return [ - "Request not yet verified", - "Attempting to sync...", - "Synchronizing..." - ] - - -@pytest.fixture(scope="module") -def showTranscriptProofOut(): - return [ - "Claim ({rcvd-claim-transcript-name} " - "v{rcvd-claim-transcript-version} " - "from {rcvd-claim-transcript-provider})", - " student_name: {attr-student_name}", - "* ssn: {attr-ssn}", - "* degree: {attr-degree}", - " year: {attr-year}", - "* status: {attr-status}", - ] - - -@pytest.fixture(scope="module") -def showJobCertificateClaimInProofOut(): - return [ - "The Proof is constructed from the following claims:", - "Claim ({rcvd-claim-job-certificate-name} " - "v{rcvd-claim-job-certificate-version} " - "from {rcvd-claim-job-certificate-provider})", - "* first_name: {attr-first_name}", - "* last_name: {attr-last_name}", - " employee_status: {attr-employee_status}", - " experience: {attr-experience}", - " salary_bracket: {attr-salary_bracket}" - ] - - -@pytest.fixture(scope="module") -def proofConstructedMsg(): - return ["The Proof is constructed from the following claims:"] - - -@pytest.fixture(scope="module") -def showJobAppProofRequestOut(proofConstructedMsg, showTranscriptProofOut): - return [ - 'Found proof request "{proof-req-to-match}" in connection "{inviter}"', - "Status: Requested", - "Name: {proof-request-to-show}", - "Version: {proof-request-version}", - "Attributes:", - "{proof-request-attr-first_name}: {set-attr-first_name}", - "{proof-request-attr-last_name}: {set-attr-last_name}", - "{proof-request-attr-phone_number}: {set-attr-phone_number}", - "{proof-request-attr-degree} (V): {attr-degree}", - "{proof-request-attr-status} (V): {attr-status}", - "{proof-request-attr-ssn} (V): {attr-ssn}" - ] + proofConstructedMsg + showTranscriptProofOut - - -@pytest.fixture(scope="module") -def showNameProofRequestOut(showJobCertificateClaimInProofOut): - return [ - 'Found proof request "{proof-req-to-match}" in connection "{inviter}"', - "Name: {proof-req-to-match}", - "Version: {proof-request-version}", - "Status: Requested", - "Attributes:", - "{proof-request-attr-first_name} (V): {set-attr-first_name}", - "{proof-request-attr-last_name} (V): {set-attr-last_name}", - ] + showJobCertificateClaimInProofOut + [ - "Try Next:", - "set to ", - 'send proof "{proof-req-to-match}" to "{inviter}"' - ] - - -@pytest.fixture(scope="module") -def showBankingProofOut(): - return [ - "Claim ({rcvd-claim-banking-name} " - "v{rcvd-claim-banking-version} " - "from {rcvd-claim-banking-provider})", - "title: {attr-title}", - "first_name: {attr-first_name}", - "last_name: {attr-last_name}", - "address_1: {attr-address_1}", - "address_2: {attr-address_2}", - "address_3: {attr-address_3}", - "postcode_zip: {attr-postcode_zip}", - "date_of_birth: {attr-date_of_birth}", - "account_type: {attr-account_type}", - "year_opened: {attr-year_opened}", - "account_status: {attr-account_status}" - ] - - -@pytest.fixture(scope="module") -def proofRequestNotExists(): - return ["No matching Proof Requests found in current wallet"] - - -@pytest.fixture(scope="module") -def connectionNotExists(): - return ["No matching connection requests found in current wallet"] - - -@pytest.fixture(scope="module") -def faberInviteLoaded(aliceCLI, be, do, faberMap, loadInviteOut): - be(aliceCLI) - do("load {invite}", expect=loadInviteOut, mapper=faberMap) - - -@pytest.fixture(scope="module") -def acmeInviteLoaded(aliceCLI, be, do, acmeMap, loadInviteOut): - be(aliceCLI) - do("load {invite}", expect=loadInviteOut, mapper=acmeMap) - - -@pytest.fixture(scope="module") -def attrAddedOut(): - return ["Attribute added for nym {remote}"] - - -@pytest.fixture(scope="module") -def nymAddedOut(): - return ["Nym {remote} added"] - - -@pytest.fixture(scope="module") -def unSyncedEndpointOut(): - return ["Remote endpoint: "] - - -@pytest.fixture(scope="module") -def showConnectionOutWithoutEndpoint(showConnectionOut, unSyncedEndpointOut): - return showConnectionOut + unSyncedEndpointOut - - -@pytest.fixture(scope="module") -def endpointReceived(): - return ["Endpoint received:"] - - -@pytest.fixture(scope="module") -def endpointNotAvailable(): - return ["Endpoint not available"] - - -@pytest.fixture(scope="module") -def syncConnectionOutEndsWith(): - return ["Connection {inviter} synced"] - - -@pytest.fixture(scope="module") -def syncConnectionOutStartsWith(): - return ["Synchronizing..."] - - -@pytest.fixture(scope="module") -def syncConnectionOutWithEndpoint(syncConnectionOutStartsWith, - syncConnectionOutEndsWith): - return syncConnectionOutStartsWith + syncConnectionOutEndsWith - - -@pytest.fixture(scope="module") -def syncConnectionOutWithoutEndpoint(syncConnectionOutStartsWith): - return syncConnectionOutStartsWith - - -@pytest.fixture(scope="module") -def showSyncedConnectionWithEndpointOut( - acceptedConnectionHeading, showConnectionOut): - return acceptedConnectionHeading + showConnectionOut + \ - ["Last synced: "] - - -@pytest.fixture(scope="module") -def showSyncedConnectionWithoutEndpointOut(showConnectionOut): - return showConnectionOut - - -@pytest.fixture(scope="module") -def connectionNotYetSynced(): - return [" Last synced: "] - - -@pytest.fixture(scope="module") -def acceptedConnectionHeading(): - return ["Connection"] - - -@pytest.fixture(scope="module") -def unAcceptedConnectionHeading(): - return ["Connection (not yet accepted)"] - - -@pytest.fixture(scope="module") -def showUnSyncedConnectionOut(unAcceptedConnectionHeading, showConnectionOut): - return unAcceptedConnectionHeading + showConnectionOut - - -@pytest.fixture(scope="module") -def showClaimNotFoundOut(): - return ["No matching Claims found in any connections in current wallet"] - - -@pytest.fixture(scope="module") -def transcriptClaimAttrValueMap(): - return { - "attr-student_name": "Alice Garcia", - "attr-ssn": "123-45-6789", - "attr-degree": "Bachelor of Science, Marketing", - "attr-year": "2015", - "attr-status": "graduated" - } - - -@pytest.fixture(scope="module") -def transcriptClaimValueMap(transcriptClaimAttrValueMap): - basic = { - 'inviter': 'Faber College', - 'name': 'Transcript', - "version": "1.2", - 'status': "available (not yet issued)" - } - basic.update(transcriptClaimAttrValueMap) - return basic - - -@pytest.fixture(scope="module") -def bankingRelationshipClaimAttrValueMap(): - return { - "attr-title": "Mrs.", - "attr-first_name": "Alicia", - "attr-last_name": "Garcia", - "attr-address_1": "H-301", - "attr-address_2": "Street 1", - "attr-address_3": "UK", - "attr-postcode_zip": "G61 3NR", - "attr-date_of_birth": "December 28, 1990", - "attr-account_type": "savings", - "attr-year_opened": "2000", - "attr-account_status": "active" - } - - -@pytest.fixture(scope="module") -def transcriptClaimMap(): - return { - 'inviter': 'Faber College', - 'name': 'Transcript', - 'status': "available (not yet issued)", - "version": "1.2", - "attr-student_name": "string", - "attr-ssn": "string", - "attr-degree": "string", - "attr-year": "string", - "attr-status": "string" - } - - -@pytest.fixture(scope="module") -def jobCertClaimAttrValueMap(): - return { - "attr-first_name": "Alice", - "attr-last_name": "Garcia", - "attr-employee_status": "Permanent", - "attr-experience": "3 years", - "attr-salary_bracket": "between $50,000 to $100,000" - } - - -@pytest.fixture(scope="module") -def jobCertificateClaimValueMap(jobCertClaimAttrValueMap): - basic = { - 'inviter': 'Acme Corp', - 'name': 'Job-Certificate', - 'status': "available (not yet issued)", - "version": "0.2" - } - basic.update(jobCertClaimAttrValueMap) - return basic - - -@pytest.fixture(scope="module") -def jobCertificateClaimMap(): - return { - 'inviter': 'Acme Corp', - 'name': 'Job-Certificate', - 'status': "available (not yet issued)", - "version": "0.2", - "attr-first_name": "string", - "attr-last_name": "string", - "attr-employee_status": "string", - "attr-experience": "string", - "attr-salary_bracket": "string" - } - - -@pytest.fixture(scope="module") -def reqClaimOut(): - return ["Found claim {name} in connection {inviter}", - "Requesting claim {name} from {inviter}..."] - - -# TODO Change name -@pytest.fixture(scope="module") -def reqClaimOut1(): - return ["Found claim {name} in connection {inviter}", - "Requesting claim {name} from {inviter}...", - "Signature accepted.", - 'Received claim "{name}".'] - - -@pytest.fixture(scope="module") -def rcvdTranscriptClaimOut(): - return ["Found claim {name} in connection {inviter}", - "Name: {name}", - "Status: ", - "Version: {version}", - "Attributes:", - "student_name: {attr-student_name}", - "ssn: {attr-ssn}", - "degree: {attr-degree}", - "year: {attr-year}", - "status: {attr-status}" - ] - - -@pytest.fixture(scope="module") -def rcvdBankingRelationshipClaimOut(): - return ["Found claim {name} in connection {inviter}", - "Name: {name}", - "Status: ", - "Version: {version}", - "Attributes:", - "title: {attr-title}", - "first_name: {attr-first_name}", - "last_name: {attr-last_name}", - "address_1: {attr-address_1}", - "address_2: {attr-address_2}", - "address_3: {attr-address_3}", - "postcode_zip: {attr-postcode_zip}", - "date_of_birth: {attr-date_of_birth}", - "year_opened: {attr-year_opened}", - "account_status: {attr-account_status}" - ] - - -@pytest.fixture(scope="module") -def rcvdJobCertClaimOut(): - return ["Found claim {name} in connection {inviter}", - "Name: {name}", - "Status: ", - "Version: {version}", - "Attributes:", - "first_name: {attr-first_name}", - "last_name: {attr-last_name}", - "employee_status: {attr-employee_status}", - "experience: {attr-experience}", - "salary_bracket: {attr-salary_bracket}" - ] - - -@pytest.fixture(scope="module") -def showTranscriptClaimOut(nextCommandsToTryUsageLine): - return ["Found claim {name} in connection {inviter}", - "Name: {name}", - "Status: {status}", - "Version: {version}", - "Attributes:", - "student_name", - "ssn", - "degree", - "year", - "status" - ] + nextCommandsToTryUsageLine + \ - ['request claim "{name}"'] - - -@pytest.fixture(scope="module") -def showJobCertClaimOut(nextCommandsToTryUsageLine): - return ["Found claim {name} in connection {inviter}", - "Name: {name}", - "Status: {status}", - "Version: {version}", - "Attributes:", - "first_name", - "last_name", - "employee_status", - "experience", - "salary_bracket" - ] + nextCommandsToTryUsageLine + \ - ['request claim "{name}"'] - - -@pytest.fixture(scope="module") -def showBankingRelationshipClaimOut(nextCommandsToTryUsageLine): - return ["Found claim {name} in connection {inviter}", - "Name: {name}", - "Status: {status}", - "Version: {version}", - "Attributes:", - "title", - "first_name", - "last_name", - "address_1", - "address_2", - "address_3", - "postcode_zip", - "date_of_birth", - "account_type", - "year_opened", - "account_status" - ] + nextCommandsToTryUsageLine + \ - ['request claim "{name}"'] - - -@pytest.fixture(scope="module") -def showConnectionWithProofRequestsOut(): - return ["Proof Request(s): {proof-requests}"] - - -@pytest.fixture(scope="module") -def showConnectionWithAvailableClaimsOut(): - return ["Available Claim(s): {claims}"] - - -@pytest.fixture(scope="module") -def showAcceptedConnectionWithClaimReqsOut( - showAcceptedConnectionOut, - showConnectionWithProofRequestsOut, - showConnectionWithAvailableClaimsOut, - showConnectionSuggestion): - return showAcceptedConnectionOut + showConnectionWithProofRequestsOut + \ - showConnectionWithAvailableClaimsOut + \ - showConnectionSuggestion - - -@pytest.fixture(scope="module") -def showAcceptedConnectionWithoutAvailableClaimsOut( - showAcceptedConnectionOut, - showConnectionWithProofRequestsOut): - return showAcceptedConnectionOut + showConnectionWithProofRequestsOut - - -@pytest.fixture(scope="module") -def showAcceptedConnectionWithAvailableClaimsOut( - showAcceptedConnectionOut, - showConnectionWithProofRequestsOut, - showConnectionWithAvailableClaimsOut): - return showAcceptedConnectionOut + showConnectionWithProofRequestsOut + \ - showConnectionWithAvailableClaimsOut - - -@pytest.fixture(scope="module") -def showConnectionSuggestion(nextCommandsToTryUsageLine): - return nextCommandsToTryUsageLine + \ - ['show claim "{claims}"', - 'request claim "{claims}"'] - - -@pytest.fixture(scope="module") -def showAcceptedConnectionOut(): - return [ - "Connection", - "Name: {inviter}", - "DID: {DID}", - "Verification key: {verkey}", - "Remote: {remote}", - "Remote Verification key: {remote-verkey}", - "Trust anchor: {inviter} (confirmed)", - "Request nonce: {nonce}", - "Request status: Accepted"] - - -@pytest.fixture(scope="module") -def showConnectionOut(nextCommandsToTryUsageLine, connectionNotYetSynced): - return [ - " Name: {inviter}", - " DID: not yet assigned", - " Trust anchor: {inviter} (not yet written to Indy)", - " Verification key: ", - " Signing key: ", - " Remote: {remote}", - " Remote endpoint: {endpoint}", - " Request nonce: {nonce}", - " Request status: not verified, remote verkey unknown", - " Last synced: {last_synced}"] + \ - [""] + \ - nextCommandsToTryUsageLine + \ - [' sync "{inviter}"', - ' accept request from "{inviter}"', - '', - ''] - - -@pytest.fixture(scope="module") -def showAcceptedSyncedConnectionOut(nextCommandsToTryUsageLine): - return [ - "Connection", - "Name: {inviter}", - "Trust anchor: {inviter} (confirmed)", - "Verification key: ~", - "Signing key: ", - "Remote: {remote}", - "Remote Verification key: ", - "Request nonce: {nonce}", - "Request status: Accepted", - "Proof Request(s): {proof-requests}", - "Available Claim(s): {claims}"] + \ - nextCommandsToTryUsageLine + \ - ['show claim "{claim-to-show}"', - 'send proof "{proof-requests}"'] - - -@pytest.yield_fixture(scope="module") -def poolCLI_baby(CliBuilder): - yield from CliBuilder("pool") - - -@pytest.yield_fixture(scope="module") -def aliceCLI(CliBuilder): - yield from CliBuilder("alice") - - -@pytest.yield_fixture(scope="module") -def devinCLI(CliBuilder): - yield from CliBuilder("devin") - - -@pytest.yield_fixture(scope="module") -def bobCLI(CliBuilder): - yield from CliBuilder("bob") - - -@pytest.yield_fixture(scope="module") -def earlCLI(CliBuilder): - yield from CliBuilder("earl") - - -@pytest.yield_fixture(scope="module") -def susanCLI(CliBuilder): - yield from CliBuilder("susan") - - -@pytest.yield_fixture(scope="module") -def philCLI(CliBuilder): - yield from CliBuilder("phil") - - -@pytest.yield_fixture(scope="module") -def faberCLI(CliBuilder): - yield from CliBuilder("faber") - - -@pytest.yield_fixture(scope="module") -def acmeCLI(CliBuilder): - yield from CliBuilder("acme") - - -@pytest.yield_fixture(scope="module") -def thriftCLI(CliBuilder): - yield from CliBuilder("thrift") - - -@pytest.fixture(scope="module") -def poolCLI(tdir, tconf, poolCLI_baby, poolTxnData, poolTxnNodeNames, txnPoolNodeSet): - seeds = poolTxnData["seeds"] - for nName in poolTxnNodeNames: - seed = seeds[nName] - use_bls = nName in poolTxnData['nodesWithBls'] - config_helper = NodeConfigHelper(nName, tconf, chroot=tdir) - initNodeKeysForBothStacks(nName, config_helper.keys_dir, - seed, override=True, use_bls=use_bls) - for node in txnPoolNodeSet: - poolCLI_baby.nodes[node.name] = node - return poolCLI_baby - - -@pytest.fixture(scope="module") -def poolNodesCreated(poolCLI, poolTxnNodeNames): - #ensureNodesCreated(poolCLI, poolTxnNodeNames) - return poolCLI - - -class TestMultiNode: - def __init__(self, name, poolTxnNodeNames, tdir, tconf, - poolTxnData, tdirWithPoolTxns, tdirWithDomainTxns, poolCli): - self.name = name - self.poolTxnNodeNames = poolTxnNodeNames - self.tdir = tdir - self.tconf = tconf - self.poolTxnData = poolTxnData - self.tdirWithPoolTxns = tdirWithPoolTxns - self.tdirWithDomainTxns = tdirWithDomainTxns - self.poolCli = poolCli - - -def custom_tdir_with_pool_txns(pool_txn_data, tdir_for_pool_txns, pool_transactions_file_name): - ledger = create_genesis_txn_init_ledger(tdir_for_pool_txns, pool_transactions_file_name) - - for item in pool_txn_data["txns"]: - if get_type(item) == NODE: - ledger.add(item) - ledger.stop() - return tdir_for_pool_txns - - -def custom_tdir_with_domain_txns(pool_txn_data, tdir_for_domain_txns, - domain_txn_ordered_fields, domain_transactions_file_name): - ledger = create_genesis_txn_init_ledger(tdir_for_domain_txns, domain_transactions_file_name) - - for item in pool_txn_data["txns"]: - if get_type(item) == NYM: - ledger.add(item) - ledger.stop() - return tdir_for_domain_txns - - -@pytest.yield_fixture(scope="module") -def multiPoolNodesCreated(request, tconf, looper, tdir, - cliTempLogger, namesOfPools=("pool1", "pool2")): - - multiNodes = [] - for poolName in namesOfPools: - newPoolTxnNodeNames = [poolName + n for n - in ("Alpha", "Beta", "Gamma", "Delta")] - config_helper = ConfigHelper(tconf, chroot=tdir) - ledger_dir = os.path.join(config_helper.ledger_base_dir, poolName) - newPoolTxnData = getPoolTxnData(poolName, newPoolTxnNodeNames) - newTdirWithPoolTxns = custom_tdir_with_pool_txns(newPoolTxnData, ledger_dir, - tconf.poolTransactionsFile) - newTdirWithDomainTxns = custom_tdir_with_domain_txns( - newPoolTxnData, ledger_dir, domainTxnOrderedFields(), tconf.domainTransactionsFile) - testPoolNode = TestMultiNode( - poolName, newPoolTxnNodeNames, tdir, tconf, - newPoolTxnData, newTdirWithPoolTxns, newTdirWithDomainTxns, None) - - poolCLIBabyGen = CliBuilder(tdir, newTdirWithPoolTxns, - newTdirWithDomainTxns, looper, tconf, - cliTempLogger) - poolCLIBaby = next(poolCLIBabyGen(poolName)) - - # Ugly hack to build several networks - network_bak = tconf.NETWORK_NAME - tconf.NETWORK_NAME = poolName - tdirWithNodeKeepInited(tdir, tconf, NodeConfigHelper, newPoolTxnData, newPoolTxnNodeNames) - - nodes = [] - for nm in newPoolTxnNodeNames: - config_helper = NodeConfigHelper(nm, tconf, chroot=tdir) - node = TestNode(nm, - config_helper=config_helper, - config=tconf, - pluginPaths=None) - looper.add(node) - nodes.append(node) - looper.run(checkNodesConnected(nodes)) - ensureElectionsDone(looper=looper, nodes=nodes) - - poolCli = poolCLI(tdir, tconf, poolCLIBaby, newPoolTxnData, - newPoolTxnNodeNames, nodes) - testPoolNode.poolCli = poolCli - multiNodes.append(testPoolNode) - tconf.NETWORK_NAME = network_bak - - return multiNodes - - -@pytest.fixture("module") -def ctx(): - """ - Provides a simple container for test context. Assists with 'be' and 'do'. - """ - return {} - - -@pytest.fixture("module") -def be(ctx): - """ - Fixture that is a 'be' function that closes over the test context. - 'be' allows to change the current cli in the context. - """ - def _(cli): - ctx['current_cli'] = cli - return _ - - -@pytest.fixture("module") -def do(ctx): - """ - Fixture that is a 'do' function that closes over the test context - 'do' allows to call the do method of the current cli from the context. - """ - return doByCtx(ctx) - - -@pytest.fixture(scope="module") -def dump(ctx): - - def _dump(): - logger = getlogger() - - cli = ctx['current_cli'] - nocli = {"cli": False} - wrts = ''.join(cli.cli.output.writes) - logger.info('=========================================', extra=nocli) - logger.info('| OUTPUT DUMP |', extra=nocli) - logger.info('-----------------------------------------', extra=nocli) - for w in wrts.splitlines(): - logger.info('> ' + w, extra=nocli) - logger.info('=========================================', extra=nocli) - return _dump - - -@pytest.fixture(scope="module") -def bookmark(ctx): - BM = '~bookmarks~' - if BM not in ctx: - ctx[BM] = {} - return ctx[BM] - - -@pytest.fixture(scope="module") -def current_cli(ctx): - def _(): - return ctx['current_cli'] - return _ - - -@pytest.fixture(scope="module") -def get_bookmark(bookmark, current_cli): - def _(): - return bookmark.get(current_cli(), 0) - return _ - - -@pytest.fixture(scope="module") -def set_bookmark(bookmark, current_cli): - def _(val): - bookmark[current_cli()] = val - return _ - - -@pytest.fixture(scope="module") -def inc_bookmark(get_bookmark, set_bookmark): - def _(inc): - val = get_bookmark() - set_bookmark(val + inc) - return _ - - -@pytest.fixture(scope="module") -def expect(current_cli, get_bookmark, inc_bookmark): - - def _expect(expected, mapper=None, line_no=None, - within=None, ignore_extra_lines=None): - cur_cli = current_cli() - - def _(): - expected_ = expected if not mapper \ - else [s.format(**mapper) for s in expected] - assert isinstance(expected_, List) - bm = get_bookmark() - actual = ''.join(cur_cli.cli.output.writes).splitlines()[bm:] - assert isinstance(actual, List) - explanation = '' - expected_index = 0 - for i in range(min(len(expected_), len(actual))): - e = expected_[expected_index] - assert isinstance(e, str) - a = actual[i] - assert isinstance(a, str) - is_p = isinstance(e, P) - if (not is_p and a != e) or (is_p and not e.match(a)): - if ignore_extra_lines: - continue - explanation += "line {} doesn't match\n"\ - " expected: {}\n"\ - " actual: {}\n".format(i, e, a) - expected_index += 1 - - if len(expected_) > len(actual): - for e in expected_: - try: - p = re.compile(e) if isinstance(e, P) else None - except Exception as err: - explanation += "ERROR COMPILING REGEX for {}: {}\n".\ - format(e, err) - for a in actual: - if (p and p.fullmatch(a)) or a == e: - break - else: - explanation += "missing: {}\n".format(e) - - if len(expected_) < len(actual) and ignore_extra_lines is None: - for a in actual: - for e in expected_: - p = re.compile(e) if isinstance(e, P) else None - if (p and p.fullmatch(a)) or a == e: - break - else: - explanation += "extra: {}\n".format(a) - - if explanation: - explanation += "\nexpected:\n" - for x in expected_: - explanation += " > {}\n".format(x) - explanation += "\nactual:\n" - for x in actual: - explanation += " > {}\n".format(x) - if line_no: - explanation += "section ends line number: {}\n".format( - line_no) - pytest.fail(''.join(explanation)) - else: - inc_bookmark(len(actual)) - if within: - cur_cli.looper.run(eventually(_, timeout=within)) - else: - _() - - return _expect - - -@pytest.fixture(scope="module") -def steward(poolNodesCreated, looper, tdir, stewardWallet): - return buildStewardClient(looper, tdir, stewardWallet) - - -@pytest.fixture(scope="module") -def faberAdded(poolNodesCreated, - looper, - aliceCLI, - faberInviteLoaded, - aliceConnected, - steward, stewardWallet): - li = get_connection_request("Faber", aliceCLI.activeWallet) - createNym(looper, li.remoteIdentifier, steward, stewardWallet, - role=TRUST_ANCHOR) - - -@pytest.fixture(scope="module") # noqa -def faberIsRunningWithoutNymAdded(emptyLooper, tdirWithPoolTxns, faberWallet, - faberAgent): - faber, faberWallet = runningFaber(emptyLooper, tdirWithPoolTxns, - faberWallet, faberAgent, None) - return faber, faberWallet - - -@pytest.fixture(scope="module") # noqa -def faberIsRunning(emptyLooper, tdirWithPoolTxns, faberWallet, - faberAddedByPhil, faberAgent, faberBootstrap): - faber, faberWallet = runningFaber( - emptyLooper, tdirWithPoolTxns, faberWallet, faberAgent, faberAddedByPhil, faberBootstrap) - return faber, faberWallet - - -@pytest.fixture(scope="module") # noqa -def acmeIsRunning(emptyLooper, tdirWithPoolTxns, acmeWallet, - acmeAddedByPhil, acmeAgent, acmeBootstrap): - acme, acmeWallet = runningAcme( - emptyLooper, tdirWithPoolTxns, acmeWallet, acmeAgent, acmeAddedByPhil, acmeBootstrap) - - return acme, acmeWallet - - -@pytest.fixture(scope="module") # noqa -def thriftIsRunning(emptyLooper, tdirWithPoolTxns, thriftWallet, - thriftAddedByPhil, thriftAgent): - thrift, thriftWallet = runningThrift(emptyLooper, tdirWithPoolTxns, - thriftWallet, thriftAgent, - thriftAddedByPhil) - - return thrift, thriftWallet - - -@pytest.fixture(scope='module') -def savedKeyringRestored(): - return ['Saved wallet {wallet-name} restored'] - - -# TODO: Need to refactor following three fixture to reuse code -@pytest.yield_fixture(scope="module") -def cliForMultiNodePools(request, multiPoolNodesCreated, tdir, - tdirWithPoolTxns, tdirWithDomainTxns, tconf, - cliTempLogger): - yield from getCliBuilder(tdir, tconf, - tdirWithPoolTxns, tdirWithDomainTxns, - cliTempLogger, multiPoolNodesCreated)("susan") - - -@pytest.yield_fixture(scope="module") -def aliceMultiNodePools(request, multiPoolNodesCreated, tdir, - tdirWithPoolTxns, tdirWithDomainTxns, tconf, - cliTempLogger): - yield from getCliBuilder(tdir, tconf, - tdirWithPoolTxns, tdirWithDomainTxns, - cliTempLogger, multiPoolNodesCreated)("alice") - - -@pytest.yield_fixture(scope="module") -def earlMultiNodePools(request, multiPoolNodesCreated, tdir, - tdirWithPoolTxns, tdirWithDomainTxns, tconf, - cliTempLogger): - yield from getCliBuilder(tdir, tconf, - tdirWithPoolTxns, tdirWithDomainTxns, - cliTempLogger, multiPoolNodesCreated)("earl") - - -@pytest.yield_fixture(scope="module") # noqa -def trusteeCLI(CliBuilder, poolTxnTrusteeNames): - yield from CliBuilder(poolTxnTrusteeNames[0]) - - -@pytest.fixture(scope="module") -def trusteeMap(trusteeWallet): - return { - 'trusteeSeed': bytes(trusteeWallet._signerById( - trusteeWallet.defaultId).sk).decode(), - 'trusteeIdr': trusteeWallet.defaultId, - } - - -@pytest.fixture(scope="module") -def trusteeCli(be, do, trusteeMap, poolNodesStarted, nymAddedOut, trusteeCLI): - be(trusteeCLI) - do('new key with seed {trusteeSeed}', expect=[ - 'DID for key is {trusteeIdr}', - 'Current DID set to {trusteeIdr}'], - mapper=trusteeMap) - - if not trusteeCLI._isConnectedToAnyEnv(): - connect_and_check_output(do, trusteeCLI.txn_dir) - - return trusteeCLI - - -@pytest.fixture(scope="module") -def poolNodesStarted(be, do, poolCLI): - be(poolCLI) - return poolCLI - - -@pytest.fixture(scope="module") -def philCli(be, do, philCLI, trusteeCli, poolTxnData): - - be(philCLI) - - do('prompt Phil', expect=prompt_is('Phil')) - - do('new wallet Phil', expect=['New wallet Phil created', - 'Active wallet set to "Phil"']) - - phil_seed = randomSeed() - phil_signer = DidSigner(seed=phil_seed) - - mapper = { - 'seed': phil_seed.decode(), - 'idr': phil_signer.identifier} - do('new key with seed {seed}', expect=['Key created in wallet Phil', - 'DID for key is {idr}', - 'Current DID set to {idr}'], - mapper=mapper) - - addNym(be, do, trusteeCli, - phil_signer.identifier, - verkey=phil_signer.verkey, - role=Roles.TRUSTEE.name) - - return philCLI - - -@pytest.fixture(scope="module") -def faberAddedByPhil(be, do, poolNodesStarted, philCli, - nymAddedOut, faberMap): - return addAgent(be, do, philCli, faberMap) - - -@pytest.fixture(scope="module") -def acmeAddedByPhil(be, do, poolNodesStarted, philCli, - nymAddedOut, acmeMap): - return addAgent(be, do, philCli, acmeMap) - - -@pytest.fixture(scope="module") -def thriftAddedByPhil(be, do, poolNodesStarted, philCli, - nymAddedOut, thriftMap): - return addAgent(be, do, philCli, thriftMap) - - -@pytest.fixture(scope='module') -def newStewardVals(): - newStewardSeed = randomSeed() - signer = DidSigner(seed=newStewardSeed) - return { - 'newStewardSeed': newStewardSeed.decode(), - 'newStewardIdr': signer.identifier, - 'newStewardVerkey': signer.verkey - } - - -@pytest.fixture(scope='function') -def new_bls_keys(): - _, bls_key, key_proof = create_default_bls_crypto_factory().generate_bls_keys() - return bls_key, key_proof - - -@pytest.fixture(scope='module') -def newNodeVals(): - newNodeSeed = randomSeed() - nodeIp, nodePort = genHa() - clientIp, clientPort = genHa() - _, bls_key, key_proof = create_default_bls_crypto_factory().generate_bls_keys() - - newNodeData = { - NODE_IP: nodeIp, - NODE_PORT: nodePort, - CLIENT_IP: clientIp, - CLIENT_PORT: clientPort, - ALIAS: randomString(6), - SERVICES: [VALIDATOR], - BLS_KEY: bls_key, - BLS_KEY_PROOF: key_proof - } - - return { - 'newNodeSeed': newNodeSeed.decode(), - 'newNodeIdr': SimpleSigner(seed=newNodeSeed).identifier, - 'newNodeData': newNodeData - } - -@pytest.fixture(scope='module') -def nodeValsEmptyData(newNodeVals): - node_vals = {} - node_vals['newNodeData'] = {} - node_vals['newNodeIdr'] = newNodeVals['newNodeIdr'] - return node_vals - -@pytest.yield_fixture(scope="module") -def cliWithNewStewardName(CliBuilder): - yield from CliBuilder("newSteward") - - -@pytest.fixture(scope='module') -def newStewardCli(be, do, poolNodesStarted, trusteeCli, - cliWithNewStewardName, newStewardVals): - be(trusteeCli) - if not trusteeCli._isConnectedToAnyEnv(): - connect_and_check_output(do, trusteeCli.txn_dir) - - do('send NYM dest={{newStewardIdr}} role={role} verkey={{newStewardVerkey}}' - .format(role=Roles.STEWARD.name), - within=3, - expect='Nym {newStewardIdr} added', - mapper=newStewardVals) - - be(cliWithNewStewardName) - - do('new key with seed {newStewardSeed}', expect=[ - 'DID for key is {newStewardIdr}', - 'Current DID set to {newStewardIdr}'], - mapper=newStewardVals) - - if not cliWithNewStewardName._isConnectedToAnyEnv(): - connect_and_check_output(do, cliWithNewStewardName.txn_dir) - - return cliWithNewStewardName - - -@pytest.fixture(scope="module") -def newNodeAdded(be, do, poolNodesStarted, philCli, newStewardCli, - newNodeVals): - be(newStewardCli) - doSendNodeCmd(do, newNodeVals) - newNodeData = newNodeVals["newNodeData"] - - def checkClientConnected(client): - name = newNodeData[ALIAS] + CLIENT_STACK_SUFFIX - assert name in client.nodeReg - - def checkNodeConnected(nodes): - for node in nodes: - name = newNodeData[ALIAS] - assert name in node.nodeReg - - # Reconnect steward's CLI to get new pool membership info. - disconnect_and_check_output(do) - connect_and_check_output(do, newStewardCli.txn_dir) - - timeout = waits.expectedClientToPoolConnectionTimeout( - len(newStewardCli.activeClient.nodeReg)) - - newStewardCli.looper.run(eventually(checkClientConnected, - newStewardCli.activeClient, - timeout=timeout)) - - be(philCli) - - # Reconnect Phil's CLI if needed to get new pool membership info. - if philCli._isConnectedToAnyEnv(): - disconnect_and_check_output(do) - connect_and_check_output(do, philCli.txn_dir) - - philCli.looper.run(eventually(checkClientConnected, - philCli.activeClient, - timeout=timeout)) - - poolNodesStarted.looper.run( - eventually( - checkNodeConnected, - list( - poolNodesStarted.nodes.values()), - timeout=timeout)) - return newNodeVals - - -@pytest.fixture(scope='module') -def nodeIds(poolNodesStarted): - return next(iter(poolNodesStarted.nodes.values())).poolManager.nodeIds diff --git a/indy_client/test/cli/constants.py b/indy_client/test/cli/constants.py deleted file mode 100644 index 525376c8a..000000000 --- a/indy_client/test/cli/constants.py +++ /dev/null @@ -1,12 +0,0 @@ -NODE_REQUEST_COMPLETED = "Node request completed" -NODE_REQUEST_FAILED = "Node request failed" -SCHEMA_ADDED = [ - 'The following schema is published to the Indy distributed ledger', - 'Sequence number is'] -SCHEMA_NOT_ADDED_DUPLICATE = [ - 'can have one and only one SCHEMA with name'] -CLAIM_DEF_ADDED = [ - 'The claim definition was published to the Indy distributed ledger', - 'Sequence number is'] -INVALID_SYNTAX = "Invalid syntax" -ERROR = 'Error:' diff --git a/indy_client/test/cli/helper.py b/indy_client/test/cli/helper.py deleted file mode 100644 index 561401ec3..000000000 --- a/indy_client/test/cli/helper.py +++ /dev/null @@ -1,515 +0,0 @@ -import json -import os -import re -from _sha256 import sha256 -from typing import Dict - -from libnacl import randombytes - -from indy_client.cli.cli import IndyCli -from indy_client.client.wallet.connection import Connection -from indy_client.test.client.TestClient import TestClient -from indy_common.constants import NYM -from indy_common.roles import Roles -from indy_common.txn_util import getTxnOrderedFields -from ledger.genesis_txn.genesis_txn_file_util import create_genesis_txn_init_ledger -from plenum.bls.bls_crypto_factory import create_default_bls_crypto_factory -from plenum.common.constants import TARGET_NYM, ROLE, TXN_TYPE, ALIAS, TXN_ID, VALIDATOR, STEWARD -from plenum.common.member.member import Member -from plenum.common.member.steward import Steward -from plenum.common.signer_did import DidSigner -from plenum.common.signer_simple import SimpleSigner -from plenum.common.util import rawToFriendly -from plenum.test import waits -from plenum.test.cli.helper import TestCliCore, assertAllNodesCreated, \ - waitAllNodesStarted, newCLI as newPlenumCLI -from plenum.test.helper import initDirWithGenesisTxns -from plenum.test.testable import spyable -from stp_core.common.log import getlogger -from stp_core.loop.eventually import eventually -from stp_core.loop.looper import Looper -from stp_core.network.port_dispenser import genHa - -logger = getlogger() - - -@spyable(methods=[IndyCli.print, IndyCli.printTokens]) -class TestCLI(IndyCli, TestCliCore): - pass - # def __init__(self, *args, **kwargs): - # super().__init__(*args, **kwargs) - # # new = logging.StreamHandler(sys.stdout) - # # Logger()._setHandler('std', new) - # Logger().enableStdLogging() - - -def sendNym(cli, nym, role): - cli.enterCmd("send NYM {}={} " - "{}={}".format(TARGET_NYM, nym, - ROLE, role)) - - -def checkGetNym(cli, nym): - printeds = ["Getting nym {}".format(nym), "Sequence number for NYM {} is " - .format(nym)] - checks = [x in cli.lastCmdOutput for x in printeds] - assert all(checks) - # TODO: These give NameError, don't know why - # assert all([x in cli.lastCmdOutput for x in printeds]) - # assert all(x in cli.lastCmdOutput for x in printeds) - - -def checkAddAttr(cli): - assert "Adding attributes" in cli.lastCmdOutput - - -def chkNymAddedOutput(cli, nym): - checks = [x['msg'] == "Nym {} added".format(nym) for x in cli.printeds] - assert any(checks) - - -def checkConnectedToEnv(cli): - # TODO: Improve this - assert "now connected to" in cli.lastCmdOutput - - -def ensureConnectedToTestEnv(be, do, cli): - be(cli) - if not cli._isConnectedToAnyEnv(): - timeout = waits.expectedClientToPoolConnectionTimeout(len(cli.nodeReg)) - connect_and_check_output(do, cli.txn_dir, timeout) - - -def connect_and_check_output(do, netwotk, timeout=3, expect=None, mapper=None): - if expect is None: - expect = 'Connected to {}'.format(netwotk) - do('connect {}'.format(netwotk), within=timeout, - expect=expect, mapper=mapper) - - -def disconnect_and_check_output(do, timeout=3, expect=None, mapper=None): - if expect is None: - expect = 'Disconnected from' - do('disconnect', within=timeout, expect=expect, mapper=mapper) - - -def ensureNymAdded(be, do, cli, nym, role=None): - ensureConnectedToTestEnv(be, do, cli) - cmd = "send NYM {dest}={nym}".format(dest=TARGET_NYM, nym=nym) - if role: - cmd += " {ROLE}={role}".format(ROLE=ROLE, role=role) - cli.enterCmd(cmd) - timeout = waits.expectedTransactionExecutionTime(len(cli.nodeReg)) - cli.looper.run( - eventually(chkNymAddedOutput, cli, nym, retryWait=1, timeout=timeout)) - - timeout = waits.expectedTransactionExecutionTime(len(cli.nodeReg)) - cli.enterCmd("send GET_NYM {dest}={nym}".format(dest=TARGET_NYM, nym=nym)) - cli.looper.run(eventually(checkGetNym, cli, nym, - retryWait=1, timeout=timeout)) - - cli.enterCmd('send ATTRIB {dest}={nym} raw={raw}'. - format(dest=TARGET_NYM, nym=nym, - # raw='{\"attrName\":\"attrValue\"}')) - raw=json.dumps({"attrName": "attrValue"}))) - timeout = waits.expectedTransactionExecutionTime(len(cli.nodeReg)) - cli.looper.run(eventually(checkAddAttr, cli, retryWait=1, timeout=timeout)) - - -def ensureNodesCreated(cli, nodeNames): - # cli.enterCmd("new node all") - # TODO: Why 2 different interfaces one with list and one with varags - assertAllNodesCreated(cli, nodeNames) - waitAllNodesStarted(cli, *nodeNames) - - -def getFileLines(path, caller_file=None): - filePath = IndyCli._getFilePath(path, caller_file) - with open(filePath, 'r') as fin: - lines = fin.read().splitlines() - return lines - - -def doubleBraces(lines): - # TODO this is needed to accommodate mappers in 'do' fixture; this can be - # removed when refactoring to the new 'expect' fixture is complete - alteredLines = [] - for line in lines: - alteredLines.append(line.replace('{', '{{').replace('}', '}}')) - return alteredLines - - -def get_connection_request(name, wallet) -> Connection: - existingLinkInvites = wallet.getMatchingConnections(name) - li = existingLinkInvites[0] - return li - - -def getPoolTxnData(poolId, newPoolTxnNodeNames): - data = {} - data["seeds"] = {} - data["txns"] = [] - data['nodesWithBls'] = {} - for index, n in enumerate(newPoolTxnNodeNames, start=1): - newStewardAlias = poolId + "Steward" + str(index) - stewardSeed = (newStewardAlias + "0" * - (32 - len(newStewardAlias))).encode() - data["seeds"][newStewardAlias] = stewardSeed - stewardSigner = SimpleSigner(seed=stewardSeed) - data["txns"].append( - Member.nym_txn(nym=stewardSigner.identifier, - verkey=stewardSigner.verkey, - role=STEWARD, - name=poolId + "Steward" + str(index), - seq_no=index, - txn_id=sha256("{}".format(stewardSigner.verkey).encode()).hexdigest())) - - newNodeAlias = n - nodeSeed = (newNodeAlias + "0" * (32 - len(newNodeAlias))).encode() - data["seeds"][newNodeAlias] = nodeSeed - nodeSigner = SimpleSigner(seed=nodeSeed) - - _, bls_key, key_proof = create_default_bls_crypto_factory().generate_bls_keys( - seed=data['seeds'][n]) - data['nodesWithBls'][n] = True - - node_txn = Steward.node_txn( - steward_nym=stewardSigner.verkey, - node_name=newNodeAlias, - nym=nodeSigner.verkey, - ip="127.0.0.1", - node_port=genHa()[1], - client_port=genHa()[1], - client_ip="127.0.0.1", - blskey=bls_key, - bls_key_proof=key_proof, - services=[VALIDATOR], - txn_id=sha256("{}".format(nodeSigner.verkey).encode()).hexdigest() - ) - - data["txns"].append(node_txn) - - return data - - -def prompt_is(prompt): - def x(cli): - assert cli.currPromptText == prompt, \ - "expected prompt: {}, actual prompt: {}". \ - format(prompt, cli.currPromptText) - - return x - - -def addTxnsToGenesisFile(dir, file, txns, fields=getTxnOrderedFields()): - ledger = create_genesis_txn_init_ledger(dir, file) - for txn in txns: - ledger.add(txn) - ledger.stop() - - -def addTrusteeTxnsToGenesis(trusteeList, trusteeData, txnDir, txnFileName): - added = 0 - if trusteeList and len(trusteeList) and trusteeData: - txns = [] - for trusteeToAdd in trusteeList: - try: - trusteeData = next( - (data for data in trusteeData if data[0] == trusteeToAdd)) - name, seed, txn = trusteeData - txns.append(txn) - except StopIteration as e: - logger.debug( - '{} not found in trusteeData'.format(trusteeToAdd)) - addTxnsToGenesisFile(txnDir, txnFileName, txns) - return added - - -def newCLI(looper, client_tdir, network='sandbox', conf=None, poolDir=None, - domainDir=None, multiPoolNodes=None, unique_name=None, - logFileName=None, cliClass=TestCLI, name=None, agent=None, - nodes_chroot: str = None): - ledger_base_dir = os.path.join(client_tdir, 'networks') - tempDir = os.path.join(ledger_base_dir, network) - os.makedirs(tempDir, exist_ok=True) - if poolDir or domainDir: - initDirWithGenesisTxns(tempDir, conf, poolDir, domainDir) - - if multiPoolNodes: - for pool in multiPoolNodes: - initDirWithGenesisTxns( - os.path.join(ledger_base_dir, pool.name), - conf, - pool.tdirWithPoolTxns, - pool.tdirWithDomainTxns - ) - from indy_node.test.helper import TestNode - new_cli = newPlenumCLI( - looper, - client_tdir, - ledger_base_dir, - cliClass=cliClass, - nodeClass=TestNode, - clientClass=TestClient, - config=conf, - unique_name=unique_name, - logFileName=logFileName, - name=name, - agentCreator=True, - nodes_chroot=nodes_chroot) - if isinstance(new_cli, IndyCli) and agent is not None: - new_cli.agent = agent - new_cli.txn_dir = network - return new_cli - - -def getCliBuilder(tdir, tconf, tdirWithPoolTxns, tdirWithDomainTxns, - logFileName=None, multiPoolNodes=None, cliClass=TestCLI, - name=None, agent=None, def_looper=None): - def _(space, - looper=None, - unique_name=None): - def new(): - client_tdir = os.path.join(tdir, 'home', space) - c = newCLI(looper, - client_tdir, - conf=tconf, - poolDir=tdirWithPoolTxns, - domainDir=tdirWithDomainTxns, - multiPoolNodes=multiPoolNodes, - unique_name=unique_name or space, - logFileName=logFileName, - cliClass=cliClass, - name=name, - agent=agent, - nodes_chroot=tdir) - return c - - if not looper: - looper = def_looper - if looper: - yield new() - else: - with Looper(debug=False) as looper: - yield new() - - return _ - - -# marker class for regex pattern -class P(str): - def match(self, other): - return re.match('^{}$'.format(self), other) - - -def check_wallet(cli, - totalLinks=None, - totalAvailableClaims=None, - totalSchemas=None, - totalClaimsRcvd=None, - within=None): - async def check(): - actualLinks = len(cli.activeWallet._connections) - assert (totalLinks is None or (totalLinks == actualLinks)), \ - 'connections expected to be {} but is {}'.format( - totalLinks, actualLinks) - - tac = 0 - for li in cli.activeWallet._connections.values(): - tac += len(li.availableClaims) - - assert (totalAvailableClaims is None or - totalAvailableClaims == tac), \ - 'available claims {} must be equal to {}'. \ - format(tac, totalAvailableClaims) - - if cli.agent.prover is None: - assert (totalSchemas + totalClaimsRcvd) == 0 - else: - w = cli.agent.prover.wallet - actualSchemas = len(await w.getAllSchemas()) - assert (totalSchemas is None or - totalSchemas == actualSchemas), \ - 'schemas expected to be {} but is {}'. \ - format(totalSchemas, actualSchemas) - - assert (totalClaimsRcvd is None or - totalClaimsRcvd == len((await w.getAllClaimsSignatures()).keys())) - - if within: - cli.looper.run(eventually(check, timeout=within)) - else: - cli.looper.run(check) - - -def wallet_state(totalLinks=0, - totalAvailableClaims=0, - totalSchemas=0, - totalClaimsRcvd=0): - return locals() - - -def addAgent(be, do, userCli, mapper): - addNym(be, - do, - userCli, - mapper['remote'], - verkey=mapper.get('remote-verkey', None), - role=Roles.TRUST_ANCHOR.name) - return userCli - - -def addNym(be, do, userCli, idr, verkey=None, role=None): - be(userCli) - - ensureConnectedToTestEnv(be, do, userCli) - - cmd = 'send NYM dest={}'.format(idr) - if role is not None: - cmd += ' role={}'.format(role) - if verkey is not None: - cmd += ' verkey={}'.format(verkey) - - do(cmd, expect='Nym {} added'.format(idr), within=2) - - -def newKey(be, do, userCli, seed=None): - be(userCli) - cmd = 'new key' - if seed is not None: - cmd += ' with seed {}'.format(seed) - - do(cmd, expect='Current DID set to') - - -def getAgentCliHelpString(): - return """Indy-CLI, a simple command-line interface for a Indy Identity platform. - Commands: - help - Shows this or specific help message for given command - Usage: - help [] - prompt - Changes the prompt to given principal (a person like Alice, an organization like Faber College, or an IoT-style thing) - list wallets - Lists all wallets - list ids - Lists all DIDs of active wallet - show - Shows content of given file - show connection - Shows connection info in case of one matching connection, otherwise shows all the matching connection names - ping - Pings given remote's endpoint - list connections - List available connections in active wallet - send proof request - Send a proof request - license - Shows the license - exit - Exit the command-line interface ('quit' also works)""" - - -def getTotalConnections(userCli): - return len(userCli.activeWallet._connections) - - -def getTotalAvailableClaims(userCli): - availableClaimsCount = 0 - for li in userCli.activeWallet._connections.values(): - availableClaimsCount += len(li.availableClaims) - return availableClaimsCount - - -def getTotalSchemas(userCli): - async def getTotalSchemasCoro(): - return 0 if userCli.agent.prover is None \ - else len(await userCli.agent.prover.wallet.getAllSchemas()) - - return userCli.looper.run(getTotalSchemasCoro) - - -def getTotalClaimsRcvd(userCli): - async def getTotalClaimsRcvdCoro(): - return 0 if userCli.agent.prover is None \ - else len((await userCli.agent.prover.wallet.getAllClaimsSignatures()).keys()) - - return userCli.looper.run(getTotalClaimsRcvdCoro) - - -def getWalletState(userCli): - totalLinks = getTotalLinks(userCli) - totalAvailClaims = getTotalAvailableClaims(userCli) - totalSchemas = getTotalSchemas(userCli) - totalClaimsRcvd = getTotalClaimsRcvd(userCli) - return wallet_state(totalLinks, totalAvailClaims, totalSchemas, - totalClaimsRcvd) - - -def doSendNodeCmd(do, nodeVals, expMsgs=None): - expect = expMsgs or ['Node request completed'] - do('send NODE dest={newNodeIdr} data={newNodeData}', - within=15, expect=expect, mapper=nodeVals) - - -def createUuidIdentifier(): - return rawToFriendly(randombytes(16)) - - -def createUuidIdentifierAndFullVerkey(seed=None): - didSigner = DidSigner(identifier=createUuidIdentifier(), seed=seed) - return didSigner.identifier, didSigner.verkey - - -def createHalfKeyIdentifierAndAbbrevVerkey(seed=None): - didSigner = DidSigner(seed=seed) - return didSigner.identifier, didSigner.verkey - - -def createCryptonym(seed=None): - return SimpleSigner(seed=seed).identifier - - -def compareAgentIssuerWallet(unpersistedWallet, restoredWallet): - def compare(old, new): - if isinstance(old, Dict): - for k, v in old.items(): - assert v == new.get(k) - else: - assert old == new - - compareList = [ - # from anoncreds wallet - (unpersistedWallet.walletId, restoredWallet.walletId), - (unpersistedWallet._repo.wallet.name, restoredWallet._repo.wallet.name), - - # from indy-issuer-wallet-in-memory - (unpersistedWallet.availableClaimsToAll, - restoredWallet.availableClaimsToAll), - (unpersistedWallet.availableClaimsByNonce, - restoredWallet.availableClaimsByNonce), - (unpersistedWallet.availableClaimsByIdentifier, - restoredWallet.availableClaimsByIdentifier), - (unpersistedWallet._proofRequestsSchema, - restoredWallet._proofRequestsSchema), - - # from anoncreds issuer-wallet-in-memory - (unpersistedWallet._sks, restoredWallet._sks), - (unpersistedWallet._skRs, restoredWallet._skRs), - (unpersistedWallet._accumSks, restoredWallet._accumSks), - (unpersistedWallet._m2s, restoredWallet._m2s), - (unpersistedWallet._attributes, restoredWallet._attributes), - - # from anoncreds wallet-in-memory - (unpersistedWallet._schemasByKey, restoredWallet._schemasByKey), - (unpersistedWallet._schemasById, restoredWallet._schemasById), - (unpersistedWallet._pks, restoredWallet._pks), - (unpersistedWallet._pkRs, restoredWallet._pkRs), - (unpersistedWallet._accums, restoredWallet._accums), - (unpersistedWallet._accumPks, restoredWallet._accumPks), - # TODO: need to check for _tails, it is little bit different than - # others (Dict instead of namedTuple or class) - ] - - assert unpersistedWallet._repo.client is None - assert restoredWallet._repo.client is not None - for oldDict, newDict in compareList: - compare(oldDict, newDict) - - -def getSeqNoFromCliOutput(cli): - seqPat = re.compile("Sequence number is ([0-9]+)") - m = seqPat.search(cli.lastCmdOutput) - assert m - seqNo, = m.groups() - return int(seqNo) diff --git a/indy_client/test/cli/test_accept_invitation_base58_as_pubkey.py b/indy_client/test/cli/test_accept_invitation_base58_as_pubkey.py deleted file mode 100644 index 8fbb945c2..000000000 --- a/indy_client/test/cli/test_accept_invitation_base58_as_pubkey.py +++ /dev/null @@ -1,35 +0,0 @@ -import json - -import pytest - -from plenum.common.constants import PUBKEY - -# noinspection PyUnresolvedReferences -from indy_client.test.cli.conftest \ - import faberMap as faberMapWithoutEndpointPubkey - -# noinspection PyUnresolvedReferences -from indy_client.test.cli.test_tutorial import alice_accepted_faber_request, \ - aliceCli, preRequisite, faberCli, acmeCli, thriftCli, faberWithEndpointAdded, acmeWithEndpointAdded, \ - thriftWithEndpointAdded, walletCreatedForTestEnv, \ - faberInviteSyncedWithEndpoint, faberInviteSyncedWithoutEndpoint, \ - faberInviteLoadedByAlice, accept_request, preRequisite - -from indy_common.constants import ENDPOINT - - -@pytest.fixture(scope="module") -def faberMap(faberMapWithoutEndpointPubkey): - fbrMap = faberMapWithoutEndpointPubkey - endpointAttr = json.loads(fbrMap["endpointAttr"]) - base58Key = '5hmMA64DDQz5NzGJNVtRzNwpkZxktNQds21q3Wxxa62z' - endpointAttr[ENDPOINT][PUBKEY] = base58Key - fbrMap["endpointAttr"] = json.dumps(endpointAttr) - return fbrMap - - -def testInvitationAcceptedIfAgentWasAddedUsingBase58AsPubkey( - be, do, aliceCli, faberMap, preRequisite, - syncedInviteAcceptedWithClaimsOut, faberInviteSyncedWithEndpoint): - accept_request(be, do, aliceCli, faberMap, - syncedInviteAcceptedWithClaimsOut) diff --git a/indy_client/test/cli/test_add_genesis_transaction.py b/indy_client/test/cli/test_add_genesis_transaction.py deleted file mode 100644 index 2fddd5642..000000000 --- a/indy_client/test/cli/test_add_genesis_transaction.py +++ /dev/null @@ -1,68 +0,0 @@ -import json - -from plenum.common.constants import VERKEY, DATA, NODE, TYPE -from plenum.common.txn_util import get_payload_data, get_type -from plenum.test.cli.helper import checkCmdValid - -from indy_common.constants import NYM -from indy_common.constants import TARGET_NYM, ROLE -from indy_common.roles import Roles -from indy_common.transactions import IndyTransactions - - -def executeAndCheckGenTxn(cli, cmd, typ, nym, role=None, data=None): - checkCmdValid(cli, cmd) - nymCorrect = False - roleCorrect = False if role else True - dataCorrect = False if data else True - typeCorrect = False if typ else True - - role = Roles[role].value if role else role - for txn in cli.genesisTransactions: - txn_data = get_payload_data(txn) - if txn_data.get(TARGET_NYM) == nym: - nymCorrect = True - if get_type(txn) == typ: - typeCorrect = True - if txn_data.get(ROLE) == role: - roleCorrect = True - if data and txn_data.get(DATA) == json.loads(data): - dataCorrect = True - - assert typeCorrect and nymCorrect and roleCorrect and dataCorrect - assert "Genesis transaction added" in cli.lastCmdOutput - - -def prepareCmdAndCheckGenTxn( - cli, typ: IndyTransactions, nym, role=None, data=None): - cmd = "add genesis transaction {} dest={}".format(typ.name, nym) - if role: - cmd += " role={}".format(role) - if data: - cmd += " with data {}".format(data) - executeAndCheckGenTxn(cli, cmd, typ.value, nym, role, data) - - -def testAddGenTxnBasic(cli): - nym = "2ru5PcgeQzxF7QZYwQgDkG2K13PRqyigVw99zMYg8eML" - role = None - typ = IndyTransactions.NYM - prepareCmdAndCheckGenTxn(cli, typ, nym, role) - - -def testAddGenTxnWithRole(cli): - nym = "2ru5PcgeQzxF7QZYwQgDkG2K13PRqyigVw99zMYg8eML" - role = Roles.STEWARD.name - typ = IndyTransactions.NYM - prepareCmdAndCheckGenTxn(cli, typ, nym, role) - - -def testAddGenTxnForNode(cli): - nym = "2ru5PcgeQzxF7QZYwQgDkG2K13PRqyigVw99zMYg8eML" - by = "FvDi9xQZd1CZitbK15BNKFbA7izCdXZjvxf91u3rQVzW" - role = None - typ = NODE - data = '{"node_ip": "localhost", "node_port": "9701", "client_ip": "localhost", "client_port": "9702", "alias": "AliceNode"}' - cmd = 'add genesis transaction {} for {} by {} with data {}'.format( - typ, nym, by, data) - executeAndCheckGenTxn(cli, cmd, typ, nym, role, data) diff --git a/indy_client/test/cli/test_agent_cli.py b/indy_client/test/cli/test_agent_cli.py deleted file mode 100644 index 1a7578111..000000000 --- a/indy_client/test/cli/test_agent_cli.py +++ /dev/null @@ -1,130 +0,0 @@ -from functools import partial - -import pytest - -from plenum.test.cli.helper import TestCliCore -from plenum.test.testable import spyable -from indy_client.agent.agent_cli import AgentCli -from indy_client.test.agent.acme import create_acme, bootstrap_acme -from indy_client.test.agent.helper import buildAcmeWallet -from indy_client.test.cli.helper import getCliBuilder, getAgentCliHelpString -from indy_client.test.cli.test_tutorial import acmeWithEndpointAdded,\ - connectIfNotAlreadyConnected, faberCli, acmeCli, thriftCli - - -@spyable(methods=[AgentCli.print, AgentCli.printTokens]) -class TestAgentCLI(AgentCli, TestCliCore): - pass - - -@pytest.fixture(scope='module') -def agentCliBuilder(tdir, tdirWithPoolTxns, tdirWithDomainTxns, tconf, - txnPoolNodesLooper, cliTempLogger, txnPoolNodeSet): - return partial(getCliBuilder, tdir=tdir, tconf=tconf, - tdirWithPoolTxns=tdirWithPoolTxns, - tdirWithDomainTxns=tdirWithDomainTxns, - logFileName=cliTempLogger, cliClass=TestAgentCLI) - - -@pytest.fixture(scope='module') -def acmeAgentCli(agentCliBuilder, acmeAgentPort, tdirWithClientPoolTxns): - agent = create_acme(port=acmeAgentPort, base_dir_path=tdirWithClientPoolTxns, - wallet=buildAcmeWallet()) - cliBuild = agentCliBuilder(name='Acme-Agent', agent=agent) - cli = cliBuild('Acme-Agent') - yield from cli - - -@pytest.fixture(scope='module') -def acmeAgentCliRunning(acmeWithEndpointAdded, acmeAgentCli, looper): - looper.run(bootstrap_acme(acmeAgentCli.agent)) - return acmeAgentCli - - -def test_acme_cli_started_successfully(be, acmeAgentCliRunning): - be(acmeAgentCliRunning) - assert acmeAgentCliRunning.currPromptText == 'Acme-Agent' - - -def testAgentCliHelp(be, do, acmeAgentCliRunning): - be(acmeAgentCliRunning) - do('help', expect=[getAgentCliHelpString()]) - - -def testAgentCliForInvalidCommand(be, do, acmeAgentCliRunning): - be(acmeAgentCliRunning) - do('set Attr1 to Value1', expect=[ - "Invalid command: 'set Attr1 to Value1'", - getAgentCliHelpString()]) - - -def sendProofRequest(be, do, agentCli, userMap): - be(agentCli) - userMap['pr-name-version'] = '{}-v{}'.format( - userMap['pr-name'], userMap['pr-schema-version']) - do('send proof-request {pr-name-version} to {send-proof-target}', - within=5, - mapper=userMap, - expect=[ - 'Sent proof request "{pr-name-version}" to {send-proof-target}' - ]) - - -def checkProofRequestReceived(be, do, userCli, commandMap): - be(userCli) - do(None, within=3, mapper=commandMap, - expect=['Proof request {pr-name} received from {inviter}.']) - - -def getProofRequestsCount(userCli, target): - li = userCli.activeWallet.getConnectionBy(remote=target) - return len(li.proofRequests) - - -@pytest.fixture(scope='module') -def aliceAcceptedAcmeInvitationNoProofReq( - acmeAgentCliRunning, be, do, aliceCLI, acmeMap, loadInviteOut, - unsycedAcceptedInviteWithoutClaimOut, - syncConnectionOutWithEndpoint, newKeyringOut): - def _(invitationFile, wallet): - be(aliceCLI) - connectIfNotAlreadyConnected(do, aliceCLI, acmeMap) - walletMapper = { - 'wallet-name': wallet - } - do('new wallet {}'.format(wallet), - expect=newKeyringOut, - mapper=walletMapper) - do('load {}'.format(invitationFile), - mapper=acmeMap, - expect=loadInviteOut) - do('sync {inviter}', - mapper=acmeMap, - expect=syncConnectionOutWithEndpoint, - within=15) - do('accept request from {inviter}', - within=15, - mapper=acmeMap, - expect=unsycedAcceptedInviteWithoutClaimOut) - - proofRequestsBefore = getProofRequestsCount( - aliceCLI, acmeMap['remote']) - - sendProofRequest(be, do, acmeAgentCliRunning, acmeMap) - - checkProofRequestReceived(be, do, aliceCLI, acmeMap) - - proofRequestsAfter = getProofRequestsCount(aliceCLI, acmeMap['remote']) - - return proofRequestsBefore, proofRequestsAfter - - return _ - - -def test_acme_cli_send_proof_request( - be, do, acmeAgentCliRunning, aliceCLI, acmeMap, - aliceAcceptedAcmeInvitationNoProofReq): - proofRequestsBefore, proofRequestsAfter = aliceAcceptedAcmeInvitationNoProofReq( - acmeMap['invite-no-pr'], 'aliceNoPR') - - assert proofRequestsBefore + 1 == proofRequestsAfter diff --git a/indy_client/test/cli/test_agent_cli2.py b/indy_client/test/cli/test_agent_cli2.py deleted file mode 100644 index 2c5e52c7e..000000000 --- a/indy_client/test/cli/test_agent_cli2.py +++ /dev/null @@ -1,116 +0,0 @@ -from functools import partial - -import pytest - -from plenum.test.cli.helper import TestCliCore -from plenum.test.testable import spyable -from indy_client.agent.agent_cli import AgentCli -from indy_client.test.agent.acme import create_acme, bootstrap_acme -from indy_client.test.agent.helper import buildAcmeWallet -from indy_client.test.cli.helper import getCliBuilder, getAgentCliHelpString -from indy_client.test.cli.test_tutorial import acmeWithEndpointAdded,\ - connectIfNotAlreadyConnected, faberCli, acmeCli, thriftCli - - -@spyable(methods=[AgentCli.print, AgentCli.printTokens]) -class TestAgentCLI(AgentCli, TestCliCore): - pass - - -@pytest.fixture(scope='module') -def agentCliBuilder(tdir, tdirWithPoolTxns, tdirWithDomainTxns, tconf, - txnPoolNodesLooper, cliTempLogger, txnPoolNodeSet): - return partial(getCliBuilder, tdir=tdir, tconf=tconf, - tdirWithPoolTxns=tdirWithPoolTxns, - tdirWithDomainTxns=tdirWithDomainTxns, - logFileName=cliTempLogger, cliClass=TestAgentCLI) - - -@pytest.fixture(scope='module') -def acmeAgentCli(agentCliBuilder, acmeAgentPort, tdirWithClientPoolTxns): - agent = create_acme(port=acmeAgentPort, base_dir_path=tdirWithClientPoolTxns, - wallet=buildAcmeWallet()) - cliBuild = agentCliBuilder(name='Acme-Agent', agent=agent) - cli = cliBuild('Acme-Agent') - yield from cli - - -@pytest.fixture(scope='module') -def acmeAgentCliRunning(acmeWithEndpointAdded, acmeAgentCli, looper): - looper.run(bootstrap_acme(acmeAgentCli.agent)) - return acmeAgentCli - - - - -def sendProofRequest(be, do, agentCli, userMap): - be(agentCli) - userMap['pr-name-version'] = '{}-v{}'.format( - userMap['pr-name'], userMap['pr-schema-version']) - do('send proof-request {pr-name-version} to {send-proof-target}', - within=5, - mapper=userMap, - expect=[ - 'Sent proof request "{pr-name-version}" to {send-proof-target}' - ]) - - -def checkProofRequestReceived(be, do, userCli, commandMap): - be(userCli) - do(None, within=3, mapper=commandMap, - expect=['Proof request {pr-name} received from {inviter}.']) - - -def getProofRequestsCount(userCli, target): - li = userCli.activeWallet.getConnectionBy(remote=target) - return len(li.proofRequests) - - -@pytest.fixture(scope='module') -def aliceAcceptedAcmeInvitationNoProofReq( - acmeAgentCliRunning, be, do, aliceCLI, acmeMap, loadInviteOut, - unsycedAcceptedInviteWithoutClaimOut, - syncConnectionOutWithEndpoint, newKeyringOut): - def _(invitationFile, wallet): - be(aliceCLI) - connectIfNotAlreadyConnected(do, aliceCLI, acmeMap) - walletMapper = { - 'wallet-name': wallet - } - do('new wallet {}'.format(wallet), - expect=newKeyringOut, - mapper=walletMapper) - do('load {}'.format(invitationFile), - mapper=acmeMap, - expect=loadInviteOut) - do('sync {inviter}', - mapper=acmeMap, - expect=syncConnectionOutWithEndpoint, - within=15) - do('accept request from {inviter}', - within=15, - mapper=acmeMap, - expect=unsycedAcceptedInviteWithoutClaimOut) - - proofRequestsBefore = getProofRequestsCount( - aliceCLI, acmeMap['remote']) - - sendProofRequest(be, do, acmeAgentCliRunning, acmeMap) - - checkProofRequestReceived(be, do, aliceCLI, acmeMap) - - proofRequestsAfter = getProofRequestsCount(aliceCLI, acmeMap['remote']) - - return proofRequestsBefore, proofRequestsAfter - return _ - - - -def test_acme_cli_send_proof_request_already_exist( - be, do, acmeAgentCliRunning, aliceCLI, acmeMap, - aliceAcceptedAcmeInvitationNoProofReq): - - proofRequestsBefore, proofRequestsAfter = aliceAcceptedAcmeInvitationNoProofReq( - acmeMap['invite'], 'aliceWithPR') - - assert proofRequestsBefore == proofRequestsAfter diff --git a/indy_client/test/cli/test_agent_startup.py b/indy_client/test/cli/test_agent_startup.py deleted file mode 100644 index f8909b41c..000000000 --- a/indy_client/test/cli/test_agent_startup.py +++ /dev/null @@ -1,87 +0,0 @@ -import pytest - -# it is fixture - do not remove -from indy_client.test.cli.conftest import acmeAddedByPhil as agentAddedBySponsor -from indy_common.exceptions import NotConnectedToNetwork - -from plenum.common.exceptions import OperationError, NoConsensusYet - -from stp_core.network.exceptions import PortNotAvailable -from stp_core.network.port_dispenser import genHa -from plenum.common.types import HA -from plenum.common.util import randomString -from stp_core.network.util import checkPortAvailable -from indy_client.test.agent.conftest import startAgent - -from indy_client.test.agent.acme import create_acme as createAgent -from indy_client.test.agent.acme import bootstrap_acme as bootstrap_agent -from indy_client.test.agent.helper import buildAcmeWallet as agentWallet - - -agentPort = genHa()[1] - - -def getNewAgent(name, basedir, port, wallet): - return createAgent(name, wallet, base_dir_path=basedir, port=port) - - -def runAgent(looper, basedir, port, name=None, agent=None): - wallet = agentWallet() - name = name or "Agent" + randomString(5) - agent = agent or getNewAgent(name, basedir, port, wallet) - agent._name = name - return startAgent(looper, agent, wallet, bootstrap_agent(agent)) - - -def stopAgent(looper, name): - agent = looper.removeProdable(name=name) - if agent: - agent.stop() - - -@pytest.fixture(scope="module") -def agentStarted(emptyLooper, tdirWithPoolTxns): - runAgent(emptyLooper, tdirWithPoolTxns, agentPort, "Agent0") - - -def testCreateAgentDoesNotAllocatePort(tdirWithPoolTxns): - for i in range(2): - checkPortAvailable(HA("0.0.0.0", agentPort)) - agent = getNewAgent("Agent0", tdirWithPoolTxns, - agentPort, agentWallet()) - checkPortAvailable(HA("0.0.0.0", agentPort)) - agent.stop() - - -def testAgentStartedWithoutPoolStarted(emptyLooper, tdirWithClientPoolTxns): - import indy_client.agent.run_agent - indy_client.agent.run_agent.CONNECTION_TIMEOUT = 10 - newAgentName = "Agent2" - with pytest.raises(NotConnectedToNetwork): - runAgent(emptyLooper, tdirWithClientPoolTxns, agentPort, - name=newAgentName) - stopAgent(emptyLooper, newAgentName) - - -def testStartNewAgentOnUsedPort(poolNodesStarted, tdirWithClientPoolTxns, - emptyLooper, agentAddedBySponsor, - agentStarted): - with pytest.raises(PortNotAvailable): - runAgent(emptyLooper, tdirWithClientPoolTxns, agentPort, name='Agent4') - - stopAgent(emptyLooper, 'Agent4') - - -def testStartAgentChecksForPortAvailability(poolNodesStarted, tdirWithClientPoolTxns, - emptyLooper, agentAddedBySponsor): - newAgentName1 = "Agent11" - newAgentName2 = "Agent12" - with pytest.raises(PortNotAvailable): - agent = getNewAgent(newAgentName1, tdirWithClientPoolTxns, agentPort, - agentWallet()) - runAgent(emptyLooper, tdirWithClientPoolTxns, agentPort, - name=newAgentName2) - runAgent(emptyLooper, tdirWithClientPoolTxns, agentPort, - name=newAgentName1, agent=agent) - - stopAgent(emptyLooper, newAgentName2) diff --git a/indy_client/test/cli/test_agent_wallet_persistence.py b/indy_client/test/cli/test_agent_wallet_persistence.py deleted file mode 100644 index a304f96b3..000000000 --- a/indy_client/test/cli/test_agent_wallet_persistence.py +++ /dev/null @@ -1,101 +0,0 @@ -import os -import stat - -import pytest - -from plenum.common.util import randomString, normalizedWalletFileName -from plenum.test.conftest import tdirWithPoolTxns -from indy_client.agent.agent import createAgent -from indy_client.test.agent.conftest import emptyLooper, startAgent - -from indy_client.test.agent.acme import create_acme as createAcmeAgent, AcmeAgent -from indy_client.test.agent.helper import buildAcmeWallet as agentWallet -from indy_client.test.cli.conftest \ - import acmeAddedByPhil as agentAddedBySponsor -from indy_client.test.cli.helper import compareAgentIssuerWallet -from indy_client.test.client.TestClient import TestClient -from stp_core.network.port_dispenser import genHa - -agentPort = genHa()[1] - - -def getNewAgent(name, basedir, port, wallet): - return createAcmeAgent(name, wallet, base_dir_path=basedir, port=port) - - -def runAgent(looper, basedir, port, name=None, agent=None): - wallet = agentWallet() - wallet.name = name - name = name or "Agent" + randomString(5) - agent = agent or getNewAgent(name, basedir, port, wallet) - return startAgent(looper, agent, wallet) - - -def _startAgent(looper, base_dir, port, name): - agent, wallet = runAgent(looper, base_dir, port, name) - return agent, wallet - - -@pytest.fixture(scope="module") -def agentStarted(emptyLooper, tdirWithClientPoolTxns): - return _startAgent(emptyLooper, tdirWithClientPoolTxns, agentPort, "Agent0") - - -def changeAndPersistWallet(agent, emptyLooper): - walletName = normalizedWalletFileName(agent._wallet.name) - expectedFilePath = os.path.join(agent.getContextDir(), walletName) - assert "agents" in expectedFilePath - assert agent.name.lower().replace(" ", "-") in expectedFilePath - walletToBePersisted = agent._wallet - walletToBePersisted.idsToSigners = {} - agent.stop() - emptyLooper.runFor(.5) - assert os.path.isfile(expectedFilePath) - assert stat.S_IMODE(os.stat(agent.getContextDir() - ).st_mode) == agent.config.WALLET_DIR_MODE - assert stat.S_IMODE( - os.stat(expectedFilePath).st_mode) == agent.config.WALLET_FILE_MODE - return walletToBePersisted - - -def changePersistAndRestoreWallet(agent, emptyLooper): - assert agent - changeAndPersistWallet(agent, emptyLooper) - agent.start(emptyLooper) - assert agent._wallet.idsToSigners == {} - - -def testAgentPersistsWalletWhenStopped(poolNodesStarted, emptyLooper, - agentAddedBySponsor, agentStarted): - agent, _ = agentStarted - changePersistAndRestoreWallet(agent, emptyLooper) - - -def testAgentUsesRestoredWalletIfItHas( - poolNodesStarted, emptyLooper, tdirWithClientPoolTxns, - agentAddedBySponsor, agentStarted): - agent, wallet = agentStarted - changeAndPersistWallet(agent, emptyLooper) - - newAgent = getNewAgent(agent.name, tdirWithClientPoolTxns, agentPort, - agentWallet()) - assert newAgent._wallet.idsToSigners == {} - - -def testAgentCreatesWalletIfItDoesntHaveOne(tdirWithClientPoolTxns): - agent = createAgent(AcmeAgent, "Acme Corp", - wallet=None, basedirpath=tdirWithClientPoolTxns, - port=genHa()[1], clientClass=TestClient) - assert agent._wallet is not None - - -def testAgentWalletRestoration(poolNodesStarted, tdirWithClientPoolTxns, emptyLooper, - agentAddedBySponsor, agentStarted): - agent, wallet = agentStarted - unpersistedIssuerWallet = agent.issuer.wallet - agent.stop() - emptyLooper.removeProdable(agent) - newAgent, newWallet = _startAgent(emptyLooper, tdirWithClientPoolTxns, - agentPort, "Agent0") - restoredIssuerWallet = newAgent.issuer.wallet - compareAgentIssuerWallet(unpersistedIssuerWallet, restoredIssuerWallet) diff --git a/indy_client/test/cli/test_change_key.py b/indy_client/test/cli/test_change_key.py deleted file mode 100644 index 85e854fb9..000000000 --- a/indy_client/test/cli/test_change_key.py +++ /dev/null @@ -1,80 +0,0 @@ -import pytest -from indy_client.test.cli.helper import connect_and_check_output - - -def test_change_key(be, do, susanCLI, newStewardCli): - # Generate new key in the wallet - be(susanCLI) - connect_and_check_output(do, susanCLI.txn_dir) - do('new key', within=3, expect=["Key created in wallet"]) - - # check that id cannot be used for the time - id = susanCLI.activeDID - verk = susanCLI.activeWallet.getVerkey(id) - do('send NYM dest={}'.format(id), within=3, expect=[ - "Error: client request invalid: CouldNotAuthenticate("]) - - # Add verkey - be(newStewardCli) - do('send NYM dest={} verkey={}'.format(id, verk), - within=3, expect=["Nym {} added".format(id)]) - - # check Susan's key is ok - be(susanCLI) - do('send NYM dest={}'.format(id), within=3, - expect=["Nym {} added".format(id)]) - do('send GET_NYM dest={}'.format(id), within=3, expect=[ - "Current verkey for NYM {} is {}".format(id, verk)]) - - # change key - do('change current key', within=3, expect=["Adding nym {}".format( - id), "Key changed for {}".format(id), "New verification key is"]) - - # check new key - assert id == susanCLI.activeDID - assert verk != susanCLI.activeWallet.getVerkey(id) - do('send NYM dest={}'.format(id), within=3, - expect=["Nym {} added".format(id)]) - do('send GET_NYM dest={}'.format(id), within=3, expect=[ - "Current verkey for NYM {} is {}".format(id, susanCLI.activeWallet.getVerkey(id))]) - - -def test_change_key_with_seed(be, do, philCli, newStewardCli): - # Generate new key in the wallet - be(philCli) - connect_and_check_output(do, philCli.txn_dir) - do('new key', within=3, expect=["Key created in wallet"]) - - # check that id cannot be used for the time - id = philCli.activeDID - verk = philCli.activeWallet.getVerkey(id) - do('send NYM dest={}'.format(id), within=3, expect=[ - "Error: client request invalid: CouldNotAuthenticate("]) - - # Add verkey - be(newStewardCli) - do('send NYM dest={} verkey={}'.format(id, verk), - within=3, expect=["Nym {} added".format(id)]) - - # check Susan's key is ok - be(philCli) - do('send NYM dest={}'.format(id), within=3, - expect=["Nym {} added".format(id)]) - do('send GET_NYM dest={}'.format(id), within=3, expect=[ - "Current verkey for NYM {} is {}".format(id, verk)]) - - # change key - seed = "8" * 32 - do('change current key with seed {}'.format(seed), - within=3, - expect=["Adding nym {}".format(id), - "Key changed for {}".format(id), - "New verification key is"]) - - # check new key - assert id == philCli.activeDID - assert verk != philCli.activeWallet.getVerkey(id) - do('send NYM dest={}'.format(id), within=3, - expect=["Nym {} added".format(id)]) - do('send GET_NYM dest={}'.format(id), within=3, expect=[ - "Current verkey for NYM {} is {}".format(id, philCli.activeWallet.getVerkey(id))]) diff --git a/indy_client/test/cli/test_cli_exit.py b/indy_client/test/cli/test_cli_exit.py deleted file mode 100644 index 4b756e9c9..000000000 --- a/indy_client/test/cli/test_cli_exit.py +++ /dev/null @@ -1,36 +0,0 @@ -import pytest - -from indy_client.test.cli.helper import connect_and_check_output -from indy_client.test.cli.test_tutorial import prompt_is -from stp_core.loop.eventually import eventually -from plenum.cli.cli import Exit - - -def testCliExitCommand(be, do, poolNodesStarted, aliceCLI, CliBuilder, - aliceMap, newKeyringOut, - savedKeyringRestored, aliceKeyringMap): - within = 3 - name = 'Alice' - be(aliceCLI) - do('prompt {}'.format(name), expect=prompt_is(name)) - do('new wallet {}'.format(name), expect=newKeyringOut, mapper=aliceMap) - connect_and_check_output(do, aliceCLI.txn_dir) - with pytest.raises(Exit): - do('exit') - - def checkWalletRestore(): - # open cli again - aliceCliNew = yield from CliBuilder(name) - # check message of saved wallet alice restored - be(aliceCliNew) - connect_and_check_output(do, aliceCliNew.txn_dir, expect=savedKeyringRestored, mapper=aliceKeyringMap) - - # check wallet restore - aliceCLI.looper.run(eventually(checkWalletRestore, timeout=within)) - - -@pytest.fixture(scope='module') -def aliceKeyringMap(): - return { - 'wallet-name': 'Alice' - } diff --git a/indy_client/test/cli/test_command_reg_ex.py b/indy_client/test/cli/test_command_reg_ex.py deleted file mode 100644 index cd3ef8cf6..000000000 --- a/indy_client/test/cli/test_command_reg_ex.py +++ /dev/null @@ -1,383 +0,0 @@ -import pytest -from plenum.cli.helper import getClientGrams -from plenum.common.constants import NAME, VERSION, TYPE, KEYS -from plenum.test.cli.helper import assertCliTokens -from plenum.test.cli.test_command_reg_ex import getMatchedVariables -from prompt_toolkit.contrib.regular_languages.compiler import compile - -from indy_client.cli.helper import getNewClientGrams -from indy_common.constants import REF, CLAIM_DEF_SCHEMA_REF -from indy_common.roles import Roles - - -@pytest.fixture("module") -def grammar(): - grams = getClientGrams() + getNewClientGrams() - return compile("".join(grams)) - - -def testSendNymWithRole(grammar): - dest = "LNAyBZUjvLF7duhrNtOWgdAKs18nHdbJUxJLT39iEGU=" - role = Roles.TRUST_ANCHOR.name - matchedVars = getMatchedVariables( - grammar, "send NYM dest={} role={}".format(dest, role)) - assertCliTokens(matchedVars, { - "send_nym": "send NYM", "dest_id": dest, "role": role}) - - -def testSendNymWithoutRole(grammar): - dest = "LNAyBZUjvLF7duhrNtOWgdAKs18nHdbJUxJLT39iEGU=" - matchedVars = getMatchedVariables(grammar, 'send NYM dest={}'.format(dest)) - assertCliTokens(matchedVars, { - "send_nym": "send NYM", "dest_id": dest}) - - -def testSendNymVerkey(grammar): - dest = "LNAyBZUjvLF7duhrNtOWgdAKs18nHdbJUxJLT39iEGU=" - role = Roles.TRUST_ANCHOR.name - verkey = "LNAyBZUjvLF7duhrNtOWgdAKs18nHdbJUxJLT39iEGU=" - - # Test with verkey - matchedVars = getMatchedVariables( - grammar, "send NYM dest={} role={} verkey={}".format( - dest, role, verkey)) - assertCliTokens(matchedVars, { - "send_nym": "send NYM", "dest_id": dest, - "role": role, "new_ver_key": verkey - }) - - # Test without verkey - matchedVars = getMatchedVariables( - grammar, - "send NYM dest={} role={}".format(dest, role)) - assertCliTokens(matchedVars, { - "send_nym": "send NYM", "dest_id": dest, "role": role - }) - - # Verkey being empty string is supported - matchedVars = getMatchedVariables( - grammar, - "send NYM dest={} role={} verkey={}".format(dest, role, '')) - assertCliTokens(matchedVars, { - "send_nym": "send NYM", "dest_id": dest, "role": role, "new_ver_key": '' - }) - - -def testGetNym(grammar): - dest = "LNAyBZUjvLF7duhrNtOWgdAKs18nHdbJUxJLT39iEGU=" - matchedVars = getMatchedVariables( - grammar, "send GET_NYM dest={}".format(dest)) - assertCliTokens(matchedVars, { - "send_get_nym": "send GET_NYM", "dest_id": dest}) - - -def testSendSchema(grammar): - name = "Degree" - version = "1.0" - keys = "undergrad,last_name,first_name,birth_date,postgrad,expiry_date" - matchedVars = getMatchedVariables(grammar, - 'send SCHEMA name={} version={} keys={}' - .format(name, version, keys)) - assertCliTokens(matchedVars, - {"send_schema": "send SCHEMA", - NAME: name, - VERSION: version, - KEYS: keys}) - - -def test_send_get_schema(grammar): - dest = "LNAyBZUjvLF7duhrNtOWgdAKs18nHdbJUxJLT39iEGU=" - name = "Degree" - version = "1.0" - matchedVars = getMatchedVariables( - grammar, 'send GET_SCHEMA dest={} name={} version={}' .format( - dest, name, version)) - assertCliTokens(matchedVars, { - "send_get_schema": "send GET_SCHEMA", NAME: name, VERSION: version}) - - -def testSendAttribRegEx(grammar): - dest = "LNAyBZUjvLF7duhrNtOWgdAKs18nHdbJUxJLT39iEGU=" - raw = '{"legal org": "BRIGHAM YOUNG UNIVERSITY, PROVO, UT", ' \ - '"email": "mail@byu.edu"}' - matchedVars = getMatchedVariables( - grammar, 'send ATTRIB dest={} raw={}'.format( - dest, raw)) - assertCliTokens(matchedVars, { - "send_attrib": "send ATTRIB", "dest_id": dest, "raw": raw}) - - -def test_send_get_attrib_regex(grammar): - dest = "LNAyBZUjvLF7duhrNtOWgdAKs18nHdbJUxJLT39iEGU=" - raw = 'legal' - matchedVars = getMatchedVariables( - grammar, 'send GET_ATTR dest={} raw={}'.format( - dest, raw)) - assertCliTokens(matchedVars, { - "send_get_attr": "send GET_ATTR", "dest_id": dest, "raw": raw}) - - -def testAddAttrRegEx(grammar): - getMatchedVariables( - grammar, - "add attribute first_name=Tyler,last_name=Ruff,birth_date=12/17/1991,undergrad=True,postgrad=True,expiry_date=12/31/2101 for Tyler") - - -def testAddAttrProverRegEx(grammar): - getMatchedVariables( - grammar, - "attribute known to BYU first_name=Tyler, last_name=Ruff, birth_date=12/17/1991, undergrad=True, postgrad=True, expiry_date=12/31/2101") - - -def testSendClaimDefRegEx(grammar): - matchedVars = getMatchedVariables( - grammar, "send CLAIM_DEF ref=15 signature_type=CL") - from indy_common.constants import CLAIM_DEF_SIGNATURE_TYPE - assertCliTokens(matchedVars, { - "send_claim_def": "send CLAIM_DEF", CLAIM_DEF_SCHEMA_REF: "15", CLAIM_DEF_SIGNATURE_TYPE: "CL"}) - - -def test_send_get_claim_def_regex(grammar): - matchedVars = getMatchedVariables( - grammar, "send GET_CLAIM_DEF ref=15 signature_type=CL") - from indy_common.constants import CLAIM_DEF_SIGNATURE_TYPE - assertCliTokens(matchedVars, { - "send_get_claim_def": "send GET_CLAIM_DEF", CLAIM_DEF_SCHEMA_REF: "15", CLAIM_DEF_SIGNATURE_TYPE: "CL"}) - - -def testShowFileCommandRegEx(grammar): - matchedVars = getMatchedVariables(grammar, - "show sample/faber-request.indy") - assertCliTokens(matchedVars, { - "show_file": "show", "file_path": "sample/faber-request.indy"}) - - matchedVars = getMatchedVariables(grammar, - "show sample/faber-request.indy ") - assertCliTokens(matchedVars, { - "show_file": "show", "file_path": "sample/faber-request.indy"}) - - -def testLoadFileCommandRegEx(grammar): - matchedVars = getMatchedVariables(grammar, - "load sample/faber-request.indy") - assertCliTokens(matchedVars, { - "load_file": "load", "file_path": "sample/faber-request.indy"}) - - matchedVars = getMatchedVariables(grammar, - "load sample/faber-request.indy ") - assertCliTokens(matchedVars, { - "load_file": "load", "file_path": "sample/faber-request.indy"}) - - -def testShowLinkRegEx(grammar): - matchedVars = getMatchedVariables(grammar, "show connection faber") - assertCliTokens(matchedVars, {"show_connection": "show connection", - "connection_name": "faber"}) - - matchedVars = getMatchedVariables(grammar, "show connection faber college") - assertCliTokens(matchedVars, {"show_connection": "show connection", - "connection_name": "faber college"}) - - matchedVars = getMatchedVariables( - grammar, "show connection faber college ") - assertCliTokens(matchedVars, {"show_connection": "show connection", - "connection_name": "faber college "}) - - -def testConnectRegEx(grammar): - getMatchedVariables(grammar, "connect dummy") - getMatchedVariables(grammar, "connect test") - getMatchedVariables(grammar, "connect live") - - -def testSyncLinkRegEx(grammar): - matchedVars = getMatchedVariables(grammar, "sync faber") - assertCliTokens( - matchedVars, {"sync_connection": "sync", "connection_name": "faber"}) - - matchedVars = getMatchedVariables(grammar, 'sync "faber"') - assertCliTokens( - matchedVars, {"sync_connection": "sync", "connection_name": '"faber"'}) - - matchedVars = getMatchedVariables(grammar, 'sync "faber" ') - assertCliTokens(matchedVars, - {"sync_connection": "sync", - "connection_name": '"faber" '}) - - -def testPingTargetRegEx(grammar): - matchedVars = getMatchedVariables(grammar, "ping faber") - assertCliTokens(matchedVars, {"ping": "ping", "target_name": "faber"}) - - -def testAcceptInvitationLinkRegEx(grammar): - matchedVars = getMatchedVariables(grammar, "accept request from faber") - assertCliTokens(matchedVars, - {"accept_connection_request": "accept request from", - "connection_name": "faber"}) - - matchedVars = getMatchedVariables(grammar, 'accept request from "faber"') - assertCliTokens(matchedVars, - {"accept_connection_request": "accept request from", - "connection_name": '"faber"'}) - - matchedVars = getMatchedVariables(grammar, 'accept request from "faber" ') - assertCliTokens(matchedVars, - {"accept_connection_request": "accept request from", - "connection_name": '"faber" '}) - - -def testShowClaimRegEx(grammar): - matchedVars = getMatchedVariables(grammar, "show claim Transcript") - assertCliTokens(matchedVars, {"show_claim": "show claim", - "claim_name": "Transcript"}) - - matchedVars = getMatchedVariables(grammar, 'show claim "Transcript"') - assertCliTokens(matchedVars, {"show_claim": "show claim", - "claim_name": '"Transcript"'}) - - -def testRequestClaimRegEx(grammar): - matchedVars = getMatchedVariables(grammar, "request claim Transcript") - assertCliTokens(matchedVars, {"request_claim": "request claim", - "claim_name": "Transcript"}) - - matchedVars = getMatchedVariables(grammar, 'request claim "Transcript"') - assertCliTokens(matchedVars, {"request_claim": "request claim", - "claim_name": '"Transcript"'}) - - -def testProofReqRegEx(grammar): - matchedVars = getMatchedVariables(grammar, - "show proof request Transcript") - assertCliTokens(matchedVars, {"show_proof_request": "show proof request", - "proof_request_name": "Transcript"}) - - matchedVars = getMatchedVariables(grammar, - "show proof request Transcript ") - assertCliTokens(matchedVars, {"show_proof_request": "show proof request", - "proof_request_name": "Transcript "}) - - -def testSendProofReqRegEx(grammar): - matchedVars = getMatchedVariables(grammar, - "send proof-request Over-21 to JaneDoe") - assertCliTokens(matchedVars, {"send_proof_request": "send proof-request", - "proof_request_name": "Over-21", - "target": " JaneDoe"}) - - -def testSetAttribute(grammar): - matchedVars = getMatchedVariables( - grammar, "set first_name to Alice") - assertCliTokens(matchedVars, { - "set_attr": "set", "attr_name": "first_name", "attr_value": "Alice"}) - - -def testSendProof(grammar): - getMatchedVariables(grammar, 'send proof Job-Application to Acme') - - -def testSendPoolUpgrade(grammar): - # Testing for start - getMatchedVariables( - grammar, "send POOL_UPGRADE name=upgrade-13 " - "version=0.0.6 sha256=f284bdc3c1c9e24a494e285cb387c69510f28de51c15bb93179d9c7f28705398 action=start " - "schedule={'AtDfpKFe1RPgcr5nnYBw1Wxkgyn8Zjyh5MzFoEUTeoV3': " - "'2017-01-25T12:49:05.258870+00:00', " - "'4yC546FFzorLPgTNTc6V43DnpFrR8uHvtunBxb2Suaa2': " - "'2017-01-25T12:33:53.258870+00:00', " - "'JpYerf4CssDrH76z7jyQPJLnZ1vwYgvKbvcp16AB5RQ': " - "'2017-01-25T12:44:01.258870+00:00', " - "'DG5M4zFm33Shrhjj6JB7nmx9BoNJUq219UXDfvwBDPe2': " - "'2017-01-25T12:38:57.258870+00:00'} " - "timeout=10 " - "force=True " - "reinstall=True") - - # Testing for cancel - getMatchedVariables( - grammar, 'send POOL_UPGRADE name=upgrade-13 version=0.0.6 ' - 'sha256=aad1242 action=cancel ' - 'justification="not gonna give you"') - - -def testDisconnect(grammar): - matchedVars = getMatchedVariables( - grammar, "disconnect") - assertCliTokens(matchedVars, {"disconn": "disconnect"}) - - -def testNewIdentifier(grammar): - matchedVars = getMatchedVariables( - grammar, "new DID") - assertCliTokens(matchedVars, {"new_id": "new DID", - "id": None, - "seed": None, "alias": None}) - - matchedVars = getMatchedVariables( - grammar, "new DID as myalis") - assertCliTokens(matchedVars, - {"new_id": "new DID", "id": None, - "seed": None, "alias": "myalis"}) - - matchedVars = getMatchedVariables( - grammar, "new DID 4QxzWk3ajdnEA37NdNU5Kt") - assertCliTokens(matchedVars, {"new_id": "new DID", - "id": "4QxzWk3ajdnEA37NdNU5Kt", - "seed": None, "alias": None}) - - matchedVars = getMatchedVariables( - grammar, "new DID 4QxzWk3ajdnEA37NdNU5Kt " - "with seed aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa") - assertCliTokens(matchedVars, {"new_id": "new DID", - "id": "4QxzWk3ajdnEA37NdNU5Kt", - "seed": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - "alias": None}) - - -def testAddGenTxnRegEx(grammar): - matchedVars = getMatchedVariables( - grammar, - "add genesis transaction NYM dest=2ru5PcgeQzxF7QZYwQgDkG2K13PRqyigVw99zMYg8eML") - assertCliTokens(matchedVars, - {"add_genesis": "add genesis transaction NYM", - "dest": "dest=", - "dest_id": "2ru5PcgeQzxF7QZYwQgDkG2K13PRqyigVw99zMYg8eML", - "role": None, - "ver_key": None}) - - matchedVars = getMatchedVariables( - grammar, - "add genesis transaction NYM dest=2ru5PcgeQzxF7QZYwQgDkG2K13PRqyigVw99zMYg8eML role={role}".format( - role=Roles.STEWARD.name)) - assertCliTokens(matchedVars, - {"add_genesis": "add genesis transaction NYM", - "dest": "dest=", - "dest_id": "2ru5PcgeQzxF7QZYwQgDkG2K13PRqyigVw99zMYg8eML", - "role": Roles.STEWARD.name, - "ver_key": None}) - - matchedVars = getMatchedVariables( - grammar, 'add genesis transaction NODE for 2ru5PcgeQzxF7QZYwQgDkG2K13PRqyigVw99zMYg8eML ' - 'by FvDi9xQZd1CZitbK15BNKFbA7izCdXZjvxf91u3rQVzW with data ' - '{"node_ip": "localhost", "node_port": "9701", "client_ip": "localhost", "client_port": "9702", "alias": "AliceNode"}') - assertCliTokens( - matchedVars, - { - "add_gen_txn": "add genesis transaction", - "type": "NODE", - "dest": "2ru5PcgeQzxF7QZYwQgDkG2K13PRqyigVw99zMYg8eML", - "identifier": "FvDi9xQZd1CZitbK15BNKFbA7izCdXZjvxf91u3rQVzW", - "role": None, - "data": '{"node_ip": "localhost", "node_port": "9701", "client_ip": "localhost", "client_port": "9702", "alias": "AliceNode"}'}) - - -def testReqAvailClaims(grammar): - matchedVars = getMatchedVariables(grammar, - "request available claims from Faber") - - assertCliTokens(matchedVars, { - "request_avail_claims": "request available claims from", - "connection_name": "Faber" - }) diff --git a/indy_client/test/cli/test_connect_env.py b/indy_client/test/cli/test_connect_env.py deleted file mode 100644 index 81223e374..000000000 --- a/indy_client/test/cli/test_connect_env.py +++ /dev/null @@ -1,92 +0,0 @@ -import pytest - -from indy_client.test import waits -from stp_core.loop.eventually import eventually - -from indy_client.test.cli.helper import checkConnectedToEnv, prompt_is - - -@pytest.fixture(scope="module") -def alice(aliceCLI): - return aliceCLI - - -def test_disconnect_when_not_connected(alice, be, do): - be(alice) - do(None, expect=prompt_is("indy")) - do('disconnect', within=1, expect=['Not connected to any environment.']) - do(None, expect=prompt_is("indy")) - - -@pytest.fixture(scope="module") -def alice_connected(alice, be, do, poolNodesCreated): - be(alice) - do(None, expect=prompt_is("indy")) - do('connect sandbox', within=5, expect=["Connected to sandbox"]) - do(None, expect=prompt_is("indy@sandbox")) - - -def test_connect_to_test(alice_connected): - pass - - -@pytest.fixture(scope="module") -def alice_disconnected(alice, be, do, alice_connected): - be(alice) - do(None, expect=prompt_is("indy@sandbox")) - do('disconnect', within=1, expect=[ - 'Disconnecting from sandbox ...', - 'Disconnected from sandbox' - ]) - do(None, expect=prompt_is("indy")) - - -def test_disconnect_when_connected(do, be, alice_disconnected): - pass - - -def testConnectEnv(poolNodesCreated, looper, notConnectedStatus): - poolCLI = poolNodesCreated - notConnectedMsgs = notConnectedStatus - # Done to initialise a wallet. - poolCLI.enterCmd("new key") - - poolCLI.enterCmd("status") - for msg in notConnectedMsgs: - assert msg in poolCLI.lastCmdOutput - - poolCLI.enterCmd("connect dummy") - assert "Unknown environment dummy" in poolCLI.lastCmdOutput - - poolCLI.enterCmd("connect sandbox") - assert "Connecting to sandbox" in poolCLI.lastCmdOutput - timeout = waits.expectedAgentConnected() - looper.run(eventually(checkConnectedToEnv, poolCLI, retryWait=1, - timeout=timeout)) - poolCLI.enterCmd("status") - assert "Connected to sandbox Indy network" == poolCLI.lastCmdOutput - - -def testCreateMultiPoolNodes(multiPoolNodesCreated): - assert len(multiPoolNodesCreated) == 2 - - -@pytest.fixture(scope="module") -def pool1(multiPoolNodesCreated): - return multiPoolNodesCreated[0] - - -@pytest.fixture(scope="module") -def pool2(multiPoolNodesCreated): - return multiPoolNodesCreated[1] - - -def test_connect_to_different_pools(do, be, cliForMultiNodePools): - be(cliForMultiNodePools) - do(None, expect=prompt_is("indy")) - do('connect pool1', within=5, expect=["Connected to pool1"]) - do(None, expect=prompt_is("indy@pool1")) - do('connect pool2', within=5, expect=["Connected to pool2"]) - do(None, expect=prompt_is("indy@pool2")) - do('connect pool1', within=5, expect=["Connected to pool1"]) - do(None, expect=prompt_is("indy@pool1")) diff --git a/indy_client/test/cli/test_merge_invitation.py b/indy_client/test/cli/test_merge_invitation.py deleted file mode 100644 index 57f7b6bdb..000000000 --- a/indy_client/test/cli/test_merge_invitation.py +++ /dev/null @@ -1,67 +0,0 @@ -from indy_client.client.wallet.connection import Connection -from indy_client.client.wallet.wallet import Wallet -from indy_client.agent.walleted_agent import WalletedAgent -from plenum.common.constants import NAME, VERSION, ATTRIBUTES, VERIFIABLE_ATTRIBUTES, NONCE - - -def test_merge_invitation(): - nonce = "12345" - wallet1 = Wallet('wallet1') - connection_1 = Connection('connection1') - walleted_agent = WalletedAgent(name='wallet1') - - wallet1.addConnection(connection_1) - walleted_agent.wallet = wallet1 - connection = walleted_agent._wallet.getConnection('connection1') - assert len(connection.proofRequests) == 0 - request_data = {'connection-request': {NAME: 'connection1', - NONCE: nonce}, - 'proof-requests': [{NAME: 'proof1', - VERSION: '1', - ATTRIBUTES: {'att_key1': 'att_value1', - 'att_key2': 'att_value2'}, - VERIFIABLE_ATTRIBUTES: {'ver_att_key1': 'ver_att_value1'}}]} - - # test that a proof request with attributes can be merged into a connection - # that already exists but has no proof requests. - walleted_agent._merge_request(request_data) - assert len(connection.proofRequests) == 1 - assert len(connection.proofRequests[0].attributes.keys()) == 2 - assert connection.proofRequests[0].attributes['att_key1'] == 'att_value1' - - request_data2 = {'connection-request': {NAME: 'connection1', - NONCE: nonce}, - 'proof-requests': [{NAME: 'proof1', - VERSION: '1', - ATTRIBUTES: {'att_key1': 'att_value1', - 'att_key2': 'att_value2', - 'att_key3': 'att_value3'}, - VERIFIABLE_ATTRIBUTES: {'ver_att_key1': 'ver_att_value1', - 'ver_att_key2': 'ver_att_value2'}, - }]} - - # test that additional attributes and verifiable attributes can be - # merged into an already existing proof request - walleted_agent._merge_request(request_data2) - assert len(connection.proofRequests) == 1 - assert len(connection.proofRequests[0].attributes.keys()) == 3 - assert connection.proofRequests[0].attributes['att_key3'] == 'att_value3' - assert len(connection.proofRequests[0].verifiableAttributes.keys()) == 2 - - request_data3 = {'connection-request': {NAME: 'connection1', - NONCE: nonce}, - 'proof-requests': [{NAME: 'proof2', - VERSION: '1', - ATTRIBUTES: {'att_key1': 'att_value1', - 'att_key2': 'att_value2', - 'att_key3': 'att_value3'}, - VERIFIABLE_ATTRIBUTES: {'ver_att_key1': 'ver_att_value1', - 'ver_att_key2': 'ver_att_value2'}, - }]} - - # test that a second proof from the same connection can be merged - walleted_agent._merge_request(request_data3) - assert len(connection.proofRequests) == 2 - assert len(connection.proofRequests[1].attributes.keys()) == 3 - assert connection.proofRequests[1].attributes['att_key3'] == 'att_value3' - assert len(connection.proofRequests[1].verifiableAttributes.keys()) == 2 diff --git a/indy_client/test/cli/test_new_identifier.py b/indy_client/test/cli/test_new_identifier.py deleted file mode 100644 index 72e3ec814..000000000 --- a/indy_client/test/cli/test_new_identifier.py +++ /dev/null @@ -1,72 +0,0 @@ - -def checkWalletState(cli, totalIds, isAbbr, isCrypto): - - if cli._activeWallet: - assert len(cli._activeWallet.idsToSigners) == totalIds - - if totalIds > 0: - activeSigner = cli._activeWallet.idsToSigners[ - cli._activeWallet.defaultId] - - if isAbbr: - assert activeSigner.verkey.startswith("~"), \ - "verkey {} doesn't look like abbreviated verkey".\ - format(activeSigner.verkey) - - assert cli._activeWallet.defaultId != activeSigner.verkey, \ - "new DID should not be equal to abbreviated verkey" - - if isCrypto: - assert not activeSigner.verkey.startswith("~"), \ - "verkey {} doesn't look like cryptographic verkey". \ - format(activeSigner.verkey) - - assert cli._activeWallet.defaultId == activeSigner.verkey, \ - "new DID should be equal to verkey" - - -def getTotalIds(cli): - if cli._activeWallet: - return len(cli._activeWallet.idsToSigners) - else: - return 0 - - -def testNewIdWithIncorrectSeed(be, do, aliceCLI): - totalIds = getTotalIds(aliceCLI) - be(aliceCLI) - # Seed not of length 32 or 64 - do("new DID with seed aaaaaaaaaaa", - expect=["Seed needs to be 32 or 64 characters (if hex) long"]) - checkWalletState(aliceCLI, totalIds=totalIds, isAbbr=False, isCrypto=False) - - # Seed of length 64 but not hex - do("new DID with seed " - "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy", - expect=["Seed needs to be 32 or 64 characters (if hex) long"]) - checkWalletState(aliceCLI, totalIds=totalIds, isAbbr=False, - isCrypto=False) - - # Seed of length 64 and hex - do("new DID with seed " - "2af3d062450c942be50ee766ce2571a6c75c0aca0de322293e7e9f116959c9c3", - expect=["Current DID set to"]) - checkWalletState(aliceCLI, totalIds=totalIds + 1, isAbbr=False, - isCrypto=False) - - -def testNewIdIsNotInvalidCommand(be, do, aliceCLI): - totalIds = getTotalIds(aliceCLI) - be(aliceCLI) - do("new DID", not_expect=["Invalid command"]) - checkWalletState(aliceCLI, totalIds=totalIds + - 1, isAbbr=False, isCrypto=False) - - -def testNewId(be, do, aliceCLI): - totalIds = getTotalIds(aliceCLI) - be(aliceCLI) - do("new DID", - expect=["Current DID set to"]) - checkWalletState(aliceCLI, totalIds=totalIds + - 1, isAbbr=False, isCrypto=False) diff --git a/indy_client/test/cli/test_node.py b/indy_client/test/cli/test_node.py deleted file mode 100644 index 32bf0b460..000000000 --- a/indy_client/test/cli/test_node.py +++ /dev/null @@ -1,173 +0,0 @@ -import base58 -from plenum.bls.bls_crypto_factory import create_default_bls_crypto_factory - -from plenum.common.constants import NODE_IP, CLIENT_IP, CLIENT_PORT, NODE_PORT, \ - ALIAS, BLS_KEY, BLS_KEY_PROOF -from plenum.common.keygen_utils import init_bls_keys -from plenum.common.util import randomString -from plenum.test.cli.helper import exitFromCli -from stp_core.network.port_dispenser import genHa - -from indy_client.test.cli.helper import doSendNodeCmd - - -def test_add_new_node(newNodeAdded): - ''' - Checks adding of a new Nodes with all parameters (including BLS keys) - ''' - pass - - -def test_add_same_node_without_any_change(be, do, newStewardCli, - newNodeVals, newNodeAdded): - ''' - Checks that it's not possible to add the same node twice by owner - ''' - be(newStewardCli) - doSendNodeCmd(do, newNodeVals, - expMsgs=['node already has the same data as requested']) - exitFromCli(do) - - -def test_add_same_node_without_any_change_by_trustee(be, do, trusteeCli, - newNodeVals, newNodeAdded, - nodeValsEmptyData): - ''' - Checks that it's not possible to add the same node twice by Trustee - ''' - be(trusteeCli) - doSendNodeCmd(do, newNodeVals, - expMsgs=["node already has the same data as requested"]) - exitFromCli(do) - - -def test_add_same_node_with_changed_bls_by_trustee(be, do, trusteeCli, - newNodeVals, newNodeAdded, - nodeValsEmptyData, - new_bls_keys): - ''' - Checks that it's not possible to add the same node with different BLS key by Trustee - ''' - be(trusteeCli) - node_vals = newNodeVals - bls_key, key_proof = new_bls_keys - node_vals['newNodeData'][BLS_KEY] = bls_key - node_vals['newNodeData'][BLS_KEY_PROOF] = key_proof - doSendNodeCmd(do, node_vals, - expMsgs=["TRUSTEE not in allowed roles ['STEWARD']"]) - exitFromCli(do) - - -def test_update_node_and_client_port_same(be, do, newStewardCli, - newNodeVals, - newNodeAdded, - nodeValsEmptyData): - ''' - Checks that it's not possible to have node and client ports same (by owner) - ''' - be(newStewardCli) - nodeIp, nodePort = genHa() - - node_vals = nodeValsEmptyData - node_vals['newNodeData'][ALIAS] = newNodeVals['newNodeData'][ALIAS] - node_vals['newNodeData'][NODE_IP] = nodeIp - node_vals['newNodeData'][NODE_PORT] = nodePort - node_vals['newNodeData'][CLIENT_IP] = nodeIp - node_vals['newNodeData'][CLIENT_PORT] = nodePort - - doSendNodeCmd(do, node_vals, - expMsgs=["node and client ha cannot be same"]) - exitFromCli(do) - - -def test_update_ports_and_ips(be, do, newStewardCli, - newNodeVals, newNodeAdded, - nodeValsEmptyData): - ''' - Checks that it's possible to update node and client ports and IPs (by owner) - (just alias and ports/IPs are required) - ''' - be(newStewardCli) - nodeIp, nodePort = genHa() - clientIp, clientPort = genHa() - - node_vals = nodeValsEmptyData - node_vals['newNodeData'][ALIAS] = newNodeVals['newNodeData'][ALIAS] - node_vals['newNodeData'][NODE_IP] = nodeIp - node_vals['newNodeData'][NODE_PORT] = nodePort - node_vals['newNodeData'][CLIENT_IP] = clientIp - node_vals['newNodeData'][CLIENT_PORT] = clientPort - - doSendNodeCmd(do, node_vals, - expMsgs=['Node request completed']) - exitFromCli(do) - - -def test_update_bls(be, do, newStewardCli, - newNodeVals, newNodeAdded, - nodeValsEmptyData, - new_bls_keys): - ''' - Checks that it's possible to update BLS keys by owner (just alias and new key are required) - ''' - be(newStewardCli) - - node_vals = nodeValsEmptyData - bls_key, key_proof = new_bls_keys - node_vals['newNodeData'][BLS_KEY] = bls_key - node_vals['newNodeData'][BLS_KEY_PROOF] = key_proof - node_vals['newNodeData'][ALIAS] = newNodeVals['newNodeData'][ALIAS] - - doSendNodeCmd(do, node_vals, - expMsgs=['Node request completed']) - exitFromCli(do) - - -def test_update_bls_by_trustee(be, do, trusteeCli, - newNodeVals, newNodeAdded, - nodeValsEmptyData, - new_bls_keys): - ''' - Checks that it's not possible to update BLS keys by Trustee (just alias and new key are required) - ''' - be(trusteeCli) - - node_vals = nodeValsEmptyData - bls_key, key_proof = new_bls_keys - node_vals['newNodeData'][BLS_KEY] = bls_key - node_vals['newNodeData'][BLS_KEY_PROOF] = key_proof - node_vals['newNodeData'][ALIAS] = newNodeVals['newNodeData'][ALIAS] - - doSendNodeCmd(do, node_vals, - expMsgs=["TRUSTEE not in allowed roles ['STEWARD']"]) - exitFromCli(do) - - -def test_add_same_data_alias_changed(be, do, - newStewardCli, newNodeVals, - newNodeAdded): - ''' - Checks that it's not possible to add a new Node with the same alias (by owner) - ''' - be(newStewardCli) - newNodeVals['newNodeData'][ALIAS] = randomString(6) - doSendNodeCmd(do, newNodeVals, - expMsgs=['existing data has conflicts with request data']) - exitFromCli(do) - - -def test_update_alias(be, do, - newStewardCli, - newNodeAdded, - nodeValsEmptyData): - ''' - Checks that it's not possible to change alias of existing node (by owner) - ''' - be(newStewardCli) - - node_vals = nodeValsEmptyData - node_vals['newNodeData'][ALIAS] = randomString(6) - - doSendNodeCmd(do, node_vals, - expMsgs=['existing data has conflicts with request data']) - exitFromCli(do) diff --git a/indy_client/test/cli/test_node_demotion.py b/indy_client/test/cli/test_node_demotion.py deleted file mode 100644 index 5ff8782f0..000000000 --- a/indy_client/test/cli/test_node_demotion.py +++ /dev/null @@ -1,51 +0,0 @@ -import pytest -from plenum.common.signer_did import DidSigner -from plenum.common.txn_util import get_payload_data -from stp_core.crypto.util import randomSeed -from indy_client.test.cli.helper import addAgent -from plenum.common.constants import SERVICES, VALIDATOR, TARGET_NYM, DATA -from indy_client.test.cli.constants import NODE_REQUEST_COMPLETED, NODE_REQUEST_FAILED - - -def ensurePoolIsOperable(be, do, cli): - randomNymMapper = { - 'remote': DidSigner(seed=randomSeed()).identifier - } - addAgent(be, do, cli, randomNymMapper) - - -# this test messes with other tests so it goes in its own module -def test_steward_can_promote_and_demote_own_node( - be, do, poolNodesStarted, newStewardCli, trusteeCli, newNodeVals): - - ensurePoolIsOperable(be, do, newStewardCli) - - newNodeVals['newNodeData'][SERVICES] = [VALIDATOR] - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_COMPLETED, within=8) - - newNodeVals['newNodeData'][SERVICES] = [] - - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_COMPLETED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - for node in poolNodesStarted.nodes.values(): - txn = [t for _, t in node.poolLedger.getAllTxn()][-1] - txn_data = get_payload_data(txn) - assert txn_data[TARGET_NYM] == newNodeVals['newNodeIdr'] - assert SERVICES in txn_data[DATA] and txn_data[DATA][SERVICES] == [] - - newNodeVals['newNodeData'][SERVICES] = [VALIDATOR] - - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_COMPLETED, within=8) - - for node in poolNodesStarted.nodes.values(): - txn = [t for _, t in node.poolLedger.getAllTxn()][-1] - txn_data = get_payload_data(txn) - assert txn_data[TARGET_NYM] == newNodeVals['newNodeIdr'] - assert SERVICES in txn_data[DATA] and txn_data[DATA][SERVICES] == [VALIDATOR] diff --git a/indy_client/test/cli/test_node_suspension.py b/indy_client/test/cli/test_node_suspension.py deleted file mode 100644 index 2f9f8f545..000000000 --- a/indy_client/test/cli/test_node_suspension.py +++ /dev/null @@ -1,80 +0,0 @@ -from plenum.common.constants import SERVICES, VALIDATOR, TARGET_NYM, DATA -from indy_common.roles import Roles -from plenum.common.txn_util import get_payload_data -from stp_core.network.port_dispenser import genHa - -import pytest - -from indy_client.test.cli.helper import doSendNodeCmd - - -def testSuspendNode(be, do, trusteeCli, newNodeAdded): - """ - Suspend a node and then cancel suspension. Suspend while suspended - to test that there is no error - """ - newNodeVals = newNodeAdded - - be(trusteeCli) - - newNodeVals['newNodeData'][SERVICES] = [] - doSendNodeCmd(do, newNodeVals) - # Re-suspend node - newNodeVals['newNodeData'][SERVICES] = [] - doSendNodeCmd(do, newNodeVals, - expMsgs=['node already has the same data as requested']) - - # Cancel suspension - newNodeVals['newNodeData'][SERVICES] = [VALIDATOR] - doSendNodeCmd(do, newNodeVals) - - # Re-cancel suspension - newNodeVals['newNodeData'][SERVICES] = [VALIDATOR] - doSendNodeCmd(do, nodeVals=newNodeVals, - expMsgs=['node already has the same data as requested']) - - -@pytest.mark.skip(reason='INDY-133. Broken compatibility') -def testSuspendNodeWhichWasNeverActive(be, do, trusteeCli, nymAddedOut, - poolNodesStarted, trusteeMap): - """ - Add a node without services field and check that the ledger does not - contain the `services` field and check that it can be blacklisted and - the ledger has `services` as empty list - """ - newStewardSeed = '0000000000000000000KellySteward2' - newStewardIdr = 'DqCx7RFEpSUMZbV2mH89XPH6JT3jMvDNU55NTnBHsQCs' - be(trusteeCli) - do('send NYM dest={{remote}} role={role}'.format( - role=Roles.STEWARD.name), - within=5, - expect=nymAddedOut, mapper={'remote': newStewardIdr}) - do('new key with seed {}'.format(newStewardSeed)) - nport, cport = (_[1] for _ in genHa(2)) - nodeId = '6G9QhQa3HWjRKeRmEvEkLbWWf2t7cw6KLtafzi494G4G' - newNodeVals = { - 'newNodeIdr': nodeId, - 'newNodeData': {'client_port': cport, - 'client_ip': '127.0.0.1', - 'alias': 'Node6', - 'node_ip': '127.0.0.1', - 'node_port': nport - } - } - doSendNodeCmd(do, newNodeVals) - - for node in poolNodesStarted.nodes.values(): - txn = [t for _, t in node.poolLedger.getAllTxn()][-1] - txn_data = get_payload_data(txn) - assert txn_data[TARGET_NYM] == nodeId - assert SERVICES not in txn_data[DATA] - - do('new key with seed {}'.format(trusteeMap['trusteeSeed'])) - newNodeVals['newNodeData'][SERVICES] = [] - doSendNodeCmd(do, newNodeVals) - - for node in poolNodesStarted.nodes.values(): - txn = [t for _, t in node.poolLedger.getAllTxn()][-1] - txn_data = get_payload_data(txn) - assert txn_data[TARGET_NYM] == nodeId - assert SERVICES in txn_data[DATA] and txn_data[DATA][SERVICES] == [] diff --git a/indy_client/test/cli/test_pool_config.py b/indy_client/test/cli/test_pool_config.py deleted file mode 100644 index 93141bfda..000000000 --- a/indy_client/test/cli/test_pool_config.py +++ /dev/null @@ -1,60 +0,0 @@ -import pytest - -from indy_node.test.upgrade.conftest import validUpgrade -from indy_client.test.cli.constants import INVALID_SYNTAX -from plenum.common.constants import VERSION - - -def testPoolConfigInvalidSyntax(be, do, trusteeCli): - be(trusteeCli) - do('send POOL_CONFIG wites=True force=False', expect=INVALID_SYNTAX, within=10) - do('send POOL_CONFIG writes=Tue force=False', expect=INVALID_SYNTAX, within=10) - do('send POOL_CONFIG writes=True froce=False', - expect=INVALID_SYNTAX, within=10) - do('send POOL_CONFIG writes=True force=1', expect=INVALID_SYNTAX, within=10) - - -def testPoolConfigWritableFalse(be, do, trusteeCli): - be(trusteeCli) - do('send POOL_CONFIG writes=False force=False', - expect="Pool config successful", within=10) - do('send NYM dest=33333333333333333333333333333333333333333333', - expect="Pool is in readonly mode", within=10) - - -def testPoolConfigWritableTrue(be, do, trusteeCli): - be(trusteeCli) - do('send NYM dest=33333333333333333333333333333333333333333333', - expect="Pool is in readonly mode", within=10) - do('send POOL_CONFIG writes=True force=False', - expect="Pool config successful", within=10) - do('send NYM dest=33333333333333333333333333333333333333333333', - expect="added", within=10) - - -def testPoolConfigWritableFalseCanRead(be, do, trusteeCli): - be(trusteeCli) - do('send NYM dest=44444444444444444444444444444444444444444444', - expect="added", within=10) - do('send GET_NYM dest=44444444444444444444444444444444444444444444', - expect="Current verkey is same as DID", within=10) - do('send POOL_CONFIG writes=False force=False', - expect="Pool config successful", within=10) - do('send NYM dest=55555555555555555555555555555555555555555555', - expect="Pool is in readonly mode", within=10) - do('send GET_NYM dest=44444444444444444444444444444444444444444444', - expect="Current verkey is same as DID", within=10) - - -def testPoolUpgradeOnReadonlyPool( - poolNodesStarted, be, do, trusteeCli, validUpgrade): - be(trusteeCli) - do('send POOL_CONFIG writes=False force=False', - expect="Pool config successful", within=10) - do('send POOL_UPGRADE name={name} version={version} sha256={sha256} action={action} schedule={schedule} timeout={timeout}', - within=10, expect=['Sending pool upgrade', 'Pool Upgrade Transaction Scheduled'], mapper=validUpgrade) - - for node in poolNodesStarted.nodes.values(): - assert len(node.upgrader.aqStash) > 0 - assert node.upgrader.scheduledAction - assert node.upgrader.scheduledAction[0] == validUpgrade[VERSION] diff --git a/indy_client/test/cli/test_pool_upgrade.py b/indy_client/test/cli/test_pool_upgrade.py deleted file mode 100644 index 00a029a85..000000000 --- a/indy_client/test/cli/test_pool_upgrade.py +++ /dev/null @@ -1,137 +0,0 @@ -from copy import copy - -import pytest - -from indy_node.test import waits -from stp_core.loop.eventually import eventually -from plenum.common.constants import VERSION -from indy_common.constants import ACTION, CANCEL, JUSTIFICATION -from indy_node.test.upgrade.helper import checkUpgradeScheduled, \ - checkNoUpgradeScheduled -from indy_node.test.upgrade.conftest import validUpgrade, validUpgradeExpForceFalse, validUpgradeExpForceTrue - - -def send_upgrade_cmd(do, expect, upgrade_data): - do('send POOL_UPGRADE name={name} version={version} sha256={sha256} ' - 'action={action} schedule={schedule} timeout={timeout}', - within=10, - expect=expect, mapper=upgrade_data) - - -@pytest.fixture(scope="function") -def poolUpgradeSubmitted(be, do, trusteeCli, validUpgrade): - be(trusteeCli) - send_upgrade_cmd(do, - ['Sending pool upgrade', - 'Pool Upgrade Transaction Scheduled'], - validUpgrade) - - -@pytest.fixture(scope="function") -def poolUpgradeScheduled(poolUpgradeSubmitted, poolNodesStarted, validUpgrade): - nodes = poolNodesStarted.nodes.values() - timeout = waits.expectedUpgradeScheduled() - poolNodesStarted.looper.run( - eventually(checkUpgradeScheduled, nodes, - validUpgrade[VERSION], retryWait=1, timeout=timeout)) - - -@pytest.fixture(scope="function") -def poolUpgradeCancelled(poolUpgradeScheduled, be, do, trusteeCli, - validUpgrade): - cancelUpgrade = copy(validUpgrade) - cancelUpgrade[ACTION] = CANCEL - cancelUpgrade[JUSTIFICATION] = '"not gonna give you one"' - be(trusteeCli) - do('send POOL_UPGRADE name={name} version={version} sha256={sha256} action={action} ' - 'justification={justification} schedule={schedule} timeout={timeout} ', - within=10, - expect=['Sending pool upgrade', 'Pool Upgrade Transaction Scheduled'], - mapper=cancelUpgrade) - - -def test_pool_upgrade_rejected(be, do, newStewardCli, validUpgrade): - """ - Pool upgrade done by a non trustee is rejected - """ - be(newStewardCli) - err_msg = "Pool upgrade failed: client request invalid: " \ - "UnauthorizedClientRequest('STEWARD cannot do POOL_UPGRADE'" - send_upgrade_cmd(do, - ['Sending pool upgrade', - err_msg], - validUpgrade) - - -def testPoolUpgradeSent(poolUpgradeScheduled): - pass - - -def testPoolUpgradeCancelled(poolUpgradeCancelled, poolNodesStarted): - nodes = poolNodesStarted.nodes.values() - timeout = waits.expectedNoUpgradeScheduled() - poolNodesStarted.looper.run( - eventually(checkNoUpgradeScheduled, - nodes, retryWait=1, timeout=timeout)) - - -def send_force_false_upgrade_cmd(do, expect, upgrade_data): - do('send POOL_UPGRADE name={name} version={version} sha256={sha256} ' - 'action={action} schedule={schedule} timeout={timeout} force=False', - within=10, - expect=expect, mapper=upgrade_data) - - -def test_force_false_upgrade( - be, do, trusteeCli, poolNodesStarted, validUpgradeExpForceFalse): - be(trusteeCli) - send_force_false_upgrade_cmd(do, - ['Sending pool upgrade', - 'Pool Upgrade Transaction Scheduled'], - validUpgradeExpForceFalse) - poolNodesStarted.looper.run( - eventually( - checkUpgradeScheduled, - poolNodesStarted.nodes.values(), - validUpgradeExpForceFalse[VERSION], - retryWait=1, - timeout=10)) - - -def send_force_true_upgrade_cmd(do, expect, upgrade_data): - do('send POOL_UPGRADE name={name} version={version} sha256={sha256} ' - 'action={action} schedule={schedule} timeout={timeout} force=True package={package}', - within=10, expect=expect, mapper=upgrade_data) - - -def test_force_upgrade(be, do, trusteeCli, poolNodesStarted, - validUpgradeExpForceTrue): - nodes = poolNodesStarted.nodes.values() - for node in nodes: - if node.name in ["Delta", "Gamma"] and node in poolNodesStarted.looper.prodables: - node.stop() - poolNodesStarted.looper.removeProdable(node) - be(trusteeCli) - send_force_true_upgrade_cmd( - do, ['Sending pool upgrade'], validUpgradeExpForceTrue) - - def checksched(): - for node in nodes: - if node.name not in ["Delta", "Gamma"]: - assert node.upgrader.scheduledAction - assert node.upgrader.scheduledAction[0] == validUpgradeExpForceTrue[VERSION] - - poolNodesStarted.looper.run(eventually( - checksched, retryWait=1, timeout=10)) - - -def send_reinstall_true_upgrade_cmd(do, expect, upgrade_data): - do('send POOL_UPGRADE name={name} version={version} sha256={sha256} ' - 'action={action} schedule={schedule} timeout={timeout} reinstall=True package={package}', - within=10, expect=expect, mapper=upgrade_data) - - -def send_reinstall_false_upgrade_cmd(do, expect, upgrade_data): - do('send POOL_UPGRADE name={name} version={version} sha256={sha256} ' - 'action={action} schedule={schedule} timeout={timeout} reinstall=False package={package}', - within=10, expect=expect, mapper=upgrade_data) diff --git a/indy_client/test/cli/test_pool_upgrade_schedule.py b/indy_client/test/cli/test_pool_upgrade_schedule.py deleted file mode 100644 index b0ef60db0..000000000 --- a/indy_client/test/cli/test_pool_upgrade_schedule.py +++ /dev/null @@ -1,27 +0,0 @@ -import dateutil -import pytest -from datetime import timedelta, datetime - -from indy_client.test.cli.test_pool_upgrade import poolUpgradeSubmitted -from indy_client.test.cli.test_pool_upgrade import poolUpgradeScheduled -from indy_node.test.upgrade.conftest import validUpgrade as _validUpgrade - - -@pytest.fixture(scope='function') -def validUpgrade(_validUpgrade): - # Add 5 days to the time of the upgrade of each node in schedule parameter - # of send POOL_UPGRADE command - upgradeSchedule = _validUpgrade['schedule'] - for nodeId in upgradeSchedule: - nodeUpgradeDateTime = dateutil.parser.parse(upgradeSchedule[nodeId]) - nodeUpgradeDateTime += timedelta(days=5) - upgradeSchedule[nodeId] = nodeUpgradeDateTime.isoformat() - return _validUpgrade - - -def test_node_upgrade_scheduled_on_proper_date(poolNodesStarted, - poolUpgradeScheduled): - # Verify that the upgrade is scheduled in approximately 5 days for each node - now = datetime.utcnow().replace(tzinfo=dateutil.tz.tzutc()) - for node in poolNodesStarted.nodes.values(): - assert (node.upgrader.scheduledAction[1] - now).days == 5 diff --git a/indy_client/test/cli/test_pool_upgrade_validation.py b/indy_client/test/cli/test_pool_upgrade_validation.py deleted file mode 100644 index 029ed2885..000000000 --- a/indy_client/test/cli/test_pool_upgrade_validation.py +++ /dev/null @@ -1,44 +0,0 @@ -from copy import deepcopy - -from indy_node.test.upgrade.conftest import validUpgrade -from indy_client.test.cli.constants import INVALID_SYNTAX, ERROR -from indy_node.test.upgrade.helper import loweredVersion -from plenum.common.constants import VERSION -from plenum.common.util import randomString -from indy_common.constants import JUSTIFICATION, JUSTIFICATION_MAX_SIZE - - -def testPoolUpgradeFailsIfVersionIsLowerThanCurrent( - be, do, validUpgrade, trusteeCli): - upgrade = deepcopy(validUpgrade) - upgrade[VERSION] = loweredVersion() - - err_msg = "Pool upgrade failed: client request invalid: " \ - "InvalidClientRequest('Version is not upgradable'" - - be(trusteeCli) - do('send POOL_UPGRADE name={name} version={version} sha256={sha256} ' - 'action={action} schedule={schedule} timeout={timeout} package={package}', - mapper=upgrade, expect=['Sending pool upgrade', err_msg], within=10) - - -def testPoolUpgradeHasInvalidSyntaxIfJustificationIsEmpty( - be, do, validUpgrade, trusteeCli): - upgrade = deepcopy(validUpgrade) - upgrade[JUSTIFICATION] = '' - - be(trusteeCli) - do('send POOL_UPGRADE name={name} version={version} sha256={sha256} ' - 'action={action} schedule={schedule} timeout={timeout} justification={justification} package={package}', - mapper=upgrade, expect=INVALID_SYNTAX, within=10) - - -def testPoolUpgradeHasInvalidSyntaxIfJustificationIsVeryLong( - be, do, validUpgrade, trusteeCli): - upgrade = deepcopy(validUpgrade) - upgrade[JUSTIFICATION] = randomString(JUSTIFICATION_MAX_SIZE + 1) - - be(trusteeCli) - do('send POOL_UPGRADE name={name} version={version} sha256={sha256} ' - 'action={action} schedule={schedule} timeout={timeout} justification={justification}', - mapper=upgrade, expect=INVALID_SYNTAX, within=10) diff --git a/indy_client/test/cli/test_proof_request.py b/indy_client/test/cli/test_proof_request.py deleted file mode 100644 index 25bfe852f..000000000 --- a/indy_client/test/cli/test_proof_request.py +++ /dev/null @@ -1,4 +0,0 @@ -def test_show_nonexistant_proof_request(be, do, aliceCLI): - be(aliceCLI) - do("show proof request Transcript", expect=[ - "No matching Proof Requests found in current wallet"], within=1) diff --git a/indy_client/test/cli/test_restore_wallet_before_rebranding.py b/indy_client/test/cli/test_restore_wallet_before_rebranding.py deleted file mode 100644 index c4a007850..000000000 --- a/indy_client/test/cli/test_restore_wallet_before_rebranding.py +++ /dev/null @@ -1,18 +0,0 @@ -import os -import shutil - -from plenum.cli.constants import NO_ENV -from plenum.common.util import createDirIfNotExists -from indy_client.client.wallet.wallet import Wallet - - -def testRestoreWalletBeforeRebranding(aliceCLI): - fileName = "wallet_before_rebranding" - curPath = os.path.dirname(os.path.realpath(__file__)) - walletFilePath = os.path.join(curPath, fileName) - noEnvKeyringsDir = os.path.join(aliceCLI.getWalletsBaseDir(), NO_ENV) - createDirIfNotExists(noEnvKeyringsDir) - shutil.copy2(walletFilePath, noEnvKeyringsDir) - targetWalletFilePath = os.path.join(noEnvKeyringsDir, fileName) - restored = aliceCLI.restoreWalletByPath(targetWalletFilePath) - assert restored and isinstance(aliceCLI.activeWallet, Wallet) diff --git a/indy_client/test/cli/test_restore_wallet_before_renaming_link_to_connection.py b/indy_client/test/cli/test_restore_wallet_before_renaming_link_to_connection.py deleted file mode 100644 index 5bf403414..000000000 --- a/indy_client/test/cli/test_restore_wallet_before_renaming_link_to_connection.py +++ /dev/null @@ -1,18 +0,0 @@ -import os -import shutil - -from plenum.cli.constants import NO_ENV -from plenum.common.util import createDirIfNotExists -from indy_client.client.wallet.wallet import Wallet - - -def testRestoreWalletBeforeRenamingLinkToConnection(aliceCLI): - fileName = "wallet_before_renaming_link_to_connection" - curPath = os.path.dirname(os.path.realpath(__file__)) - walletFilePath = os.path.join(curPath, fileName) - noEnvKeyringsDir = os.path.join(aliceCLI.getWalletsBaseDir(), NO_ENV) - createDirIfNotExists(noEnvKeyringsDir) - shutil.copy2(walletFilePath, noEnvKeyringsDir) - targetWalletFilePath = os.path.join(noEnvKeyringsDir, fileName) - restored = aliceCLI.restoreWalletByPath(targetWalletFilePath) - assert restored and isinstance(aliceCLI.activeWallet, Wallet) diff --git a/indy_client/test/cli/test_restore_wallet_from_mgl.py b/indy_client/test/cli/test_restore_wallet_from_mgl.py deleted file mode 100644 index 3213bdc8b..000000000 --- a/indy_client/test/cli/test_restore_wallet_from_mgl.py +++ /dev/null @@ -1,18 +0,0 @@ -import os -import shutil - -from plenum.cli.constants import NO_ENV -from plenum.common.util import createDirIfNotExists -from indy_client.client.wallet.wallet import Wallet - - -def testRestoreWalletFromMinimalGoLive(aliceCLI): - fileName = "wallet_from_minimal_go_live" - curPath = os.path.dirname(os.path.realpath(__file__)) - walletFilePath = os.path.join(curPath, fileName) - noEnvKeyringsDir = os.path.join(aliceCLI.getWalletsBaseDir(), NO_ENV) - createDirIfNotExists(noEnvKeyringsDir) - shutil.copy2(walletFilePath, noEnvKeyringsDir) - targetWalletFilePath = os.path.join(noEnvKeyringsDir, fileName) - restored = aliceCLI.restoreWalletByPath(targetWalletFilePath) - assert restored and isinstance(aliceCLI.activeWallet, Wallet) diff --git a/indy_client/test/cli/test_save_and_restore_wallet.py b/indy_client/test/cli/test_save_and_restore_wallet.py deleted file mode 100644 index 02f6277fe..000000000 --- a/indy_client/test/cli/test_save_and_restore_wallet.py +++ /dev/null @@ -1,211 +0,0 @@ -import os -from time import sleep - -import pytest -import shutil - -from plenum.cli.cli import Exit, Cli -from plenum.cli.constants import NO_ENV -from plenum.common.util import createDirIfNotExists, normalizedWalletFileName, \ - getWalletFilePath -from plenum.test.cli.helper import checkWalletFilePersisted, checkWalletRestored, \ - createAndAssertNewCreation, createAndAssertNewKeyringCreation, \ - useAndAssertKeyring, exitFromCli, restartCliAndAssert -from stp_core.loop.eventually import eventually - -from indy_client.client.wallet.wallet import Wallet -from indy_client.test.cli.helper import prompt_is - - -def performExit(do): - with pytest.raises(Exit): - do('exit', within=3) - - -def testPersistentWalletName(): - # Connects to "test" environment - walletFileName = normalizedWalletFileName("test") - assert "test.wallet" == walletFileName - assert "test" == Cli.getWalletKeyName(walletFileName) - - # New default wallet gets created - walletFileName = normalizedWalletFileName("Default") - assert "default.wallet" == walletFileName - assert "default" == Cli.getWalletKeyName(walletFileName) - - # User creates new wallet - walletFileName = normalizedWalletFileName("MyVault") - assert "myvault.wallet" == walletFileName - assert "myvault" == Cli.getWalletKeyName(walletFileName) - - -def getActiveWalletFilePath(cli): - fileName = cli.getActiveWalletPersitentFileName() - return getWalletFilePath(cli.getContextBasedWalletsBaseDir(), fileName) - - -def _connectTo(envName, do, cli): - do('connect {}'.format(envName), within=10, - expect=["Connected to {}".format(envName)]) - prompt_is("{}@{}".format(cli.name, envName)) - - -def connectTo(envName, do, cli, activeWalletPresents, identifiers, - firstTimeConnect=False): - currActiveWallet = cli._activeWallet - _connectTo(envName, do, cli) - if currActiveWallet is None and firstTimeConnect: - do(None, expect=[ - "New wallet Default created", - 'Active wallet set to "Default"'] - ) - - if activeWalletPresents: - assert cli._activeWallet is not None - assert len(cli._activeWallet.identifiers) == identifiers - else: - assert cli._activeWallet is None - - -def switchEnv(newEnvName, do, cli, checkIfWalletRestored=False, - restoredWalletKeyName=None, restoredIdentifiers=0): - walletFilePath = getActiveWalletFilePath(cli) - _connectTo(newEnvName, do, cli) - - # check wallet should have been persisted - checkWalletFilePersisted(walletFilePath) - - if checkIfWalletRestored: - checkWalletRestored(cli, restoredWalletKeyName, restoredIdentifiers) - - -def restartCli(cli, be, do, expectedRestoredWalletName, - expectedIdentifiers): - be(cli) - _connectTo("pool1", do, cli) - restartCliAndAssert(cli, do, expectedRestoredWalletName, - expectedIdentifiers) - - -def restartCliWithCorruptedWalletFile(cli, be, do, filePath): - with open(filePath, "a") as myfile: - myfile.write("appended text to corrupt wallet file") - - be(cli) - _connectTo("pool1", do, cli) - do(None, - expect=[ - 'error occurred while restoring wallet', - 'New wallet Default_', - 'Active wallet set to "Default_', - ], - not_expect=[ - 'Saved wallet "Default" restored', - 'New wallet Default created', - 'Active wallet set to "Default"' - ], within=5) - - -def createNewKey(do, cli, walletName): - createAndAssertNewCreation(do, cli, walletName) - - -def createNewWallet(name, do, expectedMsgs=None): - createAndAssertNewKeyringCreation(do, name, expectedMsgs) - - -def useKeyring(name, do, expectedName=None, expectedMsgs=None): - useAndAssertKeyring(do, name, expectedName, expectedMsgs) - - -def testRestoreWalletFile(aliceCLI): - fileName = "tmp_wallet_restore_issue" - curPath = os.path.dirname(os.path.realpath(__file__)) - walletFilePath = os.path.join(curPath, fileName) - noEnvKeyringsDir = os.path.join(aliceCLI.getWalletsBaseDir(), NO_ENV) - createDirIfNotExists(noEnvKeyringsDir) - shutil.copy2(walletFilePath, noEnvKeyringsDir) - targetWalletFilePath = os.path.join(noEnvKeyringsDir, fileName) - restored = aliceCLI.restoreWalletByPath(targetWalletFilePath) - assert restored and isinstance(aliceCLI.activeWallet, Wallet) - - -@pytest.mark.skip(reason="Something goes wrong on Jenkins, need separate ticket.") -def testSaveAndRestoreWallet(do, be, cliForMultiNodePools, - aliceMultiNodePools, - earlMultiNodePools): - be(cliForMultiNodePools) - # No wallet should have been restored - assert cliForMultiNodePools._activeWallet is None - - connectTo("pool1", do, cliForMultiNodePools, - activeWalletPresents=True, identifiers=0, firstTimeConnect=True) - createNewKey(do, cliForMultiNodePools, walletName="Default") - - switchEnv("pool2", do, cliForMultiNodePools, checkIfWalletRestored=False) - createNewKey(do, cliForMultiNodePools, walletName="Default") - createNewWallet("mykr0", do) - createNewKey(do, cliForMultiNodePools, walletName="mykr0") - createNewKey(do, cliForMultiNodePools, walletName="mykr0") - useKeyring("Default", do) - createNewKey(do, cliForMultiNodePools, walletName="Default") - sleep(10) - switchEnv("pool1", do, cliForMultiNodePools, checkIfWalletRestored=True, - restoredWalletKeyName="Default", restoredIdentifiers=1) - createNewWallet("mykr1", do) - createNewKey(do, cliForMultiNodePools, walletName="mykr1") - - switchEnv("pool2", do, cliForMultiNodePools, checkIfWalletRestored=True, - restoredWalletKeyName="Default", restoredIdentifiers=2) - createNewWallet("mykr0", do, - expectedMsgs=[ - '"mykr0" conflicts with an existing wallet', - 'Please choose a new name.']) - - filePath = getWalletFilePath( - cliForMultiNodePools.getContextBasedWalletsBaseDir(), - cliForMultiNodePools.walletFileName) - switchEnv("pool1", do, cliForMultiNodePools, checkIfWalletRestored=True, - restoredWalletKeyName="mykr1", restoredIdentifiers=1) - useKeyring(filePath, do, expectedName="mykr0", - expectedMsgs=[ - "Given wallet file ({}) doesn't " - "belong to current context.".format(filePath), - "Please connect to 'pool2' environment and try again."]) - - # exit from current cli so that active wallet gets saved - exitFromCli(do) - - alice_wallets_dir = os.path.join(aliceMultiNodePools.getWalletsBaseDir(), "pool1") - earl_wallets_dir = os.path.join(earlMultiNodePools.getWalletsBaseDir(), "pool1") - - os.makedirs(alice_wallets_dir, exist_ok=True) - os.makedirs(earl_wallets_dir, exist_ok=True) - - alice_wallet_path = os.path.join(alice_wallets_dir, cliForMultiNodePools.walletFileName) - earl_wallet_path = os.path.join(earl_wallets_dir, cliForMultiNodePools.walletFileName) - - # different tests for restoring saved wallet - filePath = getWalletFilePath( - cliForMultiNodePools.getContextBasedWalletsBaseDir(), - "default.wallet") - - shutil.copy(filePath, alice_wallets_dir) - shutil.copy(filePath, earl_wallets_dir) - - filePath = getWalletFilePath( - cliForMultiNodePools.getContextBasedWalletsBaseDir(), - cliForMultiNodePools.walletFileName) - - shutil.copy(filePath, alice_wallet_path) - shutil.copy(filePath, earl_wallet_path) - - def _f(path): - if not os.path.exists(path): - raise FileNotFoundError("{}".format(path)) - - cliForMultiNodePools.looper.run(eventually(_f, alice_wallet_path)) - cliForMultiNodePools.looper.run(eventually(_f, earl_wallet_path)) - - restartCli(aliceMultiNodePools, be, do, "mykr1", 1) - restartCliWithCorruptedWalletFile(earlMultiNodePools, be, do, earl_wallet_path) diff --git a/indy_client/test/cli/test_send_attrib_validation.py b/indy_client/test/cli/test_send_attrib_validation.py deleted file mode 100644 index 89ac44956..000000000 --- a/indy_client/test/cli/test_send_attrib_validation.py +++ /dev/null @@ -1,1118 +0,0 @@ -import json -from base64 import b64encode -from binascii import hexlify -from hashlib import sha256 - -import pytest -from libnacl import randombytes -from libnacl.secret import SecretBox -from plenum.common.util import rawToFriendly, friendlyToRaw -from stp_core.crypto.util import randomSeed - -from indy_client.test.cli.conftest import trusteeCli -from indy_client.test.cli.constants import ERROR, INVALID_SYNTAX -from indy_client.test.cli.helper import addNym, newKey, \ - createUuidIdentifier, createCryptonym, createHalfKeyIdentifierAndAbbrevVerkey - -ATTRIBUTE_ADDED = 'Attribute added for nym {dest}' - - -@pytest.yield_fixture(scope='function') -def pureLocalCli(CliBuilder): - yield from CliBuilder('Local') - - -@pytest.fixture(scope='function') -def localTrusteeCli(be, do, trusteeMap, poolNodesStarted, - nymAddedOut, pureLocalCli): - return trusteeCli(be, do, trusteeMap, poolNodesStarted, - nymAddedOut, pureLocalCli) - - -def testSendAttribSucceedsForExistingUuidDest( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({ - 'name': 'Alice' - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ATTRIBUTE_ADDED, within=2) - - -def testSendAttribFailsForNotExistingUuidDest( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({ - 'name': 'Alice' - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ERROR, within=2) - - -def testSendAttribSucceedsForExistingDidDest( - be, do, poolNodesStarted, localTrusteeCli): - - seed = randomSeed() - idr, verkey = createHalfKeyIdentifierAndAbbrevVerkey(seed=seed) - - userCli = localTrusteeCli - addNym(be, do, userCli, idr=idr, verkey=verkey) - newKey(be, do, userCli, seed=seed.decode()) - - sendAttribParameters = { - 'dest': idr, - 'raw': json.dumps({ - 'name': 'Alice' - }) - } - - be(userCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=sendAttribParameters, expect=ATTRIBUTE_ADDED, within=2) - - -def testSendAttribFailsForNotExistingCryptonymDest( - be, do, poolNodesStarted, localTrusteeCli): - - seed = randomSeed() - cryptonym = createCryptonym(seed=seed) - - userCli = localTrusteeCli - newKey(be, do, userCli, seed=seed.decode()) - - sendAttribParameters = { - 'dest': cryptonym, - 'raw': json.dumps({ - 'name': 'Alice' - }) - } - - be(userCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=sendAttribParameters, expect=ERROR, within=2) - - -def testSendAttribFailsIfDestIsPassedInHexFormat( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - hexEncodedUuidIdentifier = hexlify(friendlyToRaw(uuidIdentifier)).decode() - - parameters = { - 'dest': hexEncodedUuidIdentifier, - 'raw': json.dumps({ - 'name': 'Alice' - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ERROR, within=2) - - -def testSendAttribFailsIfDestIsInvalid( - be, do, poolNodesStarted, trusteeCli): - - cryptonym = createCryptonym() - invalidIdentifier = cryptonym[:-4] - - sendAttribParameters = { - 'dest': invalidIdentifier, - 'raw': json.dumps({ - 'name': 'Alice' - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=sendAttribParameters, expect=ERROR, within=2) - - -def testSendAttribHasInvalidSyntaxIfDestIsEmpty( - be, do, poolNodesStarted, trusteeCli): - - parameters = { - 'dest': '', - 'raw': json.dumps({ - 'name': 'Alice' - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=INVALID_SYNTAX, within=2) - - -def testSendAttribHasInvalidSyntaxIfDestIsOmitted( - be, do, poolNodesStarted, trusteeCli): - - parameters = { - 'raw': json.dumps({ - 'name': 'Alice' - }) - } - - be(trusteeCli) - do('send ATTRIB raw={raw}', - mapper=parameters, expect=INVALID_SYNTAX, within=2) - - -def testSendAttribSucceedsForRawWithOneAttr( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({ - 'name': 'Alice' - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ATTRIBUTE_ADDED, within=2) - - -def testSendAttribSucceedsForRawWithCompoundAttr( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({ - 'dateOfBirth': { - 'year': 1984, - 'month': 5, - 'dayOfMonth': 23 - } - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ATTRIBUTE_ADDED, within=2) - - -def testSendAttribSucceedsForRawWithNullifiedAttr( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({ - 'name': None - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ATTRIBUTE_ADDED, within=2) - - -def testSendAttribSucceedsForRawWithEndpointWithHaContainingIpAddrAndPort( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({ - 'endpoint': { - 'ha': '52.11.117.186:6321' - } - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ATTRIBUTE_ADDED, within=2) - - -def testSendAttribSucceedsForRawWithEndpointWithHaBeingNull( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({ - 'endpoint': { - 'ha': None - } - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ATTRIBUTE_ADDED, within=2) - - -def testSendAttribSucceedsForRawWithEndpointWithValidHaAndOtherProperties( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({ - 'endpoint': { - 'ha': '52.11.117.186:6321', - 'name': 'SOV Agent', - 'description': 'The SOV agent.' - } - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ATTRIBUTE_ADDED, within=2) - - -def testSendAttribSucceedsForRawWithEndpointWithoutHaButWithOtherProperties( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({ - 'endpoint': { - 'name': 'SOV Agent', - 'description': 'The SOV agent.' - } - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ATTRIBUTE_ADDED, within=2) - - -def testSendAttribSucceedsForRawWithEndpointWithoutProperties( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({ - 'endpoint': {} - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ATTRIBUTE_ADDED, within=2) - - -def testSendAttribSucceedsForRawWithEndpointBeingNull( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({ - 'endpoint': None - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ATTRIBUTE_ADDED, within=2) - - -def testSendAttribFailsForRawWithEndpointWithHaIfIpAddrHasWrongFormat( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({ - 'endpoint': { - 'ha': '52.11.117:6321' - } - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ERROR, within=2) - - -def testSendAttribFailsForRawWithEndpointWithHaIfSomeIpComponentsAreNegative( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({ - 'endpoint': { - 'ha': '52.-1.117.186:6321' - } - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ERROR, within=2) - - -def testSendAttribFailsForRawWithEndpointWithHaIfSomeIpCompHigherThanUpperBound( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({ - 'endpoint': { - 'ha': '52.11.256.186:6321' - } - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ERROR, within=2) - - -def testSendAttribFailsForRawWithEndpointWithHaIfIpAddrIsEmpty( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({ - 'endpoint': { - 'ha': ':6321' - } - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ERROR, within=2) - - -def testSendAttribFailsForRawWithEndpointWithHaIfPortIsNegative( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({ - 'endpoint': { - 'ha': '52.11.117.186:-1' - } - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ERROR, within=2) - - -def testSendAttribFailsForRawWithEndpointWithHaIfPortIsHigherThanUpperBound( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({ - 'endpoint': { - 'ha': '52.11.117.186:65536' - } - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ERROR, within=2) - - -def testSendAttribFailsForRawWithEndpointWithHaIfPortIsFloat( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({ - 'endpoint': { - 'ha': '52.11.117.186:6321.5' - } - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ERROR, within=2) - - -def testSendAttribFailsForRawWithEndpointWithHaIfPortHasWrongFormat( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({ - 'endpoint': { - 'ha': '52.11.117.186:ninety' - } - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ERROR, within=2) - - -def testSendAttribFailsForRawWithEndpointWithHaIfPortIsEmpty( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({ - 'endpoint': { - 'ha': '52.11.117.186:' - } - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ERROR, within=2) - - -def testSendAttribFailsForRawWithEndpointWithHaContainingIpAddrOnly( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({ - 'endpoint': { - 'ha': '52.11.117.186' - } - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ERROR, within=2) - - -def testSendAttribFailsForRawWithEndpointWithHaContainingPortOnly( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({ - 'endpoint': { - 'ha': '6321' - } - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ERROR, within=2) - - -def testSendAttribFailsForRawWithEndpointWithHaContainingDomainNameAndPort( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({ - 'endpoint': { - 'ha': 'sovrin.org:6321' - } - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ERROR, within=2) - - -def testSendAttribFailsForRawWithEndpointWithHaContainingDomainNameOnly( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({ - 'endpoint': { - 'ha': 'sovrin.org' - } - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ERROR, within=2) - - -def testSendAttribFailsForRawWithEndpointWithHaBeingHumanReadableText( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({ - 'endpoint': { - 'ha': 'This is not a host address.' - } - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ERROR, within=2) - - -def testSendAttribFailsForRawWithEndpointWithHaBeingDecimalNumber( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({ - 'endpoint': { - 'ha': 42 - } - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ERROR, within=2) - - -def testSendAttribFailsForRawWithEndpointWithEmptyHa( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({ - 'endpoint': { - 'ha': '' - } - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ERROR, within=2) - - -def testSendAttribFailsForRawWithEndpointBeingEmptyString( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({ - 'endpoint': '' - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ERROR, within=2) - - -def testSendAttribFailsIfRawContainsMulipleAttrs( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({ - 'name': 'Alice', - 'dateOfBirth': '05/23/2017' - }) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ERROR, within=2) - - -def testSendAttribFailsIfRawContainsNoAttrs( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({}) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ERROR, within=2) - - -def testSendAttribFailsIfRawIsBrokenJson( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - validJson = json.dumps({ - 'name': 'Alice' - }) - - brokenJson = validJson[:-1] - - parameters = { - 'dest': uuidIdentifier, - 'raw': brokenJson - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=INVALID_SYNTAX, within=2) - - brokenJson = validJson.replace(':', '-') - parameters['raw'] = brokenJson - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=ERROR, within=2) - - -def testSendAttribFailsIfRawIsHex( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': hexlify(randombytes(32)).decode() - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=INVALID_SYNTAX, within=2) - - -def testSendAttribFailsIfRawIsHumanReadableText( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': 'This is not a json.' - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=INVALID_SYNTAX, within=2) - - -def testSendAttribFailsIfRawIsDecimalNumber( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': 42 - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=INVALID_SYNTAX, within=2) - - -def testSendAttribHasInvalidSyntaxIfRawIsEmptyString( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': '' - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw}', - mapper=parameters, expect=INVALID_SYNTAX, within=2) - - -def testSendAttribSucceedsForHexSha256Hash( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - raw = json.dumps({ - 'name': 'Alice' - }) - - parameters = { - 'dest': uuidIdentifier, - 'hash': sha256(raw.encode()).hexdigest() - } - - be(trusteeCli) - do('send ATTRIB dest={dest} hash={hash}', - mapper=parameters, expect=ATTRIBUTE_ADDED, within=2) - - -def testSendAttribSucceedsForHexHashWithLettersInBothCases( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'hash': '6d4a333838d0ef96756cccC680AF2531075C512502Fb68c5503c63d93de859b3' - } - - be(trusteeCli) - do('send ATTRIB dest={dest} hash={hash}', - mapper=parameters, expect=ATTRIBUTE_ADDED, within=2) - - -def testSendAttribFailsForHashShorterThanSha256( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'hash': hexlify(randombytes(31)).decode() - } - - be(trusteeCli) - do('send ATTRIB dest={dest} hash={hash}', - mapper=parameters, expect=ERROR, within=2) - - -def testSendAttribFailsForHashLongerThanSha256( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'hash': hexlify(randombytes(33)).decode() - } - - be(trusteeCli) - do('send ATTRIB dest={dest} hash={hash}', - mapper=parameters, expect=ERROR, within=2) - - -def testSendAttribFailsForBase58Hash( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - raw = json.dumps({ - 'name': 'Alice' - }) - - hash = sha256(raw.encode()).digest() - - parameters = { - 'dest': uuidIdentifier, - 'hash': rawToFriendly(hash) - } - - be(trusteeCli) - do('send ATTRIB dest={dest} hash={hash}', - mapper=parameters, expect=INVALID_SYNTAX, within=2) - - -def testSendAttribFailsForBase64Hash( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - raw = json.dumps({ - 'name': 'Alice' - }) - - hash = sha256(raw.encode()).digest() - - parameters = { - 'dest': uuidIdentifier, - 'hash': b64encode(hash).decode() - } - - be(trusteeCli) - do('send ATTRIB dest={dest} hash={hash}', - mapper=parameters, expect=INVALID_SYNTAX, within=2) - - -def testSendAttribHasInvalidSyntaxIfHashIsEmpty( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'hash': '' - } - - be(trusteeCli) - do('send ATTRIB dest={dest} hash={hash}', - mapper=parameters, expect=INVALID_SYNTAX, within=2) - - -def testSendAttribSucceedsForNonEmptyEnc( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - raw = json.dumps({ - 'name': 'Alice' - }) - - secretBox = SecretBox() - - parameters = { - 'dest': uuidIdentifier, - 'enc': secretBox.encrypt(raw.encode()).hex() - } - - be(trusteeCli) - do('send ATTRIB dest={dest} enc={enc}', - mapper=parameters, expect=ATTRIBUTE_ADDED, within=2) - - -def testSendAttribHasInvalidSyntaxIfEncIsEmpty( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'enc': '' - } - - be(trusteeCli) - do('send ATTRIB dest={dest} enc={enc}', - mapper=parameters, expect=INVALID_SYNTAX, within=2) - - -def testSendAttribHasInvalidSyntaxIfRawAndHashPassedAtSameTime( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - raw = json.dumps({ - 'name': 'Alice' - }) - - parameters = { - 'dest': uuidIdentifier, - 'raw': raw, - 'hash': sha256(raw.encode()).hexdigest() - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw} hash={hash}', - mapper=parameters, expect=INVALID_SYNTAX, within=2) - - -def testSendAttribHasInvalidSyntaxIfRawAndEncPassedAtSameTime( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - raw = json.dumps({ - 'name': 'Alice' - }) - - secretBox = SecretBox() - - parameters = { - 'dest': uuidIdentifier, - 'raw': raw, - 'enc': secretBox.encrypt(raw.encode()).hex() - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw} enc={enc}', - mapper=parameters, expect=INVALID_SYNTAX, within=2) - - -def testSendAttribHasInvalidSyntaxIfHashAndEncPassedAtSameTime( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - raw = json.dumps({ - 'name': 'Alice' - }) - - secretBox = SecretBox() - encryptedRaw = secretBox.encrypt(raw.encode()) - - parameters = { - 'dest': uuidIdentifier, - 'hash': sha256(encryptedRaw).hexdigest(), - 'enc': encryptedRaw.hex() - } - - be(trusteeCli) - do('send ATTRIB dest={dest} hash={hash} enc={enc}', - mapper=parameters, expect=INVALID_SYNTAX, within=2) - - -def testSendAttribHasInvalidSyntaxIfRawHashAndEncPassedAtSameTime( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - raw = json.dumps({ - 'name': 'Alice' - }) - - secretBox = SecretBox() - encryptedRaw = secretBox.encrypt(raw.encode()) - - parameters = { - 'dest': uuidIdentifier, - 'raw': raw, - 'hash': sha256(encryptedRaw).hexdigest(), - 'enc': encryptedRaw.hex() - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw} hash={hash} enc={enc}', - mapper=parameters, expect=INVALID_SYNTAX, within=2) - - -def testSendAttribHasInvalidSyntaxIfUnknownParameterIsPassed( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({ - 'name': 'Alice' - }), - 'extra': 42 - } - - be(trusteeCli) - do('send ATTRIB dest={dest} raw={raw} extra={extra}', - mapper=parameters, expect=INVALID_SYNTAX, within=2) - - -def testSendAttribHasInvalidSyntaxIfParametersOrderIsWrong( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=uuidIdentifier) - - parameters = { - 'dest': uuidIdentifier, - 'raw': json.dumps({ - 'name': 'Alice' - }) - } - - be(trusteeCli) - do('send ATTRIB raw={raw} dest={dest}', - mapper=parameters, expect=INVALID_SYNTAX, within=2) - - -def testSendAttribHasInvalidSyntaxIfAllParametersAreOmitted( - be, do, poolNodesStarted, trusteeCli): - - be(trusteeCli) - do('send ATTRIB', expect=INVALID_SYNTAX, within=2) diff --git a/indy_client/test/cli/test_send_claim_def.py b/indy_client/test/cli/test_send_claim_def.py deleted file mode 100644 index 8ae7e0439..000000000 --- a/indy_client/test/cli/test_send_claim_def.py +++ /dev/null @@ -1,104 +0,0 @@ -from indy_client.test.cli.constants import SCHEMA_ADDED, CLAIM_DEF_ADDED -from indy_client.test.cli.helper import getSeqNoFromCliOutput - - -def test_send_claim_def_succeeds( - be, do, poolNodesStarted, trusteeCli): - be(trusteeCli) - - do('send SCHEMA name=Degree version=1.0' - ' keys=undergrad,last_name,first_name,birth_date,postgrad,expiry_date', - expect=SCHEMA_ADDED, - within=5) - - schemaTxnSeqNo = getSeqNoFromCliOutput(trusteeCli) - - do('send CLAIM_DEF ref={ref} signature_type=CL', - expect=CLAIM_DEF_ADDED, - mapper={'ref': schemaTxnSeqNo}, - within=239) - - -def test_send_claim_def_fails_if_ref_is_seqno_of_non_schema_txn( - be, do, poolNodesStarted, trusteeCli): - be(trusteeCli) - - do('send SCHEMA name=Degree version=1.1' - ' keys=undergrad,last_name,first_name,birth_date,postgrad,expiry_date', - expect=SCHEMA_ADDED, - within=5) - - schemaTxnSeqNo = getSeqNoFromCliOutput(trusteeCli) - - do('send CLAIM_DEF ref={ref} signature_type=CL', - expect=CLAIM_DEF_ADDED, - mapper={'ref': schemaTxnSeqNo}, - within=239) - - firstClaimDefSeqNo = getSeqNoFromCliOutput(trusteeCli) - - do('send CLAIM_DEF ref={ref} signature_type=CL', - expect='Schema with seqNo {ref} not found', - mapper={'ref': firstClaimDefSeqNo}, - within=5) - - -def test_send_claim_def_fails_if_ref_is_not_existing_seqno( - be, do, poolNodesStarted, trusteeCli): - be(trusteeCli) - - do('send SCHEMA name=Degree version=1.2' - ' keys=undergrad,last_name,first_name,birth_date,postgrad,expiry_date', - expect=SCHEMA_ADDED, within=5) - - schemaTxnSeqNo = getSeqNoFromCliOutput(trusteeCli) - - do('send CLAIM_DEF ref={ref} signature_type=CL', - expect='Schema with seqNo {ref} not found', - mapper={'ref': schemaTxnSeqNo + 1}, - within=5) - - -def test_update_claim_def_for_same_schema_and_signature_type( - be, do, trusteeCli): - be(trusteeCli) - - do('send SCHEMA name=Degree version=1.3' - ' keys=undergrad,last_name,first_name,birth_date,postgrad,expiry_date', - expect=SCHEMA_ADDED, - within=5) - - schemaTxnSeqNo = getSeqNoFromCliOutput(trusteeCli) - - do('send CLAIM_DEF ref={ref} signature_type=CL', - expect=CLAIM_DEF_ADDED, - mapper={'ref': schemaTxnSeqNo}, - within=239) - - do('send CLAIM_DEF ref={ref} signature_type=CL', - expect=CLAIM_DEF_ADDED, - mapper={'ref': schemaTxnSeqNo}, - within=239) - - -def test_can_send_same_claim_def_by_different_issuers( - be, do, poolNodesStarted, trusteeCli, newStewardCli): - be(trusteeCli) - - do('send SCHEMA name=Degree version=1.4' - ' keys=undergrad,last_name,first_name,birth_date,postgrad,expiry_date', - expect=SCHEMA_ADDED, - within=5) - - schemaTxnSeqNo = getSeqNoFromCliOutput(trusteeCli) - - do('send CLAIM_DEF ref={ref} signature_type=CL', - expect=CLAIM_DEF_ADDED, - mapper={'ref': schemaTxnSeqNo}, - within=239) - - be(newStewardCli) - do('send CLAIM_DEF ref={ref} signature_type=CL', - expect=CLAIM_DEF_ADDED, - mapper={'ref': schemaTxnSeqNo}, - within=239) diff --git a/indy_client/test/cli/test_send_get_attr.py b/indy_client/test/cli/test_send_get_attr.py deleted file mode 100644 index 2e51fbee4..000000000 --- a/indy_client/test/cli/test_send_get_attr.py +++ /dev/null @@ -1,154 +0,0 @@ -import pytest -import json - -from libnacl.secret import SecretBox -from hashlib import sha256 - -from indy_client.test.cli.constants import INVALID_SYNTAX -from indy_client.test.cli.helper import createUuidIdentifier, addNym - - -attrib_name = 'dateOfBirth' - -secretBox = SecretBox() -enc_data = secretBox.encrypt(json.dumps({'name': 'Alice'}).encode()).hex() -hash_data = sha256(json.dumps({'name': 'Alice'}).encode()).hexdigest() - -FOUND_ATTRIBUTE = 'Found attribute' -ATTRIBUTE_ADDED = 'Attribute added for nym {valid_dest}' -RETURNED_RAW_DATA = [FOUND_ATTRIBUTE, attrib_name, 'dayOfMonth', 'year', 'month'] -RETURNED_ENC_DATA = [FOUND_ATTRIBUTE, enc_data] -RETURNED_HASH_DATA = [FOUND_ATTRIBUTE, hash_data] -ATTR_NOT_FOUND = 'Attr not found' - - -@pytest.fixture(scope="module") -def send_raw_attrib(be, do, poolNodesStarted, trusteeCli): - - valid_identifier = createUuidIdentifier() - invalid_identifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=valid_identifier) - - parameters = { - 'attrib_name': attrib_name, - 'valid_dest': valid_identifier, - 'invalid_dest': invalid_identifier, - 'raw': json.dumps({ - attrib_name: { - 'dayOfMonth': 23, - 'year': 1984, - 'month': 5 - } - }) - } - - be(trusteeCli) - do('send ATTRIB dest={valid_dest} raw={raw}', - mapper=parameters, expect=ATTRIBUTE_ADDED, within=2) - - return parameters - - -@pytest.fixture(scope="module") -def send_enc_attrib(be, do, poolNodesStarted, trusteeCli): - - valid_identifier = createUuidIdentifier() - invalid_identifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=valid_identifier) - - parameters = { - 'valid_dest': valid_identifier, - 'invalid_dest': invalid_identifier, - 'enc': enc_data - } - - be(trusteeCli) - do('send ATTRIB dest={valid_dest} enc={enc}', - mapper=parameters, expect=ATTRIBUTE_ADDED, within=2) - - return parameters - - -@pytest.fixture(scope="module") -def send_hash_attrib(be, do, poolNodesStarted, trusteeCli): - - valid_identifier = createUuidIdentifier() - invalid_identifier = createUuidIdentifier() - addNym(be, do, trusteeCli, idr=valid_identifier) - - parameters = { - 'valid_dest': valid_identifier, - 'invalid_dest': invalid_identifier, - 'hash': hash_data - } - - be(trusteeCli) - do('send ATTRIB dest={valid_dest} hash={hash}', - mapper=parameters, expect=ATTRIBUTE_ADDED, within=2) - - return parameters - - -def test_send_get_attr_succeeds_for_existing_uuid_dest( - be, do, poolNodesStarted, trusteeCli, send_raw_attrib): - - be(trusteeCli) - do('send GET_ATTR dest={valid_dest} raw={attrib_name}', - mapper=send_raw_attrib, expect=RETURNED_RAW_DATA, within=2) - - -def test_send_get_attr_fails_for_invalid_uuid_dest( - be, do, poolNodesStarted, trusteeCli, send_raw_attrib): - - do('send GET_ATTR dest={invalid_dest} raw={attrib_name}', - mapper=send_raw_attrib, expect=ATTR_NOT_FOUND, within=2) - - -def test_send_get_attr_fails_for_nonexistent_uuid_dest( - be, do, poolNodesStarted, trusteeCli, send_raw_attrib): - - with pytest.raises(AssertionError) as excinfo: - do('send GET_ATTR dest=this_is_not_valid raw={attrib_name}', - mapper=send_raw_attrib, expect=ATTR_NOT_FOUND, within=2) - assert(INVALID_SYNTAX in str(excinfo.value)) - - -def test_send_get_attr_fails_for_invalid_attrib( - be, do, poolNodesStarted, trusteeCli, send_raw_attrib): - - do('send GET_ATTR dest={valid_dest} raw=badname', - mapper=send_raw_attrib, expect=ATTR_NOT_FOUND, within=2) - - -def test_send_get_attr_fails_with_missing_dest( - be, do, poolNodesStarted, trusteeCli, send_raw_attrib): - - with pytest.raises(AssertionError) as excinfo: - do('send GET_ATTR raw={attrib_name}', - mapper=send_raw_attrib, expect=ATTR_NOT_FOUND, within=2) - assert(INVALID_SYNTAX in str(excinfo.value)) - - -def test_send_get_attr_fails_with_missing_attrib( - be, do, poolNodesStarted, trusteeCli, send_raw_attrib): - - with pytest.raises(AssertionError) as excinfo: - do('send GET_ATTR dest={valid_dest}', - mapper=send_raw_attrib, expect=ATTR_NOT_FOUND, within=2) - assert(INVALID_SYNTAX in str(excinfo.value)) - - -def test_send_get_attr_enc_succeeds_for_existing_uuid_dest( - be, do, poolNodesStarted, trusteeCli, send_enc_attrib): - - be(trusteeCli) - do('send GET_ATTR dest={valid_dest} enc={enc}', - mapper=send_enc_attrib, expect=RETURNED_ENC_DATA, within=2) - - -def test_send_get_attr_hash_succeeds_for_existing_uuid_dest( - be, do, poolNodesStarted, trusteeCli, send_hash_attrib): - - be(trusteeCli) - do('send GET_ATTR dest={valid_dest} hash={hash}', - mapper=send_hash_attrib, expect=RETURNED_HASH_DATA, within=2) diff --git a/indy_client/test/cli/test_send_get_claim_def.py b/indy_client/test/cli/test_send_get_claim_def.py deleted file mode 100644 index 669720338..000000000 --- a/indy_client/test/cli/test_send_get_claim_def.py +++ /dev/null @@ -1,88 +0,0 @@ -import pytest -from indy_client.test.cli.helper import connect_and_check_output -from indy_node.test.did.conftest import wallet -from indy_client.test.cli.constants import INVALID_SYNTAX, SCHEMA_ADDED, \ - CLAIM_DEF_ADDED - -CLAIM_DEF_FOUND = ['Found claim def', 'attrib1', 'attrib2', 'attrib3'] -CLAIM_DEF_NOT_FOUND = 'Claim def not found' - - -@pytest.fixture(scope="module") -def create_schema_and_claim_def(be, do, poolNodesStarted, trusteeCli): - - be(trusteeCli) - do('send SCHEMA name=Degree version=1.0 keys=attrib1,attrib2,attrib3', - expect=SCHEMA_ADDED, within=5) - - RefNo = 0 - for s in trusteeCli.lastPrintArgs['msg'].split(): - if s.isdigit(): - RefNo = int(s) - - assert(RefNo > 0) - - # genKeys can take a random amount of time (genPrime) - do('send CLAIM_DEF ref={} signature_type=CL'.format(RefNo), - expect=CLAIM_DEF_ADDED, within=239) - - return RefNo - - -@pytest.fixture(scope="module") -def aliceCli(be, do, poolNodesStarted, aliceCLI, wallet): - keyseed = 'a' * 32 - - be(aliceCLI) - addAndActivateCLIWallet(aliceCLI, wallet) - connect_and_check_output(do, aliceCLI.txn_dir) - do('new key with seed {}'.format(keyseed)) - - return aliceCLI - - -def addAndActivateCLIWallet(cli, wallet): - cli.wallets[wallet.name] = wallet - cli.activeWallet = wallet - - -def test_send_get_claim_def_succeeds(be, do, poolNodesStarted, - trusteeCli, create_schema_and_claim_def): - - be(trusteeCli) - RefNo = create_schema_and_claim_def - do('send GET_CLAIM_DEF ref={} signature_type=CL'.format(RefNo), - expect=CLAIM_DEF_FOUND, within=5) - - -def test_send_get_claim_def_as_alice_fails( - be, - do, - poolNodesStarted, - trusteeCli, - create_schema_and_claim_def, - aliceCli): - - be(aliceCli) - RefNo = create_schema_and_claim_def - do('send GET_CLAIM_DEF ref={} signature_type=CL'.format(RefNo), - expect=CLAIM_DEF_NOT_FOUND, within=5) - - -def test_send_get_claim_def_with_invalid_ref_fails(be, do, poolNodesStarted, - trusteeCli): - - be(trusteeCli) - do('send GET_CLAIM_DEF ref=500 signature_type=CL', - expect=CLAIM_DEF_NOT_FOUND, within=5) - - -def test_send_get_claim_def_with_invalid_signature_fails( - be, do, poolNodesStarted, trusteeCli, create_schema_and_claim_def): - - be(trusteeCli) - RefNo = create_schema_and_claim_def - with pytest.raises(AssertionError) as excinfo: - do('send GET_CLAIM_DEF ref={} signature_type=garbage'.format(RefNo), - expect=CLAIM_DEF_NOT_FOUND, within=5) - assert(INVALID_SYNTAX in str(excinfo.value)) diff --git a/indy_client/test/cli/test_send_get_nym_validation.py b/indy_client/test/cli/test_send_get_nym_validation.py deleted file mode 100644 index 2a691860a..000000000 --- a/indy_client/test/cli/test_send_get_nym_validation.py +++ /dev/null @@ -1,153 +0,0 @@ -from binascii import hexlify - -import pytest -from plenum.common.util import friendlyToRaw - -from indy_client.test.cli.constants import INVALID_SYNTAX -from indy_client.test.cli.helper import createUuidIdentifier, addNym, \ - createHalfKeyIdentifierAndAbbrevVerkey, createCryptonym -from indy_common.roles import Roles -from indy_node.test.helper import check_str_is_base58_compatible - -CURRENT_VERKEY_FOR_NYM = 'Current verkey for NYM {dest} is {verkey}' -CURRENT_VERKEY_FOR_NYM_WITH_ROLE = 'Current verkey for NYM {dest} is ' \ - '{verkey} with role {role}' -CURRENT_VERKEY_IS_SAME_AS_IDENTIFIER = \ - 'Current verkey is same as identifier {dest}' -NYM_NOT_FOUND = 'NYM {dest} not found' - - -def testSendGetNymSucceedsForExistingUuidDest( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier, abbrevVerkey = createHalfKeyIdentifierAndAbbrevVerkey() - addNym(be, do, trusteeCli, idr=uuidIdentifier, verkey=abbrevVerkey) - - parameters = { - 'dest': uuidIdentifier, - 'verkey': abbrevVerkey - } - - be(trusteeCli) - do('send GET_NYM dest={dest}', - mapper=parameters, expect=CURRENT_VERKEY_FOR_NYM, within=2) - - -def testSendGetNymFailsForNotExistingUuidDest( - be, do, poolNodesStarted, trusteeCli): - - parameters = { - 'dest': createUuidIdentifier() - } - - be(trusteeCli) - do('send GET_NYM dest={dest}', - mapper=parameters, expect=NYM_NOT_FOUND, within=2) - - -def test_get_nym_returns_role( - be, do, poolNodesStarted, trusteeCli): - current_role = Roles.TRUST_ANCHOR - uuidIdentifier, abbrevVerkey = createHalfKeyIdentifierAndAbbrevVerkey() - addNym(be, do, trusteeCli, idr=uuidIdentifier, verkey=abbrevVerkey, - role=current_role) - - parameters = { - 'dest': uuidIdentifier, - 'verkey': abbrevVerkey, - 'role': current_role - } - - do('send GET_NYM dest={dest}', - mapper=parameters, expect=CURRENT_VERKEY_FOR_NYM_WITH_ROLE, within=2) - new_role = '' - addNym(be, do, trusteeCli, idr=uuidIdentifier, verkey=abbrevVerkey, - role=new_role) - do('send GET_NYM dest={dest}', - mapper=parameters, expect=CURRENT_VERKEY_FOR_NYM, within=2) - - -def testSendGetNymFailsIfCryptonymIsPassedAsDest( - be, do, poolNodesStarted, trusteeCli): - - parameters = { - 'dest': createCryptonym() - } - - be(trusteeCli) - do('send GET_NYM dest={dest}', - mapper=parameters, expect=NYM_NOT_FOUND, within=2) - - -def testSendGetNymFailsIfDestIsPassedInHexFormat( - be, do, poolNodesStarted, trusteeCli): - - # Sometimes hex representation can use only base58 compatible characters - while True: - uuidIdentifier, abbrevVerkey = createHalfKeyIdentifierAndAbbrevVerkey() - hexEncodedUuidIdentifier = hexlify( - friendlyToRaw(uuidIdentifier)).decode() - if not check_str_is_base58_compatible(hexEncodedUuidIdentifier): - break - - addNym(be, do, trusteeCli, idr=uuidIdentifier, verkey=abbrevVerkey) - - parameters = { - 'dest': hexEncodedUuidIdentifier - } - - be(trusteeCli) - do('send GET_NYM dest={dest}', - mapper=parameters, - expect="should not contain the following chars", - within=2) - - -def testSendGetNymFailsIfDestIsInvalid( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - invalidIdentifier = uuidIdentifier[:-4] - - parameters = { - 'dest': invalidIdentifier - } - - be(trusteeCli) - do('send GET_NYM dest={dest}', - mapper=parameters, expect="b58 decoded value length", within=2) - - -def testSendGetNymHasInvalidSyntaxIfDestIsEmpty( - be, do, poolNodesStarted, trusteeCli): - - parameters = { - 'dest': '' - } - - be(trusteeCli) - do('send GET_NYM dest={dest}', - mapper=parameters, expect=INVALID_SYNTAX, within=2) - - -def testSendGetNymHasInvalidSyntaxIfDestIsOmitted( - be, do, poolNodesStarted, trusteeCli): - - be(trusteeCli) - do('send GET_NYM', expect=INVALID_SYNTAX, within=2) - - -def testSendGetNymHasInvalidSyntaxIfUnknownParameterIsPassed( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier, abbrevVerkey = createHalfKeyIdentifierAndAbbrevVerkey() - addNym(be, do, trusteeCli, idr=uuidIdentifier, verkey=abbrevVerkey) - - parameters = { - 'dest': uuidIdentifier, - 'extra': 42 - } - - be(trusteeCli) - do('send GET_NYM dest={dest} extra={extra}', - mapper=parameters, expect=INVALID_SYNTAX, within=2) diff --git a/indy_client/test/cli/test_send_get_schema.py b/indy_client/test/cli/test_send_get_schema.py deleted file mode 100644 index 2ea8cf476..000000000 --- a/indy_client/test/cli/test_send_get_schema.py +++ /dev/null @@ -1,107 +0,0 @@ -import pytest -from indy_client.test.cli.constants import INVALID_SYNTAX, SCHEMA_ADDED -from indy_client.test.cli.helper import createUuidIdentifier -from indy_node.test.did.conftest import wallet -from indy_client.test.cli.helper import connect_and_check_output - - -SCHEMA_FOUND = ['Found schema', 'Degree', - '1.0', 'attrib1', 'attrib2', 'attrib3'] -SCHEMA_NOT_FOUND = 'Schema not found' - - -@pytest.fixture(scope="module") -def aliceCli(be, do, poolNodesStarted, aliceCLI, wallet): - keyseed = 'a' * 32 - - be(aliceCLI) - addAndActivateCLIWallet(aliceCLI, wallet) - connect_and_check_output(do, aliceCLI.txn_dir) - do('new key with seed {}'.format(keyseed)) - - return aliceCLI - - -def addAndActivateCLIWallet(cli, wallet): - cli.wallets[wallet.name] = wallet - cli.activeWallet = wallet - - -@pytest.fixture(scope="module") -def send_schema(be, do, poolNodesStarted, trusteeCli): - - be(trusteeCli) - do('send SCHEMA name=Degree version=1.0 keys=attrib1,attrib2,attrib3', - expect=SCHEMA_ADDED, within=5) - - -def test_send_get_schema_succeeds( - be, do, poolNodesStarted, trusteeCli, send_schema): - - do('send GET_SCHEMA dest={} name=Degree version=1.0'.format( - trusteeCli.activeDID), expect=SCHEMA_FOUND, within=5) - - -def test_send_get_schema_as_alice( - be, do, poolNodesStarted, trusteeCli, send_schema, aliceCli): - - be(aliceCli) - do('send GET_SCHEMA dest={} name=Degree version=1.0'.format( - trusteeCli.activeDID), expect=SCHEMA_FOUND, within=5) - - -def test_send_get_schema_fails_with_invalid_name( - be, do, poolNodesStarted, trusteeCli, send_schema): - - do('send GET_SCHEMA dest={} name=invalid version=1.0'.format( - trusteeCli.activeDID), expect=SCHEMA_NOT_FOUND, within=5) - - -def test_send_get_schema_fails_with_invalid_dest( - be, do, poolNodesStarted, trusteeCli, send_schema): - - uuid_identifier = createUuidIdentifier() - do('send GET_SCHEMA dest={} name=invalid version=1.0'.format( - uuid_identifier), expect=SCHEMA_NOT_FOUND, within=5) - - -def test_send_get_schema_fails_with_invalid_version( - be, do, poolNodesStarted, trusteeCli, send_schema): - do('send GET_SCHEMA dest={} name=Degree version=2.0'.format( - trusteeCli.activeDID), expect=SCHEMA_NOT_FOUND, within=5) - - -def test_send_get_schema_fails_with_invalid_version_syntax( - be, do, poolNodesStarted, trusteeCli, send_schema): - - with pytest.raises(AssertionError) as excinfo: - do('send GET_SCHEMA dest={} name=Degree version=asdf'.format( - trusteeCli.activeDID), expect=SCHEMA_NOT_FOUND, within=5) - assert(INVALID_SYNTAX in str(excinfo.value)) - - -def test_send_get_schema_fails_without_version( - be, do, poolNodesStarted, trusteeCli, send_schema): - - with pytest.raises(AssertionError) as excinfo: - do('send GET_SCHEMA dest={} name=Degree'.format(trusteeCli.activeDID), - expect=SCHEMA_NOT_FOUND, within=5) - assert(INVALID_SYNTAX in str(excinfo.value)) - - -def test_send_get_schema_fails_without_name( - be, do, poolNodesStarted, trusteeCli, send_schema): - - with pytest.raises(AssertionError) as excinfo: - do('send GET_SCHEMA dest={} version=1.0'.format(trusteeCli.activeDID), - expect=SCHEMA_NOT_FOUND, within=5) - assert(INVALID_SYNTAX in str(excinfo.value)) - - -def test_send_get_schema_fails_without_dest( - be, do, poolNodesStarted, trusteeCli, send_schema): - - with pytest.raises(AssertionError) as excinfo: - do('send GET_SCHEMA name=Degree version=1.0', - expect=SCHEMA_NOT_FOUND, within=5) - assert(INVALID_SYNTAX in str(excinfo.value)) diff --git a/indy_client/test/cli/test_send_node_validation.py b/indy_client/test/cli/test_send_node_validation.py deleted file mode 100644 index 346ce98c4..000000000 --- a/indy_client/test/cli/test_send_node_validation.py +++ /dev/null @@ -1,609 +0,0 @@ -import pytest -from plenum.common.signer_did import DidSigner - -from stp_core.crypto.util import randomSeed - -from plenum.common.constants import NODE_IP, NODE_PORT, CLIENT_IP, CLIENT_PORT, \ - ALIAS, SERVICES, VALIDATOR -from plenum.common.signer_simple import SimpleSigner -from plenum.common.util import cryptonymToHex, randomString -from indy_client.test.cli.conftest import newStewardCli as getNewStewardCli, \ - newStewardVals as getNewStewardVals, newNodeVals as getNewNodeVals -from indy_client.test.cli.constants import NODE_REQUEST_COMPLETED, NODE_REQUEST_FAILED, INVALID_SYNTAX -from indy_client.test.cli.helper import addAgent - -NYM_ADDED = "Nym {remote} added" - - -@pytest.yield_fixture(scope="function") -def cliWithRandomName(CliBuilder): - yield from CliBuilder(randomString(6)) - - -@pytest.fixture(scope="function") -def newStewardVals(): - return getNewStewardVals() - - -@pytest.fixture(scope="function") -def newNodeVals(): - return getNewNodeVals() - - -@pytest.fixture(scope="function") -def newStewardCli(be, do, poolNodesStarted, trusteeCli, - cliWithRandomName, newStewardVals): - return getNewStewardCli(be, do, poolNodesStarted, trusteeCli, - cliWithRandomName, newStewardVals) - - -def ensurePoolIsOperable(be, do, cli): - randomNymMapper = { - 'remote': DidSigner(seed=randomSeed()).identifier - } - addAgent(be, do, cli, randomNymMapper) - - -def testSendNodeSucceedsIfServicesIsArrayWithValidatorValueOnly( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'][SERVICES] = [VALIDATOR] - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_COMPLETED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeSucceedsIfServicesIsEmptyArray( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'][SERVICES] = [] - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_COMPLETED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfDestIsSmallDecimalNumber( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeIdr'] = 42 - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfDestIsShortReadableName( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeIdr'] = 'TheNewNode' - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfDestIsHexKey( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeIdr'] = cryptonymToHex( - newNodeVals['newNodeIdr']).decode() - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -@pytest.mark.skip(reason='SOV-1096') -def testSendNodeHasInvalidSyntaxIfDestIsEmpty( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeIdr'] = '' - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=INVALID_SYNTAX, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -@pytest.mark.skip(reason='SOV-1096') -def testSendNodeHasInvalidSyntaxIfDestIsMissed( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - be(newStewardCli) - do('send NODE data={newNodeData}', - mapper=newNodeVals, expect=INVALID_SYNTAX, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfNodeIpContainsLeadingSpace( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'][NODE_IP] = ' 122.62.52.13' - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfNodeIpContainsTrailingSpace( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'][NODE_IP] = '122.62.52.13 ' - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfNodeIpHasWrongFormat( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'][NODE_IP] = '122.62.52' - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfSomeNodeIpComponentsAreNegative( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'][NODE_IP] = '122.-1.52.13' - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfSomeNodeIpComponentsAreHigherThanUpperBound( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'][NODE_IP] = '122.62.256.13' - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfNodeIpIsEmpty( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'][NODE_IP] = '' - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfNodeIpIsMissed( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - del newNodeVals['newNodeData'][NODE_IP] - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfNodePortIsNegative( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'][NODE_PORT] = -1 - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfNodePortIsHigherThanUpperBound( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'][NODE_PORT] = 65536 - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfNodePortIsFloat( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'][NODE_PORT] = 5555.5 - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfNodePortHasWrongFormat( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'][NODE_PORT] = 'ninety' - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfNodePortIsEmpty( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'][NODE_PORT] = '' - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfNodePortIsMissed( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - del newNodeVals['newNodeData'][NODE_PORT] - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfClientIpContainsLeadingSpace( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'][CLIENT_IP] = ' 122.62.52.13' - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfClientIpContainsTrailingSpace( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'][CLIENT_IP] = '122.62.52.13 ' - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfClientIpHasWrongFormat( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'][CLIENT_IP] = '122.62.52' - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfSomeClientIpComponentsAreNegative( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'][CLIENT_IP] = '122.-1.52.13' - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfSomeClientIpComponentsAreHigherThanUpperBound( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'][CLIENT_IP] = '122.62.256.13' - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfClientIpIsEmpty( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'][CLIENT_IP] = '' - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfClientIpIsMissed( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - del newNodeVals['newNodeData'][CLIENT_IP] - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfClientPortIsNegative( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'][CLIENT_PORT] = -1 - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfClientPortIsHigherThanUpperBound( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'][CLIENT_PORT] = 65536 - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfClientPortIsFloat( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'][CLIENT_PORT] = 5555.5 - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfClientPortHasWrongFormat( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'][CLIENT_PORT] = 'ninety' - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfClientPortIsEmpty( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'][CLIENT_PORT] = '' - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfClientPortIsMissed( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - del newNodeVals['newNodeData'][CLIENT_PORT] - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfAliasIsEmpty( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'][ALIAS] = '' - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfAliasIsMissed( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - del newNodeVals['newNodeData'][ALIAS] - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfServicesContainsUnknownValue( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'][SERVICES] = [VALIDATOR, 'DECIDER'] - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfServicesIsValidatorValue( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'][SERVICES] = VALIDATOR # just string, not array - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfServicesIsEmptyString( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'][SERVICES] = '' - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeSuccessIfDataContainsUnknownField( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'][SERVICES] = [] - newNodeVals['newNodeData']['extra'] = 42 - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_COMPLETED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeFailsIfDataIsEmptyJson( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'] = {} - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -@pytest.mark.skip(reason='INDY-68') -def testSendNodeFailsIfDataIsBrokenJson( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'] = "{'node_ip': '10.0.0.105', 'node_port': 9701" - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -@pytest.mark.skip(reason='INDY-68') -def testSendNodeFailsIfDataIsNotJson( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'] = 'not_json' - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_FAILED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -@pytest.mark.skip(reason='SOV-1096') -def testSendNodeHasInvalidSyntaxIfDataIsEmptyString( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - newNodeVals['newNodeData'] = '' - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=INVALID_SYNTAX, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -@pytest.mark.skip(reason='SOV-1096') -def testSendNodeHasInvalidSyntaxIfDataIsMissed( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - be(newStewardCli) - do('send NODE dest={newNodeIdr}', - mapper=newNodeVals, expect=INVALID_SYNTAX, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -@pytest.mark.skip(reason='SOV-1096') -def testSendNodeHasInvalidSyntaxIfUnknownParameterIsPassed( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData} extra=42', - mapper=newNodeVals, expect=INVALID_SYNTAX, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -def testSendNodeHasInvalidSyntaxIfAllParametersAreMissed( - be, do, poolNodesStarted, newStewardCli): - - be(newStewardCli) - do('send NODE', expect=INVALID_SYNTAX, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) - - -@pytest.mark.skip('INDY-88') -def testSendNodeSucceedsIfServicesIsMissed( - be, do, poolNodesStarted, newStewardCli, newNodeVals): - - del newNodeVals['newNodeData'][SERVICES] - - be(newStewardCli) - do('send NODE dest={newNodeIdr} data={newNodeData}', - mapper=newNodeVals, expect=NODE_REQUEST_COMPLETED, within=8) - - ensurePoolIsOperable(be, do, newStewardCli) diff --git a/indy_client/test/cli/test_send_nym_validation.py b/indy_client/test/cli/test_send_nym_validation.py deleted file mode 100644 index 743429e9f..000000000 --- a/indy_client/test/cli/test_send_nym_validation.py +++ /dev/null @@ -1,541 +0,0 @@ -import pytest -from libnacl import randombytes - -from plenum.common.util import rawToFriendly, friendlyToHexStr, friendlyToHex, \ - hexToFriendly -from indy_client.test.cli.constants import ERROR, INVALID_SYNTAX -from indy_client.test.cli.helper import createUuidIdentifier, \ - createHalfKeyIdentifierAndAbbrevVerkey, createUuidIdentifierAndFullVerkey, \ - createCryptonym -from indy_common.roles import Roles - -NYM_ADDED = 'Nym {dest} added' - - -def testSendNymSucceedsForUuidIdentifierAndOmittedVerkey( - be, do, poolNodesStarted, trusteeCli): - - parameters = { - 'dest': createUuidIdentifier(), - 'role': Roles.TRUST_ANCHOR.name - } - - be(trusteeCli) - do('send NYM dest={dest} role={role}', - mapper=parameters, expect=NYM_ADDED, within=2) - - -def testSendNymSucceedsForUuidIdentifierAndFullVerkey( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier, fullVerkey = createUuidIdentifierAndFullVerkey() - - parameters = { - 'dest': uuidIdentifier, - 'verkey': fullVerkey, - 'role': Roles.TRUST_ANCHOR.name - } - - be(trusteeCli) - do('send NYM dest={dest} role={role} verkey={verkey}', - mapper=parameters, expect=NYM_ADDED, within=2) - - -def testSendNymSucceedsForHalfKeyIdentifierAndAbbrevVerkey( - be, do, poolNodesStarted, trusteeCli): - - halfKeyIdentifier, abbrevVerkey = createHalfKeyIdentifierAndAbbrevVerkey() - - parameters = { - 'dest': halfKeyIdentifier, - 'verkey': abbrevVerkey, - 'role': Roles.TRUST_ANCHOR.name - } - - be(trusteeCli) - do('send NYM dest={dest} role={role} verkey={verkey}', - mapper=parameters, expect=NYM_ADDED, within=2) - - -def testSendNymSucceedsForTrusteeRole( - be, do, poolNodesStarted, trusteeCli): - - halfKeyIdentifier, abbrevVerkey = createHalfKeyIdentifierAndAbbrevVerkey() - - parameters = { - 'dest': halfKeyIdentifier, - 'verkey': abbrevVerkey, - 'role': Roles.TRUSTEE.name - } - - be(trusteeCli) - do('send NYM dest={dest} role={role} verkey={verkey}', - mapper=parameters, expect=NYM_ADDED, within=2) - - -def testSendNymSucceedsForStewardRole( - be, do, poolNodesStarted, trusteeCli): - - halfKeyIdentifier, abbrevVerkey = createHalfKeyIdentifierAndAbbrevVerkey() - - parameters = { - 'dest': halfKeyIdentifier, - 'verkey': abbrevVerkey, - 'role': Roles.STEWARD.name - } - - be(trusteeCli) - do('send NYM dest={dest} role={role} verkey={verkey}', - mapper=parameters, expect=NYM_ADDED, within=2) - - -def testSendNymSucceedsForTrustAnchorRole( - be, do, poolNodesStarted, trusteeCli): - - halfKeyIdentifier, abbrevVerkey = createHalfKeyIdentifierAndAbbrevVerkey() - - parameters = { - 'dest': halfKeyIdentifier, - 'verkey': abbrevVerkey, - 'role': Roles.TRUST_ANCHOR.name - } - - be(trusteeCli) - do('send NYM dest={dest} role={role} verkey={verkey}', - mapper=parameters, expect=NYM_ADDED, within=2) - - -def testSendNymSucceedsForOmittedRole( - be, do, poolNodesStarted, trusteeCli): - - halfKeyIdentifier, abbrevVerkey = createHalfKeyIdentifierAndAbbrevVerkey() - - parameters = { - 'dest': halfKeyIdentifier, - 'verkey': abbrevVerkey - } - - be(trusteeCli) - do('send NYM dest={dest} verkey={verkey}', - mapper=parameters, expect=NYM_ADDED, within=2) - - -def testSendNymSucceedsForEmptyRole( - be, do, poolNodesStarted, trusteeCli): - - halfKeyIdentifier, abbrevVerkey = createHalfKeyIdentifierAndAbbrevVerkey() - - parameters = { - 'dest': halfKeyIdentifier, - 'verkey': abbrevVerkey, - 'role': '' - } - - be(trusteeCli) - do('send NYM dest={dest} role={role} verkey={verkey}', - mapper=parameters, expect=NYM_ADDED, within=2) - - -@pytest.mark.skip(reason='INDY-210') -def testSendNymFailsForCryptonymIdentifierAndOmittedVerkey( - be, do, poolNodesStarted, trusteeCli): - - parameters = { - 'dest': createCryptonym(), - 'role': Roles.TRUST_ANCHOR.name - } - - be(trusteeCli) - do('send NYM dest={dest} role={role}', - mapper=parameters, expect=ERROR, within=2) - - -@pytest.mark.skip(reason='INDY-210') -def testSendNymFailsForCryptonymIdentifierAndFullVerkey( - be, do, poolNodesStarted, trusteeCli): - - cryptonym = createCryptonym() - _, fullVerkey = createUuidIdentifierAndFullVerkey() - - parameters = { - 'dest': cryptonym, - 'verkey': fullVerkey, - 'role': Roles.TRUST_ANCHOR.name - } - - be(trusteeCli) - do('send NYM dest={dest} role={role} verkey={verkey}', - mapper=parameters, expect=ERROR, within=2) - - -def testSendNymFailsForCryptonymIdentifierAndMatchedAbbrevVerkey( - be, do, poolNodesStarted, trusteeCli): - - cryptonym = createCryptonym() - hexCryptonym = friendlyToHex(cryptonym) - abbrevVerkey = '~' + hexToFriendly(hexCryptonym[16:]) - - parameters = { - 'dest': cryptonym, - 'verkey': abbrevVerkey, - 'role': Roles.TRUST_ANCHOR.name - } - - be(trusteeCli) - do('send NYM dest={dest} role={role} verkey={verkey}', - mapper=parameters, expect=ERROR, within=2) - - -@pytest.mark.skip(reason='SOV-1108') -def testSendNymFailsIfIdentifierSizeIs15Bytes( - be, do, poolNodesStarted, trusteeCli): - - parameters = { - 'dest': rawToFriendly(randombytes(15)), - 'role': Roles.TRUST_ANCHOR.name - } - - be(trusteeCli) - do('send NYM dest={dest} role={role}', - mapper=parameters, expect=ERROR, within=2) - - -@pytest.mark.skip(reason='SOV-1108') -def testSendNymFailsIfIdentifierSizeIs17Bytes( - be, do, poolNodesStarted, trusteeCli): - - parameters = { - 'dest': rawToFriendly(randombytes(17)), - 'role': Roles.TRUST_ANCHOR.name - } - - be(trusteeCli) - do('send NYM dest={dest} role={role}', - mapper=parameters, expect=ERROR, within=2) - - -@pytest.mark.skip(reason='SOV-1108') -def testSendNymFailsIfFullVerkeySizeIs31Bytes( - be, do, poolNodesStarted, trusteeCli): - - parameters = { - 'dest': rawToFriendly(randombytes(16)), - 'verkey': rawToFriendly(randombytes(31)), - 'role': Roles.TRUST_ANCHOR.name - } - - be(trusteeCli) - do('send NYM dest={dest} role={role}', - mapper=parameters, expect=ERROR, within=2) - - -@pytest.mark.skip(reason='SOV-1108') -def testSendNymFailsIfFullVerkeySizeIs33Bytes( - be, do, poolNodesStarted, trusteeCli): - - parameters = { - 'dest': rawToFriendly(randombytes(16)), - 'verkey': rawToFriendly(randombytes(33)), - 'role': Roles.TRUST_ANCHOR.name - } - - be(trusteeCli) - do('send NYM dest={dest} role={role}', - mapper=parameters, expect=ERROR, within=2) - - -@pytest.mark.skip(reason='SOV-1108') -def testSendNymFailsIfAbbrevVerkeySizeIs15Bytes( - be, do, poolNodesStarted, trusteeCli): - - parameters = { - 'dest': rawToFriendly(randombytes(16)), - 'verkey': '~' + rawToFriendly(randombytes(15)), - 'role': Roles.TRUST_ANCHOR.name - } - - be(trusteeCli) - do('send NYM dest={dest} role={role}', - mapper=parameters, expect=ERROR, within=2) - - -@pytest.mark.skip(reason='SOV-1108') -def testSendNymFailsIfAbbrevVerkeySizeIs17Bytes( - be, do, poolNodesStarted, trusteeCli): - - parameters = { - 'dest': rawToFriendly(randombytes(16)), - 'verkey': '~' + rawToFriendly(randombytes(17)), - 'role': Roles.TRUST_ANCHOR.name - } - - be(trusteeCli) - do('send NYM dest={dest} role={role}', - mapper=parameters, expect=ERROR, within=2) - - -@pytest.mark.skip(reason='SOV-1109') -def testSendNymFailsIfUuidIdentifierIsHexEncoded( - be, do, poolNodesStarted, trusteeCli): - - parameters = { - 'dest': friendlyToHexStr(createUuidIdentifier()), - 'role': Roles.TRUST_ANCHOR.name - } - - be(trusteeCli) - do('send NYM dest={dest} role={role}', - mapper=parameters, expect=ERROR, within=2) - - -@pytest.mark.skip(reason='SOV-1109') -def testSendNymFailsIfFullVerkeyIsHexEncoded( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier, fullVerkey = createUuidIdentifierAndFullVerkey() - - parameters = { - 'dest': uuidIdentifier, - 'verkey': friendlyToHexStr(fullVerkey), - 'role': Roles.TRUST_ANCHOR.name - } - - be(trusteeCli) - do('send NYM dest={dest} role={role} verkey={verkey}', - mapper=parameters, expect=ERROR, within=2) - - -@pytest.mark.skip(reason='SOV-1109') -def testSendNymFailsIfAbbrevVerkeyIsHexEncoded( - be, do, poolNodesStarted, trusteeCli): - - halfKeyIdentifier, abbrevVerkey = createHalfKeyIdentifierAndAbbrevVerkey() - - parameters = { - 'dest': halfKeyIdentifier, - 'verkey': '~' + friendlyToHexStr(abbrevVerkey.replace('~', '')), - 'role': Roles.TRUST_ANCHOR.name - } - - be(trusteeCli) - do('send NYM dest={dest} role={role} verkey={verkey}', - mapper=parameters, expect=ERROR, within=2) - - -@pytest.mark.skip(reason='SOV-1109') -def testSendNymFailsIfIdentifierContainsNonBase58Characters( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier = createUuidIdentifier() - - parameters = { - 'dest': uuidIdentifier[:5] + '/' + uuidIdentifier[6:], - 'role': Roles.TRUST_ANCHOR.name - } - - be(trusteeCli) - do('send NYM dest={dest} role={role}', - mapper=parameters, expect=ERROR, within=2) - - -@pytest.mark.skip(reason='SOV-1109') -def testSendNymFailsIfFullVerkeyContainsNonBase58Characters( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier, fullVerkey = createUuidIdentifierAndFullVerkey() - - parameters = { - 'dest': uuidIdentifier, - 'verkey': fullVerkey[:5] + '/' + fullVerkey[6:], - 'role': Roles.TRUST_ANCHOR.name - } - - be(trusteeCli) - do('send NYM dest={dest} role={role} verkey={verkey}', - mapper=parameters, expect=ERROR, within=2) - - -@pytest.mark.skip(reason='SOV-1109') -def testSendNymFailsIfAbbrevVerkeyContainsNonBase58Characters( - be, do, poolNodesStarted, trusteeCli): - - halfKeyIdentifier, abbrevVerkey = createHalfKeyIdentifierAndAbbrevVerkey() - - parameters = { - 'dest': halfKeyIdentifier, - 'verkey': abbrevVerkey[:6] + '/' + abbrevVerkey[7:], - 'role': Roles.TRUST_ANCHOR.name - } - - be(trusteeCli) - do('send NYM dest={dest} role={role} verkey={verkey}', - mapper=parameters, expect=ERROR, within=2) - - -@pytest.mark.skip(reason='SOV-1109') -def testSendNymFailsIfFullVerkeyContainsTilde( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier, fullVerkey = createUuidIdentifierAndFullVerkey() - - parameters = { - 'dest': uuidIdentifier, - 'verkey': '~' + fullVerkey, - 'role': Roles.TRUST_ANCHOR.name - } - - be(trusteeCli) - do('send NYM dest={dest} role={role} verkey={verkey}', - mapper=parameters, expect=ERROR, within=2) - - -@pytest.mark.skip(reason='SOV-1109') -def testSendNymFailsIfAbbrevVerkeyDoesNotContainTilde( - be, do, poolNodesStarted, trusteeCli): - - halfKeyIdentifier, abbrevVerkey = createHalfKeyIdentifierAndAbbrevVerkey() - - parameters = { - 'dest': halfKeyIdentifier, - 'verkey': abbrevVerkey.replace('~', ''), - 'role': Roles.TRUST_ANCHOR.name - } - - be(trusteeCli) - do('send NYM dest={dest} role={role} verkey={verkey}', - mapper=parameters, expect=ERROR, within=2) - - -@pytest.mark.skip(reason='SOV-1110') -def testSendNymFailsIfRoleIsUnknown(be, do, poolNodesStarted, trusteeCli): - - halfKeyIdentifier, abbrevVerkey = createHalfKeyIdentifierAndAbbrevVerkey() - - parameters = { - 'dest': halfKeyIdentifier, - 'verkey': abbrevVerkey, - 'role': 'SUPERVISOR' - } - - be(trusteeCli) - do('send NYM dest={dest} role={role} verkey={verkey}', - mapper=parameters, expect=ERROR, within=2) - - -@pytest.mark.skip(reason='SOV-1110') -def testSendNymFailsIfRoleIsSpecifiedUsingNumericCode( - be, do, poolNodesStarted, trusteeCli): - - halfKeyIdentifier, abbrevVerkey = createHalfKeyIdentifierAndAbbrevVerkey() - - parameters = { - 'dest': halfKeyIdentifier, - 'verkey': abbrevVerkey, - 'role': Roles.TRUST_ANCHOR.value - } - - be(trusteeCli) - do('send NYM dest={dest} role={role} verkey={verkey}', - mapper=parameters, expect=ERROR, within=2) - - -@pytest.mark.skip(reason='SOV-1111') -def testSendNymHasInvalidSyntaxIfParametersOrderIsWrong( - be, do, poolNodesStarted, trusteeCli): - - halfKeyIdentifier, abbrevVerkey = createHalfKeyIdentifierAndAbbrevVerkey() - - parameters = { - 'dest': halfKeyIdentifier, - 'verkey': abbrevVerkey, - 'role': Roles.TRUST_ANCHOR.name - } - - be(trusteeCli) - do('send NYM verkey={verkey} role={role} dest={dest}', - mapper=parameters, expect=INVALID_SYNTAX, within=2) - - -@pytest.mark.skip(reason='SOV-1111') -def testSendNymHasInvalidSyntaxIfIdentifierIsEmpty( - be, do, poolNodesStarted, trusteeCli): - - _, fullVerkey = createUuidIdentifierAndFullVerkey() - - parameters = { - 'dest': '', - 'verkey': fullVerkey, - 'role': Roles.TRUST_ANCHOR.name - } - - be(trusteeCli) - do('send NYM dest={dest} role={role} verkey={verkey}', - mapper=parameters, expect=INVALID_SYNTAX, within=2) - - -@pytest.mark.skip(reason='SOV-1111') -def testSendNymHasInvalidSyntaxIfIdentifierIsOmitted( - be, do, poolNodesStarted, trusteeCli): - - _, fullVerkey = createUuidIdentifierAndFullVerkey() - - parameters = { - 'verkey': fullVerkey, - 'role': Roles.TRUST_ANCHOR.name - } - - be(trusteeCli) - do('send NYM role={role} verkey={verkey}', - mapper=parameters, expect=INVALID_SYNTAX, within=2) - - -def testSendNymHasInvalidSyntaxForUuidIdentifierAndEmptyVerkey( - be, do, poolNodesStarted, trusteeCli): - - parameters = { - 'dest': createUuidIdentifier(), - 'verkey': '', - 'role': Roles.TRUST_ANCHOR.name - } - - be(trusteeCli) - do('send NYM dest={dest} role={role} verkey={verkey}', - mapper=parameters, expect=NYM_ADDED, within=2) - - -@pytest.mark.skip(reason='SOV-1111') -def testSendNymHasInvalidSyntaxIfIdentifierAndVerkeyAreOmitted( - be, do, poolNodesStarted, trusteeCli): - - parameters = { - 'role': Roles.TRUST_ANCHOR.name - } - - be(trusteeCli) - do('send NYM role={role}', - mapper=parameters, expect=INVALID_SYNTAX, within=2) - - -@pytest.mark.skip(reason='SOV-1111') -def testSendNymHasInvalidSyntaxIfUnknownParameterIsPassed( - be, do, poolNodesStarted, trusteeCli): - - uuidIdentifier, fullVerkey = createUuidIdentifierAndFullVerkey() - - parameters = { - 'dest': uuidIdentifier, - 'verkey': fullVerkey, - 'role': Roles.TRUST_ANCHOR.name, - 'extra': 42 - } - - be(trusteeCli) - do('send NYM dest={dest} role={role} verkey={verkey} extra={extra}', - mapper=parameters, expect=INVALID_SYNTAX, within=2) - - -def testSendNymHasInvalidSyntaxIfAllParametersAreOmitted( - be, do, poolNodesStarted, trusteeCli): - - be(trusteeCli) - do('send NYM', expect=INVALID_SYNTAX, within=2) diff --git a/indy_client/test/cli/test_send_schema.py b/indy_client/test/cli/test_send_schema.py deleted file mode 100644 index 3f3a15ad2..000000000 --- a/indy_client/test/cli/test_send_schema.py +++ /dev/null @@ -1,21 +0,0 @@ -from indy_client.test.cli.constants import SCHEMA_ADDED, SCHEMA_NOT_ADDED_DUPLICATE - - -def test_send_schema_multiple_attribs(be, do, poolNodesStarted, trusteeCli): - be(trusteeCli) - do('send SCHEMA name=Degree version=1.0 keys=attrib1,attrib2,attrib3', - expect=SCHEMA_ADDED, within=5) - - -def test_send_schema_one_attrib(be, do, poolNodesStarted, trusteeCli): - be(trusteeCli) - do('send SCHEMA name=Degree2 version=1.1 keys=attrib1', - expect=SCHEMA_ADDED, within=5) - - -def test_can_not_send_same_schema(be, do, poolNodesStarted, trusteeCli): - be(trusteeCli) - do('send SCHEMA name=Degree3 version=1.3 keys=attrib1', - expect=SCHEMA_ADDED, within=5) - do('send SCHEMA name=Degree3 version=1.3 keys=attrib1', - expect=SCHEMA_NOT_ADDED_DUPLICATE, within=5) diff --git a/indy_client/test/cli/test_show_proof_multi_claim.py b/indy_client/test/cli/test_show_proof_multi_claim.py deleted file mode 100644 index f62f588d6..000000000 --- a/indy_client/test/cli/test_show_proof_multi_claim.py +++ /dev/null @@ -1,63 +0,0 @@ -from collections import OrderedDict - -import pytest - -from indy_client.cli.cli import IndyCli -from indy_client.client.wallet.connection import Connection - - -@pytest.fixture() -def claimsUsedForProof(): - - claim1Attr = OrderedDict() - claim1Attr['first_name'] = 'Alice' - claim1Attr['last_name'] = 'Garcia' - claim1Attr['account_address_1'] = '321' - claim1Attr['date_of_birth'] = 'May 15, 1990' - - claim2Attr = OrderedDict() - claim2Attr['first_name'] = 'Alice' - claim2Attr['last_name'] = 'Garcia' - claim2Attr['account_status'] = 'active' - - return [ - ( - Connection(name='Issuer 1'), - ('TestClaim-1', '0.1', 'Other data'), - claim1Attr - ), - ( - Connection(name='Issuer 2'), - ('TestClaim-2', '0.1', 'Other claim 2 data'), - claim2Attr - ) - ] - - -@pytest.fixture() -def proofRequestAttrs(): - return { - 'first_name': None, - 'last_name': None, - 'account_status': None - } - - -def test_showProofOnlyUsedAttributesAreHighlighted( - claimsUsedForProof, proofRequestAttrs): - actualConstructionToPrint = IndyCli._printClaimsUsedInProofConstruction( - claimsUsedForProof, proofRequestAttrs - ) - - expectedPrint = '\nThe Proof is constructed from the following claims:\n' \ - '\n Claim [1] (TestClaim-1 v0.1 from Issuer 1)\n' \ - ' * first_name: Alice\n' \ - ' * last_name: Garcia\n' \ - ' account_address_1: 321\n' \ - ' date_of_birth: May 15, 1990\n' \ - '\n Claim [2] (TestClaim-2 v0.1 from Issuer 2)\n' \ - ' first_name: Alice\n' \ - ' last_name: Garcia\n' \ - ' * account_status: active\n' - - assert expectedPrint == actualConstructionToPrint diff --git a/indy_client/test/cli/test_tutorial.py b/indy_client/test/cli/test_tutorial.py deleted file mode 100644 index b015990ab..000000000 --- a/indy_client/test/cli/test_tutorial.py +++ /dev/null @@ -1,1071 +0,0 @@ -import json - -import pytest - -from indy_client.test import waits -from stp_core.loop.eventually import eventually -from plenum.test.cli.helper import exitFromCli, \ - createAndAssertNewKeyringCreation -from indy_common.exceptions import InvalidConnectionException -from indy_common.constants import ENDPOINT -from plenum.common.signer_did import DidSigner - -from indy_client.client.wallet.connection import Connection, constant -from indy_client.test.cli.helper import getFileLines, prompt_is, doubleBraces, \ - getTotalConnections, getTotalSchemas, getTotalClaimsRcvd, getTotalAvailableClaims, \ - newKey, ensureConnectedToTestEnv, connect_and_check_output - - -def getSampleConnectionInvitation(): - return { - "connection-request": { - "name": "Acme Corp", - "identifier": "CzkavE58zgX7rUMrzSinLr", - "nonce": "57fbf9dc8c8e6acde33de98c6d747b28c", - "endpoint": "127.0.0.1:1213" - }, - "proof-requests": [{ - "name": "Job-Application", - "version": "0.2", - "attributes": { - "first_name": "string", - "last_name": "string", - "phone_number": "string", - "degree": "string", - "status": "string", - "ssn": "string" - } - }], - "sig": "KDkI4XUePwEu1K01u0DpDsbeEfBnnBfwuw8e4DEPK+MdYXv" - "VsXdSmBJ7yEfQBm8bSJuj6/4CRNI39fFul6DcDA==" - } - - -def checkIfInvalidAttribIsRejected(do, map): - data = json.loads(map.get('invalidEndpointAttr')) - endpoint = data.get(ENDPOINT).get('ha') - errorMsg = 'client request invalid: InvalidClientRequest(' \ - '"invalid endpoint: \'{}\'",)'.format(endpoint) - - do("send ATTRIB dest={remote} raw={invalidEndpointAttr}", - within=5, - expect=[errorMsg], - mapper=map) - - -def checkIfValidEndpointIsAccepted(do, map, attribAdded): - validEndpoints = [] - validPorts = ["1", "3457", "65535"] - for validPort in validPorts: - validEndpoints.append("127.0.0.1:{}".format(validPort)) - - for validEndpoint in validEndpoints: - endpoint = json.dumps({ENDPOINT: {'ha': validEndpoint}}) - map["validEndpointAttr"] = endpoint - do("send ATTRIB dest={remote} raw={validEndpointAttr}", - within=5, - expect=attribAdded, - mapper=map) - - -def checkIfInvalidEndpointIsRejected(do, map): - invalidEndpoints = [] - invalidIps = [" 127.0.0.1", "127.0.0.1 ", " 127.0.0.1 ", "127.0.0", - "127.A.0.1"] - invalidPorts = [" 3456", "3457 ", "63AB", "0", "65536"] - for invalidIp in invalidIps: - invalidEndpoints.append(("{}:1234".format(invalidIp), 'address')) - for invalidPort in invalidPorts: - invalidEndpoints.append(("127.0.0.1:{}".format(invalidPort), 'port')) - - for invalidEndpoint, invalid_part in invalidEndpoints: - errorMsg = "client request invalid: InvalidClientRequest(" \ - "'validation error [ClientAttribOperation]: invalid endpoint {} (ha={})',)" \ - "".format(invalid_part, invalidEndpoint) - endpoint = json.dumps({ENDPOINT: {'ha': invalidEndpoint}}) - map["invalidEndpointAttr"] = endpoint - do("send ATTRIB dest={remote} raw={invalidEndpointAttr}", - within=5, - expect=[errorMsg], - mapper=map) - - -def agentWithEndpointAdded(be, do, agentCli, agentMap, attrAddedOut): - be(agentCli) - - ensureConnectedToTestEnv(be, do, agentCli) - - # TODO these belong in there own test, not - # TODO part of standing up agents - - # checkIfInvalidEndpointIsRejected(do, agentMap) - # checkIfValidEndpointIsAccepted(do, agentMap, attrAddedOut) - do('send ATTRIB dest={remote} raw={endpointAttr}', - within=5, - expect=attrAddedOut, - mapper=agentMap) - return agentCli - - -@pytest.fixture(scope="module") -def faberWithEndpointAdded(be, do, faberCli, faberAddedByPhil, - faberMap, attrAddedOut): - agentWithEndpointAdded(be, do, faberCli, faberMap, attrAddedOut) - - -@pytest.fixture(scope="module") -def acmeWithEndpointAdded(be, do, acmeCli, acmeAddedByPhil, - acmeMap, attrAddedOut): - agentWithEndpointAdded(be, do, acmeCli, acmeMap, attrAddedOut) - - -@pytest.fixture(scope="module") -def thriftWithEndpointAdded(be, do, thriftCli, thriftAddedByPhil, - thriftMap, attrAddedOut): - agentWithEndpointAdded(be, do, thriftCli, thriftMap, attrAddedOut) - - -def connectIfNotAlreadyConnected(do, userCli, userMap, expectMsgs=None): - # TODO: Shouldn't this be testing the cli command `status`? - if not userCli._isConnectedToAnyEnv(): - connect_and_check_output(do, userCli.txn_dir, expect=expectMsgs, mapper=userMap) - - -def setPromptAndKeyring(do, name, newKeyringOut, userMap): - do('prompt {}'.format(name), expect=prompt_is(name)) - do('new wallet {}'.format(name), expect=newKeyringOut, mapper=userMap) - - -@pytest.fixture(scope="module") -def preRequisite(poolNodesStarted, - faberIsRunning, acmeIsRunning, thriftIsRunning, - faberWithEndpointAdded, acmeWithEndpointAdded, - thriftWithEndpointAdded): - pass - - -@pytest.fixture(scope="module") -def walletCreatedForTestEnv(preRequisite, be, do, earlCLI): - be(earlCLI) - createAndAssertNewKeyringCreation(do, "default1") - createAndAssertNewKeyringCreation(do, "default2") - connectIfNotAlreadyConnected(do, earlCLI, {}) - createAndAssertNewKeyringCreation(do, "test2") - exitFromCli(do) - - -@pytest.fixture(scope="module") -def aliceCli(preRequisite, be, do, walletCreatedForTestEnv, - aliceCLI, newKeyringOut, aliceMap): - be(aliceCLI) - setPromptAndKeyring(do, "Alice", newKeyringOut, aliceMap) - return aliceCLI - - -@pytest.fixture(scope="module") -def susanCli(preRequisite, be, do, susanCLI, newKeyringOut, susanMap): - be(susanCLI) - setPromptAndKeyring(do, "Susan", newKeyringOut, susanMap) - return susanCLI - - -@pytest.fixture(scope="module") -def bobCli(preRequisite, be, do, bobCLI, newKeyringOut, bobMap): - be(bobCLI) - setPromptAndKeyring(do, "Bob", newKeyringOut, bobMap) - return bobCLI - - -@pytest.fixture(scope="module") -def faberCli(be, do, faberCLI, newKeyringOut, faberMap): - be(faberCLI) - setPromptAndKeyring(do, "Faber", newKeyringOut, faberMap) - newKey(be, do, faberCLI, seed=faberMap['seed']) - - return faberCLI - - -@pytest.fixture(scope="module") -def acmeCli(be, do, acmeCLI, newKeyringOut, acmeMap): - be(acmeCLI) - setPromptAndKeyring(do, "Acme", newKeyringOut, acmeMap) - newKey(be, do, acmeCLI, seed=acmeMap['seed']) - - return acmeCLI - - -@pytest.fixture(scope="module") -def thriftCli(be, do, thriftCLI, newKeyringOut, thriftMap): - be(thriftCLI) - setPromptAndKeyring(do, "Thrift", newKeyringOut, thriftMap) - newKey(be, do, thriftCLI, seed=thriftMap['seed']) - - return thriftCLI - - -def testNotConnected(be, do, aliceCli, notConnectedStatus): - be(aliceCli) - do('status', expect=notConnectedStatus) - - -def testShowInviteNotExists(be, do, aliceCli, fileNotExists, faberMap): - be(aliceCli) - do('show {invite-not-exists}', expect=fileNotExists, mapper=faberMap) - - -def testShowInviteWithDirPath(be, do, aliceCli, fileNotExists, faberMap): - be(aliceCli) - do('show sample', expect=fileNotExists, mapper=faberMap) - - -def testLoadConnectionInviteWithoutSig(): - li = getSampleConnectionInvitation() - del li["sig"] - with pytest.raises(InvalidConnectionException) as excinfo: - Connection.validate(li) - assert "Field not found in given input: sig" in str(excinfo.value) - - -def testShowFaberInvite(be, do, aliceCli, faberMap): - be(aliceCli) - inviteContents = doubleBraces(getFileLines(faberMap.get("invite"))) - do('show {invite}', expect=inviteContents, - mapper=faberMap) - - -def testLoadInviteNotExists(be, do, aliceCli, fileNotExists, faberMap): - be(aliceCli) - do('load {invite-not-exists}', expect=fileNotExists, mapper=faberMap) - - -@pytest.fixture(scope="module") -def faberInviteLoadedByAlice(be, do, aliceCli, loadInviteOut, faberMap): - totalConnectionsBefore = getTotalConnections(aliceCli) - be(aliceCli) - do('load {invite}', expect=loadInviteOut, mapper=faberMap) - assert totalConnectionsBefore + 1 == getTotalConnections(aliceCli) - return aliceCli - - -def testLoadFaberInvite(faberInviteLoadedByAlice): - pass - - -def testShowConnectionNotExists( - be, do, aliceCli, connectionNotExists, faberMap): - be(aliceCli) - do('show connection {inviter-not-exists}', - expect=connectionNotExists, - mapper=faberMap) - - -def testShowFaberConnection(be, do, aliceCli, faberInviteLoadedByAlice, - showUnSyncedConnectionOut, faberMap): - be(aliceCli) - cp = faberMap.copy() - cp.update(endpoint='', - last_synced='') - do('show connection {inviter}', - expect=showUnSyncedConnectionOut, mapper=cp) - - -def testSyncConnectionNotExists( - be, do, aliceCli, connectionNotExists, faberMap): - be(aliceCli) - do('sync {inviter-not-exists}', - expect=connectionNotExists, mapper=faberMap) - - -def testSyncFaberWhenNotConnected(be, do, aliceCli, faberMap, - faberInviteLoadedByAlice, - syncWhenNotConnected): - be(aliceCli) - do('sync {inviter}', expect=syncWhenNotConnected, - mapper=faberMap) - - -def testAcceptUnSyncedFaberInviteWhenNotConnected( - be, - do, - aliceCli, - faberInviteLoadedByAlice, - acceptUnSyncedWhenNotConnected, - faberMap): - be(aliceCli) - do('accept request from {inviter}', - expect=acceptUnSyncedWhenNotConnected, - mapper=faberMap) - - -# def testAcceptUnSyncedFaberInvite(be, do, aliceCli, preRequisite, -# faberInviteLoadedByAlice, -# acceptUnSyncedWithoutEndpointWhenConnected, -# faberMap): -# be(aliceCli) -# connectIfNotAlreadyConnected(do, aliceCli, faberMap) -# -# checkWalletStates(aliceCli, totalLinks=1, totalAvailableClaims=0, -# totalSchemas=0, totalClaimsRcvd=0) -# do('accept invitation from {inviter}', -# within=13, -# expect=acceptUnSyncedWithoutEndpointWhenConnected, -# mapper=faberMap) -# checkWalletStates(aliceCli, totalLinks=1, totalAvailableClaims=0, -# totalSchemas=0, totalClaimsRcvd=0) - - -@pytest.fixture(scope="module") -def faberInviteSyncedWithoutEndpoint(be, do, aliceCli, faberMap, - preRequisite, - faberInviteLoadedByAlice, - connectionNotYetSynced, - syncConnectionOutWithoutEndpoint): - be(aliceCli) - connectIfNotAlreadyConnected(do, aliceCli, faberMap) - - do('sync {inviter}', within=2, - expect=syncConnectionOutWithoutEndpoint, - mapper=faberMap) - return aliceCli - - -def testSyncFaberInviteWithoutEndpoint(faberInviteSyncedWithoutEndpoint): - pass - - -def testShowSyncedFaberInvite( - be, - do, - aliceCli, - faberMap, - connectionNotYetSynced, - faberInviteSyncedWithoutEndpoint, - showSyncedConnectionWithoutEndpointOut): - - be(aliceCli) - - cp = faberMap.copy() - cp.update(endpoint='', - last_synced='') - - do('show connection {inviter}', within=4, - expect=showSyncedConnectionWithoutEndpointOut, - # TODO, need to come back to not_expect - # not_expect=linkNotYetSynced, - mapper=cp) - - -def syncInvite(be, do, userCli, expectedMsgs, mapping): - be(userCli) - - do('sync {inviter}', within=3, - expect=expectedMsgs, - mapper=mapping) - - -@pytest.fixture(scope="module") -def faberInviteSyncedWithEndpoint(be, do, faberMap, aliceCLI, preRequisite, - faberInviteSyncedWithoutEndpoint, - syncConnectionOutWithEndpoint): - cp = faberMap.copy() - cp.update(last_synced='') - syncInvite(be, do, aliceCLI, syncConnectionOutWithEndpoint, cp) - return aliceCLI - - -def testSyncFaberInvite(faberInviteSyncedWithEndpoint): - pass - - -def testShowSyncedFaberInviteWithEndpoint(be, do, aliceCLI, faberMap, - faberInviteSyncedWithEndpoint, - showSyncedConnectionWithEndpointOut): - be(aliceCLI) - cp = faberMap.copy() - cp.update(last_synced='just now') - do('show connection {inviter}', - expect=showSyncedConnectionWithEndpointOut, mapper=cp, within=3) - - -def testPingBeforeAccept(be, do, aliceCli, faberMap, - faberInviteSyncedWithEndpoint): - be(aliceCli) - connectIfNotAlreadyConnected(do, aliceCli, faberMap) - do('ping {inviter}', - within=3, - expect=[ - 'Ping sent.', - 'Error processing ping. Connection is not yet created.' - ], - mapper=faberMap) - - -def testAcceptNotExistsConnection( - be, do, aliceCli, connectionNotExists, faberMap): - be(aliceCli) - do('accept request from {inviter-not-exists}', - expect=connectionNotExists, mapper=faberMap) - - -def getSignedRespMsg(msg, signer): - signature = signer.sign(msg) - msg["signature"] = signature - return msg - - -def accept_request(be, do, userCli, agentMap, expect): - be(userCli) - do("accept request from {inviter}", - within=15, - mapper=agentMap, - expect=expect, - not_expect=[ - "Observer threw an exception", - "DID is not yet written to Indy"] - ) - li = userCli.agent.wallet.getConnectionBy(nonce=agentMap['nonce']) - assert li - agentMap['DID'] = li.localIdentifier - agentMap['verkey'] = li.localVerkey - - -@pytest.fixture(scope="module") -def alice_accepted_faber_request(be, do, aliceCli, faberMap, - preRequisite, - syncedInviteAcceptedWithClaimsOut, - faberInviteSyncedWithEndpoint): - accept_request(be, do, aliceCli, faberMap, - syncedInviteAcceptedWithClaimsOut) - do("list connections", within=10, - mapper=faberMap, - expect="Faber College") - return aliceCli - - -def testAliceAcceptFaberInvitationFirstTime(alice_accepted_faber_request): - pass - - -def testPingFaber(be, do, aliceCli, faberMap, - alice_accepted_faber_request): - be(aliceCli) - do('ping {inviter}', - within=3, - expect=[ - "Ping sent.", - "Pong received."], - mapper=faberMap) - - -def test_alice_accept_faber_request_again( - be, - do, - aliceCli, - faberMap, - unsyced_already_accepted_request_accepted_out, - alice_accepted_faber_request): - li = aliceCli.activeWallet.getConnectionBy(remote=faberMap['remote']) - li.connection_status = None - be(aliceCli) - accept_request(be, do, aliceCli, faberMap, - unsyced_already_accepted_request_accepted_out) - li.connection_status = constant.CONNECTION_STATUS_ACCEPTED - - -# TODO: Write tests which sends request with invalid signature -# TODO: Write tests which receives response with invalid signature - -def testShowFaberConnectionAfterInviteAccept(be, do, aliceCli, faberMap, - showAcceptedConnectionOut, - alice_accepted_faber_request): - be(aliceCli) - - do("show connection {inviter}", expect=showAcceptedConnectionOut, - # not_expect="Link (not yet accepted)", - mapper=faberMap) - - -def testShowClaimNotExists(be, do, aliceCli, faberMap, showClaimNotFoundOut, - alice_accepted_faber_request): - be(aliceCli) - - do("show claim claim-to-show-not-exists", - expect=showClaimNotFoundOut, - mapper=faberMap, - within=3) - - -def testShowTranscriptClaim(be, do, aliceCli, transcriptClaimMap, - showTranscriptClaimOut, - alice_accepted_faber_request): - be(aliceCli) - totalSchemasBefore = getTotalSchemas(aliceCli) - do("show claim {name}", - expect=showTranscriptClaimOut, - mapper=transcriptClaimMap, - within=3) - assert totalSchemasBefore + 1 == getTotalSchemas(aliceCli) - - -def testReqClaimNotExists(be, do, aliceCli, faberMap, showClaimNotFoundOut, - alice_accepted_faber_request): - be(aliceCli) - - do("request claim claim-to-req-not-exists", - expect=showClaimNotFoundOut, - mapper=faberMap) - - -@pytest.fixture(scope="module") -def aliceRequestedTranscriptClaim(be, do, aliceCli, transcriptClaimMap, - reqClaimOut, preRequisite, - alice_accepted_faber_request): - be(aliceCli) - totalClaimsRcvdBefore = getTotalClaimsRcvd(aliceCli) - do("request claim {name}", within=5, - expect=reqClaimOut, - mapper=transcriptClaimMap) - - async def assertTotalClaimsRcvdIncreasedByOne(): - total_claims = len((await aliceCli.agent.prover.wallet.getAllClaimsSignatures()).keys()) - assert totalClaimsRcvdBefore + 1 == total_claims - - aliceCli.looper.runFor(10) - timeout = waits.expectedClaimsReceived() - aliceCli.looper.run( - eventually(assertTotalClaimsRcvdIncreasedByOne, timeout=timeout)) - - -def testAliceReqClaim(aliceRequestedTranscriptClaim): - pass - - -def testShowFaberClaimPostReqClaim(be, do, aliceCli, - aliceRequestedTranscriptClaim, - transcriptClaimValueMap, - rcvdTranscriptClaimOut): - be(aliceCli) - do("show claim {name}", - expect=rcvdTranscriptClaimOut, - mapper=transcriptClaimValueMap, - within=3) - - -def testShowAcmeInvite(be, do, aliceCli, acmeMap): - be(aliceCli) - inviteContents = doubleBraces(getFileLines(acmeMap.get("invite"))) - - do('show {invite}', expect=inviteContents, - mapper=acmeMap) - - -@pytest.fixture(scope="module") -def acmeInviteLoadedByAlice(be, do, aliceCli, loadInviteOut, acmeMap): - totalConnectionsBefore = getTotalConnections(aliceCli) - be(aliceCli) - do('load {invite}', expect=loadInviteOut, mapper=acmeMap) - connection = aliceCli.activeWallet.getConnectionInvitation( - acmeMap.get("inviter")) - connection.remoteEndPoint = acmeMap.get(ENDPOINT) - assert totalConnectionsBefore + 1 == getTotalConnections(aliceCli) - return aliceCli - - -def testLoadAcmeInvite(acmeInviteLoadedByAlice): - pass - - -def testShowAcmeConnection( - be, - do, - aliceCli, - acmeInviteLoadedByAlice, - showUnSyncedConnectionOut, - showConnectionWithProofRequestsOut, - acmeMap): - showUnSyncedConnectionWithClaimReqs = \ - showUnSyncedConnectionOut + showConnectionWithProofRequestsOut - be(aliceCli) - - cp = acmeMap.copy() - cp.update(last_synced='') - do('show connection {inviter}', - expect=showUnSyncedConnectionWithClaimReqs, mapper=cp) - - -@pytest.fixture(scope="module") -def aliceAcceptedAcmeJobInvitation(aliceCli, be, do, - unsycedAcceptedInviteWithoutClaimOut, - preRequisite, - aliceRequestedTranscriptClaim, - acmeInviteLoadedByAlice, - acmeMap): - be(aliceCli) - accept_request(be, do, aliceCli, acmeMap, - unsycedAcceptedInviteWithoutClaimOut) - return aliceCli - - -def testAliceAcceptAcmeJobInvitation(aliceAcceptedAcmeJobInvitation): - pass - - -def testSetAttrWithoutContext(be, do, aliceCli): - be(aliceCli) - do("set first_name to Alice", expect=[ - "No context, " - "use below command to " - "set the context"]) - - -def testShowAcmeConnectionAfterInviteAccept( - be, - do, - aliceCli, - acmeMap, - aliceAcceptedAcmeJobInvitation, - showAcceptedConnectionWithoutAvailableClaimsOut): - be(aliceCli) - - do("show connection {inviter}", - expect=showAcceptedConnectionWithoutAvailableClaimsOut, - not_expect="Connection (not yet accepted)", - mapper=acmeMap) - - -def testShowProofRequestNotExists(be, do, aliceCli, acmeMap, - proofRequestNotExists): - be(aliceCli) - do("show proof request proof-request-to-show-not-exists", - expect=proofRequestNotExists, - mapper=acmeMap, - within=3) - - -def proofRequestShown(be, do, userCli, agentMap, - proofRequestOut, - proofRequestMap, - claimAttrValueMap): - be(userCli) - - mapping = { - "set-attr-first_name": "", - "set-attr-last_name": "", - "set-attr-phone_number": "" - } - mapping.update(agentMap) - mapping.update(proofRequestMap) - mapping.update(claimAttrValueMap) - do("show proof request {proof-request-to-show}", - expect=proofRequestOut, - mapper=mapping, - within=3) - - -def testShowJobAppProofReqWithShortName(be, do, aliceCli, acmeMap, - showJobAppProofRequestOut, - jobApplicationProofRequestMap, - transcriptClaimAttrValueMap, - aliceAcceptedAcmeJobInvitation): - newAcmeMap = {} - newAcmeMap.update(acmeMap) - newAcmeMap["proof-request-to-show"] = "Job" - - proofRequestShown(be, do, aliceCli, newAcmeMap, - showJobAppProofRequestOut, - jobApplicationProofRequestMap, - transcriptClaimAttrValueMap) - - -def testShowJobAppilcationProofRequest(be, do, aliceCli, acmeMap, - showJobAppProofRequestOut, - jobApplicationProofRequestMap, - transcriptClaimAttrValueMap, - aliceAcceptedAcmeJobInvitation): - proofRequestShown(be, do, aliceCli, acmeMap, - showJobAppProofRequestOut, - jobApplicationProofRequestMap, - transcriptClaimAttrValueMap) - - -@pytest.fixture(scope="module") -def aliceSelfAttestsAttributes(be, do, aliceCli, acmeMap, - showJobAppProofRequestOut, - jobApplicationProofRequestMap, - transcriptClaimAttrValueMap, - aliceAcceptedAcmeJobInvitation): - be(aliceCli) - - mapping = { - "set-attr-first_name": "", - "set-attr-last_name": "", - "set-attr-phone_number": "" - } - mapping.update(acmeMap) - mapping.update(jobApplicationProofRequestMap) - mapping.update(transcriptClaimAttrValueMap) - do("show proof request {proof-request-to-show}", - expect=showJobAppProofRequestOut, - mapper=mapping, - within=3) - do("set first_name to Alice") - do("set last_name to Garcia") - do("set phone_number to 123-555-1212") - mapping.update({ - "set-attr-first_name": "Alice", - "set-attr-last_name": "Garcia", - "set-attr-phone_number": "123-555-1212" - }) - return mapping - - -def showProofReq(do, expectMsgs, mapper): - do("show proof request {proof-request-to-show}", - expect=expectMsgs, - mapper=mapper, - within=3) - - -def testShowJobApplicationProofReqAfterSetAttr(be, do, aliceCli, - showJobAppProofRequestOut, - aliceSelfAttestsAttributes): - be(aliceCli) - showProofReq(do, showJobAppProofRequestOut, aliceSelfAttestsAttributes) - - -# def testInvalidSigErrorResponse(be, do, aliceCli, faberMap, -# preRequisite, -# faberInviteSyncedWithoutEndpoint): -# -# msg = { -# f.REQ_ID.nm: getTimeBasedId(), -# TYPE: ACCEPT_INVITE, -# IDENTIFIER: faberMap['target'], -# NONCE: "unknown" -# } -# signature = aliceCli.activeWallet.signMsg(msg, -# aliceCli.activeWallet.defaultId) -# msg[f.SIG.nm] = signature -# link = aliceCli.activeWallet.getLink(faberMap['inviter'], required=True) -# aliceCli.sendToAgent(msg, link) -# -# be(aliceCli) -# do(None, within=3, -# expect=["Signature rejected.". -# format(msg)]) -# -# -# def testLinkNotFoundErrorResponse(be, do, aliceCli, faberMap, -# faberInviteSyncedWithoutEndpoint): -# -# msg = { -# f.REQ_ID.nm: getTimeBasedId(), -# TYPE: ACCEPT_INVITE, -# IDENTIFIER: aliceCli.activeWallet.defaultId, -# NONCE: "unknown" -# } -# signature = aliceCli.activeWallet.signMsg(msg, -# aliceCli.activeWallet.defaultId) -# msg[f.SIG.nm] = signature -# link = aliceCli.activeWallet.getLink(faberMap['inviter'], required=True) -# aliceCli.sendToAgent(msg, link) -# -# be(aliceCli) -# do(None, within=3, -# expect=["Nonce not found".format(msg)]) - - -def sendProof(be, do, userCli, agentMap, newAvailableClaims, extraMsgs=None): - be(userCli) - - expectMsgs = [ - "Your Proof {proof-req-to-match} " - "{claim-ver-req-to-show} was " - "received and verified" - ] - if extraMsgs: - expectMsgs.extend(extraMsgs) - mapping = {} - mapping.update(agentMap) - if newAvailableClaims: - mapping['new-available-claims'] = newAvailableClaims - expectMsgs.append("Available Claim(s): {new-available-claims}") - - do("send proof {proof-req-to-match} to {inviter}", - within=7, - expect=expectMsgs, - mapper=mapping) - - -@pytest.fixture(scope="module") -def jobApplicationProofSent(be, do, aliceCli, acmeMap, - aliceAcceptedAcmeJobInvitation, - aliceRequestedTranscriptClaim, - aliceSelfAttestsAttributes): - totalAvailableClaimsBefore = getTotalAvailableClaims(aliceCli) - sendProof(be, do, aliceCli, acmeMap, "Job-Certificate") - assert totalAvailableClaimsBefore + 1 == getTotalAvailableClaims(aliceCli) - - -def testAliceSendClaimProofToAcme(jobApplicationProofSent): - pass - - -# TODO: Need to uncomment below tests once above testAliceSendClaimProofToAcme -# test works correctly all the time and also we start supporting -# building and sending proofs from more than one claim - -def testShowAcmeConnectionAfterClaimSent( - be, - do, - aliceCli, - acmeMap, - jobApplicationProofSent, - showAcceptedConnectionWithAvailableClaimsOut): - be(aliceCli) - mapping = {} - mapping.update(acmeMap) - mapping["claims"] = "Job-Certificate" - - acmeMap.update(acmeMap) - do("show connection {inviter}", - expect=showAcceptedConnectionWithAvailableClaimsOut, - mapper=mapping) - - -def testShowJobCertClaim(be, do, aliceCli, jobCertificateClaimMap, - showJobCertClaimOut, - jobApplicationProofSent): - be(aliceCli) - totalSchemasBefore = getTotalSchemas(aliceCli) - do("show claim {name}", - within=3, - expect=showJobCertClaimOut, - mapper=jobCertificateClaimMap) - assert totalSchemasBefore + 1 == getTotalSchemas(aliceCli) - - -@pytest.fixture(scope="module") -def jobCertClaimRequested(be, do, aliceCli, preRequisite, - jobCertificateClaimMap, reqClaimOut1, - jobApplicationProofSent): - - def removeSchema(): - inviter = jobCertificateClaimMap["inviter"] - connections = aliceCli.activeWallet.getMatchingConnections(inviter) - assert len(connections) == 1 - faberId = connections[0].remoteIdentifier - name, version = jobCertificateClaimMap["name"], \ - jobCertificateClaimMap["version"] - aliceCli.activeWallet._schemas.pop((name, version, faberId)) - - # Removing schema to check if it fetches the schema again or not - # removeSchema() - - be(aliceCli) - - totalClaimsRcvdBefore = getTotalClaimsRcvd(aliceCli) - do("request claim {name}", within=7, - expect=reqClaimOut1, - mapper=jobCertificateClaimMap) - assert totalClaimsRcvdBefore + 1 == getTotalClaimsRcvd(aliceCli) - - -def testReqJobCertClaim(jobCertClaimRequested): - pass - - -def testShowAcmeClaimPostReqClaim(be, do, aliceCli, - jobCertClaimRequested, - jobCertificateClaimValueMap, - rcvdJobCertClaimOut): - be(aliceCli) - do("show claim {name}", - expect=rcvdJobCertClaimOut, - mapper=jobCertificateClaimValueMap, - within=3) - - -@pytest.fixture(scope="module") -def thriftInviteLoadedByAlice(be, do, aliceCli, loadInviteOut, thriftMap, - jobCertClaimRequested, - preRequisite): - be(aliceCli) - totalConnectionsBefore = getTotalConnections(aliceCli) - do('load {invite}', expect=loadInviteOut, mapper=thriftMap) - assert totalConnectionsBefore + 1 == getTotalConnections(aliceCli) - return aliceCli - - -def testAliceLoadedThriftLoanApplication(thriftInviteLoadedByAlice): - pass - - -@pytest.mark.skip('INDY-86. ' - 'Cannot ping if not synced since will not have public key') -def testPingThriftBeforeSync(be, do, aliceCli, thriftMap, - thriftInviteLoadedByAlice): - be(aliceCli) - do('ping {inviter}', expect=['Ping sent.'], mapper=thriftMap) - - -@pytest.fixture(scope="module") -def aliceAcceptedThriftLoanApplication(be, do, aliceCli, thriftMap, - preRequisite, - thriftInviteLoadedByAlice, - syncedInviteAcceptedOutWithoutClaims): - - connectIfNotAlreadyConnected(do, aliceCli, thriftMap) - accept_request(be, do, aliceCli, thriftMap, - syncedInviteAcceptedOutWithoutClaims) - return aliceCli - - -def testAliceAcceptsThriftLoanApplication(aliceAcceptedThriftLoanApplication): - pass - - -def testAliceShowProofIncludeSingleClaim( - aliceAcceptedThriftLoanApplication, be, do, aliceCli, thriftMap, - showNameProofRequestOut, jobApplicationProofRequestMap, - jobCertClaimAttrValueMap): - mapping = {} - mapping.update(thriftMap) - mapping.update(jobApplicationProofRequestMap) - mapping.update(jobCertClaimAttrValueMap) - mapping['proof-req-to-match'] = 'Name-Proof' - mapping['proof-request-version'] = '0.1' - mapping.update({ - "set-attr-first_name": "Alice", - "set-attr-last_name": "Garcia", - }) - do("show proof request {proof-req-to-match}", - expect=showNameProofRequestOut, - mapper=mapping, - within=3) - - -@pytest.fixture(scope="module") -def bankBasicProofSent(be, do, aliceCli, thriftMap, - aliceAcceptedThriftLoanApplication): - mapping = {} - mapping.update(thriftMap) - mapping["proof-req-to-match"] = "Loan-Application-Basic" - extraMsgs = ["Loan eligibility criteria satisfied, " - "please send another claim 'Loan-Application-KYC'"] - sendProof(be, do, aliceCli, mapping, None, extraMsgs) - - -def testAliceSendBankBasicClaim(bankBasicProofSent): - pass - - -@pytest.fixture(scope="module") -def bankKYCProofSent(be, do, aliceCli, thriftMap, - bankBasicProofSent): - mapping = {} - mapping.update(thriftMap) - mapping["proof-req-to-match"] = "Loan-Application-KYC" - sendProof(be, do, aliceCli, mapping, None) - - -def restartCliAndTestWalletRestoration(be, do, cli): - be(cli) - connectIfNotAlreadyConnected(do, cli, {}) - do(None, expect=[ - 'Saved wallet ', - 'Active wallet set to ' - ], within=5) - assert cli._activeWallet is not None - # assert len(cli._activeWallet._connections) == 3 - # assert len(cli._activeWallet.identifiers) == 4 - - -def testAliceSendBankKYCClaim(be, do, aliceCli, susanCli, bankKYCProofSent): - be(aliceCli) - exitFromCli(do) - restartCliAndTestWalletRestoration(be, do, susanCli) - - -def testAliceReqAvailClaimsFromNonExistentConnection( - be, do, aliceCli, bankKYCProofSent, faberMap): - be(aliceCli) - do('request available claims from dummy-connection', mapper=faberMap, - expect=["No matching connection requests found in current wallet"]) - - -def testAliceReqAvailClaimsFromFaber( - be, do, aliceCli, bankKYCProofSent, faberMap): - be(aliceCli) - do('request available claims from {inviter}', - mapper=faberMap, - expect=["Available Claim(s): {claim-to-show}"], - within=3) - - -def testAliceReqAvailClaimsFromAcme( - be, do, aliceCli, bankKYCProofSent, acmeMap): - be(aliceCli) - do('request available claims from {inviter}', - mapper=acmeMap, - expect=["Available Claim(s): Job-Certificate"], - within=3) - - -def testAliceReqAvailClaimsFromThrift( - be, do, aliceCli, bankKYCProofSent, thriftMap): - be(aliceCli) - do('request available claims from {inviter}', - mapper=thriftMap, - expect=["Available Claim(s): No available claims found"], - within=3) - - -def assertReqAvailClaims(be, do, userCli, agentMap, - inviteLoadedExpMsgs, - invitedAcceptedExpMsgs, - connectedToTestExpMsgs=None): - be(userCli) - connectIfNotAlreadyConnected(do, userCli, agentMap, expectMsgs=connectedToTestExpMsgs) - do('load {invite}', expect=inviteLoadedExpMsgs, mapper=agentMap) - accept_request(be, do, userCli, agentMap, - invitedAcceptedExpMsgs) - do('request available claims from {inviter}', - mapper=agentMap, - expect=["Available Claim(s): {claims}"], - within=3) - - -def testBobReqAvailClaimsFromAgents( - be, do, bobCli, loadInviteOut, faberMap, acmeMap, thriftMap, - syncedInviteAcceptedWithClaimsOut, - unsycedAcceptedInviteWithoutClaimOut): - userCli = bobCli - - # When new user/cli requests available claims from Faber, - # Transcript claim should be send as available claims - bob_faber_map = dict(faberMap) - bob_faber_map.update({'invite': 'sample/faber-bob-connection-request.indy', - 'nonce': '710b78be79f29fc81335abaa4ee1c5e8'}) - assertReqAvailClaims(be, do, userCli, bob_faber_map, - loadInviteOut, syncedInviteAcceptedWithClaimsOut) - - # When new user/cli requests available claims from Acme, - # No claims should be sent as available claims. 'Job-Certificate' claim - # should be only available when agent has received 'Job-Application' - # proof request and it is verified. - bob_acme_map = dict(acmeMap) - bob_acme_map.update({"claims": "No available claims found", - 'invite': 'sample/acme-bob-connection-request.indy', - 'nonce': '810b78be79f29fc81335abaa4ee1c5e8'}) - assertReqAvailClaims(be, do, userCli, bob_acme_map, - loadInviteOut, unsycedAcceptedInviteWithoutClaimOut) - - # When new user/cli requests available claims from Thrift, - # No claims should be sent as available claims. - bob_thrift_map = dict(thriftMap) - bob_thrift_map.update({"claims": "No available claims found", - 'invite': 'sample/thrift-bob-connection-request.indy', - 'nonce': 'ousezru20ic4yz3j074trcgthwlsnfsef'}) - assertReqAvailClaims(be, do, userCli, bob_thrift_map, - loadInviteOut, unsycedAcceptedInviteWithoutClaimOut) diff --git a/indy_client/test/cli/test_tutorial_manual.py b/indy_client/test/cli/test_tutorial_manual.py deleted file mode 100644 index cc612d036..000000000 --- a/indy_client/test/cli/test_tutorial_manual.py +++ /dev/null @@ -1,278 +0,0 @@ -import json -import logging - -import pytest -from plenum.common.constants import PUBKEY - -from anoncreds.protocol.types import SchemaKey, ID -from indy_client.test import waits -from indy_client.test.agent.faber import create_faber, bootstrap_faber, FABER_ID, FABER_VERKEY - -from stp_core.loop.eventually import eventually -from indy_common.roles import Roles -from indy_client.agent.walleted_agent import WalletedAgent -from indy_client.agent.runnable_agent import RunnableAgent -from indy_common.constants import ENDPOINT - -from indy_client.test.agent.acme import create_acme, bootstrap_acme, ACME_ID, ACME_VERKEY -from indy_client.test.agent.helper import buildFaberWallet, buildAcmeWallet, \ - buildThriftWallet -from indy_client.test.agent.thrift import create_thrift, bootstrap_thrift, THRIFT_ID, THRIFT_VERKEY -from indy_client.test.cli.conftest import faberMap, acmeMap, \ - thriftMap -from indy_client.test.cli.helper import newCLI, connect_and_check_output -from indy_client.test.cli.test_tutorial import syncInvite, accept_request, \ - aliceRequestedTranscriptClaim, jobApplicationProofSent, \ - jobCertClaimRequested, bankBasicProofSent, bankKYCProofSent, \ - setPromptAndKeyring - -concerningLogLevels = [logging.WARNING, - logging.ERROR, - logging.CRITICAL] - -whitelist = ["is not connected - message will not be sent immediately." - "If this problem does not resolve itself - " - "check your firewall settings", - "with invalid state proof from", - "is neither Trustee nor owner"] - - -class TestWalletedAgent(WalletedAgent, RunnableAgent): - pass - - -@pytest.fixture(scope="module") -def newGuyCLI(looper, client_tdir, tconf): - # FIXME: rework logic of setup because Setup.setupAll does not exist anymore - # Setup(tdir).setupAll() - return newCLI(looper, client_tdir, conf=tconf) - - -@pytest.mark.skip("SOV-569. Not yet implemented") -def testGettingStartedTutorialAgainstSandbox(newGuyCLI, be, do): - be(newGuyCLI) - connect_and_check_output(newGuyCLI.txn_dir) - # TODO finish the entire set of steps - - -@pytest.mark.skipif('sys.platform == "win32"', reason='SOV-384') -def testManual(do, be, poolNodesStarted, poolTxnStewardData, philCli, - nymAddedOut, attrAddedOut, - aliceCLI, newKeyringOut, aliceMap, - tdir, tdirWithClientPoolTxns, syncConnectionOutWithEndpoint, jobCertificateClaimMap, - syncedInviteAcceptedOutWithoutClaims, transcriptClaimMap, - reqClaimOut, reqClaimOut1, susanCLI, susanMap): - eventually.slowFactor = 3 - - # Create steward and add nyms and endpoint attributes of all agents - _, stewardSeed = poolTxnStewardData - be(philCli) - do('new wallet Steward', expect=['New wallet Steward created', - 'Active wallet set to "Steward"']) - - mapper = {'seed': stewardSeed.decode()} - do('new key with seed {seed}', expect=['Key created in wallet Steward'], - mapper=mapper) - connect_and_check_output(do, philCli.txn_dir) - - # Add nym and endpoint for Faber, Acme and Thrift - agentIpAddress = "127.0.0.1" - faberAgentPort = 7777 - acmeAgentPort = 8888 - thriftAgentPort = 9999 - - faberHa = "{}:{}".format(agentIpAddress, faberAgentPort) - acmeHa = "{}:{}".format(agentIpAddress, acmeAgentPort) - thriftHa = "{}:{}".format(agentIpAddress, thriftAgentPort) - faberId = FABER_ID - acmeId = ACME_ID - thriftId = THRIFT_ID - faberVerkey = FABER_VERKEY - acmeVerkey = ACME_VERKEY - thriftVerkey = THRIFT_VERKEY - faberPk = '5hmMA64DDQz5NzGJNVtRzNwpkZxktNQds21q3Wxxa62z' - acmePk = 'C5eqjU7NMVMGGfGfx2ubvX5H9X346bQt5qeziVAo3naQ' - thriftPk = 'AGBjYvyM3SFnoiDGAEzkSLHvqyzVkXeMZfKDvdpEsC2x' - for nym, verkey, ha, pk in [(faberId, faberVerkey, faberHa, faberPk), - (acmeId, acmeVerkey, acmeHa, acmePk), - (thriftId, thriftVerkey, thriftHa, thriftPk)]: - m = {'remote': nym, 'remote-verkey': verkey, - 'endpoint': json.dumps({ENDPOINT: {'ha': ha, PUBKEY: pk}})} - do('send NYM dest={{remote}} role={role}'.format( - role=Roles.TRUST_ANCHOR.name), within=5, expect=nymAddedOut, mapper=m) - do('send ATTRIB dest={remote} raw={endpoint}', within=5, - expect=attrAddedOut, mapper=m) - do('send NYM dest={{remote}} role={role} verkey={{remote-verkey}}'.format( - role=Roles.TRUST_ANCHOR.name), within=5, expect=nymAddedOut, mapper=m) - - # Start Faber Agent and Acme Agent - - fMap = faberMap(agentIpAddress, faberAgentPort) - aMap = acmeMap(agentIpAddress, acmeAgentPort) - tMap = thriftMap(agentIpAddress, thriftAgentPort) - - agentParams = [ - (create_faber, "Faber College", faberAgentPort, - buildFaberWallet, bootstrap_faber), - (create_acme, "Acme Corp", acmeAgentPort, - buildAcmeWallet, bootstrap_acme), - (create_thrift, "Thrift Bank", thriftAgentPort, - buildThriftWallet, bootstrap_thrift) - ] - - for create_agent_fuc, agentName, agentPort, buildAgentWalletFunc, bootstrap_func in agentParams: - agent = create_agent_fuc(name=agentName, wallet=buildAgentWalletFunc(), - base_dir_path=tdirWithClientPoolTxns, port=agentPort) - RunnableAgent.run_agent( - agent, bootstrap=bootstrap_func(agent), looper=philCli.looper) - - for p in philCli.looper.prodables: - if p.name == 'Faber College': - faberAgent = p - if p.name == 'Acme Corp': - acmeAgent = p - if p.name == 'Thrift Bank': - thriftAgent = p - - async def checkTranscriptWritten(): - faberId = faberAgent.wallet.defaultId - schemaId = ID(SchemaKey("Transcript", "1.2", faberId)) - schema = await faberAgent.issuer.wallet.getSchema(schemaId) - assert schema - assert schema.seqId - - issuerPublicKey = await faberAgent.issuer.wallet.getPublicKey(schemaId) - assert issuerPublicKey # TODO isinstance(issuerPublicKey, PublicKey) - - async def checkJobCertWritten(): - acmeId = acmeAgent.wallet.defaultId - schemaId = ID(SchemaKey("Job-Certificate", "0.2", acmeId)) - schema = await acmeAgent.issuer.wallet.getSchema(schemaId) - assert schema - assert schema.seqId - - issuerPublicKey = await acmeAgent.issuer.wallet.getPublicKey(schemaId) - assert issuerPublicKey - assert issuerPublicKey.seqId - - timeout = waits.expectedTranscriptWritten() - philCli.looper.run(eventually(checkTranscriptWritten, timeout=timeout)) - timeout = waits.expectedJobCertWritten() - philCli.looper.run(eventually(checkJobCertWritten, timeout=timeout)) - - # Defining inner method for closures - def executeGstFlow( - name, - userCLI, - userMap, - be, - do, - fMap, - aMap, - jobCertificateClaimMap, - newKeyringOut, - reqClaimOut, - reqClaimOut1, - syncConnectionOutWithEndpoint, - syncedInviteAcceptedOutWithoutClaims, - tMap, - transcriptClaimMap): - - async def getPublicKey(wallet, schemaId): - return await wallet.getPublicKey(schemaId) - - async def getClaim(schemaId): - return await userCLI.agent.prover.wallet.getClaimSignature(schemaId) - - # Start User cli - - be(userCLI) - setPromptAndKeyring(do, name, newKeyringOut, userMap) - connect_and_check_output(do, philCli.txn_dir) - # Accept faber - do('load sample/faber-request.indy') - syncInvite(be, do, userCLI, syncConnectionOutWithEndpoint, fMap) - do('show connection faber') - accept_request(be, do, userCLI, fMap, - syncedInviteAcceptedOutWithoutClaims) - # Request claim - do('show claim Transcript') - aliceRequestedTranscriptClaim(be, do, userCLI, transcriptClaimMap, - reqClaimOut, - None, # Passing None since its not used - None) # Passing None since its not used - - faberSchemaId = ID(SchemaKey('Transcript', '1.2', fMap['remote'])) - faberIssuerPublicKey = userCLI.looper.run( - getPublicKey(faberAgent.issuer.wallet, faberSchemaId)) - userFaberIssuerPublicKey = userCLI.looper.run( - getPublicKey(userCLI.agent.prover.wallet, faberSchemaId)) - assert faberIssuerPublicKey == userFaberIssuerPublicKey - - do('show claim Transcript') - assert userCLI.looper.run(getClaim(faberSchemaId)) - - # Accept acme - do('load sample/acme-job-application.indy') - syncInvite(be, do, userCLI, syncConnectionOutWithEndpoint, aMap) - accept_request(be, do, userCLI, aMap, - syncedInviteAcceptedOutWithoutClaims) - # Send claim - do('show claim request Job-Application') - do('set first_name to Alice') - do('set last_name to Garcia') - do('set phone_number to 123-45-6789') - do('show claim request Job-Application') - # Passing some args as None since they are not used in the method - jobApplicationProofSent(be, do, userCLI, aMap, None, None, None) - do('show claim Job-Certificate') - # Request new available claims Job-Certificate - jobCertClaimRequested(be, do, userCLI, None, - jobCertificateClaimMap, reqClaimOut1, None) - - acmeSchemaId = ID(SchemaKey('Job-Certificate', '0.2', aMap['remote'])) - acmeIssuerPublicKey = userCLI.looper.run(getPublicKey( - acmeAgent.issuer.wallet, acmeSchemaId)) - userAcmeIssuerPublicKey = userCLI.looper.run(getPublicKey( - userCLI.agent.prover.wallet, acmeSchemaId)) - assert acmeIssuerPublicKey == userAcmeIssuerPublicKey - - do('show claim Job-Certificate') - assert userCLI.looper.run(getClaim(acmeSchemaId)) - - # Accept thrift - do('load sample/thrift-loan-application.indy') - accept_request(be, do, userCLI, tMap, - syncedInviteAcceptedOutWithoutClaims) - # Send proofs - bankBasicProofSent(be, do, userCLI, tMap, None) - - thriftAcmeIssuerPublicKey = userCLI.looper.run(getPublicKey( - thriftAgent.issuer.wallet, acmeSchemaId)) - assert acmeIssuerPublicKey == thriftAcmeIssuerPublicKey - passed = False - try: - bankKYCProofSent(be, do, userCLI, tMap, None) - passed = True - except BaseException: - thriftFaberIssuerPublicKey = userCLI.looper.run(getPublicKey( - thriftAgent.issuer.wallet, faberSchemaId)) - assert faberIssuerPublicKey == thriftFaberIssuerPublicKey - assert passed - - executeGstFlow("Alice", aliceCLI, aliceMap, be, do, fMap, - aMap, jobCertificateClaimMap, newKeyringOut, reqClaimOut, - reqClaimOut1, syncConnectionOutWithEndpoint, - syncedInviteAcceptedOutWithoutClaims, tMap, - transcriptClaimMap) - - aliceCLI.looper.runFor(3) - - # Same flow is executed by different cli - # What is the purpose of this test? This should not work because its a different person - # with different data or it is the same person but from a different state - # executeGstFlow("Susan", susanCLI, susanMap, be, do, fMap, - # aMap, jobCertificateClaimMap, newKeyringOut, reqClaimOut, - # reqClaimOut1, syncConnectionOutWithEndpoint, - # syncedInviteAcceptedOutWithoutClaims, tMap, - # transcriptClaimMap) diff --git a/indy_client/test/cli/test_z_accept_invitation_hex_as_pubkey.py b/indy_client/test/cli/test_z_accept_invitation_hex_as_pubkey.py deleted file mode 100644 index a8bc4d462..000000000 --- a/indy_client/test/cli/test_z_accept_invitation_hex_as_pubkey.py +++ /dev/null @@ -1,36 +0,0 @@ -import json - -import pytest - -# noinspection PyUnresolvedReferences -from indy_client.test.cli.conftest \ - import faberMap as faberMapWithoutEndpointPubkey -# noinspection PyUnresolvedReferences -from indy_client.test.cli.test_tutorial import alice_accepted_faber_request, \ - aliceCli, preRequisite, faberCli, acmeCli, thriftCli, faberWithEndpointAdded, acmeWithEndpointAdded, \ - thriftWithEndpointAdded, walletCreatedForTestEnv, \ - faberInviteSyncedWithEndpoint, faberInviteSyncedWithoutEndpoint, \ - faberInviteLoadedByAlice, accept_request -from indy_common.constants import ENDPOINT -from plenum.common.constants import PUBKEY -from plenum.common.util import cryptonymToHex - -whitelist = ['Exception in callback ensureReqCompleted'] - - -@pytest.fixture(scope="module") -def faberMap(faberMapWithoutEndpointPubkey): - fbrMap = faberMapWithoutEndpointPubkey - endpointAttr = json.loads(fbrMap["endpointAttr"]) - base58Key = '5hmMA64DDQz5NzGJNVtRzNwpkZxktNQds21q3Wxxa62z' - hexKey = cryptonymToHex(base58Key).decode() - endpointAttr[ENDPOINT][PUBKEY] = hexKey - fbrMap["endpointAttr"] = json.dumps(endpointAttr) - return fbrMap - - -def test_request_not_accepted_if_agent_was_added_using_hex_as_pubkey( - be, do, aliceCli, faberMap, preRequisite, - syncedInviteAcceptedWithClaimsOut, faberInviteSyncedWithEndpoint): - accept_request(be, do, aliceCli, faberMap, - expect='Exception in callback ensureReqCompleted') diff --git a/indy_client/test/cli/tmp_wallet_restore_issue b/indy_client/test/cli/tmp_wallet_restore_issue deleted file mode 100644 index b9eeece4c..000000000 --- a/indy_client/test/cli/tmp_wallet_restore_issue +++ /dev/null @@ -1 +0,0 @@ -{"didMethods": {"py/object": "plenum.common.did_method.DidMethods", "default": {"py/id": 61}, "d": {"indy": {"py/object": "plenum.common.did_method.DidMethod", "name": "indy", "signerConstructor": {"py/type": "plenum.common.signer_did.DidSigner"}, "pattern": "did:indy:"}}}, "_nodes": {}, "_pending": {"py/reduce": [{"py/type": "collections.deque"}, {"py/tuple": [[]]}, null, null, null]}, "defaultId": "CzkavE58zgX7rUMrzSinLr", "_upgrades": {}, "_name": "Default", "aliasesToIds": {}, "_trustAnchored": {"ULtgFQJe6bjiFbs7ke3NJD": {"py/object": "indy_common.identity.Identity", "identity": {"py/object": "plenum.common.signer_did.DidIdentity", "abbreviated": true, "_identifier": "ULtgFQJe6bjiFbs7ke3NJD", "_verkey": "5kh3FB4H3NKq7tUDqeqHc1"}, "last_synced": null, "seqNo": null, "trustAnchor": null, "_role": "101"}, "CzkavE58zgX7rUMrzSinLr": {"py/object": "indy_common.identity.Identity", "identity": {"py/object": "plenum.common.signer_did.DidIdentity", "abbreviated": true, "_identifier": "CzkavE58zgX7rUMrzSinLr", "_verkey": "WjXEvZ9xj4Tz9sLtzf7HVP"}, "last_synced": null, "seqNo": null, "trustAnchor": null, "_role": "101"}}, "_pconfigs": {}, "py/object": "indy_client.client.wallet.wallet.Wallet", "ids": {"ULtgFQJe6bjiFbs7ke3NJD": {"py/object": "plenum.client.wallet.IdData", "py/newargs": {"py/tuple": [{"py/object": "plenum.common.signer_did.DidSigner", "sk": {"py/id": 75}, "naclSigner": {"py/object": "stp_core.crypto.nacl_wrappers.Signer", "verhex": {"py/b64": "ZGQ2ZGI4ZWI5MWNmZGNlNTBmMWE0ODhkOTUzMzI1ZGEyNjdlMzYyMzE5N2EwN2Q0MTQ4YjI1ZjM3\nZWZjMjg3ZQ==\n"}, "keyhex": {"py/b64": "NDY2MTYyNjU3MjMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAz\nMDMwMzAzMA==\n"}, "key": {"py/object": "stp_core.crypto.nacl_wrappers.SigningKey", "verify_key": {"py/object": "stp_core.crypto.nacl_wrappers.VerifyKey", "_key": {"py/b64": "3W2465HP3OUPGkiNlTMl2iZ+NiMZegfUFIsl8378KH4=\n"}}, "_signing_key": {"py/b64": "RmFiZXIwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDdbbjrkc/c5Q8aSI2VMyXaJn42Ixl6B9QU\niyXzfvwofg==\n"}, "_seed": {"py/b64": "RmFiZXIwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}}, "verraw": {"py/b64": "3W2465HP3OUPGkiNlTMl2iZ+NiMZegfUFIsl8378KH4=\n"}, "keyraw": {"py/b64": "RmFiZXIwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}}, "_identifier": "ULtgFQJe6bjiFbs7ke3NJD", "_alias": null, "abbreviated": true, "seed": {"py/b64": "RmFiZXIwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}, "_verkey": "5kh3FB4H3NKq7tUDqeqHc1"}, 1502387001630568]}, "py/seq": [{"py/id": 73}, 1502387001630568]}, "Th7MpTaRZVRYnPiabds81Y": {"py/object": "plenum.client.wallet.IdData", "py/newargs": {"py/tuple": [{"py/object": "plenum.common.signer_did.DidSigner", "sk": {"py/id": 70}, "naclSigner": {"py/object": "stp_core.crypto.nacl_wrappers.Signer", "verhex": {"py/b64": "ZDgyNzQ2NThkMjNiYzJlNDE5NGQxMjMyZmZmNzBlMmIzNDRiYWY2MjEwNjdlYjZhYTkyYjJmY2Vm\nMGM5NGU4ZA==\n"}, "keyhex": {"py/b64": "MzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwNTM3NDY1Nzc2\nMTcyNjQzMQ==\n"}, "key": {"py/object": "stp_core.crypto.nacl_wrappers.SigningKey", "verify_key": {"py/object": "stp_core.crypto.nacl_wrappers.VerifyKey", "_key": {"py/b64": "2CdGWNI7wuQZTRIy//cOKzRLr2IQZ+tqqSsvzvDJTo0=\n"}}, "_signing_key": {"py/b64": "MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwU3Rld2FyZDHYJ0ZY0jvC5BlNEjL/9w4rNEuvYhBn62qp\nKy/O8MlOjQ==\n"}, "_seed": {"py/b64": "MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwU3Rld2FyZDE=\n"}}, "verraw": {"py/b64": "2CdGWNI7wuQZTRIy//cOKzRLr2IQZ+tqqSsvzvDJTo0=\n"}, "keyraw": {"py/b64": "MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwU3Rld2FyZDE=\n"}}, "_identifier": "Th7MpTaRZVRYnPiabds81Y", "_alias": null, "abbreviated": true, "seed": {"py/b64": "MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwU3Rld2FyZDE=\n"}, "_verkey": "7TYfekw4GUagBnBVCqPjiC"}, 1502385714960301]}, "py/seq": [{"py/id": 68}, 1502385714960301]}, "CzkavE58zgX7rUMrzSinLr": {"py/object": "plenum.client.wallet.IdData", "py/newargs": {"py/tuple": [{"py/object": "plenum.common.signer_did.DidSigner", "sk": {"py/id": 65}, "naclSigner": {"py/object": "stp_core.crypto.nacl_wrappers.Signer", "verhex": {"py/b64": "NjEyNGM3YmQxZmVjYzVkYmI4ZDYyODNkOTljYThjYWJmMGM4ZTU0NDkwMzE1NTM4OTI5NmJhNmE3\nMjYxYTJkZQ==\n"}, "keyhex": {"py/b64": "NDE2MzZkNjUzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAz\nMDMwMzAzMA==\n"}, "key": {"py/object": "stp_core.crypto.nacl_wrappers.SigningKey", "verify_key": {"py/object": "stp_core.crypto.nacl_wrappers.VerifyKey", "_key": {"py/b64": "YSTHvR/sxdu41ig9mcqMq/DI5USQMVU4kpa6anJhot4=\n"}}, "_signing_key": {"py/b64": "QWNtZTAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBhJMe9H+zF27jWKD2Zyoyr8MjlRJAxVTiS\nlrpqcmGi3g==\n"}, "_seed": {"py/b64": "QWNtZTAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}}, "verraw": {"py/b64": "YSTHvR/sxdu41ig9mcqMq/DI5USQMVU4kpa6anJhot4=\n"}, "keyraw": {"py/b64": "QWNtZTAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}}, "_identifier": "CzkavE58zgX7rUMrzSinLr", "_alias": null, "abbreviated": true, "seed": {"py/b64": "QWNtZTAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}, "_verkey": "WjXEvZ9xj4Tz9sLtzf7HVP"}, 1502387801276699]}, "py/seq": [{"py/id": 63}, 1502387801276699]}}, "_attributes": {"json://{\"py/tuple\": [\"73563\", \"CzkavE58zgX7rUMrzSinLr\", \"CzkavE58zgX7rUMrzSinLr\"]}": {"py/object": "indy_client.client.wallet.attribute.Attribute", "value": "{\"endpoint\": {\"ha\": \"127.0.0.1:6666\"}}", "origin": "CzkavE58zgX7rUMrzSinLr", "encKey": null, "name": "73563", "seqNo": null, "dest": "CzkavE58zgX7rUMrzSinLr", "ledgerStore": {"py/id": 2}}, "json://{\"py/tuple\": [\"endpoint\", null, \"CzkavE58zgX7rUMrzSinLr\"]}": {"py/object": "indy_client.client.wallet.attribute.Attribute", "value": null, "origin": null, "encKey": null, "name": "endpoint", "seqNo": null, "dest": "CzkavE58zgX7rUMrzSinLr", "ledgerStore": {"py/id": 2}}, "json://{\"py/tuple\": [\"endpoint\", null, \"ULtgFQJe6bjiFbs7ke3NJD\"]}": {"py/object": "indy_client.client.wallet.attribute.Attribute", "value": null, "origin": null, "encKey": null, "name": "endpoint", "seqNo": null, "dest": "ULtgFQJe6bjiFbs7ke3NJD", "ledgerStore": {"py/id": 2}}, "json://{\"py/tuple\": [\"a0641\", \"CzkavE58zgX7rUMrzSinLr\", \"ULtgFQJe6bjiFbs7ke3NJD\"]}": {"py/object": "indy_client.client.wallet.attribute.Attribute", "value": "{\"endpoint\": {\"ha\": \"127.0.0.1:5555\", \"pubkey\": \"5hmMA64DDQz5NzGJNVtRzNwpkZxktNQds21q3Wxxa62z\"}}", "origin": "CzkavE58zgX7rUMrzSinLr", "encKey": null, "name": "a0641", "seqNo": null, "dest": "ULtgFQJe6bjiFbs7ke3NJD", "ledgerStore": {"py/id": 2}}, "json://{\"py/tuple\": [\"45884\", \"ULtgFQJe6bjiFbs7ke3NJD\", \"ULtgFQJe6bjiFbs7ke3NJD\"]}": {"py/object": "indy_client.client.wallet.attribute.Attribute", "value": "{\"endpoint\": {\"ha\": \"10.0.0.202:5555\", \"pubkey\": \"5hmMA64DDQz5NzGJNVtRzNwpkZxktNQds21q3Wxxa62z\"}}", "origin": "ULtgFQJe6bjiFbs7ke3NJD", "encKey": null, "name": "45884", "seqNo": null, "dest": "ULtgFQJe6bjiFbs7ke3NJD", "ledgerStore": {"py/object": "indy_client.client.wallet.attribute.LedgerStore", "py/enumvalue": 4}}, "json://{\"py/tuple\": [\"917b5\", \"CzkavE58zgX7rUMrzSinLr\", \"CzkavE58zgX7rUMrzSinLr\"]}": {"py/object": "indy_client.client.wallet.attribute.Attribute", "value": "{\"endpoint\": {\"ha\": \"10.0.0.203:6666\"}}", "origin": "CzkavE58zgX7rUMrzSinLr", "encKey": null, "name": "917b5", "seqNo": null, "dest": "CzkavE58zgX7rUMrzSinLr", "ledgerStore": {"py/id": 2}}, "json://{\"py/tuple\": [\"4f3d1\", \"ULtgFQJe6bjiFbs7ke3NJD\", \"ULtgFQJe6bjiFbs7ke3NJD\"]}": {"py/object": "indy_client.client.wallet.attribute.Attribute", "value": "{\"endpoint\": {\"ha\": \"127.0.0.1:5555\", \"pubkey\": \"5hmMA64DDQz5NzGJNVtRzNwpkZxktNQds21q3Wxxa62z\"}}", "origin": "ULtgFQJe6bjiFbs7ke3NJD", "encKey": null, "name": "4f3d1", "seqNo": null, "dest": "ULtgFQJe6bjiFbs7ke3NJD", "ledgerStore": {"py/id": 2}}, "json://{\"py/tuple\": [\"881bb\", \"CzkavE58zgX7rUMrzSinLr\", \"CzkavE58zgX7rUMrzSinLr\"]}": {"py/object": "indy_client.client.wallet.attribute.Attribute", "value": "{\"endpoint\": {\"ha\": \"127.0.0.1:6666\", \"pubkey\": \"C5eqjU7NMVMGGfGfx2ub- vX5H9X346bQt5qeziVAo3naQ\"}}", "origin": "CzkavE58zgX7rUMrzSinLr", "encKey": null, "name": "881bb", "seqNo": null, "dest": "CzkavE58zgX7rUMrzSinLr", "ledgerStore": {"py/id": 2}}, "json://{\"py/tuple\": [\"6db7a\", \"CzkavE58zgX7rUMrzSinLr\", \"CzkavE58zgX7rUMrzSinLr\"]}": {"py/object": "indy_client.client.wallet.attribute.Attribute", "value": "{\"endpoint\": {\"ha\": \"10.0.0.203:6666\", \"pubkey\": \"C5eqjU7NMVMGGfGfx2ub- vX5H9X346bQt5qeziVAo3naQ\"}}", "origin": "CzkavE58zgX7rUMrzSinLr", "encKey": null, "name": "6db7a", "seqNo": null, "dest": "CzkavE58zgX7rUMrzSinLr", "ledgerStore": {"py/id": 2}}}, "idsToSigners": {"ULtgFQJe6bjiFbs7ke3NJD": {"py/id": 73}, "JYeHd6Zn3zFo1UbNVBd6U1": {"py/object": "plenum.common.signer_did.DidSigner", "sk": {"py/id": 79}, "naclSigner": {"py/object": "stp_core.crypto.nacl_wrappers.Signer", "verhex": {"py/b64": "OGUxNjY0OGM3Y2IzYWMxNDMzNTc2MmM1YzVhYTkwZDJkM2I1ODY2NGExOTkyNGM2YWUzNmM1ODQ1\nZDE1MTg4OA==\n"}, "keyhex": {"py/b64": "ZTFlZmRjYzRmNmJkN2M4ZjhmMTc2MGFiNTkwY2EzMjllNGJjYTYzNTU4M2E5ZDg5NjRkNDljNTJh\nYTRhM2Y4ZA==\n"}, "key": {"py/object": "stp_core.crypto.nacl_wrappers.SigningKey", "verify_key": {"py/object": "stp_core.crypto.nacl_wrappers.VerifyKey", "_key": {"py/b64": "jhZkjHyzrBQzV2LFxaqQ0tO1hmShmSTGrjbFhF0VGIg=\n"}}, "_signing_key": {"py/b64": "4e/cxPa9fI+PF2CrWQyjKeS8pjVYOp2JZNScUqpKP42OFmSMfLOsFDNXYsXFqpDS07WGZKGZJMau\nNsWEXRUYiA==\n"}, "_seed": {"py/b64": "4e/cxPa9fI+PF2CrWQyjKeS8pjVYOp2JZNScUqpKP40=\n"}}, "verraw": {"py/b64": "jhZkjHyzrBQzV2LFxaqQ0tO1hmShmSTGrjbFhF0VGIg=\n"}, "keyraw": {"py/b64": "4e/cxPa9fI+PF2CrWQyjKeS8pjVYOp2JZNScUqpKP40=\n"}}, "_identifier": "JYeHd6Zn3zFo1UbNVBd6U1", "_alias": null, "abbreviated": true, "seed": {"py/b64": "4e/cxPa9fI+PF2CrWQyjKeS8pjVYOp2JZNScUqpKP40=\n"}, "_verkey": "T9HBHeNSXBXZCBB8GrgjFm"}, "Th7MpTaRZVRYnPiabds81Y": {"py/id": 68}, "Siga5PyLFTZdpupPUXogjt": {"py/object": "plenum.common.signer_did.DidSigner", "sk": {"py/id": 83}, "naclSigner": {"py/object": "stp_core.crypto.nacl_wrappers.Signer", "verhex": {"py/b64": "ZDA0NjU4ZGNhMDQ4MjYyNjQ0MmNhZGFiYzZjZWNlMjM0ODc2YTY0MzhkNzBlNWIzNGQ4MTlmZmFj\nODY2MmVlZA==\n"}, "keyhex": {"py/b64": "YWJlZmEzOGNmYjVkYmYzMjljNTZjMjI4YmEzNjAzMzZjNmY2YWJjZWE0MjZlMDFlMTVkZTNiZGU1\nNjUzYjUxMQ==\n"}, "key": {"py/object": "stp_core.crypto.nacl_wrappers.SigningKey", "verify_key": {"py/object": "stp_core.crypto.nacl_wrappers.VerifyKey", "_key": {"py/b64": "0EZY3KBIJiZELK2rxs7OI0h2pkONcOWzTYGf+shmLu0=\n"}}, "_signing_key": {"py/b64": "q++jjPtdvzKcVsIoujYDNsb2q86kJuAeFd473lZTtRHQRljcoEgmJkQsravGzs4jSHamQ41w5bNN\ngZ/6yGYu7Q==\n"}, "_seed": {"py/b64": "q++jjPtdvzKcVsIoujYDNsb2q86kJuAeFd473lZTtRE=\n"}}, "verraw": {"py/b64": "0EZY3KBIJiZELK2rxs7OI0h2pkONcOWzTYGf+shmLu0=\n"}, "keyraw": {"py/b64": "q++jjPtdvzKcVsIoujYDNsb2q86kJuAeFd473lZTtRE=\n"}}, "_identifier": "Siga5PyLFTZdpupPUXogjt", "_alias": null, "abbreviated": true, "seed": {"py/b64": "q++jjPtdvzKcVsIoujYDNsb2q86kJuAeFd473lZTtRE=\n"}, "_verkey": "9wzQSoSNbLwRcNz9JkBFME"}, "CzkavE58zgX7rUMrzSinLr": {"py/id": 63}}, "_prepared": {"json://{\"py/tuple\": [\"ULtgFQJe6bjiFbs7ke3NJD\", 1502387001630568]}": {"py/tuple": [{"py/object": "indy_common.types.Request", "py/state": {"digest": "cfbec5716f611b627475f6a9ab69415630a3244a535f9cadd4fe196705bb3e09", "reqId": 1502387001630568, "signature": "SC1JYwuXnpruhGefBZKCyBvCESmAP9vx8SfUWPPmoR1NH2LDHQYj5XV78zBMCEFAXwuZdHmkpEH6Lu35kuxzMCZ", "identifier": "ULtgFQJe6bjiFbs7ke3NJD", "operation": {"type": "104", "raw": "endpoint", "dest": "CzkavE58zgX7rUMrzSinLr"}}}, {"py/tuple": ["endpoint", null, "CzkavE58zgX7rUMrzSinLr"]}]}, "json://{\"py/tuple\": [\"ULtgFQJe6bjiFbs7ke3NJD\", 1502385870898879]}": {"py/tuple": [{"py/object": "indy_common.types.Request", "py/state": {"digest": "f2c1c5733913f633e7fc58d5b2ec52859979149cdd06e10b578d162c01d25862", "reqId": 1502385870898879, "signature": "5A2BxVk8EowuBT9TKYKWhck9Ng3Bk7Cwo8ego36aWEc4NNx5hNeXwzu37q1FhvmJwZ6fDrRnKvepjCQHXLAL1DbN", "identifier": "ULtgFQJe6bjiFbs7ke3NJD", "operation": {"type": "100", "raw": "{\"endpoint\": {\"ha\": \"127.0.0.1:5555\", \"pubkey\": \"5hmMA64DDQz5NzGJNVtRzNwpkZxktNQds21q3Wxxa62z\"}}", "dest": "ULtgFQJe6bjiFbs7ke3NJD"}}}, {"py/tuple": ["4f3d1", "ULtgFQJe6bjiFbs7ke3NJD", "ULtgFQJe6bjiFbs7ke3NJD"]}]}, "json://{\"py/tuple\": [\"CzkavE58zgX7rUMrzSinLr\", 1502387071423304]}": {"py/tuple": [{"py/object": "indy_common.types.Request", "py/state": {"digest": "45d33c68b9aa557d7b1300656905a446d9e1924e51f55a07e125396202024e37", "reqId": 1502387071423304, "signature": "2dZMNC37ihe7EXjVxk6s1FENZ6Qr15i6TTk9vyT4kEsYig1D3QQLACrgz88vFwoaiycameMseateEEFPhxvFiarZ", "identifier": "CzkavE58zgX7rUMrzSinLr", "operation": {"type": "105", "dest": "CzkavE58zgX7rUMrzSinLr"}}}, null]}, "json://{\"py/tuple\": [\"ULtgFQJe6bjiFbs7ke3NJD\", 1502386049567645]}": {"py/tuple": [{"py/object": "indy_common.types.Request", "py/state": {"digest": "079b8ed2360cc6a2c686587a613f8c1109b1941154bcee5fcece4ad2464d7f40", "reqId": 1502386049567645, "signature": "2RNPTPq3K2rNb4MFmoYJEza3n7tmLaz696nRxEegASoyYdJDKNGvCAJLCCFYJvAxAh7Y66duQ7fX2byZWE3EWZAc", "identifier": "ULtgFQJe6bjiFbs7ke3NJD", "operation": {"type": "104", "raw": "endpoint", "dest": "ULtgFQJe6bjiFbs7ke3NJD"}}}, {"py/tuple": ["endpoint", null, "ULtgFQJe6bjiFbs7ke3NJD"]}]}, "json://{\"py/tuple\": [\"CzkavE58zgX7rUMrzSinLr\", 1502387057277920]}": {"py/tuple": [{"py/object": "indy_common.types.Request", "py/state": {"digest": "1cefcc959335319b7f9c5527e5f4dcbb49a7456ebed2c24fd33cc06ab045bbcb", "reqId": 1502387057277920, "signature": "2yzWAuxduX6dQfw1nJFafU6XTeFYDXqWnxyNrdfFM8cammiz4P6YpoKbWbq7kDVA22FS1Hz13DfLdJ5PEnXJZehs", "identifier": "CzkavE58zgX7rUMrzSinLr", "operation": {"type": "100", "raw": "{\"endpoint\": {\"ha\": \"10.0.0.203:6666\"}}", "dest": "CzkavE58zgX7rUMrzSinLr"}}}, {"py/tuple": ["917b5", "CzkavE58zgX7rUMrzSinLr", "CzkavE58zgX7rUMrzSinLr"]}]}, "json://{\"py/tuple\": [\"CzkavE58zgX7rUMrzSinLr\", 1502385848758045]}": {"py/tuple": [{"py/object": "indy_common.types.Request", "py/state": {"digest": "6d45682720c304d825573cd6b9c60bc47d95eda3a70987a4fc410bd6afeadc44", "reqId": 1502385848758045, "signature": "3NLdp4qD15N2CetMjGtrmpQWDKx6ycVVaRtLZ3gKMmzLzxBujvsqQutA7yts74pJPtRodaEPy2w5FvpS7yBzNsr", "identifier": "CzkavE58zgX7rUMrzSinLr", "operation": {"type": "100", "raw": "{\"endpoint\": {\"ha\": \"127.0.0.1:5555\", \"pubkey\": \"5hmMA64DDQz5NzGJNVtRzNwpkZxktNQds21q3Wxxa62z\"}}", "dest": "ULtgFQJe6bjiFbs7ke3NJD"}}}, {"py/tuple": ["a0641", "CzkavE58zgX7rUMrzSinLr", "ULtgFQJe6bjiFbs7ke3NJD"]}]}, "json://{\"py/tuple\": [\"CzkavE58zgX7rUMrzSinLr\", 1502387071424365]}": {"py/tuple": [{"py/object": "indy_common.types.Request", "py/state": {"digest": "0036694829240ca13b5a9108c2f16fe60213c30f054c9b335b35b95d14dec31c", "reqId": 1502387071424365, "signature": "4bf8VGMVGq6JtWeFXdvD4NhdAMseuZdxnvvxw6QaodiGKA54yRxfShbDG9ff2xrDUQHCPJ1PuYMwPmDanx1gpSoB", "identifier": "CzkavE58zgX7rUMrzSinLr", "operation": {"type": "104", "raw": "endpoint", "dest": "CzkavE58zgX7rUMrzSinLr"}}}, {"py/tuple": ["endpoint", null, "CzkavE58zgX7rUMrzSinLr"]}]}, "json://{\"py/tuple\": [\"ULtgFQJe6bjiFbs7ke3NJD\", 1502386144574978]}": {"py/tuple": [{"py/object": "indy_common.types.Request", "py/state": {"digest": "ca879f3ce42853d26300fae03dc39a5c27cc3e14e8f1578418729de3d19bcf4b", "reqId": 1502386144574978, "signature": "2u2C32ww9GkdBj78haTzKLvgaU4PqJ2N8VYmmoALSpZT5pTgvaGjgFniRPHd1DSwv8MVyt4vX8m26SgL1zDJVJVY", "identifier": "ULtgFQJe6bjiFbs7ke3NJD", "operation": {"type": "104", "raw": "endpoint", "dest": "CzkavE58zgX7rUMrzSinLr"}}}, {"py/tuple": ["endpoint", null, "CzkavE58zgX7rUMrzSinLr"]}]}, "json://{\"py/tuple\": [\"Th7MpTaRZVRYnPiabds81Y\", 1502385611413187]}": {"py/tuple": [{"py/object": "indy_common.types.Request", "py/state": {"digest": "949dd184333687d9e81d853dc0d981bfc87f56a1757fd4556e2ad2983573f0ea", "reqId": 1502385611413187, "signature": "5vpkHbSFdCJrRbFXe6sAKW8S1zB2QXqisp56gdFsPXRMrx7bD8Z5Q8yLpjcyszTsDePqy1JELgGHAvwZEPU8KvR4", "identifier": "Th7MpTaRZVRYnPiabds81Y", "operation": {"role": "101", "type": "1", "dest": "ULtgFQJe6bjiFbs7ke3NJD", "verkey": "~5kh3FB4H3NKq7tUDqeqHc1"}}}, "ULtgFQJe6bjiFbs7ke3NJD"]}, "json://{\"py/tuple\": [\"CzkavE58zgX7rUMrzSinLr\", 1502385732035030]}": {"py/tuple": [{"py/object": "indy_common.types.Request", "py/state": {"digest": "52c6dbfc7fdf04bddf4e6b0bb43d7316edcb2fc820eaab4c2cada0fe34b310da", "reqId": 1502385732035030, "signature": "4RyhbLSRPpahFjk5pthh3tM3de49G2fz77oP42VyRp4DWwMHWbXrRkD6SYVxGGgYaaa8Ep8xhMEQ2QgvDNmHkaDm", "identifier": "CzkavE58zgX7rUMrzSinLr", "operation": {"type": "100", "raw": "{\"endpoint\": {\"ha\": \"10.0.0.203:6666\", \"pubkey\": \"C5eqjU7NMVMGGfGfx2ub- vX5H9X346bQt5qeziVAo3naQ\"}}", "dest": "CzkavE58zgX7rUMrzSinLr"}}}, {"py/tuple": ["6db7a", "CzkavE58zgX7rUMrzSinLr", "CzkavE58zgX7rUMrzSinLr"]}]}, "json://{\"py/tuple\": [\"ULtgFQJe6bjiFbs7ke3NJD\", 1502386097385267]}": {"py/tuple": [{"py/object": "indy_common.types.Request", "py/state": {"digest": "0e116050b139e82912740bca170ac8b550a211c34f9bfc52ddd82bdf8a10460d", "reqId": 1502386097385267, "signature": "3GBVvm55r21RhdYYJS1FYKE67yxo9Dokiv4XePh2DTpfETKRKqi3Lap8BiZUcicwVFKi4RTYZi9sfC75iLuNvFWe", "identifier": "ULtgFQJe6bjiFbs7ke3NJD", "operation": {"type": "108", "origin": "ULtgFQJe6bjiFbs7ke3NJD", "ref": 15, "signature_type": "CL"}}}, null]}, "json://{\"py/tuple\": [\"ULtgFQJe6bjiFbs7ke3NJD\", 1502386050220665]}": {"py/tuple": [{"py/object": "indy_common.types.Request", "py/state": {"digest": "d3f84e2352faca0109598e86e8eb74d5fe4039939b88980a00af24bd26e95cab", "reqId": 1502386050220665, "signature": "3bqWZafGMZY384S5Zi2ugEpamTWamezPfLtHVBW7DH6Hgntpd1KTuLjmLoJLw8nh3f6kUV2w2NAgLuQn5ZEceMvP", "identifier": "ULtgFQJe6bjiFbs7ke3NJD", "operation": {"type": "105", "dest": "JYeHd6Zn3zFo1UbNVBd6U1"}}}, null]}, "json://{\"py/tuple\": [\"ULtgFQJe6bjiFbs7ke3NJD\", 1502385681114223]}": {"py/tuple": [{"py/object": "indy_common.types.Request", "py/state": {"digest": "c5c275f4963b859d3ca5f84e15e8f09b7bf7ec1fe7e615131daace478b8dee3e", "reqId": 1502385681114223, "signature": "38jKR2T4pQ7ckYqKriuZF2gGoxbr928FCZtMDns5zNVX35DNWFZ3EWNX7pBSiQKa58928oTq4sFFe5h5v6qojAW1", "identifier": "ULtgFQJe6bjiFbs7ke3NJD", "operation": {"type": "100", "raw": "{\"endpoint\": {\"ha\": \"10.0.0.202:5555\", \"pubkey\": \"5hmMA64DDQz5NzGJNVtRzNwpkZxktNQds21q3Wxxa62z\"}}", "dest": "ULtgFQJe6bjiFbs7ke3NJD"}}}, {"py/tuple": ["45884", "ULtgFQJe6bjiFbs7ke3NJD", "ULtgFQJe6bjiFbs7ke3NJD"]}]}, "json://{\"py/tuple\": [\"CzkavE58zgX7rUMrzSinLr\", 1502385835239707]}": {"py/tuple": [{"py/object": "indy_common.types.Request", "py/state": {"digest": "4bc95d41076677bbfd40407bba1d8ddb69818a79d6a9a24aa57daf0d06ed3770", "reqId": 1502385835239707, "signature": "5R1sAupQwqoee5muMmmud94KWgLvkqkPHxRKj7jPu4VQ5pDCZuvPPpXJXYYSNrtgsDvFurXGg9Jj82S8hEM1H6YH", "identifier": "CzkavE58zgX7rUMrzSinLr", "operation": {"type": "100", "raw": "{\"endpoint\": {\"ha\": \"127.0.0.1:6666\", \"pubkey\": \"C5eqjU7NMVMGGfGfx2ub- vX5H9X346bQt5qeziVAo3naQ\"}}", "dest": "CzkavE58zgX7rUMrzSinLr"}}}, {"py/tuple": ["881bb", "CzkavE58zgX7rUMrzSinLr", "CzkavE58zgX7rUMrzSinLr"]}]}, "json://{\"py/tuple\": [\"ULtgFQJe6bjiFbs7ke3NJD\", 1502386188150095]}": {"py/tuple": [{"py/object": "indy_common.types.Request", "py/state": {"digest": "0e98ffb463973aa6873ca2cbb21b724b7c3711e30dc3f0177c42285ec022763e", "reqId": 1502386188150095, "signature": "3wB4HxjzsPhA74yv4Df4SZBdQYX7s8dAkhyKbdgWSfoMxyQo6tyFPmaHLb3AcE3r7rcngvLdQy9WbTQhxj3HYhBC", "identifier": "ULtgFQJe6bjiFbs7ke3NJD", "operation": {"type": "104", "raw": "endpoint", "dest": "CzkavE58zgX7rUMrzSinLr"}}}, {"py/tuple": ["endpoint", null, "CzkavE58zgX7rUMrzSinLr"]}]}, "json://{\"py/tuple\": [\"ULtgFQJe6bjiFbs7ke3NJD\", 1502385899839362]}": {"py/tuple": [{"py/object": "indy_common.types.Request", "py/state": {"digest": "e6b83a2dae024a32b5f3c057d3e3f2357291b8fc0e4dae481907c2a9f5e6d1d7", "reqId": 1502385899839362, "signature": "SySAHLvfYw9epanGaKhWsYGWNx7UJ99LTsum4izbgwPxLiMQkS6NhjDMfWMfeoD96rewiP22WJBRkSe2DyfvbPU", "identifier": "ULtgFQJe6bjiFbs7ke3NJD", "operation": {"type": "104", "raw": "endpoint", "dest": "ULtgFQJe6bjiFbs7ke3NJD"}}}, {"py/tuple": ["endpoint", null, "ULtgFQJe6bjiFbs7ke3NJD"]}]}, "json://{\"py/tuple\": [\"ULtgFQJe6bjiFbs7ke3NJD\", 1502386144573959]}": {"py/tuple": [{"py/object": "indy_common.types.Request", "py/state": {"digest": "73a605a704302242669e58cd2f28f58176ab2ed51783f8b3fcb6a06b2fa94fae", "reqId": 1502386144573959, "signature": "3Jz114XGYzyiSMXijXf9j8ttoZh3CsmbjMVwQj2KfFRuVjHosCBsVoPWeJtJfn4gK5MbpmScpyTu4VVFD2qNscWf", "identifier": "ULtgFQJe6bjiFbs7ke3NJD", "operation": {"type": "105", "dest": "CzkavE58zgX7rUMrzSinLr"}}}, null]}, "json://{\"py/tuple\": [\"ULtgFQJe6bjiFbs7ke3NJD\", 1502386085472327]}": {"py/tuple": [{"py/object": "indy_common.types.Request", "py/state": {"digest": "cd0692a09506d71387a4f034887e6ef266dc7cf45591f2b9ed3885589ee7d689", "reqId": 1502386085472327, "signature": "5j57ZRTFhD23fS3dHtYxEvUhK7HzA5UVrbmSszgt2JCw2Dyg228fTaiEtqPDgo7ybdKxXkGjyTfzQPe7c5wVgW9G", "identifier": "ULtgFQJe6bjiFbs7ke3NJD", "operation": {"type": "107", "dest": "ULtgFQJe6bjiFbs7ke3NJD", "data": {"name": "Transcript", "version": "1.2"}}}}, null]}, "json://{\"py/tuple\": [\"ULtgFQJe6bjiFbs7ke3NJD\", 1502387001627354]}": {"py/tuple": [{"py/object": "indy_common.types.Request", "py/state": {"digest": "adcffa4444661aaa7e0a35653126b616a1941fae3bc66e8fcafa663144a42d18", "reqId": 1502387001627354, "signature": "ybsbW5JHKn9JGKgbGeKEajUhAnCHMixEWuk9E9YUD7r8xgC5EaDaRhpuASSeyfFoymY65Gbmd3qcCNYuk7VuSrA", "identifier": "ULtgFQJe6bjiFbs7ke3NJD", "operation": {"type": "105", "dest": "CzkavE58zgX7rUMrzSinLr"}}}, null]}, "json://{\"py/tuple\": [\"ULtgFQJe6bjiFbs7ke3NJD\", 1502386049566532]}": {"py/tuple": [{"py/object": "indy_common.types.Request", "py/state": {"digest": "3a7bc6b517611dcfdf704b6903ddabec3507d4ecc272d328493b108ed04bde00", "reqId": 1502386049566532, "signature": "4ghzAiyLavPqsrEr6bYUZMyg87xYtHZ38vQHZrDU3CnJVTXBx9VXRM2qLsPwD8mEN86pMVAT3KdVaD2NJt8VhvgV", "identifier": "ULtgFQJe6bjiFbs7ke3NJD", "operation": {"type": "105", "dest": "ULtgFQJe6bjiFbs7ke3NJD"}}}, null]}, "json://{\"py/tuple\": [\"CzkavE58zgX7rUMrzSinLr\", 1502387801276699]}": {"py/tuple": [{"py/object": "indy_common.types.Request", "py/state": {"digest": "39ee07b502bd4d3c6f32b6d2beab3795e4312690514dd57288a3bac7358d82c4", "reqId": 1502387801276699, "signature": "2ByXW61ND5bxZDF9nV593ZoYWmShj7Ld1qYCyEJ6jthoE1BqDcuerUSqc4y1NCPuQy23BUGbT1BvRmPt9EVJfugW", "identifier": "CzkavE58zgX7rUMrzSinLr", "operation": {"type": "100", "raw": "{\"endpoint\": {\"ha\": \"127.0.0.1:6666\"}}", "dest": "CzkavE58zgX7rUMrzSinLr"}}}, {"py/tuple": ["73563", "CzkavE58zgX7rUMrzSinLr", "CzkavE58zgX7rUMrzSinLr"]}]}, "json://{\"py/tuple\": [\"Th7MpTaRZVRYnPiabds81Y\", 1502385714960301]}": {"py/tuple": [{"py/object": "indy_common.types.Request", "py/state": {"digest": "eaea017f0c21a8ff39a880853922748895a8495e61b4a51822a73dff7831c028", "reqId": 1502385714960301, "signature": "5AJvDyr6N4enKP31cyXyojoph1KstDGw9VkfNvGdHyY8b4ruEDHE8LU7JEvwxjwLyEVcEcu7j4QMnykjuF63PqA7", "identifier": "Th7MpTaRZVRYnPiabds81Y", "operation": {"role": "101", "type": "1", "dest": "CzkavE58zgX7rUMrzSinLr", "verkey": "~WjXEvZ9xj4Tz9sLtzf7HVP"}}}, "CzkavE58zgX7rUMrzSinLr"]}, "json://{\"py/tuple\": [\"Th7MpTaRZVRYnPiabds81Y\", 1502385655248947]}": {"py/tuple": [{"py/object": "indy_common.types.Request", "py/state": {"digest": "d1095621d406da7a2e95d744b8ba4016db73b75e3fe0faaf4a5fa82b94e065f2", "reqId": 1502385655248947, "signature": "3pY9d3kPfaf9g4L7WoeJ7L7eKQ86SRfzJfGfy4yZKn41WXHwvFdtAsGM7ne96GKUtbfJVknvbeCDo6AHxYX9j5Qm", "identifier": "Th7MpTaRZVRYnPiabds81Y", "operation": {"role": "101", "type": "1", "dest": "ULtgFQJe6bjiFbs7ke3NJD", "verkey": "~5kh3FB4H3NKq7tUDqeqHc1"}}}, "ULtgFQJe6bjiFbs7ke3NJD"]}, "json://{\"py/tuple\": [\"ULtgFQJe6bjiFbs7ke3NJD\", 1502385695560631]}": {"py/tuple": [{"py/object": "indy_common.types.Request", "py/state": {"digest": "74b080a5afc1beff66f42a87af9c4fcd032f863197c74883e7d302a8b0f18852", "reqId": 1502385695560631, "signature": "4myAj7GnGrUqb9WKtMbTD19rNvX1ToXZmWv4K7MZQP63DdYYzvW9b5n6mzYKnkdnUUQvPvdkweuUBVTJSMdy6bup", "identifier": "ULtgFQJe6bjiFbs7ke3NJD", "operation": {"role": "101", "type": "1", "dest": "CzkavE58zgX7rUMrzSinLr", "verkey": "~WjXEvZ9xj4Tz9sLtzf7HVP"}}}, "CzkavE58zgX7rUMrzSinLr"]}}, "_connections": {"py/reduce": [{"py/type": "collections.OrderedDict"}, {"py/tuple": []}, null, null, {"py/tuple": [{"py/tuple": ["Faber College", {"proofRequests": [], "internalId": null, "remoteEndPoint": {"py/tuple": ["127.0.0.1", 5555]}, "request_nonce": "b1134a647eb818069c089e7694f63e6d", "name": "Faber College", "connection_status": "Accepted", "trustAnchor": "Faber College", "verifiedClaimProofs": [], "py/object": "indy_client.client.wallet.connection.Connection", "connection_last_sync_no": null, "localIdentifier": "JYeHd6Zn3zFo1UbNVBd6U1", "localVerkey": "~T9HBHeNSXBXZCBB8GrgjFm", "remotePubkey": "5hmMA64DDQz5NzGJNVtRzNwpkZxktNQds21q3Wxxa62z", "availableClaims": [{"py/object": "anoncreds.protocol.types.AvailableClaim", "py/newargs": {"py/tuple": ["Transcript", "1.2", "ULtgFQJe6bjiFbs7ke3NJD"]}, "py/seq": ["Transcript", "1.2", "ULtgFQJe6bjiFbs7ke3NJD"]}], "connection_last_synced": {"py/object": "datetime.datetime", "__reduce__": [{"py/type": "datetime.datetime"}, ["B+EIChQbHQvKxQ=="]]}, "_remoteVerkey": "~5kh3FB4H3NKq7tUDqeqHc1", "remoteIdentifier": "ULtgFQJe6bjiFbs7ke3NJD"}]}, {"py/tuple": ["Acme Corp", {"proofRequests": [{"py/object": "anoncreds.protocol.types.ProofRequest", "attributes": {"py/reduce": [{"py/type": "collections.OrderedDict"}, {"py/tuple": []}, null, null, {"py/tuple": [{"py/tuple": ["first_name", "string"]}, {"py/tuple": ["last_name", "string"]}, {"py/tuple": ["phone_number", "string"]}, {"py/tuple": ["degree", "Bachelor of Science, Marketing"]}, {"py/tuple": ["status", "graduated"]}, {"py/tuple": ["ssn", "123-45-6789"]}]}]}, "ts": null, "version": "0.2", "verifiableAttributes": {"777fb1af-a7a0-4f0c-81b8-9e09a8068d16": {"py/object": "anoncreds.protocol.types.AttributeInfo", "py/newargs": {"py/tuple": ["status", null, null]}, "py/seq": ["status", null, null]}, "d5743fcc-d287-4176-a634-3b721d962ea5": {"py/object": "anoncreds.protocol.types.AttributeInfo", "py/newargs": {"py/tuple": ["ssn", null, null]}, "py/seq": ["ssn", null, null]}, "4de5205f-720f-49f0-8397-0576875d904b": {"py/object": "anoncreds.protocol.types.AttributeInfo", "py/newargs": {"py/tuple": ["degree", null, null]}, "py/seq": ["degree", null, null]}}, "nonce": 1871218719015472932666560146158750511756, "seqNo": null, "name": "Job-Application", "predicates": {}, "fulfilledByClaims": [{"py/tuple": [{"py/id": 12}, {"py/id": 14}, {"py/reduce": [{"py/type": "collections.OrderedDict"}, {"py/tuple": []}, null, null, {"py/tuple": [{"py/tuple": ["student_name", "Alice Garcia"]}, {"py/tuple": ["ssn", "123-45-6789"]}, {"py/tuple": ["degree", "Bachelor of Science, Marketing"]}, {"py/tuple": ["year", "2015"]}, {"py/tuple": ["status", "graduated"]}]}]}]}], "selfAttestedAttrs": {}}], "internalId": null, "remoteEndPoint": {"py/tuple": ["10.0.0.203", 6666]}, "request_nonce": "57fbf9dc8c8e6acde33de98c6d747b28c", "name": "Acme Corp", "connection_status": null, "trustAnchor": "Acme Corp", "verifiedClaimProofs": [], "py/object": "indy_client.client.wallet.connection.Connection", "connection_last_sync_no": null, "localIdentifier": "Siga5PyLFTZdpupPUXogjt", "localVerkey": "~9wzQSoSNbLwRcNz9JkBFME", "remotePubkey": "C5eqjU7NMVMGGfGfx2ubvX5H9X346bQt5qeziVAo3naQ", "availableClaims": [], "connection_last_synced": {"py/object": "datetime.datetime", "__reduce__": [{"py/type": "datetime.datetime"}, ["B+EIChQsHwmWIA=="]]}, "_remoteVerkey": "~WjXEvZ9xj4Tz9sLtzf7HVP", "remoteIdentifier": "CzkavE58zgX7rUMrzSinLr"}]}]}]}, "replyHandler": {}, "knownIds": {"ULtgFQJe6bjiFbs7ke3NJD": {"py/object": "indy_common.identity.Identity", "identity": {"py/object": "plenum.common.signer_did.DidIdentity", "abbreviated": null, "_identifier": "ULtgFQJe6bjiFbs7ke3NJD", "_verkey": null}, "last_synced": null, "seqNo": null, "trustAnchor": null, "_role": null}, "JYeHd6Zn3zFo1UbNVBd6U1": {"py/object": "indy_common.identity.Identity", "identity": {"py/object": "plenum.common.signer_did.DidIdentity", "abbreviated": null, "_identifier": "JYeHd6Zn3zFo1UbNVBd6U1", "_verkey": null}, "last_synced": null, "seqNo": null, "trustAnchor": null, "_role": null}, "CzkavE58zgX7rUMrzSinLr": {"py/object": "indy_common.identity.Identity", "identity": {"py/object": "plenum.common.signer_did.DidIdentity", "abbreviated": null, "_identifier": "CzkavE58zgX7rUMrzSinLr", "_verkey": null}, "last_synced": null, "seqNo": null, "trustAnchor": null, "_role": null}}, "lastKnownSeqs": {}, "env": "no-env", "classver/indy_client.client.wallet.wallet.Wallet": 2} \ No newline at end of file diff --git a/indy_client/test/cli/wallet_before_rebranding b/indy_client/test/cli/wallet_before_rebranding deleted file mode 100644 index 84ef2b7f4..000000000 --- a/indy_client/test/cli/wallet_before_rebranding +++ /dev/null @@ -1 +0,0 @@ -{"_connections": {"py/reduce": [{"py/type": "collections.OrderedDict"}, {"py/tuple": []}, null, null, {"py/tuple": [{"py/tuple": ["Faber College", {"verifiedClaimProofs": [], "internalId": null, "availableClaims": [{"py/newargs": {"py/tuple": ["Transcript", "1.2", "ULtgFQJe6bjiFbs7ke3NJD"]}, "py/object": "anoncreds.protocol.types.AvailableClaim", "py/seq": ["Transcript", "1.2", "ULtgFQJe6bjiFbs7ke3NJD"]}], "localVerkey": "~B8szPF41YG76ePciYUvG3W", "connection_last_sync_no": null, "connection_last_synced": {"__reduce__": [{"py/type": "datetime.datetime"}, ["B+EIHRAEKgP3/g=="]], "py/object": "datetime.datetime"}, "proofRequests": [], "py/object": "sovrin_client.client.wallet.connection.Connection", "localIdentifier": "BJWY59pnbdGrThaxgbpaWY", "trustAnchor": "Faber College", "request_nonce": "b1134a647eb818069c089e7694f63e6d", "connection_status": "Accepted", "_remoteVerkey": "~5kh3FB4H3NKq7tUDqeqHc1", "remotePubkey": "5hmMA64DDQz5NzGJNVtRzNwpkZxktNQds21q3Wxxa62z", "remoteIdentifier": "ULtgFQJe6bjiFbs7ke3NJD", "name": "Faber College", "remoteEndPoint": {"py/tuple": ["127.0.0.1", 5555]}}]}, {"py/tuple": ["Acme Corp", {"verifiedClaimProofs": [], "internalId": null, "availableClaims": [{"py/newargs": {"py/tuple": ["Job-Certificate", "0.2", "CzkavE58zgX7rUMrzSinLr"]}, "py/object": "anoncreds.protocol.types.AvailableClaim", "py/seq": ["Job-Certificate", "0.2", "CzkavE58zgX7rUMrzSinLr"]}], "localVerkey": "~VKF8RPxwwEM7BxkipZX83j", "connection_last_sync_no": null, "connection_last_synced": {"__reduce__": [{"py/type": "datetime.datetime"}, ["B+EIHRAHGw6Pbw=="]], "py/object": "datetime.datetime"}, "proofRequests": [{"selfAttestedAttrs": {"first_name": "Alice", "phone_number": "123-456-7890", "last_name": "Garcia"}, "nonce": 1871218719015472932666560146158750511756, "verifiableAttributes": {"4d646508-d49b-4a09-9809-7250d45cc2da": {"py/newargs": {"py/tuple": ["ssn", null, null]}, "py/object": "anoncreds.protocol.types.AttributeInfo", "py/seq": ["ssn", null, null]}, "aba32489-62cc-40f5-b733-7ab9bbcc46aa": {"py/newargs": {"py/tuple": ["status", null, null]}, "py/object": "anoncreds.protocol.types.AttributeInfo", "py/seq": ["status", null, null]}, "330a3131-c0ce-4766-bdee-b7a7560e0f89": {"py/newargs": {"py/tuple": ["degree", null, null]}, "py/object": "anoncreds.protocol.types.AttributeInfo", "py/seq": ["degree", null, null]}}, "predicates": {}, "ts": null, "fulfilledByClaims": [{"py/tuple": [{"py/id": 9}, {"py/id": 11}, {"py/reduce": [{"py/type": "collections.OrderedDict"}, {"py/tuple": []}, null, null, {"py/tuple": [{"py/tuple": ["student_name", "Alice Garcia"]}, {"py/tuple": ["ssn", "123-45-6789"]}, {"py/tuple": ["degree", "Bachelor of Science, Marketing"]}, {"py/tuple": ["year", "2015"]}, {"py/tuple": ["status", "graduated"]}]}]}]}], "attributes": {"py/reduce": [{"py/type": "collections.OrderedDict"}, {"py/tuple": []}, null, null, {"py/tuple": [{"py/tuple": ["first_name", "Alice"]}, {"py/tuple": ["last_name", "Garcia"]}, {"py/tuple": ["phone_number", "123-456-7890"]}, {"py/tuple": ["degree", "Bachelor of Science, Marketing"]}, {"py/tuple": ["status", "graduated"]}, {"py/tuple": ["ssn", "123-45-6789"]}]}]}, "py/object": "anoncreds.protocol.types.ProofRequest", "seqNo": null, "name": "Job-Application", "version": "0.2"}], "py/object": "sovrin_client.client.wallet.connection.Connection", "localIdentifier": "PE83fLnVVSihiq39bhD3gk", "trustAnchor": "Acme Corp", "request_nonce": "57fbf9dc8c8e6acde33de98c6d747b28c", "connection_status": "Accepted", "_remoteVerkey": "~WjXEvZ9xj4Tz9sLtzf7HVP", "remotePubkey": "C5eqjU7NMVMGGfGfx2ubvX5H9X346bQt5qeziVAo3naQ", "remoteIdentifier": "CzkavE58zgX7rUMrzSinLr", "name": "Acme Corp", "remoteEndPoint": {"py/tuple": ["127.0.0.1", 6666]}}]}, {"py/tuple": ["Thrift Bank", {"verifiedClaimProofs": [], "internalId": null, "availableClaims": [], "localVerkey": "~91MMTTVbqNdRQJ9uv7Hijt", "connection_last_sync_no": null, "connection_last_synced": {"__reduce__": [{"py/type": "datetime.datetime"}, ["B+EIHRAMIwh5WQ=="]], "py/object": "datetime.datetime"}, "proofRequests": [{"selfAttestedAttrs": {}, "nonce": 2551783452857349859593309361022286934668, "verifiableAttributes": {"c02eb394-df6e-4aa4-991a-c8479a7ac726": {"py/newargs": {"py/tuple": ["employee_status", null, null]}, "py/object": "anoncreds.protocol.types.AttributeInfo", "py/seq": ["employee_status", null, null]}, "ac32a218-3ad6-46f5-835b-a1e3a3523b46": {"py/newargs": {"py/tuple": ["salary_bracket", null, null]}, "py/object": "anoncreds.protocol.types.AttributeInfo", "py/seq": ["salary_bracket", null, null]}}, "predicates": {}, "ts": null, "fulfilledByClaims": [{"py/tuple": [{"py/id": 15}, {"py/id": 17}, {"py/reduce": [{"py/type": "collections.OrderedDict"}, {"py/tuple": []}, null, null, {"py/tuple": [{"py/tuple": ["first_name", "Alice"]}, {"py/tuple": ["last_name", "Garcia"]}, {"py/tuple": ["employee_status", "Permanent"]}, {"py/tuple": ["experience", "3 years"]}, {"py/tuple": ["salary_bracket", "between $50,000 to $100,000"]}]}]}]}], "attributes": {"py/reduce": [{"py/type": "collections.OrderedDict"}, {"py/tuple": []}, null, null, {"py/tuple": [{"py/tuple": ["salary_bracket", "between $50,000 to $100,000"]}, {"py/tuple": ["employee_status", "Permanent"]}]}]}, "py/object": "anoncreds.protocol.types.ProofRequest", "seqNo": null, "name": "Loan-Application-Basic", "version": "0.1"}, {"selfAttestedAttrs": {}, "nonce": 2551783452857349859593309361022286934668, "verifiableAttributes": {"31bd9935-a886-4814-b437-849c80368506": {"py/newargs": {"py/tuple": ["last_name", null, null]}, "py/object": "anoncreds.protocol.types.AttributeInfo", "py/seq": ["last_name", null, null]}, "a45a8fed-1695-4e95-a46b-a1ab44a88ad2": {"py/newargs": {"py/tuple": ["ssn", null, null]}, "py/object": "anoncreds.protocol.types.AttributeInfo", "py/seq": ["ssn", null, null]}, "0f25477f-88ed-4e93-a463-1ed2071d4ba3": {"py/newargs": {"py/tuple": ["first_name", null, null]}, "py/object": "anoncreds.protocol.types.AttributeInfo", "py/seq": ["first_name", null, null]}}, "predicates": {}, "ts": null, "fulfilledByClaims": [{"py/tuple": [{"py/id": 9}, {"py/id": 11}, {"py/reduce": [{"py/type": "collections.OrderedDict"}, {"py/tuple": []}, null, null, {"py/tuple": [{"py/tuple": ["student_name", "Alice Garcia"]}, {"py/tuple": ["ssn", "123-45-6789"]}, {"py/tuple": ["degree", "Bachelor of Science, Marketing"]}, {"py/tuple": ["year", "2015"]}, {"py/tuple": ["status", "graduated"]}]}]}]}, {"py/tuple": [{"py/id": 15}, {"py/id": 17}, {"py/reduce": [{"py/type": "collections.OrderedDict"}, {"py/tuple": []}, null, null, {"py/tuple": [{"py/tuple": ["first_name", "Alice"]}, {"py/tuple": ["last_name", "Garcia"]}, {"py/tuple": ["employee_status", "Permanent"]}, {"py/tuple": ["experience", "3 years"]}, {"py/tuple": ["salary_bracket", "between $50,000 to $100,000"]}]}]}]}], "attributes": {"py/reduce": [{"py/type": "collections.OrderedDict"}, {"py/tuple": []}, null, null, {"py/tuple": [{"py/tuple": ["first_name", "Alice"]}, {"py/tuple": ["last_name", "Garcia"]}, {"py/tuple": ["ssn", "123-45-6789"]}]}]}, "py/object": "anoncreds.protocol.types.ProofRequest", "seqNo": null, "name": "Loan-Application-KYC", "version": "0.1"}, {"selfAttestedAttrs": {}, "nonce": 2551783452857349859593309361022286934668, "verifiableAttributes": {"8327f28c-0194-45df-9472-196d173ee493": {"py/newargs": {"py/tuple": ["first_name", null, null]}, "py/object": "anoncreds.protocol.types.AttributeInfo", "py/seq": ["first_name", null, null]}, "f58f77fa-894f-4a47-8aaa-7d586394e16c": {"py/newargs": {"py/tuple": ["last_name", null, null]}, "py/object": "anoncreds.protocol.types.AttributeInfo", "py/seq": ["last_name", null, null]}}, "predicates": {}, "ts": null, "fulfilledByClaims": [], "attributes": {"py/reduce": [{"py/type": "collections.OrderedDict"}, {"py/tuple": []}, null, null, {"py/tuple": [{"py/tuple": ["first_name", "string"]}, {"py/tuple": ["last_name", "string"]}]}]}, "py/object": "anoncreds.protocol.types.ProofRequest", "seqNo": null, "name": "Name-Proof", "version": "0.1"}], "py/object": "sovrin_client.client.wallet.connection.Connection", "localIdentifier": "DbMzrC2CsumhvkRkjj3RMA", "trustAnchor": "Thrift Bank", "request_nonce": "77fbf9dc8c8e6acde33de98c6d747b28c", "connection_status": "Accepted", "_remoteVerkey": "~3sphzTb2itL2mwSeJ1Ji28", "remotePubkey": "AGBjYvyM3SFnoiDGAEzkSLHvqyzVkXeMZfKDvdpEsC2x", "remoteIdentifier": "H2aKRiDeq8aLZSydQMDbtf", "name": "Thrift Bank", "remoteEndPoint": {"py/tuple": ["127.0.0.1", 7777]}}]}]}]}, "_pending": {"py/reduce": [{"py/type": "collections.deque"}, {"py/tuple": [[]]}, null, null, null]}, "_trustAnchored": {"H2aKRiDeq8aLZSydQMDbtf": {"identity": {"_identifier": "H2aKRiDeq8aLZSydQMDbtf", "abbreviated": true, "py/object": "plenum.common.signer_did.DidIdentity", "_verkey": "3sphzTb2itL2mwSeJ1Ji28"}, "last_synced": null, "_role": "101", "py/object": "sovrin_common.identity.Identity", "seqNo": 11, "trustAnchor": null}, "CzkavE58zgX7rUMrzSinLr": {"identity": {"_identifier": "CzkavE58zgX7rUMrzSinLr", "abbreviated": true, "py/object": "plenum.common.signer_did.DidIdentity", "_verkey": "WjXEvZ9xj4Tz9sLtzf7HVP"}, "last_synced": null, "_role": "101", "py/object": "sovrin_common.identity.Identity", "seqNo": 9, "trustAnchor": null}, "ULtgFQJe6bjiFbs7ke3NJD": {"identity": {"_identifier": "ULtgFQJe6bjiFbs7ke3NJD", "abbreviated": true, "py/object": "plenum.common.signer_did.DidIdentity", "_verkey": "5kh3FB4H3NKq7tUDqeqHc1"}, "last_synced": null, "_role": "101", "py/object": "sovrin_common.identity.Identity", "seqNo": 7, "trustAnchor": null}}, "didMethods": {"default": {"py/id": 80}, "d": {"sovrin": {"pattern": "did:sovrin:", "py/object": "plenum.common.did_method.DidMethod", "name": "sovrin", "signerConstructor": {"py/type": "plenum.common.signer_did.DidSigner"}}}, "py/object": "plenum.common.did_method.DidMethods"}, "env": "no-env", "classver/sovrin_client.client.wallet.wallet.Wallet": 1, "_attributes": {"json://{\"py/tuple\": [\"endpoint\", null, \"CzkavE58zgX7rUMrzSinLr\"]}": {"value": null, "dest": "CzkavE58zgX7rUMrzSinLr", "encKey": null, "ledgerStore": {"py/id": 2}, "py/object": "sovrin_client.client.wallet.attribute.Attribute", "seqNo": null, "name": "endpoint", "origin": null}, "json://{\"py/tuple\": [\"aeff8\", \"H2aKRiDeq8aLZSydQMDbtf\", \"H2aKRiDeq8aLZSydQMDbtf\"]}": {"value": "{\"endpoint\": {\"ha\": \"127.0.0.1:7777\", \"pubkey\": \"AGBjYvyM3SFnoiDGAEzkSLHvqyzVkXeMZfKDvdpEsC2x\"}}", "dest": "H2aKRiDeq8aLZSydQMDbtf", "encKey": null, "ledgerStore": {"py/id": 2}, "py/object": "sovrin_client.client.wallet.attribute.Attribute", "seqNo": 12, "name": "aeff8", "origin": "H2aKRiDeq8aLZSydQMDbtf"}, "json://{\"py/tuple\": [\"66b17\", \"ULtgFQJe6bjiFbs7ke3NJD\", \"ULtgFQJe6bjiFbs7ke3NJD\"]}": {"value": "{\"endpoint\": {\"ha\": \"127.0.0.1:5555\", \"pubkey\": \"5hmMA64DDQz5NzGJNVtRzNwpkZxktNQds21q3Wxxa62z\"}}", "dest": "ULtgFQJe6bjiFbs7ke3NJD", "encKey": null, "ledgerStore": {"py/id": 2}, "py/object": "sovrin_client.client.wallet.attribute.Attribute", "seqNo": 8, "name": "66b17", "origin": "ULtgFQJe6bjiFbs7ke3NJD"}, "json://{\"py/tuple\": [\"endpoint\", null, \"H2aKRiDeq8aLZSydQMDbtf\"]}": {"value": null, "dest": "H2aKRiDeq8aLZSydQMDbtf", "encKey": null, "ledgerStore": {"py/id": 2}, "py/object": "sovrin_client.client.wallet.attribute.Attribute", "seqNo": null, "name": "endpoint", "origin": null}, "json://{\"py/tuple\": [\"07867\", \"CzkavE58zgX7rUMrzSinLr\", \"CzkavE58zgX7rUMrzSinLr\"]}": {"value": "{\"endpoint\": {\"ha\": \"127.0.0.1:6666\", \"pubkey\": \"C5eqjU7NMVMGGfGfx2ubvX5H9X346bQt5qeziVAo3naQ\"}}", "dest": "CzkavE58zgX7rUMrzSinLr", "encKey": null, "ledgerStore": {"py/enumvalue": 4, "py/object": "sovrin_client.client.wallet.attribute.LedgerStore"}, "py/object": "sovrin_client.client.wallet.attribute.Attribute", "seqNo": 10, "name": "07867", "origin": "CzkavE58zgX7rUMrzSinLr"}, "json://{\"py/tuple\": [\"endpoint\", null, \"ULtgFQJe6bjiFbs7ke3NJD\"]}": {"value": null, "dest": "ULtgFQJe6bjiFbs7ke3NJD", "encKey": null, "ledgerStore": {"py/id": 2}, "py/object": "sovrin_client.client.wallet.attribute.Attribute", "seqNo": null, "name": "endpoint", "origin": null}}, "aliasesToIds": {}, "idsToSigners": {"BJWY59pnbdGrThaxgbpaWY": {"abbreviated": true, "seed": {"py/b64": "th1xDu+RHRGP7+dsGlDkZkGjK/Idt0/QWZwDMUmWfO4=\n"}, "naclSigner": {"keyhex": {"py/b64": "YjYxZDcxMGVlZjkxMWQxMThmZWZlNzZjMWE1MGU0NjY0MWEzMmJmMjFkYjc0ZmQwNTk5YzAzMzE0\nOTk2N2NlZQ==\n"}, "verraw": {"py/b64": "U207k69qxO8wGLhTTK2HbVIVBCygOEuuXrBpvLgCKsU=\n"}, "verhex": {"py/b64": "NTM2ZDNiOTNhZjZhYzRlZjMwMThiODUzNGNhZDg3NmQ1MjE1MDQyY2EwMzg0YmFlNWViMDY5YmNi\nODAyMmFjNQ==\n"}, "keyraw": {"py/b64": "th1xDu+RHRGP7+dsGlDkZkGjK/Idt0/QWZwDMUmWfO4=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.Signer", "key": {"_signing_key": {"py/b64": "th1xDu+RHRGP7+dsGlDkZkGjK/Idt0/QWZwDMUmWfO5TbTuTr2rE7zAYuFNMrYdtUhUELKA4S65e\nsGm8uAIqxQ==\n"}, "_seed": {"py/b64": "th1xDu+RHRGP7+dsGlDkZkGjK/Idt0/QWZwDMUmWfO4=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.SigningKey", "verify_key": {"_key": {"py/b64": "U207k69qxO8wGLhTTK2HbVIVBCygOEuuXrBpvLgCKsU=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.VerifyKey"}}}, "_identifier": "BJWY59pnbdGrThaxgbpaWY", "_alias": null, "py/object": "plenum.common.signer_did.DidSigner", "_verkey": "B8szPF41YG76ePciYUvG3W", "sk": {"py/id": 103}}, "H2aKRiDeq8aLZSydQMDbtf": {"py/id": 87}, "PE83fLnVVSihiq39bhD3gk": {"abbreviated": true, "seed": {"py/b64": "BVoiFg4P8/334xSgcVafdHim/stDcuZZ6uYIUr7vpAI=\n"}, "naclSigner": {"keyhex": {"py/b64": "MDU1YTIyMTYwZTBmZjNmZGY3ZTMxNGEwNzE1NjlmNzQ3OGE2ZmVjYjQzNzJlNjU5ZWFlNjA4NTJi\nZWVmYTQwMg==\n"}, "verraw": {"py/b64": "s/4PzBQBVe6WYvm8ZEff6eVL+hw4zk3rrxmD6hP5zmo=\n"}, "verhex": {"py/b64": "YjNmZTBmY2MxNDAxNTVlZTk2NjJmOWJjNjQ0N2RmZTllNTRiZmExYzM4Y2U0ZGViYWYxOTgzZWEx\nM2Y5Y2U2YQ==\n"}, "keyraw": {"py/b64": "BVoiFg4P8/334xSgcVafdHim/stDcuZZ6uYIUr7vpAI=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.Signer", "key": {"_signing_key": {"py/b64": "BVoiFg4P8/334xSgcVafdHim/stDcuZZ6uYIUr7vpAKz/g/MFAFV7pZi+bxkR9/p5Uv6HDjOTeuv\nGYPqE/nOag==\n"}, "_seed": {"py/b64": "BVoiFg4P8/334xSgcVafdHim/stDcuZZ6uYIUr7vpAI=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.SigningKey", "verify_key": {"_key": {"py/b64": "s/4PzBQBVe6WYvm8ZEff6eVL+hw4zk3rrxmD6hP5zmo=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.VerifyKey"}}}, "_identifier": "PE83fLnVVSihiq39bhD3gk", "_alias": null, "py/object": "plenum.common.signer_did.DidSigner", "_verkey": "VKF8RPxwwEM7BxkipZX83j", "sk": {"py/id": 111}}, "ULtgFQJe6bjiFbs7ke3NJD": {"py/id": 97}, "CzkavE58zgX7rUMrzSinLr": {"py/id": 82}, "DbMzrC2CsumhvkRkjj3RMA": {"abbreviated": true, "seed": {"py/b64": "7akoSfANDCiGx9yKH/H9S4V8Lj0MMM7myzZ15lpada0=\n"}, "naclSigner": {"keyhex": {"py/b64": "ZWRhOTI4NDlmMDBkMGMyODg2YzdkYzhhMWZmMWZkNGI4NTdjMmUzZDBjMzBjZWU2Y2IzNjc1ZTY1\nYTVhNzVhZA==\n"}, "verraw": {"py/b64": "ZfnkZvCT3+Rn1l5J/FJa4UDVpu4zvDL9RiyiLQogf3s=\n"}, "verhex": {"py/b64": "NjVmOWU0NjZmMDkzZGZlNDY3ZDY1ZTQ5ZmM1MjVhZTE0MGQ1YTZlZTMzYmMzMmZkNDYyY2EyMmQw\nYTIwN2Y3Yg==\n"}, "keyraw": {"py/b64": "7akoSfANDCiGx9yKH/H9S4V8Lj0MMM7myzZ15lpada0=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.Signer", "key": {"_signing_key": {"py/b64": "7akoSfANDCiGx9yKH/H9S4V8Lj0MMM7myzZ15lpada1l+eRm8JPf5GfWXkn8UlrhQNWm7jO8Mv1G\nLKItCiB/ew==\n"}, "_seed": {"py/b64": "7akoSfANDCiGx9yKH/H9S4V8Lj0MMM7myzZ15lpada0=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.SigningKey", "verify_key": {"_key": {"py/b64": "ZfnkZvCT3+Rn1l5J/FJa4UDVpu4zvDL9RiyiLQogf3s=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.VerifyKey"}}}, "_identifier": "DbMzrC2CsumhvkRkjj3RMA", "_alias": null, "py/object": "plenum.common.signer_did.DidSigner", "_verkey": "91MMTTVbqNdRQJ9uv7Hijt", "sk": {"py/id": 107}}, "Th7MpTaRZVRYnPiabds81Y": {"py/id": 92}}, "_name": "Default", "py/object": "sovrin_client.client.wallet.wallet.Wallet", "_upgrades": {}, "_nodes": {}, "_prepared": {"json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504011883590259]}": {"py/tuple": [{"py/state": {"operation": {"dest": "BJWY59pnbdGrThaxgbpaWY", "type": "105"}, "signature": "364whYUWxD7ujejo4C5tcxXwD5gV9BsdrAEckzy3ucHuSpiSqGXgf28swPY33YsSTbr8ynXYBGFGdCWQ3dYL5n3K", "reqId": 1504011883590259, "identifier": "H2aKRiDeq8aLZSydQMDbtf", "digest": "fc76ffe6a9961de24b7c04bbced7d325a105fc9293745ede561ad45bc002b37e"}, "py/object": "sovrin_common.types.Request"}, null]}, "json://{\"py/tuple\": [\"CzkavE58zgX7rUMrzSinLr\", 1504010350751398]}": {"py/tuple": [{"py/state": {"operation": {"dest": "CzkavE58zgX7rUMrzSinLr", "type": "100", "raw": "{\"endpoint\": {\"ha\": \"127.0.0.1:6666\", \"pubkey\": \"C5eqjU7NMVMGGfGfx2ubvX5H9X346bQt5qeziVAo3naQ\"}}"}, "signature": "3qPia5g6BGx8vypLNtwWQRUmKvwa75ortax8gpeyYWSW91hZyjSavqKPJDUGkpTosFmBPtUhTnRyxihkujX2vLtw", "reqId": 1504010350751398, "identifier": "CzkavE58zgX7rUMrzSinLr", "digest": "4efe29e01a1d5759709f1568b17ca323a3adce3767a523ad943f1635d2324856"}, "py/object": "sovrin_common.types.Request"}, {"py/tuple": ["07867", "CzkavE58zgX7rUMrzSinLr", "CzkavE58zgX7rUMrzSinLr"]}]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504012188740538]}": {"py/tuple": [{"py/state": {"operation": {"dest": "CzkavE58zgX7rUMrzSinLr", "type": "107", "data": {"version": "0.2", "name": "Job-Certificate"}}, "signature": "2nCUmiFiW8PNnFTKj6bBofZTZ14uw7WGoBcnqY7fCfG882MHLH76X2YaWGTWKDU5hZxHPXPtyWFdCKCjy9nQfkaA", "reqId": 1504012188740538, "identifier": "H2aKRiDeq8aLZSydQMDbtf", "digest": "dfbfb8166a324fd9bd61ccbccb7c872827af6407415760ff1cca2b674ad535a1"}, "py/object": "sovrin_common.types.Request"}, null]}, "json://{\"py/tuple\": [\"Th7MpTaRZVRYnPiabds81Y\", 1504010398619692]}": {"py/tuple": [{"py/state": {"operation": {"role": "101", "verkey": "~3sphzTb2itL2mwSeJ1Ji28", "dest": "H2aKRiDeq8aLZSydQMDbtf", "type": "1"}, "signature": "3yKXjwJ5r3GgZBzEQ1NZyB5TJz7A7UYTrbpiDmSoy5qirWoZ3fBuezN2T4HGB3XHQDCSDJC9ziacSHGsTubyxLza", "reqId": 1504010398619692, "identifier": "Th7MpTaRZVRYnPiabds81Y", "digest": "e0808960a3543e452f83435dd142c67084763f272640388d3dd37294fd29a041"}, "py/object": "sovrin_common.types.Request"}, "H2aKRiDeq8aLZSydQMDbtf"]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504012048933581]}": {"py/tuple": [{"py/state": {"operation": {"dest": "PE83fLnVVSihiq39bhD3gk", "type": "105"}, "signature": "58CmzefeGey3ofK6jrRMVbwtWbjG13F5kbdqnw54We5g2z7o4D191HPH4kDW28PgpmHoKS6oWuCBfq3JCZD3xWZ4", "reqId": 1504012048933581, "identifier": "H2aKRiDeq8aLZSydQMDbtf", "digest": "8012bd04271496c7a06c83910089fb11fde11689a5ef86ebc3a35d25c9bcdaca"}, "py/object": "sovrin_common.types.Request"}, null]}, "json://{\"py/tuple\": [\"Th7MpTaRZVRYnPiabds81Y\", 1504010181315940]}": {"py/tuple": [{"py/state": {"operation": {"role": "101", "verkey": "~5kh3FB4H3NKq7tUDqeqHc1", "dest": "ULtgFQJe6bjiFbs7ke3NJD", "type": "1"}, "signature": "54SEC4pE41BYDyweHPcRRMtsaHttqJkSuGPxiTxy8eVK8KmvHFFzevkZ4HvAB4hAi9w7mdBXsofpZfNFeSDW8DdA", "reqId": 1504010181315940, "identifier": "Th7MpTaRZVRYnPiabds81Y", "digest": "f6bcf8b6d9a272bd7bf5d1787f37f3aa3ced590c00fdbf589b74a43552c7fe15"}, "py/object": "sovrin_common.types.Request"}, "ULtgFQJe6bjiFbs7ke3NJD"]}, "json://{\"py/tuple\": [\"ULtgFQJe6bjiFbs7ke3NJD\", 1504010242889826]}": {"py/tuple": [{"py/state": {"operation": {"dest": "ULtgFQJe6bjiFbs7ke3NJD", "type": "100", "raw": "{\"endpoint\": {\"ha\": \"127.0.0.1:5555\", \"pubkey\": \"5hmMA64DDQz5NzGJNVtRzNwpkZxktNQds21q3Wxxa62z\"}}"}, "signature": "2rJA57ukRizRu3SSQRE6mdrV7o4s3gTRDdFRn7ov3cTVtU6bidkboVG2hGRE9SECSpFyTe231yXEibio4MPxQ8k3", "reqId": 1504010242889826, "identifier": "ULtgFQJe6bjiFbs7ke3NJD", "digest": "85c79438fab5bece84b69325ae55bced2cd1dddab99ce5c3153db78740c18c28"}, "py/object": "sovrin_common.types.Request"}, {"py/tuple": ["66b17", "ULtgFQJe6bjiFbs7ke3NJD", "ULtgFQJe6bjiFbs7ke3NJD"]}]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504011967739666]}": {"py/tuple": [{"py/state": {"operation": {"type": "108", "signature_type": "CL", "origin": "ULtgFQJe6bjiFbs7ke3NJD", "ref": 13}, "signature": "4kr5ayEMn8Wc1S1tfEcfJqpNr14u4MFzZmsjZhPnXDYD8RqdretNoVHr8U7AGDgufjZgNpF3WGWY5KGag8kCACdb", "reqId": 1504011967739666, "identifier": "H2aKRiDeq8aLZSydQMDbtf", "digest": "427f12c8d3f30bbda02c09aa900f27c3d1bd657bd5f58078dc90c690e513ac89"}, "py/object": "sovrin_common.types.Request"}, null]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504012356595162]}": {"py/tuple": [{"py/state": {"operation": {"dest": "DbMzrC2CsumhvkRkjj3RMA", "type": "105"}, "signature": "4n2ZQXHrGPpzHQsEcHA8V3j1Tw5TF5aj1wm6m41tMBTLC1RXyhnFyF1BoAdRA9vZPXmMFhTWuSPD6V6UJuQ6vmwX", "reqId": 1504012356595162, "identifier": "H2aKRiDeq8aLZSydQMDbtf", "digest": "58bbbb186978c505de3c63b212f4af925ab9076e2d4d7f1af054c0764fa85400"}, "py/object": "sovrin_common.types.Request"}, null]}, "json://{\"py/tuple\": [\"Th7MpTaRZVRYnPiabds81Y\", 1504010285363587]}": {"py/tuple": [{"py/state": {"operation": {"role": "101", "verkey": "~WjXEvZ9xj4Tz9sLtzf7HVP", "dest": "CzkavE58zgX7rUMrzSinLr", "type": "1"}, "signature": "3UGgq7JWkfQEuLb9qrpjKk4cBL5gB2Xn4EivZLvwJ2ELvX4ksTPbDWBRpjcYVcnPNA9wwHxrfLhHssJrLQrGYEoe", "reqId": 1504010285363587, "identifier": "Th7MpTaRZVRYnPiabds81Y", "digest": "8be02e03b138f7039b8909feb4a34a7f5efea85c14033dac8631dbfe7db6ccba"}, "py/object": "sovrin_common.types.Request"}, "CzkavE58zgX7rUMrzSinLr"]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504011945667203]}": {"py/tuple": [{"py/state": {"operation": {"dest": "ULtgFQJe6bjiFbs7ke3NJD", "type": "107", "data": {"version": "1.2", "name": "Transcript"}}, "signature": "R4XW9CPcUdmPzK7e8vSA7KpWiX9BkTFPkMmQeDKMP3AySd45ScnaCKwwC51ZRd3BneZrBjkGUpqaZSzj39rgroL", "reqId": 1504011945667203, "identifier": "H2aKRiDeq8aLZSydQMDbtf", "digest": "24e1192a567f91a910f76105057fc37b1cd965cb6cfb69fcb7bbc7dcacbc1dd0"}, "py/object": "sovrin_common.types.Request"}, null]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504012047713360]}": {"py/tuple": [{"py/state": {"operation": {"dest": "CzkavE58zgX7rUMrzSinLr", "type": "105"}, "signature": "5iXwQDBQYQv7SUeJ2oeFuV7W9ryqVhVTxWYGPHKHz1GiTuLh8sNcn91iXHmB9Wsa4QbKC85tFQyacwqmr2T6Jb2A", "reqId": 1504012047713360, "identifier": "H2aKRiDeq8aLZSydQMDbtf", "digest": "aafdc7c965e5bc06e2861e11aaf2289b1a02c1272070a51cafc97a091ec584e6"}, "py/object": "sovrin_common.types.Request"}, null]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504012355315102]}": {"py/tuple": [{"py/state": {"operation": {"dest": "H2aKRiDeq8aLZSydQMDbtf", "type": "105"}, "signature": "5BsGV8Bt8P1jbaJmMfU24oXJQmxHQpCpp8JGem9Ge6k7ZmdXRB4qtWZS4qjqXQN8QmaUf7Dn6G8FobNv3jyfZQbr", "reqId": 1504012355315102, "identifier": "H2aKRiDeq8aLZSydQMDbtf", "digest": "7beaccec295b7457d7450a7ed2ce1f994e25ac82afa994c85d9d9a3c047a88c8"}, "py/object": "sovrin_common.types.Request"}, null]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504011882040323]}": {"py/tuple": [{"py/state": {"operation": {"dest": "ULtgFQJe6bjiFbs7ke3NJD", "type": "105"}, "signature": "2ErcvN45rsVV1YDMnoForoKA3iKvPCL7oLhnNToKEtPE5gvD2GL5Uwrh3GBWwX1Hk484MFQUkn4aoJjecQgo7wNw", "reqId": 1504011882040323, "identifier": "H2aKRiDeq8aLZSydQMDbtf", "digest": "15b7ec7667e1978c4fb0c3062f826d6eafb28eb370cf9f57f4159808990898e9"}, "py/object": "sovrin_common.types.Request"}, null]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504012047714732]}": {"py/tuple": [{"py/state": {"operation": {"dest": "CzkavE58zgX7rUMrzSinLr", "type": "104", "raw": "endpoint"}, "signature": "eQPkA7m2NJBTiuYfXtEEqWS8Tg6Zy2ByNQkmK6u3BKjC6wumimCquBEg5sq6yqWos3KbAEvvJ66ZtAjH2P8Dxgh", "reqId": 1504012047714732, "identifier": "H2aKRiDeq8aLZSydQMDbtf", "digest": "8b445a2bc31d4ad2eac3e73a8750fa943085d3f9d606bb93db8b2afa73930878"}, "py/object": "sovrin_common.types.Request"}, {"py/tuple": ["endpoint", null, "CzkavE58zgX7rUMrzSinLr"]}]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504012355316429]}": {"py/tuple": [{"py/state": {"operation": {"dest": "H2aKRiDeq8aLZSydQMDbtf", "type": "104", "raw": "endpoint"}, "signature": "3tyqA7ut7UTVU8TGWNHWDd7PT6fa7Qj2HkBe4t4Q5daRwTmQz4fhubaVGsLzPTz5SpHa3pwUvkm57qUTfCeLh2aX", "reqId": 1504012355316429, "identifier": "H2aKRiDeq8aLZSydQMDbtf", "digest": "7ecf3b0fbb7726a355e54ecb527a3faf23e0a2d2433ea3cbec23e35e26b4ff3f"}, "py/object": "sovrin_common.types.Request"}, {"py/tuple": ["endpoint", null, "H2aKRiDeq8aLZSydQMDbtf"]}]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504012207718128]}": {"py/tuple": [{"py/state": {"operation": {"type": "108", "signature_type": "CL", "origin": "CzkavE58zgX7rUMrzSinLr", "ref": 15}, "signature": "2G2dwnMHAo2DcbKmsPyoCmmYrqEeyEkyAMW3BG7ABRytCXbUJGuQazJpju32FDczAXei78VHD1UKNkNUZnkQ69of", "reqId": 1504012207718128, "identifier": "H2aKRiDeq8aLZSydQMDbtf", "digest": "c5cdb9d2bfb2f9da143238099f482708ae17c342f0ba05cad8eb90c2f9ad80b5"}, "py/object": "sovrin_common.types.Request"}, null]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504011882042186]}": {"py/tuple": [{"py/state": {"operation": {"dest": "ULtgFQJe6bjiFbs7ke3NJD", "type": "104", "raw": "endpoint"}, "signature": "5q2PR8fTvXFcotCn1KQdRw5geixyncGe4YKtpHdJhaRNPJy6mND1X7pm4gX4garStMes2tAJ5P818TKZCzjQz1HD", "reqId": 1504011882042186, "identifier": "H2aKRiDeq8aLZSydQMDbtf", "digest": "906b1b51ff7641001c057fecf3c4fe7b0663cc93f82f8bb2819fcafe68a06e8a"}, "py/object": "sovrin_common.types.Request"}, {"py/tuple": ["endpoint", null, "ULtgFQJe6bjiFbs7ke3NJD"]}]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504010486440323]}": {"py/tuple": [{"py/state": {"operation": {"dest": "H2aKRiDeq8aLZSydQMDbtf", "type": "100", "raw": "{\"endpoint\": {\"ha\": \"127.0.0.1:7777\", \"pubkey\": \"AGBjYvyM3SFnoiDGAEzkSLHvqyzVkXeMZfKDvdpEsC2x\"}}"}, "signature": "3YfkBsc7CqwpWBLcADCk4czcfkgbXvxYSCkRNrVbQhystCpmYU5XUC96o3sHzg5AxdZhFCc14SQwCwkK9yKK926a", "reqId": 1504010486440323, "identifier": "H2aKRiDeq8aLZSydQMDbtf", "digest": "0165abda3ba23da73fe2a1a969be08ab90949018be247a26c565ed4a32dcbd39"}, "py/object": "sovrin_common.types.Request"}, {"py/tuple": ["aeff8", "H2aKRiDeq8aLZSydQMDbtf", "H2aKRiDeq8aLZSydQMDbtf"]}]}}, "ids": {"H2aKRiDeq8aLZSydQMDbtf": {"py/newargs": {"py/tuple": [{"abbreviated": true, "seed": {"py/b64": "VGhyaWZ0MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}, "naclSigner": {"keyhex": {"py/b64": "NTQ2ODcyNjk2Njc0MzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAz\nMDMwMzAzMA==\n"}, "verraw": {"py/b64": "gcp+vfaMWkvmGXYQd3uE/BdK3btf/TR+8xuqhvOYDw0=\n"}, "verhex": {"py/b64": "ODFjYTdlYmRmNjhjNWE0YmU2MTk3NjEwNzc3Yjg0ZmMxNzRhZGRiYjVmZmQzNDdlZjMxYmFhODZm\nMzk4MGYwZA==\n"}, "keyraw": {"py/b64": "VGhyaWZ0MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.Signer", "key": {"_signing_key": {"py/b64": "VGhyaWZ0MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCByn699oxaS+YZdhB3e4T8F0rdu1/9NH7z\nG6qG85gPDQ==\n"}, "_seed": {"py/b64": "VGhyaWZ0MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.SigningKey", "verify_key": {"_key": {"py/b64": "gcp+vfaMWkvmGXYQd3uE/BdK3btf/TR+8xuqhvOYDw0=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.VerifyKey"}}}, "_identifier": "H2aKRiDeq8aLZSydQMDbtf", "_alias": null, "py/object": "plenum.common.signer_did.DidSigner", "_verkey": "3sphzTb2itL2mwSeJ1Ji28", "sk": {"py/id": 89}}, 1504012356595162]}, "py/object": "plenum.client.wallet.IdData", "py/seq": [{"py/id": 87}, 1504012356595162]}, "CzkavE58zgX7rUMrzSinLr": {"py/newargs": {"py/tuple": [{"abbreviated": true, "seed": {"py/b64": "QWNtZTAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}, "naclSigner": {"keyhex": {"py/b64": "NDE2MzZkNjUzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAz\nMDMwMzAzMA==\n"}, "verraw": {"py/b64": "YSTHvR/sxdu41ig9mcqMq/DI5USQMVU4kpa6anJhot4=\n"}, "verhex": {"py/b64": "NjEyNGM3YmQxZmVjYzVkYmI4ZDYyODNkOTljYThjYWJmMGM4ZTU0NDkwMzE1NTM4OTI5NmJhNmE3\nMjYxYTJkZQ==\n"}, "keyraw": {"py/b64": "QWNtZTAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.Signer", "key": {"_signing_key": {"py/b64": "QWNtZTAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBhJMe9H+zF27jWKD2Zyoyr8MjlRJAxVTiS\nlrpqcmGi3g==\n"}, "_seed": {"py/b64": "QWNtZTAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.SigningKey", "verify_key": {"_key": {"py/b64": "YSTHvR/sxdu41ig9mcqMq/DI5USQMVU4kpa6anJhot4=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.VerifyKey"}}}, "_identifier": "CzkavE58zgX7rUMrzSinLr", "_alias": null, "py/object": "plenum.common.signer_did.DidSigner", "_verkey": "WjXEvZ9xj4Tz9sLtzf7HVP", "sk": {"py/id": 84}}, 1504010350751398]}, "py/object": "plenum.client.wallet.IdData", "py/seq": [{"py/id": 82}, 1504010350751398]}, "Th7MpTaRZVRYnPiabds81Y": {"py/newargs": {"py/tuple": [{"abbreviated": true, "seed": {"py/b64": "MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwU3Rld2FyZDE=\n"}, "naclSigner": {"keyhex": {"py/b64": "MzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwNTM3NDY1Nzc2\nMTcyNjQzMQ==\n"}, "verraw": {"py/b64": "2CdGWNI7wuQZTRIy//cOKzRLr2IQZ+tqqSsvzvDJTo0=\n"}, "verhex": {"py/b64": "ZDgyNzQ2NThkMjNiYzJlNDE5NGQxMjMyZmZmNzBlMmIzNDRiYWY2MjEwNjdlYjZhYTkyYjJmY2Vm\nMGM5NGU4ZA==\n"}, "keyraw": {"py/b64": "MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwU3Rld2FyZDE=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.Signer", "key": {"_signing_key": {"py/b64": "MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwU3Rld2FyZDHYJ0ZY0jvC5BlNEjL/9w4rNEuvYhBn62qp\nKy/O8MlOjQ==\n"}, "_seed": {"py/b64": "MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwU3Rld2FyZDE=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.SigningKey", "verify_key": {"_key": {"py/b64": "2CdGWNI7wuQZTRIy//cOKzRLr2IQZ+tqqSsvzvDJTo0=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.VerifyKey"}}}, "_identifier": "Th7MpTaRZVRYnPiabds81Y", "_alias": null, "py/object": "plenum.common.signer_did.DidSigner", "_verkey": "7TYfekw4GUagBnBVCqPjiC", "sk": {"py/id": 94}}, 1504010398619692]}, "py/object": "plenum.client.wallet.IdData", "py/seq": [{"py/id": 92}, 1504010398619692]}, "ULtgFQJe6bjiFbs7ke3NJD": {"py/newargs": {"py/tuple": [{"abbreviated": true, "seed": {"py/b64": "RmFiZXIwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}, "naclSigner": {"keyhex": {"py/b64": "NDY2MTYyNjU3MjMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAz\nMDMwMzAzMA==\n"}, "verraw": {"py/b64": "3W2465HP3OUPGkiNlTMl2iZ+NiMZegfUFIsl8378KH4=\n"}, "verhex": {"py/b64": "ZGQ2ZGI4ZWI5MWNmZGNlNTBmMWE0ODhkOTUzMzI1ZGEyNjdlMzYyMzE5N2EwN2Q0MTQ4YjI1ZjM3\nZWZjMjg3ZQ==\n"}, "keyraw": {"py/b64": "RmFiZXIwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.Signer", "key": {"_signing_key": {"py/b64": "RmFiZXIwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDdbbjrkc/c5Q8aSI2VMyXaJn42Ixl6B9QU\niyXzfvwofg==\n"}, "_seed": {"py/b64": "RmFiZXIwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.SigningKey", "verify_key": {"_key": {"py/b64": "3W2465HP3OUPGkiNlTMl2iZ+NiMZegfUFIsl8378KH4=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.VerifyKey"}}}, "_identifier": "ULtgFQJe6bjiFbs7ke3NJD", "_alias": null, "py/object": "plenum.common.signer_did.DidSigner", "_verkey": "5kh3FB4H3NKq7tUDqeqHc1", "sk": {"py/id": 99}}, 1504010242889826]}, "py/object": "plenum.client.wallet.IdData", "py/seq": [{"py/id": 97}, 1504010242889826]}}, "lastKnownSeqs": {}, "defaultId": "H2aKRiDeq8aLZSydQMDbtf", "knownIds": {"BJWY59pnbdGrThaxgbpaWY": {"identity": {"_identifier": "BJWY59pnbdGrThaxgbpaWY", "abbreviated": null, "py/object": "plenum.common.signer_did.DidIdentity", "_verkey": null}, "last_synced": null, "_role": null, "py/object": "sovrin_common.identity.Identity", "seqNo": null, "trustAnchor": null}, "H2aKRiDeq8aLZSydQMDbtf": {"identity": {"_identifier": "H2aKRiDeq8aLZSydQMDbtf", "abbreviated": null, "py/object": "plenum.common.signer_did.DidIdentity", "_verkey": null}, "last_synced": null, "_role": null, "py/object": "sovrin_common.identity.Identity", "seqNo": null, "trustAnchor": null}, "PE83fLnVVSihiq39bhD3gk": {"identity": {"_identifier": "PE83fLnVVSihiq39bhD3gk", "abbreviated": null, "py/object": "plenum.common.signer_did.DidIdentity", "_verkey": null}, "last_synced": null, "_role": null, "py/object": "sovrin_common.identity.Identity", "seqNo": null, "trustAnchor": null}, "ULtgFQJe6bjiFbs7ke3NJD": {"identity": {"_identifier": "ULtgFQJe6bjiFbs7ke3NJD", "abbreviated": null, "py/object": "plenum.common.signer_did.DidIdentity", "_verkey": null}, "last_synced": null, "_role": null, "py/object": "sovrin_common.identity.Identity", "seqNo": null, "trustAnchor": null}, "CzkavE58zgX7rUMrzSinLr": {"identity": {"_identifier": "CzkavE58zgX7rUMrzSinLr", "abbreviated": null, "py/object": "plenum.common.signer_did.DidIdentity", "_verkey": null}, "last_synced": null, "_role": null, "py/object": "sovrin_common.identity.Identity", "seqNo": null, "trustAnchor": null}, "DbMzrC2CsumhvkRkjj3RMA": {"identity": {"_identifier": "DbMzrC2CsumhvkRkjj3RMA", "abbreviated": null, "py/object": "plenum.common.signer_did.DidIdentity", "_verkey": null}, "last_synced": null, "_role": null, "py/object": "sovrin_common.identity.Identity", "seqNo": null, "trustAnchor": null}}, "replyHandler": {}} \ No newline at end of file diff --git a/indy_client/test/cli/wallet_before_renaming_link_to_connection b/indy_client/test/cli/wallet_before_renaming_link_to_connection deleted file mode 100755 index 8d651847e..000000000 --- a/indy_client/test/cli/wallet_before_renaming_link_to_connection +++ /dev/null @@ -1 +0,0 @@ -{"_prepared": {"json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504012047714732]}": {"py/tuple": [{"py/object": "sovrin_common.types.Request", "py/state": {"signature": "eQPkA7m2NJBTiuYfXtEEqWS8Tg6Zy2ByNQkmK6u3BKjC6wumimCquBEg5sq6yqWos3KbAEvvJ66ZtAjH2P8Dxgh", "operation": {"type": "104", "raw": "endpoint", "dest": "CzkavE58zgX7rUMrzSinLr"}, "digest": "8b445a2bc31d4ad2eac3e73a8750fa943085d3f9d606bb93db8b2afa73930878", "identifier": "H2aKRiDeq8aLZSydQMDbtf", "reqId": 1504012047714732}}, {"py/tuple": ["endpoint", null, "CzkavE58zgX7rUMrzSinLr"]}]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504012355315102]}": {"py/tuple": [{"py/object": "sovrin_common.types.Request", "py/state": {"signature": "5BsGV8Bt8P1jbaJmMfU24oXJQmxHQpCpp8JGem9Ge6k7ZmdXRB4qtWZS4qjqXQN8QmaUf7Dn6G8FobNv3jyfZQbr", "operation": {"type": "105", "dest": "H2aKRiDeq8aLZSydQMDbtf"}, "digest": "7beaccec295b7457d7450a7ed2ce1f994e25ac82afa994c85d9d9a3c047a88c8", "identifier": "H2aKRiDeq8aLZSydQMDbtf", "reqId": 1504012355315102}}, null]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504012188740538]}": {"py/tuple": [{"py/object": "sovrin_common.types.Request", "py/state": {"signature": "2nCUmiFiW8PNnFTKj6bBofZTZ14uw7WGoBcnqY7fCfG882MHLH76X2YaWGTWKDU5hZxHPXPtyWFdCKCjy9nQfkaA", "operation": {"type": "107", "data": {"name": "Job-Certificate", "version": "0.2"}, "dest": "CzkavE58zgX7rUMrzSinLr"}, "digest": "dfbfb8166a324fd9bd61ccbccb7c872827af6407415760ff1cca2b674ad535a1", "identifier": "H2aKRiDeq8aLZSydQMDbtf", "reqId": 1504012188740538}}, null]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504012356595162]}": {"py/tuple": [{"py/object": "sovrin_common.types.Request", "py/state": {"signature": "4n2ZQXHrGPpzHQsEcHA8V3j1Tw5TF5aj1wm6m41tMBTLC1RXyhnFyF1BoAdRA9vZPXmMFhTWuSPD6V6UJuQ6vmwX", "operation": {"type": "105", "dest": "DbMzrC2CsumhvkRkjj3RMA"}, "digest": "58bbbb186978c505de3c63b212f4af925ab9076e2d4d7f1af054c0764fa85400", "identifier": "H2aKRiDeq8aLZSydQMDbtf", "reqId": 1504012356595162}}, null]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504012047713360]}": {"py/tuple": [{"py/object": "sovrin_common.types.Request", "py/state": {"signature": "5iXwQDBQYQv7SUeJ2oeFuV7W9ryqVhVTxWYGPHKHz1GiTuLh8sNcn91iXHmB9Wsa4QbKC85tFQyacwqmr2T6Jb2A", "operation": {"type": "105", "dest": "CzkavE58zgX7rUMrzSinLr"}, "digest": "aafdc7c965e5bc06e2861e11aaf2289b1a02c1272070a51cafc97a091ec584e6", "identifier": "H2aKRiDeq8aLZSydQMDbtf", "reqId": 1504012047713360}}, null]}, "json://{\"py/tuple\": [\"Th7MpTaRZVRYnPiabds81Y\", 1504010181315940]}": {"py/tuple": [{"py/object": "sovrin_common.types.Request", "py/state": {"signature": "54SEC4pE41BYDyweHPcRRMtsaHttqJkSuGPxiTxy8eVK8KmvHFFzevkZ4HvAB4hAi9w7mdBXsofpZfNFeSDW8DdA", "operation": {"verkey": "~5kh3FB4H3NKq7tUDqeqHc1", "type": "1", "role": "101", "dest": "ULtgFQJe6bjiFbs7ke3NJD"}, "digest": "f6bcf8b6d9a272bd7bf5d1787f37f3aa3ced590c00fdbf589b74a43552c7fe15", "identifier": "Th7MpTaRZVRYnPiabds81Y", "reqId": 1504010181315940}}, "ULtgFQJe6bjiFbs7ke3NJD"]}, "json://{\"py/tuple\": [\"Th7MpTaRZVRYnPiabds81Y\", 1504010398619692]}": {"py/tuple": [{"py/object": "sovrin_common.types.Request", "py/state": {"signature": "3yKXjwJ5r3GgZBzEQ1NZyB5TJz7A7UYTrbpiDmSoy5qirWoZ3fBuezN2T4HGB3XHQDCSDJC9ziacSHGsTubyxLza", "operation": {"verkey": "~3sphzTb2itL2mwSeJ1Ji28", "type": "1", "role": "101", "dest": "H2aKRiDeq8aLZSydQMDbtf"}, "digest": "e0808960a3543e452f83435dd142c67084763f272640388d3dd37294fd29a041", "identifier": "Th7MpTaRZVRYnPiabds81Y", "reqId": 1504010398619692}}, "H2aKRiDeq8aLZSydQMDbtf"]}, "json://{\"py/tuple\": [\"Th7MpTaRZVRYnPiabds81Y\", 1504010285363587]}": {"py/tuple": [{"py/object": "sovrin_common.types.Request", "py/state": {"signature": "3UGgq7JWkfQEuLb9qrpjKk4cBL5gB2Xn4EivZLvwJ2ELvX4ksTPbDWBRpjcYVcnPNA9wwHxrfLhHssJrLQrGYEoe", "operation": {"verkey": "~WjXEvZ9xj4Tz9sLtzf7HVP", "type": "1", "role": "101", "dest": "CzkavE58zgX7rUMrzSinLr"}, "digest": "8be02e03b138f7039b8909feb4a34a7f5efea85c14033dac8631dbfe7db6ccba", "identifier": "Th7MpTaRZVRYnPiabds81Y", "reqId": 1504010285363587}}, "CzkavE58zgX7rUMrzSinLr"]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504011882042186]}": {"py/tuple": [{"py/object": "sovrin_common.types.Request", "py/state": {"signature": "5q2PR8fTvXFcotCn1KQdRw5geixyncGe4YKtpHdJhaRNPJy6mND1X7pm4gX4garStMes2tAJ5P818TKZCzjQz1HD", "operation": {"type": "104", "raw": "endpoint", "dest": "ULtgFQJe6bjiFbs7ke3NJD"}, "digest": "906b1b51ff7641001c057fecf3c4fe7b0663cc93f82f8bb2819fcafe68a06e8a", "identifier": "H2aKRiDeq8aLZSydQMDbtf", "reqId": 1504011882042186}}, {"py/tuple": ["endpoint", null, "ULtgFQJe6bjiFbs7ke3NJD"]}]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504011883590259]}": {"py/tuple": [{"py/object": "sovrin_common.types.Request", "py/state": {"signature": "364whYUWxD7ujejo4C5tcxXwD5gV9BsdrAEckzy3ucHuSpiSqGXgf28swPY33YsSTbr8ynXYBGFGdCWQ3dYL5n3K", "operation": {"type": "105", "dest": "BJWY59pnbdGrThaxgbpaWY"}, "digest": "fc76ffe6a9961de24b7c04bbced7d325a105fc9293745ede561ad45bc002b37e", "identifier": "H2aKRiDeq8aLZSydQMDbtf", "reqId": 1504011883590259}}, null]}, "json://{\"py/tuple\": [\"ULtgFQJe6bjiFbs7ke3NJD\", 1504010242889826]}": {"py/tuple": [{"py/object": "sovrin_common.types.Request", "py/state": {"signature": "2rJA57ukRizRu3SSQRE6mdrV7o4s3gTRDdFRn7ov3cTVtU6bidkboVG2hGRE9SECSpFyTe231yXEibio4MPxQ8k3", "operation": {"type": "100", "raw": "{\"endpoint\": {\"ha\": \"127.0.0.1:5555\", \"pubkey\": \"5hmMA64DDQz5NzGJNVtRzNwpkZxktNQds21q3Wxxa62z\"}}", "dest": "ULtgFQJe6bjiFbs7ke3NJD"}, "digest": "85c79438fab5bece84b69325ae55bced2cd1dddab99ce5c3153db78740c18c28", "identifier": "ULtgFQJe6bjiFbs7ke3NJD", "reqId": 1504010242889826}}, {"py/tuple": ["66b17", "ULtgFQJe6bjiFbs7ke3NJD", "ULtgFQJe6bjiFbs7ke3NJD"]}]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504011882040323]}": {"py/tuple": [{"py/object": "sovrin_common.types.Request", "py/state": {"signature": "2ErcvN45rsVV1YDMnoForoKA3iKvPCL7oLhnNToKEtPE5gvD2GL5Uwrh3GBWwX1Hk484MFQUkn4aoJjecQgo7wNw", "operation": {"type": "105", "dest": "ULtgFQJe6bjiFbs7ke3NJD"}, "digest": "15b7ec7667e1978c4fb0c3062f826d6eafb28eb370cf9f57f4159808990898e9", "identifier": "H2aKRiDeq8aLZSydQMDbtf", "reqId": 1504011882040323}}, null]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504012207718128]}": {"py/tuple": [{"py/object": "sovrin_common.types.Request", "py/state": {"signature": "2G2dwnMHAo2DcbKmsPyoCmmYrqEeyEkyAMW3BG7ABRytCXbUJGuQazJpju32FDczAXei78VHD1UKNkNUZnkQ69of", "operation": {"ref": 15, "type": "108", "signature_type": "CL", "origin": "CzkavE58zgX7rUMrzSinLr"}, "digest": "c5cdb9d2bfb2f9da143238099f482708ae17c342f0ba05cad8eb90c2f9ad80b5", "identifier": "H2aKRiDeq8aLZSydQMDbtf", "reqId": 1504012207718128}}, null]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504012355316429]}": {"py/tuple": [{"py/object": "sovrin_common.types.Request", "py/state": {"signature": "3tyqA7ut7UTVU8TGWNHWDd7PT6fa7Qj2HkBe4t4Q5daRwTmQz4fhubaVGsLzPTz5SpHa3pwUvkm57qUTfCeLh2aX", "operation": {"type": "104", "raw": "endpoint", "dest": "H2aKRiDeq8aLZSydQMDbtf"}, "digest": "7ecf3b0fbb7726a355e54ecb527a3faf23e0a2d2433ea3cbec23e35e26b4ff3f", "identifier": "H2aKRiDeq8aLZSydQMDbtf", "reqId": 1504012355316429}}, {"py/tuple": ["endpoint", null, "H2aKRiDeq8aLZSydQMDbtf"]}]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504012048933581]}": {"py/tuple": [{"py/object": "sovrin_common.types.Request", "py/state": {"signature": "58CmzefeGey3ofK6jrRMVbwtWbjG13F5kbdqnw54We5g2z7o4D191HPH4kDW28PgpmHoKS6oWuCBfq3JCZD3xWZ4", "operation": {"type": "105", "dest": "PE83fLnVVSihiq39bhD3gk"}, "digest": "8012bd04271496c7a06c83910089fb11fde11689a5ef86ebc3a35d25c9bcdaca", "identifier": "H2aKRiDeq8aLZSydQMDbtf", "reqId": 1504012048933581}}, null]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504011945667203]}": {"py/tuple": [{"py/object": "sovrin_common.types.Request", "py/state": {"signature": "R4XW9CPcUdmPzK7e8vSA7KpWiX9BkTFPkMmQeDKMP3AySd45ScnaCKwwC51ZRd3BneZrBjkGUpqaZSzj39rgroL", "operation": {"type": "107", "data": {"name": "Transcript", "version": "1.2"}, "dest": "ULtgFQJe6bjiFbs7ke3NJD"}, "digest": "24e1192a567f91a910f76105057fc37b1cd965cb6cfb69fcb7bbc7dcacbc1dd0", "identifier": "H2aKRiDeq8aLZSydQMDbtf", "reqId": 1504011945667203}}, null]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504010486440323]}": {"py/tuple": [{"py/object": "sovrin_common.types.Request", "py/state": {"signature": "3YfkBsc7CqwpWBLcADCk4czcfkgbXvxYSCkRNrVbQhystCpmYU5XUC96o3sHzg5AxdZhFCc14SQwCwkK9yKK926a", "operation": {"type": "100", "raw": "{\"endpoint\": {\"ha\": \"127.0.0.1:7777\", \"pubkey\": \"AGBjYvyM3SFnoiDGAEzkSLHvqyzVkXeMZfKDvdpEsC2x\"}}", "dest": "H2aKRiDeq8aLZSydQMDbtf"}, "digest": "0165abda3ba23da73fe2a1a969be08ab90949018be247a26c565ed4a32dcbd39", "identifier": "H2aKRiDeq8aLZSydQMDbtf", "reqId": 1504010486440323}}, {"py/tuple": ["aeff8", "H2aKRiDeq8aLZSydQMDbtf", "H2aKRiDeq8aLZSydQMDbtf"]}]}, "json://{\"py/tuple\": [\"CzkavE58zgX7rUMrzSinLr\", 1504010350751398]}": {"py/tuple": [{"py/object": "sovrin_common.types.Request", "py/state": {"signature": "3qPia5g6BGx8vypLNtwWQRUmKvwa75ortax8gpeyYWSW91hZyjSavqKPJDUGkpTosFmBPtUhTnRyxihkujX2vLtw", "operation": {"type": "100", "raw": "{\"endpoint\": {\"ha\": \"127.0.0.1:6666\", \"pubkey\": \"C5eqjU7NMVMGGfGfx2ubvX5H9X346bQt5qeziVAo3naQ\"}}", "dest": "CzkavE58zgX7rUMrzSinLr"}, "digest": "4efe29e01a1d5759709f1568b17ca323a3adce3767a523ad943f1635d2324856", "identifier": "CzkavE58zgX7rUMrzSinLr", "reqId": 1504010350751398}}, {"py/tuple": ["07867", "CzkavE58zgX7rUMrzSinLr", "CzkavE58zgX7rUMrzSinLr"]}]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504011967739666]}": {"py/tuple": [{"py/object": "sovrin_common.types.Request", "py/state": {"signature": "4kr5ayEMn8Wc1S1tfEcfJqpNr14u4MFzZmsjZhPnXDYD8RqdretNoVHr8U7AGDgufjZgNpF3WGWY5KGag8kCACdb", "operation": {"ref": 13, "type": "108", "signature_type": "CL", "origin": "ULtgFQJe6bjiFbs7ke3NJD"}, "digest": "427f12c8d3f30bbda02c09aa900f27c3d1bd657bd5f58078dc90c690e513ac89", "identifier": "H2aKRiDeq8aLZSydQMDbtf", "reqId": 1504011967739666}}, null]}}, "_upgrades": {}, "_nodes": {}, "_pending": {"py/reduce": [{"py/type": "collections.deque"}, {"py/tuple": [[]]}, null, null, null]}, "aliasesToIds": {}, "env": "no-env", "idsToSigners": {"Th7MpTaRZVRYnPiabds81Y": {"py/id": 92}, "CzkavE58zgX7rUMrzSinLr": {"py/id": 82}, "BJWY59pnbdGrThaxgbpaWY": {"seed": {"py/b64": "th1xDu+RHRGP7+dsGlDkZkGjK/Idt0/QWZwDMUmWfO4=\n"}, "naclSigner": {"keyraw": {"py/b64": "th1xDu+RHRGP7+dsGlDkZkGjK/Idt0/QWZwDMUmWfO4=\n"}, "verhex": {"py/b64": "NTM2ZDNiOTNhZjZhYzRlZjMwMThiODUzNGNhZDg3NmQ1MjE1MDQyY2EwMzg0YmFlNWViMDY5YmNi\nODAyMmFjNQ==\n"}, "keyhex": {"py/b64": "YjYxZDcxMGVlZjkxMWQxMThmZWZlNzZjMWE1MGU0NjY0MWEzMmJmMjFkYjc0ZmQwNTk5YzAzMzE0\nOTk2N2NlZQ==\n"}, "py/object": "stp_core.crypto.nacl_wrappers.Signer", "verraw": {"py/b64": "U207k69qxO8wGLhTTK2HbVIVBCygOEuuXrBpvLgCKsU=\n"}, "key": {"_seed": {"py/b64": "th1xDu+RHRGP7+dsGlDkZkGjK/Idt0/QWZwDMUmWfO4=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.SigningKey", "verify_key": {"py/object": "stp_core.crypto.nacl_wrappers.VerifyKey", "_key": {"py/b64": "U207k69qxO8wGLhTTK2HbVIVBCygOEuuXrBpvLgCKsU=\n"}}, "_signing_key": {"py/b64": "th1xDu+RHRGP7+dsGlDkZkGjK/Idt0/QWZwDMUmWfO5TbTuTr2rE7zAYuFNMrYdtUhUELKA4S65e\nsGm8uAIqxQ==\n"}}}, "_verkey": "B8szPF41YG76ePciYUvG3W", "_identifier": "BJWY59pnbdGrThaxgbpaWY", "py/object": "plenum.common.signer_did.DidSigner", "abbreviated": true, "sk": {"py/id": 103}, "_alias": null}, "PE83fLnVVSihiq39bhD3gk": {"seed": {"py/b64": "BVoiFg4P8/334xSgcVafdHim/stDcuZZ6uYIUr7vpAI=\n"}, "naclSigner": {"keyraw": {"py/b64": "BVoiFg4P8/334xSgcVafdHim/stDcuZZ6uYIUr7vpAI=\n"}, "verhex": {"py/b64": "YjNmZTBmY2MxNDAxNTVlZTk2NjJmOWJjNjQ0N2RmZTllNTRiZmExYzM4Y2U0ZGViYWYxOTgzZWEx\nM2Y5Y2U2YQ==\n"}, "keyhex": {"py/b64": "MDU1YTIyMTYwZTBmZjNmZGY3ZTMxNGEwNzE1NjlmNzQ3OGE2ZmVjYjQzNzJlNjU5ZWFlNjA4NTJi\nZWVmYTQwMg==\n"}, "py/object": "stp_core.crypto.nacl_wrappers.Signer", "verraw": {"py/b64": "s/4PzBQBVe6WYvm8ZEff6eVL+hw4zk3rrxmD6hP5zmo=\n"}, "key": {"_seed": {"py/b64": "BVoiFg4P8/334xSgcVafdHim/stDcuZZ6uYIUr7vpAI=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.SigningKey", "verify_key": {"py/object": "stp_core.crypto.nacl_wrappers.VerifyKey", "_key": {"py/b64": "s/4PzBQBVe6WYvm8ZEff6eVL+hw4zk3rrxmD6hP5zmo=\n"}}, "_signing_key": {"py/b64": "BVoiFg4P8/334xSgcVafdHim/stDcuZZ6uYIUr7vpAKz/g/MFAFV7pZi+bxkR9/p5Uv6HDjOTeuv\nGYPqE/nOag==\n"}}}, "_verkey": "VKF8RPxwwEM7BxkipZX83j", "_identifier": "PE83fLnVVSihiq39bhD3gk", "py/object": "plenum.common.signer_did.DidSigner", "abbreviated": true, "sk": {"py/id": 111}, "_alias": null}, "DbMzrC2CsumhvkRkjj3RMA": {"seed": {"py/b64": "7akoSfANDCiGx9yKH/H9S4V8Lj0MMM7myzZ15lpada0=\n"}, "naclSigner": {"keyraw": {"py/b64": "7akoSfANDCiGx9yKH/H9S4V8Lj0MMM7myzZ15lpada0=\n"}, "verhex": {"py/b64": "NjVmOWU0NjZmMDkzZGZlNDY3ZDY1ZTQ5ZmM1MjVhZTE0MGQ1YTZlZTMzYmMzMmZkNDYyY2EyMmQw\nYTIwN2Y3Yg==\n"}, "keyhex": {"py/b64": "ZWRhOTI4NDlmMDBkMGMyODg2YzdkYzhhMWZmMWZkNGI4NTdjMmUzZDBjMzBjZWU2Y2IzNjc1ZTY1\nYTVhNzVhZA==\n"}, "py/object": "stp_core.crypto.nacl_wrappers.Signer", "verraw": {"py/b64": "ZfnkZvCT3+Rn1l5J/FJa4UDVpu4zvDL9RiyiLQogf3s=\n"}, "key": {"_seed": {"py/b64": "7akoSfANDCiGx9yKH/H9S4V8Lj0MMM7myzZ15lpada0=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.SigningKey", "verify_key": {"py/object": "stp_core.crypto.nacl_wrappers.VerifyKey", "_key": {"py/b64": "ZfnkZvCT3+Rn1l5J/FJa4UDVpu4zvDL9RiyiLQogf3s=\n"}}, "_signing_key": {"py/b64": "7akoSfANDCiGx9yKH/H9S4V8Lj0MMM7myzZ15lpada1l+eRm8JPf5GfWXkn8UlrhQNWm7jO8Mv1G\nLKItCiB/ew==\n"}}}, "_verkey": "91MMTTVbqNdRQJ9uv7Hijt", "_identifier": "DbMzrC2CsumhvkRkjj3RMA", "py/object": "plenum.common.signer_did.DidSigner", "abbreviated": true, "sk": {"py/id": 107}, "_alias": null}, "ULtgFQJe6bjiFbs7ke3NJD": {"py/id": 97}, "H2aKRiDeq8aLZSydQMDbtf": {"py/id": 87}}, "lastKnownSeqs": {}, "ids": {"Th7MpTaRZVRYnPiabds81Y": {"py/object": "plenum.client.wallet.IdData", "py/newargs": {"py/tuple": [{"seed": {"py/b64": "MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwU3Rld2FyZDE=\n"}, "naclSigner": {"keyraw": {"py/b64": "MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwU3Rld2FyZDE=\n"}, "verhex": {"py/b64": "ZDgyNzQ2NThkMjNiYzJlNDE5NGQxMjMyZmZmNzBlMmIzNDRiYWY2MjEwNjdlYjZhYTkyYjJmY2Vm\nMGM5NGU4ZA==\n"}, "keyhex": {"py/b64": "MzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwNTM3NDY1Nzc2\nMTcyNjQzMQ==\n"}, "py/object": "stp_core.crypto.nacl_wrappers.Signer", "verraw": {"py/b64": "2CdGWNI7wuQZTRIy//cOKzRLr2IQZ+tqqSsvzvDJTo0=\n"}, "key": {"_seed": {"py/b64": "MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwU3Rld2FyZDE=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.SigningKey", "verify_key": {"py/object": "stp_core.crypto.nacl_wrappers.VerifyKey", "_key": {"py/b64": "2CdGWNI7wuQZTRIy//cOKzRLr2IQZ+tqqSsvzvDJTo0=\n"}}, "_signing_key": {"py/b64": "MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwU3Rld2FyZDHYJ0ZY0jvC5BlNEjL/9w4rNEuvYhBn62qp\nKy/O8MlOjQ==\n"}}}, "_verkey": "7TYfekw4GUagBnBVCqPjiC", "_identifier": "Th7MpTaRZVRYnPiabds81Y", "py/object": "plenum.common.signer_did.DidSigner", "abbreviated": true, "sk": {"py/id": 94}, "_alias": null}, 1504010398619692]}, "py/seq": [{"py/id": 92}, 1504010398619692]}, "CzkavE58zgX7rUMrzSinLr": {"py/object": "plenum.client.wallet.IdData", "py/newargs": {"py/tuple": [{"seed": {"py/b64": "QWNtZTAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}, "naclSigner": {"keyraw": {"py/b64": "QWNtZTAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}, "verhex": {"py/b64": "NjEyNGM3YmQxZmVjYzVkYmI4ZDYyODNkOTljYThjYWJmMGM4ZTU0NDkwMzE1NTM4OTI5NmJhNmE3\nMjYxYTJkZQ==\n"}, "keyhex": {"py/b64": "NDE2MzZkNjUzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAz\nMDMwMzAzMA==\n"}, "py/object": "stp_core.crypto.nacl_wrappers.Signer", "verraw": {"py/b64": "YSTHvR/sxdu41ig9mcqMq/DI5USQMVU4kpa6anJhot4=\n"}, "key": {"_seed": {"py/b64": "QWNtZTAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.SigningKey", "verify_key": {"py/object": "stp_core.crypto.nacl_wrappers.VerifyKey", "_key": {"py/b64": "YSTHvR/sxdu41ig9mcqMq/DI5USQMVU4kpa6anJhot4=\n"}}, "_signing_key": {"py/b64": "QWNtZTAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBhJMe9H+zF27jWKD2Zyoyr8MjlRJAxVTiS\nlrpqcmGi3g==\n"}}}, "_verkey": "WjXEvZ9xj4Tz9sLtzf7HVP", "_identifier": "CzkavE58zgX7rUMrzSinLr", "py/object": "plenum.common.signer_did.DidSigner", "abbreviated": true, "sk": {"py/id": 84}, "_alias": null}, 1504010350751398]}, "py/seq": [{"py/id": 82}, 1504010350751398]}, "H2aKRiDeq8aLZSydQMDbtf": {"py/object": "plenum.client.wallet.IdData", "py/newargs": {"py/tuple": [{"seed": {"py/b64": "VGhyaWZ0MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}, "naclSigner": {"keyraw": {"py/b64": "VGhyaWZ0MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}, "verhex": {"py/b64": "ODFjYTdlYmRmNjhjNWE0YmU2MTk3NjEwNzc3Yjg0ZmMxNzRhZGRiYjVmZmQzNDdlZjMxYmFhODZm\nMzk4MGYwZA==\n"}, "keyhex": {"py/b64": "NTQ2ODcyNjk2Njc0MzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAz\nMDMwMzAzMA==\n"}, "py/object": "stp_core.crypto.nacl_wrappers.Signer", "verraw": {"py/b64": "gcp+vfaMWkvmGXYQd3uE/BdK3btf/TR+8xuqhvOYDw0=\n"}, "key": {"_seed": {"py/b64": "VGhyaWZ0MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.SigningKey", "verify_key": {"py/object": "stp_core.crypto.nacl_wrappers.VerifyKey", "_key": {"py/b64": "gcp+vfaMWkvmGXYQd3uE/BdK3btf/TR+8xuqhvOYDw0=\n"}}, "_signing_key": {"py/b64": "VGhyaWZ0MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCByn699oxaS+YZdhB3e4T8F0rdu1/9NH7z\nG6qG85gPDQ==\n"}}}, "_verkey": "3sphzTb2itL2mwSeJ1Ji28", "_identifier": "H2aKRiDeq8aLZSydQMDbtf", "py/object": "plenum.common.signer_did.DidSigner", "abbreviated": true, "sk": {"py/id": 89}, "_alias": null}, 1504012356595162]}, "py/seq": [{"py/id": 87}, 1504012356595162]}, "ULtgFQJe6bjiFbs7ke3NJD": {"py/object": "plenum.client.wallet.IdData", "py/newargs": {"py/tuple": [{"seed": {"py/b64": "RmFiZXIwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}, "naclSigner": {"keyraw": {"py/b64": "RmFiZXIwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}, "verhex": {"py/b64": "ZGQ2ZGI4ZWI5MWNmZGNlNTBmMWE0ODhkOTUzMzI1ZGEyNjdlMzYyMzE5N2EwN2Q0MTQ4YjI1ZjM3\nZWZjMjg3ZQ==\n"}, "keyhex": {"py/b64": "NDY2MTYyNjU3MjMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAz\nMDMwMzAzMA==\n"}, "py/object": "stp_core.crypto.nacl_wrappers.Signer", "verraw": {"py/b64": "3W2465HP3OUPGkiNlTMl2iZ+NiMZegfUFIsl8378KH4=\n"}, "key": {"_seed": {"py/b64": "RmFiZXIwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.SigningKey", "verify_key": {"py/object": "stp_core.crypto.nacl_wrappers.VerifyKey", "_key": {"py/b64": "3W2465HP3OUPGkiNlTMl2iZ+NiMZegfUFIsl8378KH4=\n"}}, "_signing_key": {"py/b64": "RmFiZXIwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDdbbjrkc/c5Q8aSI2VMyXaJn42Ixl6B9QU\niyXzfvwofg==\n"}}}, "_verkey": "5kh3FB4H3NKq7tUDqeqHc1", "_identifier": "ULtgFQJe6bjiFbs7ke3NJD", "py/object": "plenum.common.signer_did.DidSigner", "abbreviated": true, "sk": {"py/id": 99}, "_alias": null}, 1504010242889826]}, "py/seq": [{"py/id": 97}, 1504010242889826]}}, "knownIds": {"CzkavE58zgX7rUMrzSinLr": {"trustAnchor": null, "_role": null, "seqNo": null, "py/object": "sovrin_common.identity.Identity", "identity": {"_identifier": "CzkavE58zgX7rUMrzSinLr", "py/object": "plenum.common.signer_did.DidIdentity", "abbreviated": null, "_verkey": null}, "last_synced": null}, "BJWY59pnbdGrThaxgbpaWY": {"trustAnchor": null, "_role": null, "seqNo": null, "py/object": "sovrin_common.identity.Identity", "identity": {"_identifier": "BJWY59pnbdGrThaxgbpaWY", "py/object": "plenum.common.signer_did.DidIdentity", "abbreviated": null, "_verkey": null}, "last_synced": null}, "PE83fLnVVSihiq39bhD3gk": {"trustAnchor": null, "_role": null, "seqNo": null, "py/object": "sovrin_common.identity.Identity", "identity": {"_identifier": "PE83fLnVVSihiq39bhD3gk", "py/object": "plenum.common.signer_did.DidIdentity", "abbreviated": null, "_verkey": null}, "last_synced": null}, "DbMzrC2CsumhvkRkjj3RMA": {"trustAnchor": null, "_role": null, "seqNo": null, "py/object": "sovrin_common.identity.Identity", "identity": {"_identifier": "DbMzrC2CsumhvkRkjj3RMA", "py/object": "plenum.common.signer_did.DidIdentity", "abbreviated": null, "_verkey": null}, "last_synced": null}, "ULtgFQJe6bjiFbs7ke3NJD": {"trustAnchor": null, "_role": null, "seqNo": null, "py/object": "sovrin_common.identity.Identity", "identity": {"_identifier": "ULtgFQJe6bjiFbs7ke3NJD", "py/object": "plenum.common.signer_did.DidIdentity", "abbreviated": null, "_verkey": null}, "last_synced": null}, "H2aKRiDeq8aLZSydQMDbtf": {"trustAnchor": null, "_role": null, "seqNo": null, "py/object": "sovrin_common.identity.Identity", "identity": {"_identifier": "H2aKRiDeq8aLZSydQMDbtf", "py/object": "plenum.common.signer_did.DidIdentity", "abbreviated": null, "_verkey": null}, "last_synced": null}}, "didMethods": {"d": {"sovrin": {"name": "sovrin", "py/object": "plenum.common.did_method.DidMethod", "pattern": "did:sovrin:", "signerConstructor": {"py/type": "plenum.common.signer_did.DidSigner"}}}, "py/object": "plenum.common.did_method.DidMethods", "default": {"py/id": 80}}, "_attributes": {"json://{\"py/tuple\": [\"endpoint\", null, \"H2aKRiDeq8aLZSydQMDbtf\"]}": {"value": null, "seqNo": null, "encKey": null, "name": "endpoint", "py/object": "sovrin_client.client.wallet.attribute.Attribute", "origin": null, "ledgerStore": {"py/id": 2}, "dest": "H2aKRiDeq8aLZSydQMDbtf"}, "json://{\"py/tuple\": [\"66b17\", \"ULtgFQJe6bjiFbs7ke3NJD\", \"ULtgFQJe6bjiFbs7ke3NJD\"]}": {"value": "{\"endpoint\": {\"ha\": \"127.0.0.1:5555\", \"pubkey\": \"5hmMA64DDQz5NzGJNVtRzNwpkZxktNQds21q3Wxxa62z\"}}", "seqNo": 8, "encKey": null, "name": "66b17", "py/object": "sovrin_client.client.wallet.attribute.Attribute", "origin": "ULtgFQJe6bjiFbs7ke3NJD", "ledgerStore": {"py/id": 2}, "dest": "ULtgFQJe6bjiFbs7ke3NJD"}, "json://{\"py/tuple\": [\"endpoint\", null, \"ULtgFQJe6bjiFbs7ke3NJD\"]}": {"value": null, "seqNo": null, "encKey": null, "name": "endpoint", "py/object": "sovrin_client.client.wallet.attribute.Attribute", "origin": null, "ledgerStore": {"py/id": 2}, "dest": "ULtgFQJe6bjiFbs7ke3NJD"}, "json://{\"py/tuple\": [\"aeff8\", \"H2aKRiDeq8aLZSydQMDbtf\", \"H2aKRiDeq8aLZSydQMDbtf\"]}": {"value": "{\"endpoint\": {\"ha\": \"127.0.0.1:7777\", \"pubkey\": \"AGBjYvyM3SFnoiDGAEzkSLHvqyzVkXeMZfKDvdpEsC2x\"}}", "seqNo": 12, "encKey": null, "name": "aeff8", "py/object": "sovrin_client.client.wallet.attribute.Attribute", "origin": "H2aKRiDeq8aLZSydQMDbtf", "ledgerStore": {"py/id": 2}, "dest": "H2aKRiDeq8aLZSydQMDbtf"}, "json://{\"py/tuple\": [\"endpoint\", null, \"CzkavE58zgX7rUMrzSinLr\"]}": {"value": null, "seqNo": null, "encKey": null, "name": "endpoint", "py/object": "sovrin_client.client.wallet.attribute.Attribute", "origin": null, "ledgerStore": {"py/id": 2}, "dest": "CzkavE58zgX7rUMrzSinLr"}, "json://{\"py/tuple\": [\"07867\", \"CzkavE58zgX7rUMrzSinLr\", \"CzkavE58zgX7rUMrzSinLr\"]}": {"value": "{\"endpoint\": {\"ha\": \"127.0.0.1:6666\", \"pubkey\": \"C5eqjU7NMVMGGfGfx2ubvX5H9X346bQt5qeziVAo3naQ\"}}", "seqNo": 10, "encKey": null, "name": "07867", "py/object": "sovrin_client.client.wallet.attribute.Attribute", "origin": "CzkavE58zgX7rUMrzSinLr", "ledgerStore": {"py/enumvalue": 4, "py/object": "sovrin_client.client.wallet.attribute.LedgerStore"}, "dest": "CzkavE58zgX7rUMrzSinLr"}}, "defaultId": "H2aKRiDeq8aLZSydQMDbtf", "replyHandler": {}, "py/object": "sovrin_client.client.wallet.wallet.Wallet", "_links": {"py/reduce": [{"py/type": "collections.OrderedDict"}, {"py/tuple": []}, null, null, {"py/tuple": [{"py/tuple": ["Faber College", {"trustAnchor": "Faber College", "localIdentifier": "BJWY59pnbdGrThaxgbpaWY", "linkStatus": "Accepted", "remoteEndPoint": {"py/tuple": ["127.0.0.1", 5555]}, "_remoteVerkey": "~5kh3FB4H3NKq7tUDqeqHc1", "availableClaims": [{"py/object": "anoncreds.protocol.types.AvailableClaim", "py/newargs": {"py/tuple": ["Transcript", "1.2", "ULtgFQJe6bjiFbs7ke3NJD"]}, "py/seq": ["Transcript", "1.2", "ULtgFQJe6bjiFbs7ke3NJD"]}], "remotePubkey": "5hmMA64DDQz5NzGJNVtRzNwpkZxktNQds21q3Wxxa62z", "linkLastSyncNo": null, "proofRequests": [], "invitationNonce": "b1134a647eb818069c089e7694f63e6d", "name": "Faber College", "localVerkey": "~B8szPF41YG76ePciYUvG3W", "py/object": "sovrin_client.client.wallet.link.Link", "linkLastSynced": {"py/object": "datetime.datetime", "__reduce__": [{"py/type": "datetime.datetime"}, ["B+EIHRAEKgP3/g=="]]}, "verifiedClaimProofs": [], "internalId": null, "remoteIdentifier": "ULtgFQJe6bjiFbs7ke3NJD"}]}, {"py/tuple": ["Acme Corp", {"trustAnchor": "Acme Corp", "localIdentifier": "PE83fLnVVSihiq39bhD3gk", "linkStatus": "Accepted", "remoteEndPoint": {"py/tuple": ["127.0.0.1", 6666]}, "_remoteVerkey": "~WjXEvZ9xj4Tz9sLtzf7HVP", "availableClaims": [{"py/object": "anoncreds.protocol.types.AvailableClaim", "py/newargs": {"py/tuple": ["Job-Certificate", "0.2", "CzkavE58zgX7rUMrzSinLr"]}, "py/seq": ["Job-Certificate", "0.2", "CzkavE58zgX7rUMrzSinLr"]}], "remotePubkey": "C5eqjU7NMVMGGfGfx2ubvX5H9X346bQt5qeziVAo3naQ", "linkLastSyncNo": null, "proofRequests": [{"nonce": 1871218719015472932666560146158750511756, "predicates": {}, "attributes": {"py/reduce": [{"py/type": "collections.OrderedDict"}, {"py/tuple": []}, null, null, {"py/tuple": [{"py/tuple": ["first_name", "Alice"]}, {"py/tuple": ["last_name", "Garcia"]}, {"py/tuple": ["phone_number", "123-456-7890"]}, {"py/tuple": ["degree", "Bachelor of Science, Marketing"]}, {"py/tuple": ["status", "graduated"]}, {"py/tuple": ["ssn", "123-45-6789"]}]}]}, "version": "0.2", "verifiableAttributes": {"330a3131-c0ce-4766-bdee-b7a7560e0f89": {"py/object": "anoncreds.protocol.types.AttributeInfo", "py/newargs": {"py/tuple": ["degree", null, null]}, "py/seq": ["degree", null, null]}, "4d646508-d49b-4a09-9809-7250d45cc2da": {"py/object": "anoncreds.protocol.types.AttributeInfo", "py/newargs": {"py/tuple": ["ssn", null, null]}, "py/seq": ["ssn", null, null]}, "aba32489-62cc-40f5-b733-7ab9bbcc46aa": {"py/object": "anoncreds.protocol.types.AttributeInfo", "py/newargs": {"py/tuple": ["status", null, null]}, "py/seq": ["status", null, null]}}, "seqNo": null, "name": "Job-Application", "ts": null, "py/object": "anoncreds.protocol.types.ProofRequest", "fulfilledByClaims": [{"py/tuple": [{"py/id": 9}, {"py/id": 11}, {"py/reduce": [{"py/type": "collections.OrderedDict"}, {"py/tuple": []}, null, null, {"py/tuple": [{"py/tuple": ["student_name", "Alice Garcia"]}, {"py/tuple": ["ssn", "123-45-6789"]}, {"py/tuple": ["degree", "Bachelor of Science, Marketing"]}, {"py/tuple": ["year", "2015"]}, {"py/tuple": ["status", "graduated"]}]}]}]}], "selfAttestedAttrs": {"phone_number": "123-456-7890", "last_name": "Garcia", "first_name": "Alice"}}], "invitationNonce": "57fbf9dc8c8e6acde33de98c6d747b28c", "name": "Acme Corp", "localVerkey": "~VKF8RPxwwEM7BxkipZX83j", "py/object": "sovrin_client.client.wallet.link.Link", "linkLastSynced": {"py/object": "datetime.datetime", "__reduce__": [{"py/type": "datetime.datetime"}, ["B+EIHRAHGw6Pbw=="]]}, "verifiedClaimProofs": [], "internalId": null, "remoteIdentifier": "CzkavE58zgX7rUMrzSinLr"}]}, {"py/tuple": ["Thrift Bank", {"trustAnchor": "Thrift Bank", "localIdentifier": "DbMzrC2CsumhvkRkjj3RMA", "linkStatus": "Accepted", "remoteEndPoint": {"py/tuple": ["127.0.0.1", 7777]}, "_remoteVerkey": "~3sphzTb2itL2mwSeJ1Ji28", "availableClaims": [], "remotePubkey": "AGBjYvyM3SFnoiDGAEzkSLHvqyzVkXeMZfKDvdpEsC2x", "linkLastSyncNo": null, "proofRequests": [{"nonce": 2551783452857349859593309361022286934668, "predicates": {}, "attributes": {"py/reduce": [{"py/type": "collections.OrderedDict"}, {"py/tuple": []}, null, null, {"py/tuple": [{"py/tuple": ["salary_bracket", "between $50,000 to $100,000"]}, {"py/tuple": ["employee_status", "Permanent"]}]}]}, "version": "0.1", "verifiableAttributes": {"ac32a218-3ad6-46f5-835b-a1e3a3523b46": {"py/object": "anoncreds.protocol.types.AttributeInfo", "py/newargs": {"py/tuple": ["salary_bracket", null, null]}, "py/seq": ["salary_bracket", null, null]}, "c02eb394-df6e-4aa4-991a-c8479a7ac726": {"py/object": "anoncreds.protocol.types.AttributeInfo", "py/newargs": {"py/tuple": ["employee_status", null, null]}, "py/seq": ["employee_status", null, null]}}, "seqNo": null, "name": "Loan-Application-Basic", "ts": null, "py/object": "anoncreds.protocol.types.ProofRequest", "fulfilledByClaims": [{"py/tuple": [{"py/id": 15}, {"py/id": 17}, {"py/reduce": [{"py/type": "collections.OrderedDict"}, {"py/tuple": []}, null, null, {"py/tuple": [{"py/tuple": ["first_name", "Alice"]}, {"py/tuple": ["last_name", "Garcia"]}, {"py/tuple": ["employee_status", "Permanent"]}, {"py/tuple": ["experience", "3 years"]}, {"py/tuple": ["salary_bracket", "between $50,000 to $100,000"]}]}]}]}], "selfAttestedAttrs": {}}, {"nonce": 2551783452857349859593309361022286934668, "predicates": {}, "attributes": {"py/reduce": [{"py/type": "collections.OrderedDict"}, {"py/tuple": []}, null, null, {"py/tuple": [{"py/tuple": ["first_name", "Alice"]}, {"py/tuple": ["last_name", "Garcia"]}, {"py/tuple": ["ssn", "123-45-6789"]}]}]}, "version": "0.1", "verifiableAttributes": {"0f25477f-88ed-4e93-a463-1ed2071d4ba3": {"py/object": "anoncreds.protocol.types.AttributeInfo", "py/newargs": {"py/tuple": ["first_name", null, null]}, "py/seq": ["first_name", null, null]}, "31bd9935-a886-4814-b437-849c80368506": {"py/object": "anoncreds.protocol.types.AttributeInfo", "py/newargs": {"py/tuple": ["last_name", null, null]}, "py/seq": ["last_name", null, null]}, "a45a8fed-1695-4e95-a46b-a1ab44a88ad2": {"py/object": "anoncreds.protocol.types.AttributeInfo", "py/newargs": {"py/tuple": ["ssn", null, null]}, "py/seq": ["ssn", null, null]}}, "seqNo": null, "name": "Loan-Application-KYC", "ts": null, "py/object": "anoncreds.protocol.types.ProofRequest", "fulfilledByClaims": [{"py/tuple": [{"py/id": 9}, {"py/id": 11}, {"py/reduce": [{"py/type": "collections.OrderedDict"}, {"py/tuple": []}, null, null, {"py/tuple": [{"py/tuple": ["student_name", "Alice Garcia"]}, {"py/tuple": ["ssn", "123-45-6789"]}, {"py/tuple": ["degree", "Bachelor of Science, Marketing"]}, {"py/tuple": ["year", "2015"]}, {"py/tuple": ["status", "graduated"]}]}]}]}, {"py/tuple": [{"py/id": 15}, {"py/id": 17}, {"py/reduce": [{"py/type": "collections.OrderedDict"}, {"py/tuple": []}, null, null, {"py/tuple": [{"py/tuple": ["first_name", "Alice"]}, {"py/tuple": ["last_name", "Garcia"]}, {"py/tuple": ["employee_status", "Permanent"]}, {"py/tuple": ["experience", "3 years"]}, {"py/tuple": ["salary_bracket", "between $50,000 to $100,000"]}]}]}]}], "selfAttestedAttrs": {}}, {"nonce": 2551783452857349859593309361022286934668, "predicates": {}, "attributes": {"py/reduce": [{"py/type": "collections.OrderedDict"}, {"py/tuple": []}, null, null, {"py/tuple": [{"py/tuple": ["first_name", "string"]}, {"py/tuple": ["last_name", "string"]}]}]}, "version": "0.1", "verifiableAttributes": {"f58f77fa-894f-4a47-8aaa-7d586394e16c": {"py/object": "anoncreds.protocol.types.AttributeInfo", "py/newargs": {"py/tuple": ["last_name", null, null]}, "py/seq": ["last_name", null, null]}, "8327f28c-0194-45df-9472-196d173ee493": {"py/object": "anoncreds.protocol.types.AttributeInfo", "py/newargs": {"py/tuple": ["first_name", null, null]}, "py/seq": ["first_name", null, null]}}, "seqNo": null, "name": "Name-Proof", "ts": null, "py/object": "anoncreds.protocol.types.ProofRequest", "fulfilledByClaims": [], "selfAttestedAttrs": {}}], "invitationNonce": "77fbf9dc8c8e6acde33de98c6d747b28c", "name": "Thrift Bank", "localVerkey": "~91MMTTVbqNdRQJ9uv7Hijt", "py/object": "sovrin_client.client.wallet.link.Link", "linkLastSynced": {"py/object": "datetime.datetime", "__reduce__": [{"py/type": "datetime.datetime"}, ["B+EIHRAMIwh5WQ=="]]}, "verifiedClaimProofs": [], "internalId": null, "remoteIdentifier": "H2aKRiDeq8aLZSydQMDbtf"}]}]}]}, "_trustAnchored": {"CzkavE58zgX7rUMrzSinLr": {"trustAnchor": null, "_role": "101", "seqNo": 9, "py/object": "sovrin_common.identity.Identity", "identity": {"_identifier": "CzkavE58zgX7rUMrzSinLr", "py/object": "plenum.common.signer_did.DidIdentity", "abbreviated": true, "_verkey": "WjXEvZ9xj4Tz9sLtzf7HVP"}, "last_synced": null}, "H2aKRiDeq8aLZSydQMDbtf": {"trustAnchor": null, "_role": "101", "seqNo": 11, "py/object": "sovrin_common.identity.Identity", "identity": {"_identifier": "H2aKRiDeq8aLZSydQMDbtf", "py/object": "plenum.common.signer_did.DidIdentity", "abbreviated": true, "_verkey": "3sphzTb2itL2mwSeJ1Ji28"}, "last_synced": null}, "ULtgFQJe6bjiFbs7ke3NJD": {"trustAnchor": null, "_role": "101", "seqNo": 7, "py/object": "sovrin_common.identity.Identity", "identity": {"_identifier": "ULtgFQJe6bjiFbs7ke3NJD", "py/object": "plenum.common.signer_did.DidIdentity", "abbreviated": true, "_verkey": "5kh3FB4H3NKq7tUDqeqHc1"}, "last_synced": null}}, "_name": "Default"} \ No newline at end of file diff --git a/indy_client/test/cli/wallet_from_minimal_go_live b/indy_client/test/cli/wallet_from_minimal_go_live deleted file mode 100755 index 5b81ec212..000000000 --- a/indy_client/test/cli/wallet_from_minimal_go_live +++ /dev/null @@ -1 +0,0 @@ -{"_trustAnchored": {"H2aKRiDeq8aLZSydQMDbtf": {"last_synced": null, "trustAnchor": null, "_role": "101", "seqNo": 11, "py/object": "sovrin_common.identity.Identity", "identity": {"_verkey": "3sphzTb2itL2mwSeJ1Ji28", "_identifier": "H2aKRiDeq8aLZSydQMDbtf", "py/object": "plenum.common.signer_did.DidIdentity", "abbreviated": true}}, "CzkavE58zgX7rUMrzSinLr": {"last_synced": null, "trustAnchor": null, "_role": "101", "seqNo": 9, "py/object": "sovrin_common.identity.Identity", "identity": {"_verkey": "WjXEvZ9xj4Tz9sLtzf7HVP", "_identifier": "CzkavE58zgX7rUMrzSinLr", "py/object": "plenum.common.signer_did.DidIdentity", "abbreviated": true}}, "ULtgFQJe6bjiFbs7ke3NJD": {"last_synced": null, "trustAnchor": null, "_role": "101", "seqNo": 7, "py/object": "sovrin_common.identity.Identity", "identity": {"_verkey": "5kh3FB4H3NKq7tUDqeqHc1", "_identifier": "ULtgFQJe6bjiFbs7ke3NJD", "py/object": "plenum.common.signer_did.DidIdentity", "abbreviated": true}}}, "_upgrades": {}, "_pconfigs": {}, "defaultId": "H2aKRiDeq8aLZSydQMDbtf", "knownIds": {"H2aKRiDeq8aLZSydQMDbtf": {"last_synced": null, "trustAnchor": null, "_role": null, "seqNo": null, "py/object": "sovrin_common.identity.Identity", "identity": {"_verkey": null, "_identifier": "H2aKRiDeq8aLZSydQMDbtf", "py/object": "plenum.common.signer_did.DidIdentity", "abbreviated": null}}, "CzkavE58zgX7rUMrzSinLr": {"last_synced": null, "trustAnchor": null, "_role": null, "seqNo": null, "py/object": "sovrin_common.identity.Identity", "identity": {"_verkey": null, "_identifier": "CzkavE58zgX7rUMrzSinLr", "py/object": "plenum.common.signer_did.DidIdentity", "abbreviated": null}}, "K9fZBrS8TNdrozpzWWVTGQ": {"last_synced": null, "trustAnchor": null, "_role": null, "seqNo": null, "py/object": "sovrin_common.identity.Identity", "identity": {"_verkey": null, "_identifier": "K9fZBrS8TNdrozpzWWVTGQ", "py/object": "plenum.common.signer_did.DidIdentity", "abbreviated": null}}, "TL7GWwBiCVmzuTqUhSi4s6": {"last_synced": null, "trustAnchor": null, "_role": null, "seqNo": null, "py/object": "sovrin_common.identity.Identity", "identity": {"_verkey": null, "_identifier": "TL7GWwBiCVmzuTqUhSi4s6", "py/object": "plenum.common.signer_did.DidIdentity", "abbreviated": null}}, "LVD8rexf4KbDsp48X3wioA": {"last_synced": null, "trustAnchor": null, "_role": null, "seqNo": null, "py/object": "sovrin_common.identity.Identity", "identity": {"_verkey": null, "_identifier": "LVD8rexf4KbDsp48X3wioA", "py/object": "plenum.common.signer_did.DidIdentity", "abbreviated": null}}, "ULtgFQJe6bjiFbs7ke3NJD": {"last_synced": null, "trustAnchor": null, "_role": null, "seqNo": null, "py/object": "sovrin_common.identity.Identity", "identity": {"_verkey": null, "_identifier": "ULtgFQJe6bjiFbs7ke3NJD", "py/object": "plenum.common.signer_did.DidIdentity", "abbreviated": null}}}, "_name": "Default", "replyHandler": {}, "lastKnownSeqs": {}, "_connections": {"py/reduce": [{"py/type": "collections.OrderedDict"}, {"py/tuple": []}, null, null, {"py/tuple": [{"py/tuple": ["Faber College", {"invitationNonce": "b1134a647eb818069c089e7694f63e6d", "proofRequests": [], "internalId": null, "linkLastSyncNo": null, "remoteEndPoint": {"py/tuple": ["127.0.0.1", 5555]}, "connectionLastSynced": {"py/object": "datetime.datetime", "__reduce__": [{"py/type": "datetime.datetime"}, ["B+EIHxU1GgO8yA=="]]}, "remotePubkey": "5hmMA64DDQz5NzGJNVtRzNwpkZxktNQds21q3Wxxa62z", "linkStatus": "Accepted", "_remoteVerkey": "~5kh3FB4H3NKq7tUDqeqHc1", "trustAnchor": "Faber College", "availableClaims": [{"py/newargs": {"py/tuple": ["Transcript", "1.2", "ULtgFQJe6bjiFbs7ke3NJD"]}, "py/seq": ["Transcript", "1.2", "ULtgFQJe6bjiFbs7ke3NJD"], "py/object": "anoncreds.protocol.types.AvailableClaim"}], "localVerkey": "~X2rnDS1YENiwGNnT7UtTp9", "remoteIdentifier": "ULtgFQJe6bjiFbs7ke3NJD", "verifiedClaimProofs": [], "localIdentifier": "LVD8rexf4KbDsp48X3wioA", "py/object": "sovrin_client.client.wallet.link.Link", "name": "Faber College"}]}, {"py/tuple": ["Acme Corp", {"invitationNonce": "57fbf9dc8c8e6acde33de98c6d747b28c", "proofRequests": [{"ts": null, "seqNo": null, "nonce": 1871218719015472932666560146158750511756, "attributes": {"py/reduce": [{"py/type": "collections.OrderedDict"}, {"py/tuple": []}, null, null, {"py/tuple": [{"py/tuple": ["first_name", "Alice"]}, {"py/tuple": ["last_name", "Garcia"]}, {"py/tuple": ["phone_number", "123-45-6789"]}, {"py/tuple": ["degree", "Bachelor of Science, Marketing"]}, {"py/tuple": ["status", "graduated"]}, {"py/tuple": ["ssn", "123-45-6789"]}]}]}, "name": "Job-Application", "verifiableAttributes": {"03453c9d-40dc-4fcd-ad30-d02745b7709f": {"py/newargs": {"py/tuple": ["ssn", null, null]}, "py/seq": ["ssn", null, null], "py/object": "anoncreds.protocol.types.AttributeInfo"}, "299dafc4-2160-4f58-a40e-ab103e7d9d87": {"py/newargs": {"py/tuple": ["degree", null, null]}, "py/seq": ["degree", null, null], "py/object": "anoncreds.protocol.types.AttributeInfo"}, "56de048d-d291-4f7c-a320-012ed43188e3": {"py/newargs": {"py/tuple": ["status", null, null]}, "py/seq": ["status", null, null], "py/object": "anoncreds.protocol.types.AttributeInfo"}}, "version": "0.2", "fulfilledByClaims": [{"py/tuple": [{"py/id": 9}, {"py/id": 11}, {"py/reduce": [{"py/type": "collections.OrderedDict"}, {"py/tuple": []}, null, null, {"py/tuple": [{"py/tuple": ["student_name", "Alice Garcia"]}, {"py/tuple": ["ssn", "123-45-6789"]}, {"py/tuple": ["degree", "Bachelor of Science, Marketing"]}, {"py/tuple": ["year", "2015"]}, {"py/tuple": ["status", "graduated"]}]}]}]}], "predicates": {}, "py/object": "anoncreds.protocol.types.ProofRequest", "selfAttestedAttrs": {"phone_number": "123-45-6789", "last_name": "Garcia", "first_name": "Alice"}}], "internalId": null, "linkLastSyncNo": null, "remoteEndPoint": {"py/tuple": ["127.0.0.1", 6666]}, "connectionLastSynced": {"py/object": "datetime.datetime", "__reduce__": [{"py/type": "datetime.datetime"}, ["B+EIHxU5AAsdgg=="]]}, "remotePubkey": "C5eqjU7NMVMGGfGfx2ubvX5H9X346bQt5qeziVAo3naQ", "linkStatus": "Accepted", "_remoteVerkey": "~WjXEvZ9xj4Tz9sLtzf7HVP", "trustAnchor": "Acme Corp", "availableClaims": [{"py/newargs": {"py/tuple": ["Job-Certificate", "0.2", "CzkavE58zgX7rUMrzSinLr"]}, "py/seq": ["Job-Certificate", "0.2", "CzkavE58zgX7rUMrzSinLr"], "py/object": "anoncreds.protocol.types.AvailableClaim"}], "localVerkey": "~FH2y9v7PwCZDLyozCNbGWk", "remoteIdentifier": "CzkavE58zgX7rUMrzSinLr", "verifiedClaimProofs": [], "localIdentifier": "K9fZBrS8TNdrozpzWWVTGQ", "py/object": "sovrin_client.client.wallet.link.Link", "name": "Acme Corp"}]}, {"py/tuple": ["Thrift Bank", {"invitationNonce": "77fbf9dc8c8e6acde33de98c6d747b28c", "proofRequests": [{"ts": null, "seqNo": null, "nonce": 2551783452857349859593309361022286934668, "attributes": {"py/reduce": [{"py/type": "collections.OrderedDict"}, {"py/tuple": []}, null, null, {"py/tuple": [{"py/tuple": ["salary_bracket", "between $50,000 to $100,000"]}, {"py/tuple": ["employee_status", "Permanent"]}]}]}, "name": "Loan-Application-Basic", "verifiableAttributes": {"785954ef-d9c6-4c8c-b5f9-0627c4cce66f": {"py/newargs": {"py/tuple": ["salary_bracket", null, null]}, "py/seq": ["salary_bracket", null, null], "py/object": "anoncreds.protocol.types.AttributeInfo"}, "45ad085a-040b-46ac-bc73-be8f2cc92874": {"py/newargs": {"py/tuple": ["employee_status", null, null]}, "py/seq": ["employee_status", null, null], "py/object": "anoncreds.protocol.types.AttributeInfo"}}, "version": "0.1", "fulfilledByClaims": [{"py/tuple": [{"py/id": 15}, {"py/id": 17}, {"py/reduce": [{"py/type": "collections.OrderedDict"}, {"py/tuple": []}, null, null, {"py/tuple": [{"py/tuple": ["first_name", "Alice"]}, {"py/tuple": ["last_name", "Garcia"]}, {"py/tuple": ["employee_status", "Permanent"]}, {"py/tuple": ["experience", "3 years"]}, {"py/tuple": ["salary_bracket", "between $50,000 to $100,000"]}]}]}]}], "predicates": {}, "py/object": "anoncreds.protocol.types.ProofRequest", "selfAttestedAttrs": {}}, {"ts": null, "seqNo": null, "nonce": 2551783452857349859593309361022286934668, "attributes": {"py/reduce": [{"py/type": "collections.OrderedDict"}, {"py/tuple": []}, null, null, {"py/tuple": [{"py/tuple": ["first_name", "Alice"]}, {"py/tuple": ["last_name", "Garcia"]}, {"py/tuple": ["ssn", "123-45-6789"]}]}]}, "name": "Loan-Application-KYC", "verifiableAttributes": {"e0130061-5e51-4023-9a67-d66f4d169705": {"py/newargs": {"py/tuple": ["ssn", null, null]}, "py/seq": ["ssn", null, null], "py/object": "anoncreds.protocol.types.AttributeInfo"}, "37b8db9a-aa12-4c64-87fc-4a6990105624": {"py/newargs": {"py/tuple": ["last_name", null, null]}, "py/seq": ["last_name", null, null], "py/object": "anoncreds.protocol.types.AttributeInfo"}, "fc0c213d-c189-4423-8d1a-c45a232d5c0b": {"py/newargs": {"py/tuple": ["first_name", null, null]}, "py/seq": ["first_name", null, null], "py/object": "anoncreds.protocol.types.AttributeInfo"}}, "version": "0.1", "fulfilledByClaims": [{"py/tuple": [{"py/id": 9}, {"py/id": 11}, {"py/reduce": [{"py/type": "collections.OrderedDict"}, {"py/tuple": []}, null, null, {"py/tuple": [{"py/tuple": ["student_name", "Alice Garcia"]}, {"py/tuple": ["ssn", "123-45-6789"]}, {"py/tuple": ["degree", "Bachelor of Science, Marketing"]}, {"py/tuple": ["year", "2015"]}, {"py/tuple": ["status", "graduated"]}]}]}]}, {"py/tuple": [{"py/id": 15}, {"py/id": 17}, {"py/reduce": [{"py/type": "collections.OrderedDict"}, {"py/tuple": []}, null, null, {"py/tuple": [{"py/tuple": ["first_name", "Alice"]}, {"py/tuple": ["last_name", "Garcia"]}, {"py/tuple": ["employee_status", "Permanent"]}, {"py/tuple": ["experience", "3 years"]}, {"py/tuple": ["salary_bracket", "between $50,000 to $100,000"]}]}]}]}], "predicates": {}, "py/object": "anoncreds.protocol.types.ProofRequest", "selfAttestedAttrs": {}}, {"ts": null, "seqNo": null, "nonce": 2551783452857349859593309361022286934668, "attributes": {"py/reduce": [{"py/type": "collections.OrderedDict"}, {"py/tuple": []}, null, null, {"py/tuple": [{"py/tuple": ["first_name", "string"]}, {"py/tuple": ["last_name", "string"]}]}]}, "name": "Name-Proof", "verifiableAttributes": {"0b6ed454-954f-45cb-826a-51da51161954": {"py/newargs": {"py/tuple": ["first_name", null, null]}, "py/seq": ["first_name", null, null], "py/object": "anoncreds.protocol.types.AttributeInfo"}, "ce5bac7a-276e-4c97-9062-9fd736976bca": {"py/newargs": {"py/tuple": ["last_name", null, null]}, "py/seq": ["last_name", null, null], "py/object": "anoncreds.protocol.types.AttributeInfo"}}, "version": "0.1", "fulfilledByClaims": [], "predicates": {}, "py/object": "anoncreds.protocol.types.ProofRequest", "selfAttestedAttrs": {}}], "internalId": null, "linkLastSyncNo": null, "remoteEndPoint": {"py/tuple": ["127.0.0.1", 7777]}, "connectionLastSynced": {"py/object": "datetime.datetime", "__reduce__": [{"py/type": "datetime.datetime"}, ["B+EIHxYFDAT7cw=="]]}, "remotePubkey": "AGBjYvyM3SFnoiDGAEzkSLHvqyzVkXeMZfKDvdpEsC2x", "linkStatus": "Accepted", "_remoteVerkey": "~3sphzTb2itL2mwSeJ1Ji28", "trustAnchor": "Thrift Bank", "availableClaims": [], "localVerkey": "~QFbWaQ4SSnPMp1KGdMMpaY", "remoteIdentifier": "H2aKRiDeq8aLZSydQMDbtf", "verifiedClaimProofs": [], "localIdentifier": "TL7GWwBiCVmzuTqUhSi4s6", "py/object": "sovrin_client.client.wallet.link.Link", "name": "Thrift Bank"}]}]}]}, "_attributes": {"json://{\"py/tuple\": [\"5028e\", \"H2aKRiDeq8aLZSydQMDbtf\", \"H2aKRiDeq8aLZSydQMDbtf\"]}": {"dest": "H2aKRiDeq8aLZSydQMDbtf", "origin": "H2aKRiDeq8aLZSydQMDbtf", "seqNo": 12, "value": "{\"endpoint\": {\"ha\": \"127.0.0.1:7777\", \"pubkey\": \"AGBjYvyM3SFnoiDGAEzkSLHvqyzVkXeMZfKDvdpEsC2x\"}}", "name": "5028e", "ledgerStore": {"py/enumvalue": 4, "py/object": "sovrin_client.client.wallet.attribute.LedgerStore"}, "encKey": null, "py/object": "sovrin_client.client.wallet.attribute.Attribute"}, "json://{\"py/tuple\": [\"5102a\", \"CzkavE58zgX7rUMrzSinLr\", \"CzkavE58zgX7rUMrzSinLr\"]}": {"dest": "CzkavE58zgX7rUMrzSinLr", "origin": "CzkavE58zgX7rUMrzSinLr", "seqNo": 10, "value": "{\"endpoint\": {\"ha\": \"127.0.0.1:6666\", \"pubkey\": \"C5eqjU7NMVMGGfGfx2ubvX5H9X346bQt5qeziVAo3naQ\"}}", "name": "5102a", "ledgerStore": {"py/id": 2}, "encKey": null, "py/object": "sovrin_client.client.wallet.attribute.Attribute"}, "json://{\"py/tuple\": [\"f7fd8\", \"ULtgFQJe6bjiFbs7ke3NJD\", \"ULtgFQJe6bjiFbs7ke3NJD\"]}": {"dest": "ULtgFQJe6bjiFbs7ke3NJD", "origin": "ULtgFQJe6bjiFbs7ke3NJD", "seqNo": 8, "value": "{\"endpoint\": {\"ha\": \"127.0.0.1:5555\", \"pubkey\": \"5hmMA64DDQz5NzGJNVtRzNwpkZxktNQds21q3Wxxa62z\"}}", "name": "f7fd8", "ledgerStore": {"py/id": 2}, "encKey": null, "py/object": "sovrin_client.client.wallet.attribute.Attribute"}, "json://{\"py/tuple\": [\"endpoint\", null, \"H2aKRiDeq8aLZSydQMDbtf\"]}": {"dest": "H2aKRiDeq8aLZSydQMDbtf", "origin": null, "seqNo": null, "value": null, "name": "endpoint", "ledgerStore": {"py/id": 2}, "encKey": null, "py/object": "sovrin_client.client.wallet.attribute.Attribute"}, "json://{\"py/tuple\": [\"endpoint\", null, \"CzkavE58zgX7rUMrzSinLr\"]}": {"dest": "CzkavE58zgX7rUMrzSinLr", "origin": null, "seqNo": null, "value": null, "name": "endpoint", "ledgerStore": {"py/id": 2}, "encKey": null, "py/object": "sovrin_client.client.wallet.attribute.Attribute"}, "json://{\"py/tuple\": [\"endpoint\", null, \"ULtgFQJe6bjiFbs7ke3NJD\"]}": {"dest": "ULtgFQJe6bjiFbs7ke3NJD", "origin": null, "seqNo": null, "value": null, "name": "endpoint", "ledgerStore": {"py/id": 2}, "encKey": null, "py/object": "sovrin_client.client.wallet.attribute.Attribute"}}, "aliasesToIds": {}, "_nodes": {}, "_prepared": {"json://{\"py/tuple\": [\"ULtgFQJe6bjiFbs7ke3NJD\", 1504204144694724]}": {"py/tuple": [{"py/state": {"operation": {"raw": "{\"endpoint\": {\"ha\": \"127.0.0.1:5555\", \"pubkey\": \"5hmMA64DDQz5NzGJNVtRzNwpkZxktNQds21q3Wxxa62z\"}}", "dest": "ULtgFQJe6bjiFbs7ke3NJD", "type": "100"}, "digest": "86e8998d83e5e0aec0f8b697bb9a04a83a210969a29b8a7f58b832c88c57b5a7", "signature": "aSfEqkM4GmrwkPQABzTG9YHqurzDAY8fjJnaKRww4BkZi8nQmMdQnJjcvRS6KSYxjrugatYyeNSua5nk6Tp8118", "identifier": "ULtgFQJe6bjiFbs7ke3NJD", "reqId": 1504204144694724}, "py/object": "sovrin_common.types.Request"}, {"py/tuple": ["f7fd8", "ULtgFQJe6bjiFbs7ke3NJD", "ULtgFQJe6bjiFbs7ke3NJD"]}]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504206147147866]}": {"py/tuple": [{"py/state": {"operation": {"signature_type": "CL", "origin": "CzkavE58zgX7rUMrzSinLr", "type": "108", "ref": 15}, "digest": "f7e9b2f051f01701661e47901ca3b6136eb0803624f84a99540dd23551741110", "signature": "qkWRM2UX3KU8vrPAb8iuTiHALLBbx222mgCVeWZoQpPdFd1qMNEE2QibswtcTgaFFyzDF8PNMQPf1sQBHtpiqh8", "identifier": "H2aKRiDeq8aLZSydQMDbtf", "reqId": 1504206147147866}, "py/object": "sovrin_common.types.Request"}, null]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504204420673654]}": {"py/tuple": [{"py/state": {"operation": {"raw": "{\"endpoint\": {\"ha\": \"127.0.0.1:7777\", \"pubkey\": \"AGBjYvyM3SFnoiDGAEzkSLHvqyzVkXeMZfKDvdpEsC2x\"}}", "dest": "H2aKRiDeq8aLZSydQMDbtf", "type": "100"}, "digest": "5fd600afaa1f5468d6762b8bb8f078f6c3e1f6331fff86b585fc4aa832ab73ca", "signature": "4rqhNBM2ijogYaZ9SnEAS86mdq5LmNZQFWH1Z3bQqcfDxLmiQ9vUaeaw7Gc8gctUZZ3Dia8YARK92rijUY7nrV3e", "identifier": "H2aKRiDeq8aLZSydQMDbtf", "reqId": 1504204420673654}, "py/object": "sovrin_common.types.Request"}, {"py/tuple": ["5028e", "H2aKRiDeq8aLZSydQMDbtf", "H2aKRiDeq8aLZSydQMDbtf"]}]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504206312094399]}": {"py/tuple": [{"py/state": {"operation": {"raw": "endpoint", "dest": "H2aKRiDeq8aLZSydQMDbtf", "type": "104"}, "digest": "d7ecd9ea68b0fd6a6d7881da2ff2664ce964f8f6a715b17295ae4f4609de302b", "signature": "46wgGTVvquJSABQ1dCBTtuwr86S3XnMVzgxor96uaC35vArEmqWGLYdiB2LiuvwyCaaxURVQKDu1VX953TjtRRz9", "identifier": "H2aKRiDeq8aLZSydQMDbtf", "reqId": 1504206312094399}, "py/object": "sovrin_common.types.Request"}, {"py/tuple": ["endpoint", null, "H2aKRiDeq8aLZSydQMDbtf"]}]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504205670087568]}": {"py/tuple": [{"py/state": {"operation": {"dest": "ULtgFQJe6bjiFbs7ke3NJD", "type": "107", "data": {"version": "1.2", "name": "Transcript"}}, "digest": "35064aa1538a2944bf1767eb8ff37cbd2d9f7e4a3b4731e40cbb7329fbdd932f", "signature": "k1iM52JGBuCK3DRfP1Q51LnkW41ankpzeTZDcDoSg5SrotHbq84W1UwDVow3rGNyFMuTgDRtg1QAY9KCP47qz6e", "identifier": "H2aKRiDeq8aLZSydQMDbtf", "reqId": 1504205670087568}, "py/object": "sovrin_common.types.Request"}, null]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504205820505222]}": {"py/tuple": [{"py/state": {"operation": {"raw": "endpoint", "dest": "CzkavE58zgX7rUMrzSinLr", "type": "104"}, "digest": "936286544e3620d1a17440acfd81ed6f979ffc54337a86c434e1806d3011bc3b", "signature": "3fxtN6UGMcSnGRoPbzibF1mK8SpAEsxhMHqcjkWjfymbMseCGhTxfTzxvv5G92cz4weUZiex3qBp8nGCPghFyagj", "identifier": "H2aKRiDeq8aLZSydQMDbtf", "reqId": 1504205820505222}, "py/object": "sovrin_common.types.Request"}, {"py/tuple": ["endpoint", null, "CzkavE58zgX7rUMrzSinLr"]}]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504206313653873]}": {"py/tuple": [{"py/state": {"operation": {"dest": "TL7GWwBiCVmzuTqUhSi4s6", "type": "105"}, "digest": "dcc5eb08249c2a6481a49c3fec10884e3cca0195c357b0fb1cfadab16648a586", "signature": "2FKkpBWu7tbYrR2ZdYKeBKFHVKP8PaHCqH467Pj2ZYNsYPnUBrc7amHgJS939Kff7U1K5rSLKkxfrVNUwTkJpSMp", "identifier": "H2aKRiDeq8aLZSydQMDbtf", "reqId": 1504206313653873}, "py/object": "sovrin_common.types.Request"}, null]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504205820501815]}": {"py/tuple": [{"py/state": {"operation": {"dest": "CzkavE58zgX7rUMrzSinLr", "type": "105"}, "digest": "09d286f122dcaddae5862c129a17881bfc2433be0a85a29a1c2c2a7f49dc67f4", "signature": "3R1sHJ5Gr8DG5kexbFfQQGimvfdoVE2RV2Ahfv6s9LJyaJ9fAzkV2sa3FJaMH78rDtjfwzv2VqzSSxtHnVWGdTEz", "identifier": "H2aKRiDeq8aLZSydQMDbtf", "reqId": 1504205820501815}, "py/object": "sovrin_common.types.Request"}, null]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504205683914898]}": {"py/tuple": [{"py/state": {"operation": {"signature_type": "CL", "origin": "ULtgFQJe6bjiFbs7ke3NJD", "type": "108", "ref": 13}, "digest": "0a5505d0492328f48a3fac364046854ed5178f9ea245a0659d5fcdc4f272fd2c", "signature": "oCWY2avHtqRxZMDJk1nPwSq5NvmYc8LNEijC3ETzmBrt4s1ZdpyowKQ26NewmjL7P1NT5VfRLi5tMSaVcHeRbG3", "identifier": "H2aKRiDeq8aLZSydQMDbtf", "reqId": 1504205683914898}, "py/object": "sovrin_common.types.Request"}, null]}, "json://{\"py/tuple\": [\"Th7MpTaRZVRYnPiabds81Y\", 1504204203506593]}": {"py/tuple": [{"py/state": {"operation": {"dest": "CzkavE58zgX7rUMrzSinLr", "verkey": "~WjXEvZ9xj4Tz9sLtzf7HVP", "type": "1", "role": "101"}, "digest": "2c97e2bb259c852609b9a855f16183553e184c580b70ef0b4e2f00483b3a235c", "signature": "4DhdddPs3KvTf93TvZUK63AAP8ayQ1qWa3VbPvjPdJDSDqHz6k9dL4yFqNCzZ7bEWmvkLcKiWBA2k4QFYWjfYLmt", "identifier": "Th7MpTaRZVRYnPiabds81Y", "reqId": 1504204203506593}, "py/object": "sovrin_common.types.Request"}, "CzkavE58zgX7rUMrzSinLr"]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504205821775276]}": {"py/tuple": [{"py/state": {"operation": {"dest": "K9fZBrS8TNdrozpzWWVTGQ", "type": "105"}, "digest": "71801eb8741d9f669dc8df2e068b5f5a9b196209f921437c84c054405b75873a", "signature": "3mYt7ysiAq9z5BWtv5qPTir4NZkt6VqoK9qesWSvVef9zPpi1oLbx5Qdb4jZ35FGDgELqHFjf8Kok4JV1ocFsZR4", "identifier": "H2aKRiDeq8aLZSydQMDbtf", "reqId": 1504205821775276}, "py/object": "sovrin_common.types.Request"}, null]}, "json://{\"py/tuple\": [\"CzkavE58zgX7rUMrzSinLr\", 1504204254926855]}": {"py/tuple": [{"py/state": {"operation": {"raw": "{\"endpoint\": {\"ha\": \"127.0.0.1:6666\", \"pubkey\": \"C5eqjU7NMVMGGfGfx2ubvX5H9X346bQt5qeziVAo3naQ\"}}", "dest": "CzkavE58zgX7rUMrzSinLr", "type": "100"}, "digest": "81bdde59cf0498757690b49782ed0d420810479f82382e5134cd1fe871ccead2", "signature": "5vVQboYP9XjpT72GHJAceCDidSqy3C41fPPBZWiNErJfzPtgRjteyUMXQ8nMdrzWcNF63nPu8H9tXyvFwH2KC4U5", "identifier": "CzkavE58zgX7rUMrzSinLr", "reqId": 1504204254926855}, "py/object": "sovrin_common.types.Request"}, {"py/tuple": ["5102a", "CzkavE58zgX7rUMrzSinLr", "CzkavE58zgX7rUMrzSinLr"]}]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504205606025632]}": {"py/tuple": [{"py/state": {"operation": {"dest": "ULtgFQJe6bjiFbs7ke3NJD", "type": "105"}, "digest": "62b087450882081eaa6afbe42917e67056737f8bd107dbd6e8ad4897d72fc531", "signature": "64gR8DTadyDu5ESXsEHoE7nMJDkgFc5SfAuVdyVLPtEnjhDAmiacBAyZmEefpGKwPCwjypxN6d7u7CxiNK3CFmpH", "identifier": "H2aKRiDeq8aLZSydQMDbtf", "reqId": 1504205606025632}, "py/object": "sovrin_common.types.Request"}, null]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504206132046397]}": {"py/tuple": [{"py/state": {"operation": {"dest": "CzkavE58zgX7rUMrzSinLr", "type": "107", "data": {"version": "0.2", "name": "Job-Certificate"}}, "digest": "ffed9f552074273a8ec33f18409ca709c17969996fb1348c0471a92d63ce129d", "signature": "2t28PUByyLaycxWViDdzUnPMH31PJLaiwW9AAnAkpjwDDzQbXYDeqnGNDTSdU2yqQqqfQmLRM5qSa3Dw238nJkmd", "identifier": "H2aKRiDeq8aLZSydQMDbtf", "reqId": 1504206132046397}, "py/object": "sovrin_common.types.Request"}, null]}, "json://{\"py/tuple\": [\"Th7MpTaRZVRYnPiabds81Y\", 1504204079007140]}": {"py/tuple": [{"py/state": {"operation": {"dest": "ULtgFQJe6bjiFbs7ke3NJD", "verkey": "~5kh3FB4H3NKq7tUDqeqHc1", "type": "1", "role": "101"}, "digest": "05c85873df7a9e4b257d62c2f39b49632094d27d2fe824579b6f56c28a5a8ced", "signature": "z3XcCqRoBy1BFf5kdzL8WRFMJVPDaWKBFKoTWMQcouTtbUnPypXQJbnDKWbrFsUtJ6Q7AJNHjPGGcNsKCA2svUq", "identifier": "Th7MpTaRZVRYnPiabds81Y", "reqId": 1504204079007140}, "py/object": "sovrin_common.types.Request"}, "ULtgFQJe6bjiFbs7ke3NJD"]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504205606033428]}": {"py/tuple": [{"py/state": {"operation": {"raw": "endpoint", "dest": "ULtgFQJe6bjiFbs7ke3NJD", "type": "104"}, "digest": "38706e5602e7d65805029c3e131ecb7b5b4001cc347311ac9c2ec6dfc1a6b62e", "signature": "3TeHLY8Hqm23btXLzjHoW4neY7S6Wp3fnmMrmfHwMpChHg9V8j625utTWKtmDBo6F7ENBhnRCV7HxtSrU73Jjm5H", "identifier": "H2aKRiDeq8aLZSydQMDbtf", "reqId": 1504205606033428}, "py/object": "sovrin_common.types.Request"}, {"py/tuple": ["endpoint", null, "ULtgFQJe6bjiFbs7ke3NJD"]}]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504206312092849]}": {"py/tuple": [{"py/state": {"operation": {"dest": "H2aKRiDeq8aLZSydQMDbtf", "type": "105"}, "digest": "385114ce9f36ed517daf3ac3283b8a30b42796d9abb53d44d5738253f994c23e", "signature": "43DPWruK5YmRdchzoSusY6HvU4uF9qfJjZqWZPTtUKijUsQKhixRQ1Xnu5k8bkBRssnPpCykeskg4ZyX9nj6XFY", "identifier": "H2aKRiDeq8aLZSydQMDbtf", "reqId": 1504206312092849}, "py/object": "sovrin_common.types.Request"}, null]}, "json://{\"py/tuple\": [\"H2aKRiDeq8aLZSydQMDbtf\", 1504205607239931]}": {"py/tuple": [{"py/state": {"operation": {"dest": "LVD8rexf4KbDsp48X3wioA", "type": "105"}, "digest": "8145cb8c5d4e19f7501dd4f0a505156418086c920a9dac7699772842dd68f0b6", "signature": "5X4ch2CF1wwtQdDGLZmz6xuutoy3ZpnGNFf8HZr4LFLjSzRGyPoYktaESfY9PaMaJ33yTnscexRNwzraWgFkgRyt", "identifier": "H2aKRiDeq8aLZSydQMDbtf", "reqId": 1504205607239931}, "py/object": "sovrin_common.types.Request"}, null]}, "json://{\"py/tuple\": [\"Th7MpTaRZVRYnPiabds81Y\", 1504204318462474]}": {"py/tuple": [{"py/state": {"operation": {"dest": "H2aKRiDeq8aLZSydQMDbtf", "verkey": "~3sphzTb2itL2mwSeJ1Ji28", "type": "1", "role": "101"}, "digest": "511614c319cac2edacfa528a3cdc7ef59c229123240a0cf3b6ea69f126b6cded", "signature": "NibUjsEQNbUtkZwisyeBToNmr8J3nJQtPdSxM3JyH8rdvBn236wYjDQP4sA6asz7nHSbcS7tWvai6TKDfJiwsJ1", "identifier": "Th7MpTaRZVRYnPiabds81Y", "reqId": 1504204318462474}, "py/object": "sovrin_common.types.Request"}, "H2aKRiDeq8aLZSydQMDbtf"]}}, "idsToSigners": {"H2aKRiDeq8aLZSydQMDbtf": {"py/id": 87}, "Th7MpTaRZVRYnPiabds81Y": {"py/id": 92}, "CzkavE58zgX7rUMrzSinLr": {"py/id": 82}, "K9fZBrS8TNdrozpzWWVTGQ": {"naclSigner": {"verhex": {"py/b64": "OTJmYTM0ZDRjYzg0MDkxNWRmZGRkMzM0MzMxY2NlMzU3MzlkMGRmMmNlZDRlZjRkZTQxNDUwZTZi\nZDQwOTI5OQ==\n"}, "verraw": {"py/b64": "kvo01MyECRXf3dM0MxzONXOdDfLO1O9N5BRQ5r1Akpk=\n"}, "keyraw": {"py/b64": "tUySUDn4IZ4WGrPBshjLU1vsEp3n9lzo6Idiw5IRq/w=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.Signer", "keyhex": {"py/b64": "YjU0YzkyNTAzOWY4MjE5ZTE2MWFiM2MxYjIxOGNiNTM1YmVjMTI5ZGU3ZjY1Y2U4ZTg4NzYyYzM5\nMjExYWJmYw==\n"}, "key": {"verify_key": {"py/object": "stp_core.crypto.nacl_wrappers.VerifyKey", "_key": {"py/b64": "kvo01MyECRXf3dM0MxzONXOdDfLO1O9N5BRQ5r1Akpk=\n"}}, "_seed": {"py/b64": "tUySUDn4IZ4WGrPBshjLU1vsEp3n9lzo6Idiw5IRq/w=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.SigningKey", "_signing_key": {"py/b64": "tUySUDn4IZ4WGrPBshjLU1vsEp3n9lzo6Idiw5IRq/yS+jTUzIQJFd/d0zQzHM41c50N8s7U703k\nFFDmvUCSmQ==\n"}}}, "_verkey": "FH2y9v7PwCZDLyozCNbGWk", "_identifier": "K9fZBrS8TNdrozpzWWVTGQ", "abbreviated": true, "seed": {"py/b64": "tUySUDn4IZ4WGrPBshjLU1vsEp3n9lzo6Idiw5IRq/w=\n"}, "_alias": null, "sk": {"py/id": 103}, "py/object": "plenum.common.signer_did.DidSigner"}, "TL7GWwBiCVmzuTqUhSi4s6": {"naclSigner": {"verhex": {"py/b64": "ZDUzODk5NmFjN2QzYjkzZTViNjJlM2FlZjc1ZGYxN2RiYzRiZGU5NGQ0ZGExNGZkNTE1Mzg4NGZl\nMzlmMWNkNQ==\n"}, "verraw": {"py/b64": "1TiZasfTuT5bYuOu913xfbxL3pTU2hT9UVOIT+OfHNU=\n"}, "keyraw": {"py/b64": "/EHvhsgvZqEWvdG9W/AlmcNyPY+VAMYwrbTXJy1MOPA=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.Signer", "keyhex": {"py/b64": "ZmM0MWVmODZjODJmNjZhMTE2YmRkMWJkNWJmMDI1OTljMzcyM2Q4Zjk1MDBjNjMwYWRiNGQ3Mjcy\nZDRjMzhmMA==\n"}, "key": {"verify_key": {"py/object": "stp_core.crypto.nacl_wrappers.VerifyKey", "_key": {"py/b64": "1TiZasfTuT5bYuOu913xfbxL3pTU2hT9UVOIT+OfHNU=\n"}}, "_seed": {"py/b64": "/EHvhsgvZqEWvdG9W/AlmcNyPY+VAMYwrbTXJy1MOPA=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.SigningKey", "_signing_key": {"py/b64": "/EHvhsgvZqEWvdG9W/AlmcNyPY+VAMYwrbTXJy1MOPDVOJlqx9O5Plti4673XfF9vEvelNTaFP1R\nU4hP458c1Q==\n"}}}, "_verkey": "QFbWaQ4SSnPMp1KGdMMpaY", "_identifier": "TL7GWwBiCVmzuTqUhSi4s6", "abbreviated": true, "seed": {"py/b64": "/EHvhsgvZqEWvdG9W/AlmcNyPY+VAMYwrbTXJy1MOPA=\n"}, "_alias": null, "sk": {"py/id": 111}, "py/object": "plenum.common.signer_did.DidSigner"}, "LVD8rexf4KbDsp48X3wioA": {"naclSigner": {"verhex": {"py/b64": "OWRjZGYwNTc5YTFkOWU1YzgwYzJkMDFmODVkZGZhYzlmMzM0OTRkMGRjMDgyYmIyYjRlMmY1ZTRk\nMDA1MWViZQ==\n"}, "verraw": {"py/b64": "nc3wV5odnlyAwtAfhd36yfM0lNDcCCuytOL15NAFHr4=\n"}, "keyraw": {"py/b64": "6zu2E8dAq6Fwpf0CZ39ZRRrjVNlJNeZo2pVpnhArNNo=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.Signer", "keyhex": {"py/b64": "ZWIzYmI2MTNjNzQwYWJhMTcwYTVmZDAyNjc3ZjU5NDUxYWUzNTRkOTQ5MzVlNjY4ZGE5NTY5OWUx\nMDJiMzRkYQ==\n"}, "key": {"verify_key": {"py/object": "stp_core.crypto.nacl_wrappers.VerifyKey", "_key": {"py/b64": "nc3wV5odnlyAwtAfhd36yfM0lNDcCCuytOL15NAFHr4=\n"}}, "_seed": {"py/b64": "6zu2E8dAq6Fwpf0CZ39ZRRrjVNlJNeZo2pVpnhArNNo=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.SigningKey", "_signing_key": {"py/b64": "6zu2E8dAq6Fwpf0CZ39ZRRrjVNlJNeZo2pVpnhArNNqdzfBXmh2eXIDC0B+F3frJ8zSU0NwIK7K0\n4vXk0AUevg==\n"}}}, "_verkey": "X2rnDS1YENiwGNnT7UtTp9", "_identifier": "LVD8rexf4KbDsp48X3wioA", "abbreviated": true, "seed": {"py/b64": "6zu2E8dAq6Fwpf0CZ39ZRRrjVNlJNeZo2pVpnhArNNo=\n"}, "_alias": null, "sk": {"py/id": 107}, "py/object": "plenum.common.signer_did.DidSigner"}, "ULtgFQJe6bjiFbs7ke3NJD": {"py/id": 97}}, "_pending": {"py/reduce": [{"py/type": "collections.deque"}, {"py/tuple": [[]]}, null, null, null]}, "env": "no-env", "didMethods": {"default": {"py/id": 80}, "d": {"sovrin": {"pattern": "did:sovrin:", "signerConstructor": {"py/type": "plenum.common.signer_did.DidSigner"}, "py/object": "plenum.common.did_method.DidMethod", "name": "sovrin"}}, "py/object": "plenum.common.did_method.DidMethods"}, "py/object": "sovrin_client.client.wallet.wallet.Wallet", "ids": {"H2aKRiDeq8aLZSydQMDbtf": {"py/newargs": {"py/tuple": [{"naclSigner": {"verhex": {"py/b64": "ODFjYTdlYmRmNjhjNWE0YmU2MTk3NjEwNzc3Yjg0ZmMxNzRhZGRiYjVmZmQzNDdlZjMxYmFhODZm\nMzk4MGYwZA==\n"}, "verraw": {"py/b64": "gcp+vfaMWkvmGXYQd3uE/BdK3btf/TR+8xuqhvOYDw0=\n"}, "keyraw": {"py/b64": "VGhyaWZ0MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.Signer", "keyhex": {"py/b64": "NTQ2ODcyNjk2Njc0MzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAz\nMDMwMzAzMA==\n"}, "key": {"verify_key": {"py/object": "stp_core.crypto.nacl_wrappers.VerifyKey", "_key": {"py/b64": "gcp+vfaMWkvmGXYQd3uE/BdK3btf/TR+8xuqhvOYDw0=\n"}}, "_seed": {"py/b64": "VGhyaWZ0MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.SigningKey", "_signing_key": {"py/b64": "VGhyaWZ0MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDCByn699oxaS+YZdhB3e4T8F0rdu1/9NH7z\nG6qG85gPDQ==\n"}}}, "_verkey": "3sphzTb2itL2mwSeJ1Ji28", "_identifier": "H2aKRiDeq8aLZSydQMDbtf", "abbreviated": true, "seed": {"py/b64": "VGhyaWZ0MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}, "_alias": null, "sk": {"py/id": 89}, "py/object": "plenum.common.signer_did.DidSigner"}, 1504206313653873]}, "py/seq": [{"py/id": 87}, 1504206313653873], "py/object": "plenum.client.wallet.IdData"}, "Th7MpTaRZVRYnPiabds81Y": {"py/newargs": {"py/tuple": [{"naclSigner": {"verhex": {"py/b64": "ZDgyNzQ2NThkMjNiYzJlNDE5NGQxMjMyZmZmNzBlMmIzNDRiYWY2MjEwNjdlYjZhYTkyYjJmY2Vm\nMGM5NGU4ZA==\n"}, "verraw": {"py/b64": "2CdGWNI7wuQZTRIy//cOKzRLr2IQZ+tqqSsvzvDJTo0=\n"}, "keyraw": {"py/b64": "MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwU3Rld2FyZDE=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.Signer", "keyhex": {"py/b64": "MzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwNTM3NDY1Nzc2\nMTcyNjQzMQ==\n"}, "key": {"verify_key": {"py/object": "stp_core.crypto.nacl_wrappers.VerifyKey", "_key": {"py/b64": "2CdGWNI7wuQZTRIy//cOKzRLr2IQZ+tqqSsvzvDJTo0=\n"}}, "_seed": {"py/b64": "MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwU3Rld2FyZDE=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.SigningKey", "_signing_key": {"py/b64": "MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwU3Rld2FyZDHYJ0ZY0jvC5BlNEjL/9w4rNEuvYhBn62qp\nKy/O8MlOjQ==\n"}}}, "_verkey": "7TYfekw4GUagBnBVCqPjiC", "_identifier": "Th7MpTaRZVRYnPiabds81Y", "abbreviated": true, "seed": {"py/b64": "MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwU3Rld2FyZDE=\n"}, "_alias": null, "sk": {"py/id": 94}, "py/object": "plenum.common.signer_did.DidSigner"}, 1504204318462474]}, "py/seq": [{"py/id": 92}, 1504204318462474], "py/object": "plenum.client.wallet.IdData"}, "CzkavE58zgX7rUMrzSinLr": {"py/newargs": {"py/tuple": [{"naclSigner": {"verhex": {"py/b64": "NjEyNGM3YmQxZmVjYzVkYmI4ZDYyODNkOTljYThjYWJmMGM4ZTU0NDkwMzE1NTM4OTI5NmJhNmE3\nMjYxYTJkZQ==\n"}, "verraw": {"py/b64": "YSTHvR/sxdu41ig9mcqMq/DI5USQMVU4kpa6anJhot4=\n"}, "keyraw": {"py/b64": "QWNtZTAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.Signer", "keyhex": {"py/b64": "NDE2MzZkNjUzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAz\nMDMwMzAzMA==\n"}, "key": {"verify_key": {"py/object": "stp_core.crypto.nacl_wrappers.VerifyKey", "_key": {"py/b64": "YSTHvR/sxdu41ig9mcqMq/DI5USQMVU4kpa6anJhot4=\n"}}, "_seed": {"py/b64": "QWNtZTAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.SigningKey", "_signing_key": {"py/b64": "QWNtZTAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDBhJMe9H+zF27jWKD2Zyoyr8MjlRJAxVTiS\nlrpqcmGi3g==\n"}}}, "_verkey": "WjXEvZ9xj4Tz9sLtzf7HVP", "_identifier": "CzkavE58zgX7rUMrzSinLr", "abbreviated": true, "seed": {"py/b64": "QWNtZTAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}, "_alias": null, "sk": {"py/id": 84}, "py/object": "plenum.common.signer_did.DidSigner"}, 1504204254926855]}, "py/seq": [{"py/id": 82}, 1504204254926855], "py/object": "plenum.client.wallet.IdData"}, "ULtgFQJe6bjiFbs7ke3NJD": {"py/newargs": {"py/tuple": [{"naclSigner": {"verhex": {"py/b64": "ZGQ2ZGI4ZWI5MWNmZGNlNTBmMWE0ODhkOTUzMzI1ZGEyNjdlMzYyMzE5N2EwN2Q0MTQ4YjI1ZjM3\nZWZjMjg3ZQ==\n"}, "verraw": {"py/b64": "3W2465HP3OUPGkiNlTMl2iZ+NiMZegfUFIsl8378KH4=\n"}, "keyraw": {"py/b64": "RmFiZXIwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.Signer", "keyhex": {"py/b64": "NDY2MTYyNjU3MjMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAzMDMwMzAz\nMDMwMzAzMA==\n"}, "key": {"verify_key": {"py/object": "stp_core.crypto.nacl_wrappers.VerifyKey", "_key": {"py/b64": "3W2465HP3OUPGkiNlTMl2iZ+NiMZegfUFIsl8378KH4=\n"}}, "_seed": {"py/b64": "RmFiZXIwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}, "py/object": "stp_core.crypto.nacl_wrappers.SigningKey", "_signing_key": {"py/b64": "RmFiZXIwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDDdbbjrkc/c5Q8aSI2VMyXaJn42Ixl6B9QU\niyXzfvwofg==\n"}}}, "_verkey": "5kh3FB4H3NKq7tUDqeqHc1", "_identifier": "ULtgFQJe6bjiFbs7ke3NJD", "abbreviated": true, "seed": {"py/b64": "RmFiZXIwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA=\n"}, "_alias": null, "sk": {"py/id": 99}, "py/object": "plenum.common.signer_did.DidSigner"}, 1504204144694724]}, "py/seq": [{"py/id": 97}, 1504204144694724], "py/object": "plenum.client.wallet.IdData"}}} \ No newline at end of file diff --git a/indy_client/test/client/TestClient.py b/indy_client/test/client/TestClient.py deleted file mode 100644 index 3f71f893a..000000000 --- a/indy_client/test/client/TestClient.py +++ /dev/null @@ -1,35 +0,0 @@ -from plenum.test.test_stack import StackedTester, TestStack -from plenum.test.testable import spyable -from indy_client.client.client import Client - -from indy_common.test.helper import TempStorage - -from indy_common.config_util import getConfig - -from stp_core.common.log import getlogger -logger = getlogger() - - -class TestClientStorage(TempStorage): - def __init__(self, name, baseDir): - self.name = name - self.baseDir = baseDir - - def cleanupDataLocation(self): - self.cleanupDirectory(self.dataLocation) - - -@spyable(methods=[Client.handleOneNodeMsg]) -class TestClient(Client, StackedTester, TestClientStorage): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - TestClientStorage.__init__(self, self.name, self.basedirpath) - - @staticmethod - def stackType(): - return TestStack - - def onStopping(self, *args, **kwargs): - # TODO: Why we needed following line? - # self.cleanupDataLocation() - super().onStopping(*args, **kwargs) diff --git a/indy_client/test/client/__init__.py b/indy_client/test/client/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/indy_client/test/client/wallet/test_link_obj.py b/indy_client/test/client/wallet/test_link_obj.py deleted file mode 100644 index 370c624f2..000000000 --- a/indy_client/test/client/wallet/test_link_obj.py +++ /dev/null @@ -1,9 +0,0 @@ -import pytest -from indy_client.client.wallet.connection import Connection - - -@pytest.mark.skip(reason='INDY-105') -def test_link_has_requested_proofs(): - testLink = Connection("Test") - - # testLink.requestedProofs diff --git a/indy_client/test/conftest.py b/indy_client/test/conftest.py deleted file mode 100644 index 069598fb1..000000000 --- a/indy_client/test/conftest.py +++ /dev/null @@ -1,329 +0,0 @@ -import os -import logging - -import base58 - -from anoncreds.protocol.utils import randomString - -from plenum.common.member.member import Member -from plenum.common.txn_util import get_payload_data -from plenum.test.pool_transactions.helper import sdk_add_new_nym -from plenum.common.keygen_utils import initLocalKeys -from plenum.common.signer_did import DidSigner -from plenum.common.util import friendlyToRaw -from stp_core.common.log import Logger - -from stp_core.loop.eventually import eventually -from indy_client.test.constants import primes -import warnings -from copy import deepcopy - -from indy_common import strict_types - -# typecheck during tests -from stp_core.network.port_dispenser import genHa - -strict_types.defaultShouldCheck = True - -import pytest - -from plenum.common.constants import VERKEY, ALIAS, STEWARD, TXN_ID, TRUSTEE, TYPE, NODE_IP, NODE_PORT, CLIENT_IP, \ - CLIENT_PORT, SERVICES, VALIDATOR - -from indy_client.client.wallet.wallet import Wallet -from indy_common.constants import NYM, TRUST_ANCHOR -from indy_common.constants import TXN_TYPE, TARGET_NYM, ROLE -from indy_client.test.cli.helper import newCLI, addTrusteeTxnsToGenesis, addTxnsToGenesisFile -from indy_node.test.helper import makePendingTxnsRequest, buildStewardClient, \ - TestNode -from indy_client.test.helper import addRole, genTestClient, TestClient, createNym, getClientAddedWithRole - -# noinspection PyUnresolvedReferences -from plenum.test.conftest import tdir, client_tdir, nodeReg, \ - whitelist, concerningLogLevels, logcapture, \ - tdirWithDomainTxns as PTdirWithDomainTxns, txnPoolNodeSet, poolTxnData, dirName, \ - poolTxnNodeNames, allPluginsPath, tdirWithNodeKeepInited, tdirWithPoolTxns, \ - poolTxnStewardData, poolTxnStewardNames, getValueFromModule, \ - txnPoolNodesLooper, patchPluginManager, tdirWithClientPoolTxns, \ - warncheck, warnfilters as plenum_warnfilters, setResourceLimits, do_post_node_creation - -# noinspection PyUnresolvedReferences -from indy_common.test.conftest import tconf, general_conf_tdir, poolTxnTrusteeNames, \ - domainTxnOrderedFields, looper, config_helper_class, node_config_helper_class - -from plenum.test.conftest import sdk_pool_handle as plenum_pool_handle, sdk_pool_data, sdk_wallet_steward, \ - sdk_wallet_handle, sdk_wallet_data, sdk_steward_seed, sdk_wallet_trustee, sdk_trustee_seed, trustee_data, \ - sdk_wallet_client, sdk_client_seed, poolTxnClientData, poolTxnClientNames, poolTxnData - -Logger.setLogLevel(logging.DEBUG) - - -@pytest.fixture(scope="module") -def sdk_wallet_trust_anchor(looper, sdk_pool_handle, sdk_wallet_trustee): - return sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee, - alias='TA-1', role='TRUST_ANCHOR') - - -@pytest.fixture(scope="session") -def warnfilters(plenum_warnfilters): - def _(): - plenum_warnfilters() - warnings.filterwarnings( - 'ignore', category=ResourceWarning, message='unclosed file') - - return _ - - -@pytest.fixture(scope="module") -def primes1(): - P_PRIME1, Q_PRIME1 = primes.get("prime1") - return dict(p_prime=P_PRIME1, q_prime=Q_PRIME1) - - -@pytest.fixture(scope="module") -def primes2(): - P_PRIME2, Q_PRIME2 = primes.get("prime2") - return dict(p_prime=P_PRIME2, q_prime=Q_PRIME2) - - -@pytest.fixture(scope="module") -def updatedPoolTxnData(poolTxnData): - data = deepcopy(poolTxnData) - trusteeSeed = 'thisistrusteeseednotsteward12345' - signer = DidSigner(seed=trusteeSeed.encode()) - t = Member.nym_txn(nym=signer.identifier, - name="Trustee1", - verkey=signer.verkey, - role=TRUSTEE, - txn_id="6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4a") - data["seeds"]["Trustee1"] = trusteeSeed - data["txns"].insert(0, t) - return data - - -@pytest.fixture(scope="module") -def trusteeData(poolTxnTrusteeNames, updatedPoolTxnData): - ret = [] - for name in poolTxnTrusteeNames: - seed = updatedPoolTxnData["seeds"][name] - txn = next( - (txn for txn in updatedPoolTxnData["txns"] if get_payload_data(txn)[ALIAS] == name), - None) - ret.append((name, seed.encode(), txn)) - return ret - - -@pytest.fixture(scope="module") -def trusteeWallet(trusteeData): - name, sigseed, txn = trusteeData[0] - wallet = Wallet('trustee') - signer = DidSigner(seed=sigseed) - wallet.addIdentifier(signer=signer) - return wallet - - -# TODO: This fixture is present in indy_node too, it should be -# indy_common's conftest. -@pytest.fixture(scope="module") -# TODO devin -def trustee(nodeSet, looper, tdirWithClientPoolTxns, trusteeWallet): - return buildStewardClient(looper, tdirWithClientPoolTxns, trusteeWallet) - - -@pytest.fixture(scope="module") -def stewardWallet(poolTxnStewardData): - name, sigseed = poolTxnStewardData - wallet = Wallet('steward') - signer = DidSigner(seed=sigseed) - wallet.addIdentifier(signer=signer) - return wallet - - -@pytest.fixture(scope="module") -def steward(nodeSet, looper, tdirWithClientPoolTxns, stewardWallet): - return buildStewardClient(looper, tdirWithClientPoolTxns, stewardWallet) - - -@pytest.fixture(scope="module") -def genesisTxns(stewardWallet: Wallet, trusteeWallet: Wallet): - return [Member.nym_txn( - nym = stewardWallet.defaultId, - verkey=stewardWallet.getVerkey(), - role=STEWARD, - txn_id="9c86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b" - )] - - -@pytest.fixture(scope="module") -def testNodeClass(): - return TestNode - - -@pytest.fixture(scope="module") -def testClientClass(): - return TestClient - - -@pytest.fixture(scope="module") -def tdirWithDomainTxns(PTdirWithDomainTxns, poolTxnTrusteeNames, - trusteeData, genesisTxns, domainTxnOrderedFields, - tconf): - addTrusteeTxnsToGenesis(poolTxnTrusteeNames, trusteeData, - PTdirWithDomainTxns, tconf.domainTransactionsFile) - addTxnsToGenesisFile(PTdirWithDomainTxns, tconf.domainTransactionsFile, - genesisTxns, domainTxnOrderedFields) - return PTdirWithDomainTxns - - -@pytest.fixture(scope='module') -def sdk_pool_handle(plenum_pool_handle, nodeSet): - return plenum_pool_handle - - -@pytest.fixture(scope="module") -def nodeSet(txnPoolNodeSet): - return txnPoolNodeSet - - -@pytest.fixture(scope="module") -def client1Signer(): - seed = b'client1Signer secret key........' - signer = DidSigner(seed=seed) - testable_verkey = friendlyToRaw(signer.identifier) - testable_verkey += friendlyToRaw(signer.verkey[1:]) - testable_verkey = base58.b58encode(testable_verkey).decode("utf-8") - assert testable_verkey == '6JvpZp2haQgisbXEXE9NE6n3Tuv77MZb5HdF9jS5qY8m' - return signer - - -@pytest.fixture("module") -def trustAnchorCli(looper, tdir): - return newCLI(looper, tdir) - - -@pytest.fixture(scope="module") -def clientAndWallet1(client1Signer, looper, nodeSet, tdirWithClientPoolTxns): - client, wallet = genTestClient(nodeSet, tmpdir=tdirWithClientPoolTxns, usePoolLedger=True) - wallet = Wallet(client.name) - wallet.addIdentifier(signer=client1Signer) - return client, wallet - - -@pytest.fixture(scope="module") -def client1(clientAndWallet1, looper): - client, wallet = clientAndWallet1 - looper.add(client) - looper.run(client.ensureConnectedToNodes()) - return client - - -@pytest.fixture(scope="module") -def added_client_without_role(steward, stewardWallet, looper, - wallet1): - createNym(looper, - wallet1.defaultId, - steward, - stewardWallet, - role=None, - verkey=wallet1.getVerkey()) - return wallet1 - - -@pytest.fixture(scope="module") -def wallet1(clientAndWallet1): - return clientAndWallet1[1] - - -@pytest.fixture(scope="module") -def trustAnchorWallet(): - wallet = Wallet('trustAnchor') - seed = b'trust anchors are people too....' - wallet.addIdentifier(seed=seed) - return wallet - - -@pytest.fixture(scope="module") -def trustAnchor(nodeSet, addedTrustAnchor, trustAnchorWallet, looper, tdirWithClientPoolTxns): - s, _ = genTestClient(nodeSet, tmpdir=tdirWithClientPoolTxns, usePoolLedger=True) - s.registerObserver(trustAnchorWallet.handleIncomingReply) - looper.add(s) - looper.run(s.ensureConnectedToNodes()) - makePendingTxnsRequest(s, trustAnchorWallet) - return s - - -@pytest.fixture(scope="module") -def addedTrustAnchor(nodeSet, steward, stewardWallet, looper, - trustAnchorWallet): - createNym(looper, - trustAnchorWallet.defaultId, - steward, - stewardWallet, - role=TRUST_ANCHOR, - verkey=trustAnchorWallet.getVerkey()) - return trustAnchorWallet - - -@pytest.fixture(scope="module") -def userWalletA(nodeSet, addedTrustAnchor, - trustAnchorWallet, looper, trustAnchor): - return addRole(looper, trustAnchor, trustAnchorWallet, 'userA', - addVerkey=False) - - -@pytest.fixture(scope="module") -def sdk_user_wallet_a(nodeSet, sdk_wallet_trust_anchor, - sdk_pool_handle, looper, trustAnchor): - return sdk_add_new_nym(looper, sdk_pool_handle, - sdk_wallet_trust_anchor, alias='userA', - skipverkey=True) - - -@pytest.fixture(scope="module") -def userWalletB(nodeSet, addedTrustAnchor, - trustAnchorWallet, looper, trustAnchor): - return addRole(looper, trustAnchor, trustAnchorWallet, 'userB', - addVerkey=False) - - -@pytest.fixture(scope="module") -def userIdA(userWalletA): - return userWalletA.defaultId - - -@pytest.fixture(scope="module") -def userIdB(userWalletB): - return userWalletB.defaultId - - -@pytest.fixture(scope="module") -def userClientA(nodeSet, userWalletA, looper, tdirWithClientPoolTxns): - u, _ = genTestClient(nodeSet, tmpdir=tdirWithClientPoolTxns, usePoolLedger=True) - u.registerObserver(userWalletA.handleIncomingReply) - looper.add(u) - looper.run(u.ensureConnectedToNodes()) - makePendingTxnsRequest(u, userWalletA) - return u - - -@pytest.fixture(scope="module") -def userClientB(nodeSet, userWalletB, looper, tdirWithClientPoolTxns): - u, _ = genTestClient(nodeSet, tmpdir=tdirWithClientPoolTxns, usePoolLedger=True) - u.registerObserver(userWalletB.handleIncomingReply) - looper.add(u) - looper.run(u.ensureConnectedToNodes()) - makePendingTxnsRequest(u, userWalletB) - return u - - -@pytest.fixture(scope="module") -def client_ledger_dir(client_tdir, tconf): - return os.path.join(client_tdir, 'networks', tconf.NETWORK_NAME) - - -def pytest_assertrepr_compare(op, left, right): - if isinstance(left, str) and isinstance(right, str): - if op in ('in', 'not in'): - mod = 'not ' if 'not' in op else '' - lines = [' ' + s for s in right.split('\n')] - return ['"{}" should {}be in...'.format(left, mod)] + lines diff --git a/indy_client/test/constants.py b/indy_client/test/constants.py deleted file mode 100644 index b08368304..000000000 --- a/indy_client/test/constants.py +++ /dev/null @@ -1,49 +0,0 @@ -from config.config import cmod - -primes = { - "prime1": - ( - cmod.integer(int( - "".join( - """15732949138937579391219059496113493280403242640311079747 - 67301078043564845160610513453327631418060058384363049226124 - 95876180233509449197495032194146432047460167589034147716097 - 41788050395213980524159162235382862938333286942502908689845 - 22278954188297999456509738489839014597334262127359796688359 - 84691928193677469""" - .split() - ))), - cmod.integer(int( - "".join( - """1513238926483731965795157528265196838367648736076320720 - 5759183721669862272955753403513858727659415632080076852582 - 5023728398410073692081011811496168877166664537052088207068 - 0611725948793987738723529209123909831994169273886883192079 - 4649381044920370210055927143958675325672890071399009716848 - 4829574000438573295723""" - .split() - ))) - ), "prime2": - ( - cmod.integer(int( - "".join( - """1506196778844683532080581566329538914319752714166209556 - 1454803993724676961062201703338539465887948418685223146923 - 8992217246264205570458379437126692055331206248530723117202 - 1317399667377603997554909355892234011237620518236023438105 - 5497880303280360690776193758710196919324192135101143075097 - 0746500680609001799529""" - .split() - ))), - cmod.integer(int( - "".join( - """1715908575684366449923593477197037640485010783986660619 - 2171906439582749697069687948174031114114827360739265732110 - 3691543916274965279072000206208571551864201305434022165176 - 5633639549211835762300728126357446293372902429546994271603 - 6258610206896228507621320082845183814295963700604843930727 - 3563604553818326766703""" - .split() - ))) - ) -} diff --git a/indy_client/test/helper.py b/indy_client/test/helper.py deleted file mode 100644 index 5c1ebb630..000000000 --- a/indy_client/test/helper.py +++ /dev/null @@ -1,382 +0,0 @@ -from typing import Union, Tuple -import inspect -import re - -from collections import namedtuple -from pathlib import Path - -from config.config import cmod -from plenum.common.util import randomString - -from plenum.test import waits -from indy_client.test.client.TestClient import TestClient - -from stp_core.common.log import getlogger -from plenum.common.signer_did import DidSigner -from plenum.common.constants import REQNACK, OP_FIELD_NAME, REJECT, REPLY -from plenum.common.types import f, HA -from stp_core.types import Identifier - -from stp_core.loop.eventually import eventually -from plenum.test.test_client import genTestClient as genPlenumTestClient, \ - genTestClientProvider as genPlenumTestClientProvider - -from indy_common.identity import Identity -from indy_common.constants import NULL - -from indy_client.client.wallet.upgrade import Upgrade -from indy_client.client.wallet.wallet import Wallet - -logger = getlogger() - - -def createNym(looper, nym, creatorClient, creatorWallet: Wallet, role=None, - verkey=None): - idy = Identity(identifier=nym, - verkey=verkey, - role=role) - creatorWallet.addTrustAnchoredIdentity(idy) - reqs = creatorWallet.preparePending() - creatorClient.submitReqs(*reqs) - - def check(): - assert creatorWallet._trustAnchored[nym].seqNo - - timeout = waits.expectedTransactionExecutionTime( - len(creatorClient.nodeReg) - ) - looper.run(eventually(check, retryWait=1, timeout=timeout)) - - -def makePendingTxnsRequest(client, wallet): - wallet.pendSyncRequests() - prepared = wallet.preparePending() - client.submitReqs(*prepared) - - -def buildStewardClient(looper, tdir, stewardWallet): - s, _ = genTestClient(tmpdir=tdir, usePoolLedger=True) - s.registerObserver(stewardWallet.handleIncomingReply) - looper.add(s) - looper.run(s.ensureConnectedToNodes()) - makePendingTxnsRequest(s, stewardWallet) - return s - - -def addRole(looper, creatorClient, creatorWallet, name, - addVerkey=True, role=None): - wallet = Wallet(name) - signer = DidSigner() - idr, _ = wallet.addIdentifier(signer=signer) - verkey = wallet.getVerkey(idr) if addVerkey else None - createNym(looper, idr, creatorClient, creatorWallet, verkey=verkey, - role=role) - return wallet - - -def submitPoolUpgrade( - looper, - senderClient, - senderWallet, - name, - action, - version, - schedule, - timeout, - sha256): - upgrade = Upgrade(name, action, schedule, version, sha256, timeout, - senderWallet.defaultId) - senderWallet.doPoolUpgrade(upgrade) - reqs = senderWallet.preparePending() - senderClient.submitReqs(*reqs) - - def check(): - assert senderWallet._upgrades[upgrade.key].seqNo - - timeout = waits.expectedTransactionExecutionTime( - len(senderClient.nodeReg) - ) - looper.run(eventually(check, timeout=timeout)) - - -def getClientAddedWithRole(nodeSet, tdir, looper, client, wallet, name, - role=None, addVerkey=True, - client_connects_to=None): - newWallet = addRole(looper, client, wallet, - name=name, addVerkey=addVerkey, role=role) - c, _ = genTestClient(nodeSet, tmpdir=tdir, usePoolLedger=True) - looper.add(c) - looper.run(c.ensureConnectedToNodes(count=client_connects_to)) - c.registerObserver(newWallet.handleIncomingReply) - return c, newWallet - - -def checkErrorMsg(typ, client, reqId, contains='', nodeCount=4): - reqs = [x for x, _ in client.inBox if x[OP_FIELD_NAME] == typ and - x[f.REQ_ID.nm] == reqId] - for r in reqs: - assert f.REASON.nm in r - assert contains in r[f.REASON.nm], '{} not in {}'.format( - contains, r[f.REASON.nm]) - assert len(reqs) == nodeCount - - -def checkNacks(client, reqId, contains='', nodeCount=4): - checkErrorMsg(REQNACK, client, reqId, - contains=contains, nodeCount=nodeCount) - - -def checkRejects(client, reqId, contains='', nodeCount=4): - checkErrorMsg(REJECT, client, reqId, contains=contains, - nodeCount=nodeCount) - - -def checkAccpets(client, reqId, nodeCount=4): - checkErrorMsg(REPLY, client, reqId, contains='', - nodeCount=nodeCount) - - -def submitAndCheckAccepts(looper, client, wallet, op, identifier): - reqId = submit(wallet, op, identifier, client) - timeout = waits.expectedReqNAckQuorumTime() - looper.run(eventually(checkAccpets, - client, - reqId, - retryWait=1, - timeout=timeout)) - - -def submit(wallet, op, identifier, client): - req = wallet.signOp(op, identifier=identifier) - wallet.pendRequest(req) - reqs = wallet.preparePending() - client.submitReqs(*reqs) - - return req.reqId - - -def genTestClient(nodes=None, - nodeReg=None, - tmpdir=None, - identifier: Identifier = None, - verkey: str = None, - peerHA: Union[HA, Tuple[str, int]] = None, - testClientClass=TestClient, - usePoolLedger=False, - name: str = None) -> (TestClient, Wallet): - testClient, wallet = genPlenumTestClient(nodes, - nodeReg, - tmpdir, - testClientClass, - verkey=verkey, - identifier=identifier, - bootstrapKeys=False, - usePoolLedger=usePoolLedger, - name=name) - testClient.peerHA = peerHA - return testClient, wallet - - -def genConnectedTestClient(looper, - nodes=None, - nodeReg=None, - tmpdir=None, - identifier: Identifier = None, - verkey: str = None - ) -> TestClient: - c, w = genTestClient(nodes, nodeReg=nodeReg, tmpdir=tmpdir, - identifier=identifier, verkey=verkey) - looper.add(c) - looper.run(c.ensureConnectedToNodes()) - return c, w - - -def genTestClientProvider(nodes=None, - nodeReg=None, - tmpdir=None, - clientGnr=genTestClient): - return genPlenumTestClientProvider(nodes, nodeReg, tmpdir, clientGnr) - - -def clientFromSigner(signer, looper, nodeSet, tdir): - wallet = Wallet(signer.identifier) - wallet.addIdentifier(signer) - s = genTestClient(nodeSet, tmpdir=tdir, identifier=signer.identifier) - looper.add(s) - looper.run(s.ensureConnectedToNodes()) - return s - - -def addUser(looper, creatorClient, creatorWallet, name, - addVerkey=True): - wallet = Wallet(name) - signer = DidSigner() - idr, _ = wallet.addIdentifier(signer=signer) - verkey = wallet.getVerkey(idr) if addVerkey else None - createNym(looper, idr, creatorClient, creatorWallet, verkey=verkey) - return wallet - - -def peer_path(filename): - s = inspect.stack() - caller = None - for i in range(1, len(s)): - # pycharm can wrap calls, so we want to ignore those in the stack - if 'pycharm' not in s[i].filename: - caller = s[i].filename - break - return Path(caller).parent.joinpath(filename) - - -def _within_hint(match, ctx): - w = match.group(1) - ctx.cmd_within = float(w) if w else None - - -def _ignore_extra_lines(match, ctx): - ctx.ignore_extra_lines = True - - -CommandHints = namedtuple('CommandHints', 'pattern, callback') -command_hints = [ - CommandHints(r'\s*within\s*:\s*(\d*\.?\d*)', _within_hint), - CommandHints(r'\s*ignore\s*extra\s*lines\s*', _ignore_extra_lines), -] - - -# marker class for regex pattern -class P(str): - def match(self, other): - return re.match('^{}$'.format(self), other) - - -class RunnerContext: - def __init__(self): - self.clis = {} - self.output = [] - self.cmd_within = None - self.line_no = 0 - - -class ScriptRunner: - def __init__(self, CliBuilder, looper, be, do, expect): - self._cli_builder = CliBuilder - self._looper = looper - self._be = be - self._do = do - self._expect = expect - - # contexts allows one ScriptRunner maintain state for multiple scripts - self._contexts = {} - self._cur_context_name = None - - Router = namedtuple('Router', 'pattern, ends_output, handler') - - self.routers = [ - Router( - re.compile(r'\s*#(.*)'), - False, - self._handleComment), - Router( - re.compile(r'\s*(\S*)?\s*>\s*(.*?)\s*(?:<--(.*?))?\s*'), - True, - self._handleCommand), - Router( - re.compile(r'\s*~\s*(be|start)\s+(.*)'), - True, - self._handleBe)] - - # noinspection PyAttributeOutsideInit - - def cur_ctx(self): - try: - return self._contexts[self._cur_context_name] - except KeyError: - self._contexts[self._cur_context_name] = RunnerContext() - return self._contexts[self._cur_context_name] - - def run(self, filename, context=None): - # by default, use a new context for each run - self._cur_context_name = context if context else randomString() - - contents = Path(filename).read_text() - - for line in contents.lstrip().splitlines(): - self.cur_ctx().line_no += 1 - for r in self.routers: - m = r.pattern.fullmatch(line) - if m: - if r.ends_output: - self._checkOutput() - r.handler(m) - break - else: - self.cur_ctx().output.append(line) - - self._checkOutput() - - def _be_str(self, cli_str, create_if_no_exist=False): - if cli_str not in self.cur_ctx().clis: - if not create_if_no_exist: - raise RuntimeError("{} does not exist; 'start' it first". - format(cli_str)) - self.cur_ctx().clis[cli_str] = next( - self._cli_builder(cli_str, - looper=self._looper, - unique_name=cli_str + '-' + - self._cur_context_name)) - self._be(self.cur_ctx().clis[cli_str]) - - def _handleBe(self, match): - self._be_str(match.group(2), True) - - def _handleComment(self, match): - c = match.group(1).strip() - if c == 'break': - pass - - def _handleCommand(self, match): - cli_str = match.group(1) - if cli_str: - self._be_str(cli_str) - - cmd = match.group(2) - - hint_str = match.group(3) - if hint_str: - hints = hint_str.strip().split(',') - for hint in hints: - hint = hint.strip() - for hint_handler in command_hints: - m = re.match(hint_handler.pattern, hint) - if m: - hint_handler.callback(m, self.cur_ctx()) - break - else: - raise RuntimeError("no handler found for hint '{}' at " - "line no {}". - format(hint, self.cur_ctx().line_no)) - - self._do(cmd) - - def _checkOutput(self): - if self.cur_ctx().output: - new = [] - reout = re.compile(r'(.*)<--\s*regex\s*') - for o in self.cur_ctx().output: - m = reout.fullmatch(o) - if m: - new.append(P(m.group(1).rstrip())) - else: - new.append(o) - - ignore_extra_lines = False - if hasattr(self.cur_ctx(), 'ignore_extra_lines'): - ignore_extra_lines = self.cur_ctx().ignore_extra_lines - self._expect(new, - within=self.cur_ctx().cmd_within, - line_no=self.cur_ctx().line_no, - ignore_extra_lines=ignore_extra_lines) - self.cur_ctx().output = [] - self.cur_ctx().cmd_within = None - self.cur_ctx().ignore_extra_lines = False diff --git a/indy_client/test/scripts/__init__.py b/indy_client/test/scripts/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/indy_client/test/scripts/test_reset_client.py b/indy_client/test/scripts/test_reset_client.py deleted file mode 100644 index c1ef13eb1..000000000 --- a/indy_client/test/scripts/test_reset_client.py +++ /dev/null @@ -1,59 +0,0 @@ -import os - -from plenum.common.util import randomString -from indy_client.script_helper import performIndyBaseDirCleanup, \ - keepFilesInClientReset -from indy_client.test.cli.conftest import aliceCLI, CliBuilder, cliTempLogger - - -def createRandomDirsAndFiles(baseDir): - dirsCreated = [] - filesCreated = [] - - def create(path, file=False, dir=False): - if not os.path.exists(path): - if dir: - os.mkdir(path) - dirsCreated.append(path) - elif file: - with open(path, 'w+') as f: - f.write(randomString(20)) - filesCreated.append(path) - - for n in range(1, 10): - path = os.path.join(baseDir, randomString(5)) - if n % 2 == 0: - create(path, file=True, dir=False) - else: - create(path, file=False, dir=True) - - return dirsCreated, filesCreated - - -def getCurrentDirAndFiles(baseDir): - dirs = [] - files = [] - for name in os.listdir(baseDir): - path = os.path.join(baseDir, name) - if os.path.isdir(name): - dirs.append(path) - else: - files.append(name) - return dirs, files - - -def testResetClient(tconf, aliceCLI): - newDirs, newFiels = createRandomDirsAndFiles(tconf.CLI_BASE_DIR) - beforeCleanupDirs, beforeCleanupFiles = getCurrentDirAndFiles( - tconf.CLI_BASE_DIR) - backupDir = performIndyBaseDirCleanup(tconf.CLI_BASE_DIR) - afterCleanupDirs, afterCleanupFiles = getCurrentDirAndFiles(tconf.CLI_BASE_DIR) - backedupDirs, backedupFiles = getCurrentDirAndFiles(backupDir) - for name in os.listdir(tconf.CLI_BASE_DIR): - assert name in keepFilesInClientReset - - assert newDirs not in afterCleanupDirs - assert newFiels not in afterCleanupFiles - - assert beforeCleanupDirs == backedupDirs - assert beforeCleanupFiles == backedupFiles diff --git a/indy_client/test/state_proof/__init__.py b/indy_client/test/state_proof/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/indy_client/test/state_proof/test_asking_one_node.py b/indy_client/test/state_proof/test_asking_one_node.py deleted file mode 100644 index 677100e2e..000000000 --- a/indy_client/test/state_proof/test_asking_one_node.py +++ /dev/null @@ -1,29 +0,0 @@ -from indy_client.test.state_proof.helper import sdk_submit_operation_and_get_replies -from plenum.common.constants import TARGET_NYM, TXN_TYPE, RAW -from indy_common.constants import GET_ATTR -from indy_client.test.test_nym_attrib import attributeData, \ - attributeName, attributeValue, sdk_added_raw_attribute - - -# for node in txnPoolNodeSet[1:]: node.clientstack.stop() - -def test_state_proof_returned_for_get_attr(looper, - sdk_added_raw_attribute, - attributeName, - sdk_pool_handle, - sdk_wallet_trustee): - """ - Tests that client could send get-requests to only one node instead of n - """ - # Prepare and send get-request - get_attr_operation = { - TARGET_NYM: sdk_added_raw_attribute['operation']['dest'], - TXN_TYPE: GET_ATTR, - RAW: attributeName - } - # Get reply and verify that the only one received - replies = sdk_submit_operation_and_get_replies(looper, sdk_pool_handle, - sdk_wallet_trustee, - get_attr_operation) - - assert len(replies) == 1 diff --git a/indy_client/test/test.log b/indy_client/test/test.log deleted file mode 100644 index e69de29bb..000000000 diff --git a/indy_client/test/test_nym_attrib.py b/indy_client/test/test_nym_attrib.py deleted file mode 100644 index 574c4824a..000000000 --- a/indy_client/test/test_nym_attrib.py +++ /dev/null @@ -1,568 +0,0 @@ -import json -from contextlib import contextmanager - -import base58 -import libnacl.public -import pytest - -from plenum.common.constants import ENC, REPLY, TXN_TIME, TXN_ID, \ - OP_FIELD_NAME, NYM, TARGET_NYM, \ - TXN_TYPE, ROLE, NONCE, VERKEY -from plenum.common.exceptions import RequestRejectedException, RequestNackedException -from plenum.common.signer_did import DidSigner -from plenum.common.txn_util import get_type, get_payload_data, get_req_id -from plenum.common.types import f -from plenum.common.util import adict -from plenum.test import waits -from indy_client.client.client import Client -from indy_client.client.wallet.attribute import Attribute, LedgerStore -from indy_client.client.wallet.wallet import Wallet -from indy_client.test.helper import checkNacks, \ - genTestClient, createNym, checkRejects, makePendingTxnsRequest -from indy_common.constants import SKEY, TRUST_ANCHOR_STRING, ATTRIB, TRUST_ANCHOR -from indy_common.identity import Identity -from indy_common.util import getSymmetricallyEncryptedVal -from indy_node.test.helper import submitAndCheck, \ - makeAttribRequest, makeGetNymRequest, addAttributeAndCheck, TestNode, \ - getAttribute, sdk_add_attribute_and_check -from plenum.test.helper import sdk_sign_and_submit_op, sdk_get_bad_response -from plenum.test.pool_transactions.helper import sdk_add_new_nym -from stp_core.common.log import getlogger -from stp_core.loop.eventually import eventually - -logger = getlogger() - -whitelistArray = [] - - -def whitelist(): - return whitelistArray - - -@pytest.fixture(scope="module") -def attributeName(): - return 'endpoint' - - -@pytest.fixture(scope="module") -def attributeValue(): - return { - "ha": "127.0.0.1:9700", - "verkey": "F46i9NmUN72QMbbm5qWetB6CmfT7hiU8BM1qrtTGLKsc" - } - - -@pytest.fixture(scope="module") -def attributeData(attributeName, attributeValue): - return json.dumps({attributeName: attributeValue}) - - -@pytest.fixture(scope="module") -def addedRawAttribute(userWalletA: Wallet, trustAnchor: Client, - trustAnchorWallet: Wallet, attributeData, looper): - attrib = Attribute(name='test attribute', - origin=trustAnchorWallet.defaultId, - value=attributeData, - dest=userWalletA.defaultId, - ledgerStore=LedgerStore.RAW) - addAttributeAndCheck(looper, trustAnchor, trustAnchorWallet, attrib) - return attrib - - -@pytest.fixture(scope="module") -def sdk_added_raw_attribute(sdk_pool_handle, sdk_user_wallet_a, - sdk_wallet_trust_anchor, attributeData, looper): - _, did_cl = sdk_user_wallet_a - req_couple = sdk_add_attribute_and_check(looper, sdk_pool_handle, sdk_wallet_trust_anchor, attributeData, did_cl) - return req_couple[0] - - -@pytest.fixture(scope="module") -def symEncData(attributeData): - encData, secretKey = getSymmetricallyEncryptedVal(attributeData) - return adict(data=attributeData, encData=encData, secretKey=secretKey) - - -@pytest.fixture(scope="module") -def addedEncryptedAttribute(userIdA, trustAnchor, trustAnchorWallet, looper, - symEncData): - op = { - TARGET_NYM: userIdA, - TXN_TYPE: ATTRIB, - ENC: symEncData.encData - } - - return submitAndCheck(looper, trustAnchor, trustAnchorWallet, op)[0] - - -@pytest.fixture(scope="module") -def nonTrustAnchor(looper, nodeSet, tdirWithClientPoolTxns): - sseed = b'a secret trust anchor seed......' - signer = DidSigner(seed=sseed) - c, _ = genTestClient(nodeSet, tmpdir=tdirWithClientPoolTxns, usePoolLedger=True) - w = Wallet(c.name) - w.addIdentifier(signer=signer) - c.registerObserver(w.handleIncomingReply) - looper.add(c) - looper.run(c.ensureConnectedToNodes()) - return c, w - - -@pytest.fixture(scope="module") -def anotherTrustAnchor(nodeSet, steward, stewardWallet, tdirWithClientPoolTxns, looper): - sseed = b'1 secret trust anchor seed......' - signer = DidSigner(seed=sseed) - c, _ = genTestClient(nodeSet, tmpdir=tdirWithClientPoolTxns, usePoolLedger=True) - w = Wallet(c.name) - w.addIdentifier(signer=signer) - c.registerObserver(w.handleIncomingReply) - looper.add(c) - looper.run(c.ensureConnectedToNodes()) - createNym(looper, signer.identifier, steward, stewardWallet, - role=TRUST_ANCHOR, verkey=signer.verkey) - return c, w - - -def testCreateStewardWallet(stewardWallet): - pass - - -@contextmanager -def whitelistextras(*msg): - global whitelistArray - ins = {m: (m in whitelistArray) for m in msg} - [whitelistArray.append(m) for m, _in in ins.items() if not _in] - yield - [whitelistArray.remove(m) for m, _in in ins.items() if not _in] - - -def add_nym_operation(signer=None, seed=None, role=None): - if signer is None: - signer = DidSigner(seed=seed) - - op = { - TARGET_NYM: signer.identifier, - VERKEY: signer.verkey, - TXN_TYPE: NYM, - } - - if role is not None: - op[ROLE] = role - - return op - - -def test_non_steward_cannot_create_trust_anchor( - nodeSet, looper, sdk_pool_handle, sdk_wallet_steward): - sdk_wallet_client = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_steward) - with pytest.raises(RequestRejectedException) as e: - sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_client, role=TRUST_ANCHOR_STRING) - e.match('None role cannot') - - -def testStewardCreatesATrustAnchor(looper, sdk_pool_handle, sdk_wallet_steward): - sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_steward, role=TRUST_ANCHOR_STRING) - - -def testStewardCreatesAnotherTrustAnchor(looper, sdk_pool_handle, sdk_wallet_steward): - sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_steward, role=TRUST_ANCHOR_STRING) - - -def test_non_trust_anchor_cannot_create_user( - nodeSet, looper, sdk_pool_handle, sdk_wallet_steward): - sdk_wallet_client = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_steward) - with pytest.raises(RequestRejectedException) as e: - sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_client) - e.match('None role cannot') - - -def testTrustAnchorCreatesAUser(sdk_user_wallet_a): - pass - - -def test_nym_addition_fails_with_empty_verkey(looper, sdk_pool_handle, - sdk_wallet_trustee): - op = add_nym_operation(seed=b'a secret trust anchor seed......') - op[VERKEY] = '' - req = sdk_sign_and_submit_op(looper, sdk_pool_handle, sdk_wallet_trustee, op) - sdk_get_bad_response(looper, [req], RequestNackedException, - 'Neither a full verkey nor an abbreviated one') - - -@pytest.fixture(scope="module") -def nymsAddedInQuickSuccession(looper, nodeSet, sdk_added_raw_attribute, - trustAnchor, trustAnchorWallet): - usigner = DidSigner() - nym = usigner.verkey - idy = Identity(identifier=nym) - trustAnchorWallet.addTrustAnchoredIdentity(idy) - # Creating a NYM request with same nym again - req = idy.ledgerRequest() - trustAnchorWallet._pending.appendleft((req, idy.identifier)) - reqs = trustAnchorWallet.preparePending() - trustAnchor.submitReqs(*reqs) - - def check(): - assert trustAnchorWallet._trustAnchored[nym].seqNo - - timeout = waits.expectedTransactionExecutionTime(len(nodeSet)) - looper.run(eventually(check, timeout=timeout)) - - timeout = waits.expectedReqNAckQuorumTime() - looper.run(eventually(checkNacks, - trustAnchor, - req.reqId, - "is already added", - retryWait=1, timeout=timeout)) - count = 0 - for node in nodeSet: - for seq, txn in node.domainLedger.getAllTxn(): - if get_type(txn) == NYM and get_payload_data(txn)[TARGET_NYM] == usigner.identifier: - count += 1 - - assert (count == len(nodeSet)) - - -def testTrustAnchorAddsAttributeForUser(sdk_added_raw_attribute): - pass - - -def testClientGetsResponseWithoutConsensusForUsedReqId( - nodeSet, - looper, - steward, - addedTrustAnchor, - trustAnchor, - userWalletA, - attributeName, - attributeData, - addedRawAttribute): - lastReqId = None - replies = {} - for msg, sender in reversed(trustAnchor.inBox): - if msg[OP_FIELD_NAME] == REPLY: - if not lastReqId: - lastReqId = get_req_id(msg[f.RESULT.nm]) - if get_req_id(msg[f.RESULT.nm]) == lastReqId: - replies[sender] = msg - if len(replies) == len(nodeSet): - break - - trustAnchorWallet = addedTrustAnchor - attrib = Attribute(name=attributeName, - origin=trustAnchorWallet.defaultId, - value=attributeData, - dest=userWalletA.defaultId, - ledgerStore=LedgerStore.RAW) - trustAnchorWallet.addAttribute(attrib) - req = trustAnchorWallet.preparePending()[0] - _, key = trustAnchorWallet._prepared.pop((req.identifier, req.reqId)) - req.reqId = lastReqId - - req.signature = trustAnchorWallet.signMsg( - msg=req.signingState(identifier=req.identifier), - identifier=req.identifier) - trustAnchorWallet._prepared[req.identifier, req.reqId] = req, key - trustAnchor.submitReqs(req) - - def chk(): - nonlocal trustAnchor, lastReqId, replies - for node in nodeSet: - last = node.spylog.getLast(TestNode.getReplyFromLedger.__name__) - assert last - result = last.result - assert result is not None - - replies[node.clientstack.name][f.RESULT.nm].pop(TXN_TIME, None) - result.result.pop(TXN_TIME, None) - - assert {k: v for k, v in result.result.items() if v is not None}.items() <= \ - replies[node.clientstack.name][f.RESULT.nm].items() - - timeout = waits.expectedTransactionExecutionTime(len(nodeSet)) - looper.run(eventually(chk, retryWait=1, timeout=timeout)) - - -@pytest.fixture(scope="module") -def checkAddAttribute( - userWalletA, - trustAnchor, - trustAnchorWallet, - attributeName, - attributeValue, - addedRawAttribute, - looper): - getAttribute(looper=looper, - trustAnchor=trustAnchor, - trustAnchorWallet=trustAnchorWallet, - userIdA=userWalletA.defaultId, - attributeName=attributeName, - attributeValue=attributeValue) - - -def testTrustAnchorGetAttrsForUser(checkAddAttribute): - pass - - -def test_non_trust_anchor_cannot_add_attribute_for_user( - nodeSet, - nonTrustAnchor, - trustAnchor, - addedTrustAnchor, - userIdA, - looper, - attributeData): - with whitelistextras('UnauthorizedClientRequest'): - client, wallet = nonTrustAnchor - - createNym(looper, - wallet.defaultId, - trustAnchor, - addedTrustAnchor, - role=None, - verkey=wallet.getVerkey()) - - attrib = Attribute(name='test1 attribute', - origin=wallet.defaultId, - value=attributeData, - dest=userIdA, - ledgerStore=LedgerStore.RAW) - reqs = makeAttribRequest(client, wallet, attrib) - timeout = waits.expectedTransactionExecutionTime(len(nodeSet)) - looper.run( - eventually( - checkRejects, - client, - reqs[0].reqId, - "UnauthorizedClientRequest('Only identity " - "owner/guardian can add attribute for that identity'", - retryWait=1, - timeout=timeout)) - - -def testOnlyUsersTrustAnchorCanAddAttribute( - nodeSet, - looper, - steward, - stewardWallet, - attributeData, - anotherTrustAnchor, - userIdA): - with whitelistextras("UnauthorizedClientRequest"): - client, wallet = anotherTrustAnchor - attrib = Attribute(name='test2 attribute', - origin=wallet.defaultId, - value=attributeData, - dest=userIdA, - ledgerStore=LedgerStore.RAW) - reqs = makeAttribRequest(client, wallet, attrib) - timeout = waits.expectedReqNAckQuorumTime() - looper.run( - eventually( - checkRejects, - client, - reqs[0].reqId, - "UnauthorizedClientRequest('Only identity " - "owner/guardian can add attribute for that identity'", - retryWait=1, - timeout=timeout)) - - -def testStewardCannotAddUsersAttribute(nodeSet, looper, steward, - stewardWallet, userIdA, attributeData): - with whitelistextras("UnauthorizedClientRequest"): - attrib = Attribute(name='test3 attribute', - origin=stewardWallet.defaultId, - value=attributeData, - dest=userIdA, - ledgerStore=LedgerStore.RAW) - reqs = makeAttribRequest(steward, stewardWallet, attrib) - timeout = waits.expectedReqNAckQuorumTime() - looper.run( - eventually( - checkRejects, - steward, - reqs[0].reqId, - "UnauthorizedClientRequest('Only identity owner/guardian can " - "add attribute for that identity'", - retryWait=1, - timeout=timeout)) - - -@pytest.mark.skip(reason="SOV-560. Attribute encryption is done in client") -def testTrustAnchorAddedAttributeIsEncrypted(addedEncryptedAttribute): - pass - - -@pytest.mark.skip(reason="SOV-560. Attribute Disclosure is not done for now") -def testTrustAnchorDisclosesEncryptedAttribute( - addedEncryptedAttribute, - symEncData, - looper, - userSignerA, - trustAnchorSigner, - trustAnchor): - box = libnacl.public.Box(trustAnchorSigner.naclSigner.keyraw, - userSignerA.naclSigner.verraw) - - data = json.dumps({SKEY: symEncData.secretKey, - TXN_ID: addedEncryptedAttribute[TXN_ID]}) - nonce, boxedMsg = box.encrypt(data.encode(), pack_nonce=False) - - op = { - TARGET_NYM: userSignerA.verstr, - TXN_TYPE: ATTRIB, - NONCE: base58.b58encode(nonce).decode("utf-8"), - ENC: base58.b58encode(boxedMsg).decode("utf-8") - } - submitAndCheck(looper, trustAnchor, op, - identifier=trustAnchorSigner.verstr) - - -@pytest.mark.skip(reason="SOV-561. Pending implementation") -def testTrustAnchorAddedAttributeCanBeChanged(addedRawAttribute): - # TODO but only by user(if user has taken control of his identity) and - # trustAnchor - raise NotImplementedError - - -def testGetAttribute( - nodeSet, - addedTrustAnchor, - trustAnchorWallet: Wallet, - trustAnchor, - userIdA, - addedRawAttribute, - attributeData): - assert attributeData in [ - a.value for a in trustAnchorWallet.getAttributesForNym(userIdA)] - - -# TODO: Ask Jason, if getting the latest attribute makes sense since in case -# of encrypted and hashed attributes, there is no name. -def testLatestAttrIsReceived( - nodeSet, - addedTrustAnchor, - trustAnchorWallet, - looper, - trustAnchor, - userIdA): - attr1 = json.dumps({'name': 'Mario'}) - attrib = Attribute(name='name', - origin=trustAnchorWallet.defaultId, - value=attr1, - dest=userIdA, - ledgerStore=LedgerStore.RAW) - addAttributeAndCheck(looper, trustAnchor, trustAnchorWallet, attrib) - assert attr1 in [ - a.value for a in trustAnchorWallet.getAttributesForNym(userIdA)] - - attr2 = json.dumps({'name': 'Luigi'}) - attrib = Attribute(name='name', - origin=trustAnchorWallet.defaultId, - value=attr2, - dest=userIdA, - ledgerStore=LedgerStore.RAW) - addAttributeAndCheck(looper, trustAnchor, trustAnchorWallet, attrib) - logger.debug( - [a.value for a in trustAnchorWallet.getAttributesForNym(userIdA)]) - assert attr2 in [a.value for a in - trustAnchorWallet.getAttributesForNym(userIdA)] - - -@pytest.mark.skip(reason="SOV-561. Test not implemented") -def testGetTxnsNoSeqNo(): - """ - Test GET_TXNS from client and do not provide any seqNo to fetch from - """ - raise NotImplementedError - - -@pytest.mark.skip(reason="SOV-560. Come back to it later since " - "requestPendingTxns move to wallet") -def testGetTxnsSeqNo(nodeSet, addedTrustAnchor, tdirWithClientPoolTxns, - trustAnchorWallet, looper): - """ - Test GET_TXNS from client and provide seqNo to fetch from - """ - trustAnchor = genTestClient(nodeSet, tmpdir=tdirWithClientPoolTxns, usePoolLedger=True) - - looper.add(trustAnchor) - looper.run(trustAnchor.ensureConnectedToNodes()) - - def chk(): - assert trustAnchor.spylog.count( - trustAnchor.requestPendingTxns.__name__) > 0 - - # TODO choose or create timeout in 'waits' on this case. - looper.run(eventually(chk, retryWait=1, timeout=3)) - - -def testNonTrustAnchoredNymCanDoGetNym(nodeSet, addedTrustAnchor, - trustAnchorWallet, tdirWithClientPoolTxns, looper): - signer = DidSigner() - someClient, _ = genTestClient(nodeSet, tmpdir=tdirWithClientPoolTxns, usePoolLedger=True) - wallet = Wallet(someClient.name) - wallet.addIdentifier(signer=signer) - someClient.registerObserver(wallet.handleIncomingReply) - looper.add(someClient) - looper.run(someClient.ensureConnectedToNodes()) - needle = trustAnchorWallet.defaultId - makeGetNymRequest(someClient, wallet, needle) - timeout = waits.expectedTransactionExecutionTime(len(nodeSet)) - looper.run(eventually(someClient.hasNym, needle, - retryWait=1, timeout=timeout)) - - -def test_user_add_attrs_for_herself( - nodeSet, - looper, - userClientA, - userWalletA, - userIdA, - trustAnchor, - addedTrustAnchor, - attributeData): - createNym(looper, - userWalletA.defaultId, - trustAnchor, - addedTrustAnchor, - role=None, - verkey=userWalletA.getVerkey()) - - attr1 = json.dumps({'age': "25"}) - attrib = Attribute(name='test4 attribute', - origin=userIdA, - value=attr1, - dest=userIdA, - ledgerStore=LedgerStore.RAW) - addAttributeAndCheck(looper, userClientA, userWalletA, attrib) - - -@pytest.mark.skip(reason="INDY-896 ATTR cannot be added without dest") -def test_attr_with_no_dest_added(nodeSet, tdirWithClientPoolTxns, looper, - trustAnchor, addedTrustAnchor, attributeData): - user_wallet = Wallet() - signer = DidSigner() - user_wallet.addIdentifier(signer=signer) - - client, _ = genTestClient(nodeSet, tmpdir=tdirWithClientPoolTxns, usePoolLedger=True) - client.registerObserver(user_wallet.handleIncomingReply) - looper.add(client) - looper.run(client.ensureConnectedToNodes()) - makePendingTxnsRequest(client, user_wallet) - - createNym(looper, - user_wallet.defaultId, - trustAnchor, - addedTrustAnchor, - role=None, - verkey=user_wallet.getVerkey()) - - attr1 = json.dumps({'age': "24"}) - attrib = Attribute(name='test4 attribute', - origin=user_wallet.defaultId, - value=attr1, - dest=None, - ledgerStore=LedgerStore.RAW) - addAttributeAndCheck(looper, client, user_wallet, attrib) diff --git a/indy_client/test/training/__init__.py b/indy_client/test/training/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/indy_client/test/training/getting_started.py b/indy_client/test/training/getting_started.py deleted file mode 100644 index 7c6384e85..000000000 --- a/indy_client/test/training/getting_started.py +++ /dev/null @@ -1,94 +0,0 @@ -import sys - -try: - from indy_client import * -except ImportError as e: - print("Indy Client is required for this guild, " - "see doc for installing Indy Client.", file=sys.stderr) - print(str(e), file=sys.stderr) - sys.exit(-1) - -try: - from indy_node import * -except ImportError as e: - print("Indy Node is required for this guild, " - "see doc for installing Indy Node.", file=sys.stderr) - print(str(e), file=sys.stderr) - sys.exit(-1) - -from indy_client.test.agent.acme import create_acme, bootstrap_acme -from indy_client.test.agent.faber import create_faber, bootstrap_faber -from indy_client.test.agent.thrift import create_thrift, bootstrap_thrift -from indy_common.constants import TRUST_ANCHOR -from indy_common.identity import Identity - -from indy_common.config_util import getConfig - -from plenum.common.plugin_helper import loadPlugins -from indy_client.cli.cli import IndyCli -from indy_node.pool.local_pool import create_local_pool - - -def demo_start_agents(pool, looper, b_dir): - demo_start_agent(b_dir, create_faber, bootstrap_faber, - pool.create_client(5500), looper, pool.steward_agent()) - - demo_start_agent(b_dir, create_acme, bootstrap_acme, - pool.create_client(5501), looper, pool.steward_agent()) - - demo_start_agent(b_dir, create_thrift, bootstrap_thrift, - pool.create_client(5502), looper, pool.steward_agent()) - - -def demo_start_agent(b_dir, create_func, bootstrap_func, - client, looper, steward): - looper.runFor(2) - agent = create_func(base_dir_path=b_dir, client=client) - - steward.publish_trust_anchor(Identity(identifier=agent.wallet.defaultId, - verkey=agent.wallet.getVerkey( - agent.wallet.defaultId), - role=TRUST_ANCHOR)) - looper.runFor(4) - - raw = '{"endpoint": {"ha": "127.0.0.1:' + str(agent.port) + '"}}' - endpointAttrib = agent.wallet.build_attrib(agent.wallet.defaultId, raw=raw) - agent.publish_trust_anchor_attribute(endpointAttrib) - - looper.runFor(4) - - looper.add(agent) - - looper.runFor(2) - - looper.run(bootstrap_func(agent)) - - -def main(): - config = getConfig() - base_dir = config.CLI_BASE_DIR - if not os.path.exists(base_dir): - os.makedirs(base_dir) - loadPlugins(base_dir) - - pool = create_local_pool(base_dir) - - demo_start_agents(pool, pool, pool.base_dir) - - curDir = os.getcwd() - logFilePath = os.path.join(curDir, config.logFilePath) - - cli = IndyCli(looper=pool, - basedirpath=pool.base_dir, - logFileName=logFilePath, - withNode=False) - - pool.run(cli.shell()) - - -def start_getting_started(): - main() - - -if __name__ == "__main__": - main() diff --git a/indy_client/test/training/getting_started_future.py b/indy_client/test/training/getting_started_future.py deleted file mode 100644 index c51bec8f1..000000000 --- a/indy_client/test/training/getting_started_future.py +++ /dev/null @@ -1,251 +0,0 @@ -# Test for required installed modules -import sys - -from stp_core.loop.eventually import eventually - -try: - from indy_client import * -except ImportError as e: - print("Indy Client is required for this guild, " - "see doc for installing Indy Client.", file=sys.stderr) - print(str(e), file=sys.stderr) - sys.exit(-1) - -try: - from indy_node import * -except ImportError as e: - print("Indy Node is required for this guild, " - "see doc for installing Indy Node.", file=sys.stderr) - print(str(e), file=sys.stderr) - sys.exit(-1) - -from indy_client.test.agent.acme import create_acme, bootstrap_acme, ACME_VERKEY, ACME_ID -from indy_client.test.agent.faber import create_faber, bootstrap_faber, FABER_VERKEY, FABER_ID -from indy_client.test.agent.thrift import create_thrift, bootstrap_thrift, THRIFT_VERKEY, THRIFT_ID -from indy_common.constants import TRUST_ANCHOR -from indy_common.identity import Identity - -# noinspection PyUnresolvedReferences -from indy_node.pool.local_pool import create_local_pool -# noinspection PyUnresolvedReferences -from indy_client.agent.walleted_agent import WalletedAgent -# noinspection PyUnresolvedReferences -from indy_client.client.wallet.wallet import Wallet -# noinspection PyUnresolvedReferences - -from logging import Formatter -from stp_core.common.log import Logger -from plenum.config import logFormat - -ignored_files = [ - 'node.py', - 'stacked.py', - 'zstack.py', - 'network_interface.py', - 'primary_elector.py', - 'replica.py', - 'propagator.py', - 'upgrader.py', - 'plugin_loader.py'] - -log_msg = [] - -LOG_FORMAT = Formatter(fmt=logFormat, style="{") - - -def out(record, extra_cli_value=None): - if record.filename not in ignored_files: - msg = LOG_FORMAT.format(record) - print(msg) - log_msg.append(msg) - - -def demo_setup_logging(base_dir): - Logger().enableCliLogging(out, override_tags={}) - - -def demo_start_agents(pool, looper, base_dir): - demo_start_agent(base_dir, create_faber, bootstrap_faber, - pool.create_client(5500), looper, pool.steward_agent()) - - demo_start_agent(base_dir, create_acme, bootstrap_acme, - pool.create_client(5501), looper, pool.steward_agent()) - - demo_start_agent(base_dir, create_thrift, bootstrap_thrift, - pool.create_client(5502), looper, pool.steward_agent()) - - -def demo_start_agent(base_dir, create_func, bootstrap_func, - client, looper, steward): - looper.runFor(2) - agent = create_func(base_dir_path=base_dir, client=client) - - steward.publish_trust_anchor(Identity(identifier=agent.wallet.defaultId, - verkey=agent.wallet.getVerkey( - agent.wallet.defaultId), - role=TRUST_ANCHOR)) - looper.runFor(4) - - raw = '{"endpoint": {"ha": "127.0.0.1:' + str(agent.port) + '"}}' - endpointAttrib = agent.wallet.build_attrib(agent.wallet.defaultId, raw=raw) - agent.publish_trust_anchor_attribute(endpointAttrib) - - looper.runFor(4) - - looper.add(agent) - - looper.runFor(2) - - looper.run(bootstrap_func(agent)) - - -def demo_wait_for_proof(looper, proof): - search_msg = "Proof \"{}\"".format(proof.name) - _wait_for(looper, _wait_for_log_msg, *[search_msg]) - - -def demo_wait_for_ping(looper): - search_msg = "_handlePong" - _wait_for(looper, _wait_for_log_msg, *[search_msg]) - - -def _wait_for_log_msg(search_msg): - for msg in log_msg: - if search_msg in msg: - return - - assert False - - -def demo_wait_for_claim_available(looper, link, claim_name): - def _(): - claim = link.find_available_claim(name=claim_name) - assert claim - return claim - - _wait_for(looper, _) - - -def demo_wait_for_claim_received(looper, agent, claim_name): - async def _(): - claims = await agent.prover.wallet.getAllClaimsSignatures() - assert len(claims) > 0 - for schema_key, claims in claims.items(): - if schema_key.name == claim_name: - return claims - - assert False - - _wait_for(looper, _) - - -def demo_wait_for_claim_attrs_received(looper, agent, claim_name): - async def _(): - claims = await agent.prover.wallet.getAllClaimsAttributes() - assert len(claims) > 0 - for schema_key, claims in claims.items(): - if schema_key.name == claim_name: - return claims - - assert False - - _wait_for(looper, _) - - -def demo_wait_for_sync(looper, link): - def _(): - last_sync = link.linkLastSynced - assert last_sync - return last_sync - - _wait_for(looper, _) - - -def demo_wait_for_accept(looper, link): - def _(): - assert link.isAccepted - return link.isAccepted - - _wait_for(looper, _) - - -def _wait_for(looper, func, *args, retry_wait=.1, timeout=20): - return looper.run(eventually( - func, *args, retryWait=retry_wait, timeout=timeout)) - - -FABER_INVITE = """ -{ - "connection-request": { - "name": "Faber College", - "identifier": "%s", - "verkey": "%s", - "nonce": "b1134a647eb818069c089e7694f63e6d", - "endpoint": "127.0.0.1:5555" - }, - "sig": "4QKqkwv9gXmc3Sw7YFkGm2vdF6ViZz9FKZcNJGh6pjnjgBXRqZ17Sk8bUDSb6hsXHoPxrzq2F51eDn1DKAaCzhqP" -}""" % (FABER_ID, FABER_VERKEY) - -THRIFT_INVITE = """ -{ - "connection-request": { - "name": "Thrift Bank", - "identifier": "%s", - "verkey": "%s", - "nonce": "77fbf9dc8c8e6acde33de98c6d747b28c", - "endpoint": "127.0.0.1:7777" - }, - "proof-requests": [{ - "name": "Loan-Application-Basic", - "version": "0.1", - "attributes": { - "salary_bracket": "string", - "employee_status": "string" - }, - "verifiableAttributes": ["salary_bracket", "employee_status"] - }, { - "name": "Loan-Application-KYC", - "version": "0.1", - "attributes": { - "first_name": "string", - "last_name": "string", - "ssn": "string" - }, - "verifiableAttributes": ["first_name", "last_name", "ssn"] - }, { - "name": "Name-Proof", - "version": "0.1", - "attributes": { - "first_name": "string", - "last_name": "string" - }, - "verifiableAttributes": ["first_name", "last_name"] - }], - "sig": "D1vU5fbtJbqWKdCoVJgqHBLLhh5CYspikuEXdnBVVyCnLHiYC9ZsZrDWpz3GkFFGvfC4RQ4kuB64vUFLo3F7Xk6" -} -""" % (THRIFT_ID, THRIFT_VERKEY) - -ACME_INVITE = """ -{ - "connection-request": { - "name": "Acme Corp", - "identifier": "%s", - "verkey": "%s", - "nonce": "57fbf9dc8c8e6acde33de98c6d747b28c", - "endpoint": "127.0.0.1:6666" - }, - "proof-requests": [{ - "name": "Job-Application", - "version": "0.2", - "attributes": { - "first_name": "string", - "last_name": "string", - "phone_number": "string", - "degree": "string", - "status": "string", - "ssn": "string" - }, - "verifiableAttributes": ["degree", "status", "ssn"] - }], - "sig": "sdf" -}""" % (ACME_ID, ACME_VERKEY) diff --git a/indy_client/test/training/test_getting_started_guide.py b/indy_client/test/training/test_getting_started_guide.py deleted file mode 100644 index 1914aa987..000000000 --- a/indy_client/test/training/test_getting_started_guide.py +++ /dev/null @@ -1,173 +0,0 @@ -import pytest - -from indy_client.test.training.getting_started_future import * - -# noinspection PyUnresolvedReferences -from indy_node.test.conftest import tconf - - -def getting_started(base_dir=None): - #################################### - # Setup - #################################### - - if base_dir is None: - base_dir = TemporaryDirectory().name - - demo_setup_logging(base_dir) - - pool = create_local_pool(base_dir) - demo_start_agents(pool, pool, base_dir) - # ################################### - # Alice's Wallet - # ################################### - - alice_agent = WalletedAgent(name="Alice", - basedirpath=base_dir, - client=pool.create_client(5403), - wallet=Wallet(), - port=8786) - alice_agent.new_identifier() - - pool.add(alice_agent) - - pool.runFor(1) - - #################################### - # Faber Invitation - #################################### - - print(FABER_INVITE) - - link_to_faber = alice_agent.load_request_str(FABER_INVITE) - - print(link_to_faber) - - alice_agent.sync(link_to_faber.name) - - demo_wait_for_sync(pool, link_to_faber) - - print(link_to_faber) - - alice_agent.accept_request(link_to_faber) - - demo_wait_for_accept(pool, link_to_faber) - - print(link_to_faber) - - alice_agent.sendPing("Faber College") - - demo_wait_for_ping(pool) - - #################################### - # Transcription Claim - #################################### - - demo_wait_for_claim_available(pool, link_to_faber, 'Transcript') - claim_to_request = link_to_faber.find_available_claim(name='Transcript') - - print(claim_to_request) - - pool.run(alice_agent.send_claim(link_to_faber, claim_to_request)) - - demo_wait_for_claim_attrs_received(pool, alice_agent, 'Transcript') - - claims = pool.run(alice_agent.prover.wallet.getAllClaimsAttributes()) - - print(claims) - - #################################### - # Acme Invitation - #################################### - - print(ACME_INVITE) - link_to_acme = alice_agent.load_request_str(ACME_INVITE) - - print(link_to_acme) - - alice_agent.sync(link_to_acme.name) - - demo_wait_for_sync(pool, link_to_acme) - - print(link_to_acme) - - alice_agent.accept_request(link_to_acme) - - demo_wait_for_accept(pool, link_to_acme) - - print(link_to_acme) - - job_application_request = link_to_acme.find_proof_request( - name='Job-Application') - - print(job_application_request) - - alice_agent.sendProof(link_to_acme, job_application_request) - - #################################### - # Job-Certificate Claim - #################################### - - demo_wait_for_claim_available(pool, link_to_acme, 'Job-Certificate') - - print(link_to_acme) - - job_certificate = link_to_acme.find_available_claim(name='Job-Certificate') - - print(job_certificate) - - pool.run(alice_agent.send_claim(link_to_acme, job_certificate)) - - demo_wait_for_claim_attrs_received(pool, alice_agent, 'Job-Certificate') - - claims = pool.run(alice_agent.prover.wallet.getAllClaimsAttributes()) - - print(claims) - - #################################### - # Thrift Invitation - #################################### - - link_to_thrift = alice_agent.load_request_str(THRIFT_INVITE) - - print(link_to_thrift) - - alice_agent.sync(link_to_thrift.name) - - demo_wait_for_sync(pool, link_to_thrift) - - print(link_to_thrift) - - alice_agent.accept_request(link_to_thrift) - - demo_wait_for_accept(pool, link_to_thrift) - - print(link_to_thrift) - - #################################### - # Proof to Thrift - #################################### - - load_basic_request = link_to_thrift.find_proof_request( - name='Loan-Application-Basic') - - print(load_basic_request) - - alice_agent.sendProof(link_to_thrift, load_basic_request) - - demo_wait_for_proof(pool, load_basic_request) - - ####### - - load_kyc_request = link_to_thrift.find_proof_request( - name='Loan-Application-KYC') - - print(load_kyc_request) - - alice_agent.sendProof(link_to_thrift, load_kyc_request) - - demo_wait_for_proof(pool, load_kyc_request) - - -if __name__ == "__main__": - getting_started() diff --git a/indy_client/test/waits.py b/indy_client/test/waits.py deleted file mode 100644 index d47d6d7f0..000000000 --- a/indy_client/test/waits.py +++ /dev/null @@ -1,24 +0,0 @@ - - -def expectedAgentConnected(): - return 10 - - -def expectedAgentPing(): - return 5 - - -def expectedClaimsReceived(): - return 20 - - -def expectedTranscriptWritten(): - return 10 - - -def expectedJobCertWritten(): - return 10 - - -def expected_accept_request(): - return 10 diff --git a/indy_client/utils/__init__.py b/indy_client/utils/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/indy_client/utils/migration/__init__.py b/indy_client/utils/migration/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/indy_client/utils/migration/ancient_migration.py b/indy_client/utils/migration/ancient_migration.py deleted file mode 100644 index 89b92307e..000000000 --- a/indy_client/utils/migration/ancient_migration.py +++ /dev/null @@ -1,27 +0,0 @@ -import os - -from ledger.genesis_txn.genesis_txn_file_util import genesis_txn_file - -_BASE_DIR = os.path.expanduser('~/.sovrin') -_NETWORKS = ['live', 'local', 'sandbox'] - - -def _update_wallets_dir_name_if_outdated(): - old_named_path = os.path.expanduser(os.path.join(_BASE_DIR, 'keyrings')) - new_named_path = os.path.expanduser(os.path.join(_BASE_DIR, 'wallets')) - if not os.path.exists(new_named_path) and os.path.isdir(old_named_path): - os.rename(old_named_path, new_named_path) - - -def _update_genesis_txn_file_name_if_outdated(transaction_file): - old_named_path = os.path.join(_BASE_DIR, transaction_file) - new_named_path = os.path.join(_BASE_DIR, genesis_txn_file(transaction_file)) - if not os.path.exists(new_named_path) and os.path.isfile(old_named_path): - os.rename(old_named_path, new_named_path) - - -def migrate(): - _update_wallets_dir_name_if_outdated() - for network in _NETWORKS: - _update_genesis_txn_file_name_if_outdated( - 'pool_transactions_{}'.format(network)) diff --git a/indy_client/utils/migration/combined_migration.py b/indy_client/utils/migration/combined_migration.py deleted file mode 100644 index d375d5364..000000000 --- a/indy_client/utils/migration/combined_migration.py +++ /dev/null @@ -1,82 +0,0 @@ -import os - -import shutil - -from indy_client.utils.migration import ancient_migration -from indy_client.utils.migration import multi_network_migration -from indy_client.utils.migration import rebranding_migration - -_HOME_DIR = os.path.expanduser('~') -_LEGACY_BASE_DIR = os.path.expanduser('~/.sovrin') -_LEGACY_BASE_BACKUP_DIR = os.path.expanduser('~/.sovrin.backup') -_TRANS_BASE_DIR = os.path.expanduser('~/.indy') -_CLI_BASE_DIR = os.path.expanduser('~/.indy-cli') -_CLI_BASE_BACKUP_DIR = os.path.expanduser('~/.indy-cli.backup') - - -def is_cli_base_dir_untouched(): - return not os.path.exists(os.path.join(_CLI_BASE_DIR, 'wallets')) - - -def legacy_base_dir_exists(): - return os.path.isdir(_LEGACY_BASE_DIR) - - -def _remove_path_if_exists(path): - if os.path.isdir(path): - shutil.rmtree(path) - elif os.path.isfile(path): - os.remove(path) - - -def _try_remove_path(path): - try: - if os.path.isdir(path): - shutil.rmtree(path, ignore_errors=True) - elif os.path.isfile(path): - os.remove(path) - except Exception as e: - print(e) - - -def migrate(): - _remove_path_if_exists(_LEGACY_BASE_BACKUP_DIR) - _remove_path_if_exists(_CLI_BASE_BACKUP_DIR) - _remove_path_if_exists(_TRANS_BASE_DIR) - _remove_path_if_exists(os.path.join(_HOME_DIR, '.indy-cli-history')) - - try: - os.rename(_LEGACY_BASE_DIR, _LEGACY_BASE_BACKUP_DIR) - shutil.copytree(_LEGACY_BASE_BACKUP_DIR, _LEGACY_BASE_DIR) - - if os.path.exists(_CLI_BASE_DIR): - os.rename(_CLI_BASE_DIR, _CLI_BASE_BACKUP_DIR) - - ancient_migration.migrate() - rebranding_migration.migrate() - multi_network_migration.migrate() - - except Exception as e: - if os.path.exists(_CLI_BASE_BACKUP_DIR): - _remove_path_if_exists(_CLI_BASE_DIR) - os.rename(_CLI_BASE_BACKUP_DIR, _CLI_BASE_DIR) - - if os.path.exists(os.path.join(_HOME_DIR, '.indy-cli-history')): - os.rename(os.path.join(_HOME_DIR, '.indy-cli-history'), - os.path.join(_HOME_DIR, '.sovrin-cli-history')) - - raise e - - finally: - # We restore .sovrin from backup anyway to preserve - # untouched pre-migration data because the ancient migration - # makes changes right in .sovrin - if os.path.exists(_LEGACY_BASE_BACKUP_DIR): - _remove_path_if_exists(_LEGACY_BASE_DIR) - os.rename(_LEGACY_BASE_BACKUP_DIR, _LEGACY_BASE_DIR) - - # We should remove the transitional base directory (.indy) anyway - _try_remove_path(_TRANS_BASE_DIR) - - # Since the migration has succeeded, we remove .indy-cli.backup - _try_remove_path(_CLI_BASE_BACKUP_DIR) diff --git a/indy_client/utils/migration/multi_network_migration.py b/indy_client/utils/migration/multi_network_migration.py deleted file mode 100644 index 83d94a076..000000000 --- a/indy_client/utils/migration/multi_network_migration.py +++ /dev/null @@ -1,122 +0,0 @@ -import os - -import shutil -from importlib.util import module_from_spec, spec_from_file_location - -_OLD_BASE_DIR = os.path.expanduser('~/.indy') -_CLI_BASE_DIR = os.path.expanduser('~/.indy-cli') -_CONFIG = 'indy_config.py' -_WALLETS = 'wallets' -_NETWORKS = 'networks' -_KEYS = 'keys' -_DATA = 'data' -_CLIENTS = 'clients' -_LEGACY_NETWORKS = ['live', 'local', 'sandbox'] - - -# def _get_used_network_name(): -# old_config_path = os.path.join(_OLD_BASE_DIR, _CONFIG) -# spec = spec_from_file_location('old_user_overrides', old_config_path) -# old_user_overrides = module_from_spec(spec) -# spec.loader.exec_module(old_user_overrides) -# -# if hasattr(old_user_overrides, 'poolTransactionsFile'): -# network_name = old_user_overrides.poolTransactionsFile.split('_')[-1] -# if network_name in _LEGACY_NETWORKS: -# return network_name -# -# if hasattr(old_user_overrides, 'current_env') \ -# and old_user_overrides.current_env != 'test': -# network_name = old_user_overrides.current_env -# if network_name in _LEGACY_NETWORKS: -# return network_name -# -# return 'sandbox' - - -def _migrate_config(): - old_config_path = os.path.join(_OLD_BASE_DIR, _CONFIG) - new_config_path = os.path.join(_CLI_BASE_DIR, _CONFIG) - - if os.path.isfile(old_config_path): - with open(old_config_path, 'r') as old_config_file, \ - open(new_config_path, 'w') as new_config_file: - for line in old_config_file: - if not line.startswith('current_env') \ - and not line.startswith('poolTransactionsFile') \ - and not line.startswith('domainTransactionsFile'): - new_config_file.write(line) - - -def _migrate_genesis_txn_files(): - for network in _LEGACY_NETWORKS: - old_genesis_pool_txn_path = os.path.join( - _OLD_BASE_DIR, 'pool_transactions_{}_genesis'.format(network)) - new_genesis_pool_txn_path = os.path.join( - _CLI_BASE_DIR, _NETWORKS, network, 'pool_transactions_genesis') - - if os.path.exists(old_genesis_pool_txn_path): - os.makedirs(os.path.dirname(new_genesis_pool_txn_path), - exist_ok=True) - shutil.copyfile(old_genesis_pool_txn_path, - new_genesis_pool_txn_path) - - -def _migrate_wallets(): - old_wallets_dir = os.path.join(_OLD_BASE_DIR, _WALLETS) - new_wallets_dir = os.path.join(_CLI_BASE_DIR, _WALLETS) - - if os.path.isdir(old_wallets_dir): - shutil.copytree(old_wallets_dir, new_wallets_dir) - - if os.path.exists(os.path.join(new_wallets_dir, 'test')): - os.rename(os.path.join(new_wallets_dir, 'test'), - os.path.join(new_wallets_dir, 'sandbox')) - - -# def _migrate_keys(network): -# old_data_dir = os.path.join(_OLD_BASE_DIR, _DATA, _CLIENTS) -# -# for client in os.listdir(old_data_dir): -# old_client_keys_dir = os.path.join(_OLD_BASE_DIR, client) -# if os.path.isdir(old_client_keys_dir): -# new_client_keys_dir = os.path.join( -# _CLI_BASE_DIR, _NETWORKS, network, _KEYS, client) -# shutil.copytree(old_client_keys_dir, new_client_keys_dir) -# -# -# def _migrate_data(network): -# old_data_dir = os.path.join(_OLD_BASE_DIR, _DATA, _CLIENTS) -# -# for client in os.listdir(old_data_dir): -# old_client_data_dir = os.path.join(old_data_dir, client) -# if os.path.isdir(old_client_data_dir): -# new_client_data_dir = os.path.join( -# _CLI_BASE_DIR, _NETWORKS, network, _DATA, _CLIENTS, client) -# shutil.copytree(old_client_data_dir, new_client_data_dir) -# -# sole_pool_txn_dir = os.path.join(new_client_data_dir, -# 'pool_transactions') -# for specific_network in _LEGACY_NETWORKS: -# specific_pool_txn_dir = \ -# os.path.join(new_client_data_dir, -# 'pool_transactions_{}'.format(specific_network)) -# if os.path.isdir(specific_pool_txn_dir): -# if specific_network == network: -# os.rename(specific_pool_txn_dir, sole_pool_txn_dir) -# else: -# shutil.rmtree(specific_pool_txn_dir) - - -def migrate(): - os.makedirs(_CLI_BASE_DIR) - - # Used network cannot be determined for client in this way - # network = _get_used_network_name() - - _migrate_config() - _migrate_genesis_txn_files() - _migrate_wallets() - # Migration of keys and data is superfluous for client - # _migrate_keys(network) - # _migrate_data(network) diff --git a/indy_client/utils/migration/rebranding_migration.py b/indy_client/utils/migration/rebranding_migration.py deleted file mode 100644 index 98a029d3d..000000000 --- a/indy_client/utils/migration/rebranding_migration.py +++ /dev/null @@ -1,34 +0,0 @@ -import os - -import shutil - -_HOME_DIR = os.path.expanduser('~') -_LEGACY_BASE_DIR = os.path.expanduser('~/.sovrin') -_BASE_DIR = os.path.expanduser('~/.indy') - - -def _rename_if_exists(dir, old_name, new_name): - if os.path.exists(os.path.join(dir, old_name)): - os.rename(os.path.join(dir, old_name), - os.path.join(dir, new_name)) - - -def _rename_request_files(requests_dir): - for relative_name in os.listdir(requests_dir): - absolute_name = os.path.join(requests_dir, relative_name) - if os.path.isfile(absolute_name) \ - and absolute_name.endswith('.sovrin'): - os.rename(absolute_name, - absolute_name[:-len('.sovrin')] + '.indy') - - -def migrate(): - shutil.copytree(_LEGACY_BASE_DIR, _BASE_DIR) - - _rename_if_exists(_BASE_DIR, '.sovrin', '.indy') - _rename_if_exists(_BASE_DIR, 'sovrin_config.py', 'indy_config.py') - - if os.path.isdir(os.path.join(_BASE_DIR, 'sample')): - _rename_request_files(os.path.join(_BASE_DIR, 'sample')) - - _rename_if_exists(_HOME_DIR, '.sovrin-cli-history', '.indy-cli-history') diff --git a/indy_client/utils/user_scenarios.py b/indy_client/utils/user_scenarios.py deleted file mode 100644 index acb751803..000000000 --- a/indy_client/utils/user_scenarios.py +++ /dev/null @@ -1,252 +0,0 @@ -import json -import random -from abc import abstractmethod, ABCMeta -from collections import namedtuple -from functools import partial - -from stp_core.common.log import getlogger, Logger -from stp_core.crypto.util import randomSeed -from stp_core.loop.eventually import eventually -from stp_core.loop.looper import Looper -from stp_core.network.port_dispenser import genHa -from stp_core.types import HA - -from plenum.common.constants import TXN_TYPE, TARGET_NYM, VERKEY, DATA -from plenum.common.signer_simple import SimpleSigner -from indy_client.client.client import Client -from indy_client.client.wallet.wallet import Wallet -from indy_common.constants import NYM, GET_NYM -from indy_common.config_util import getConfig -from indy_common.util import get_reply_if_confirmed - -logger = getlogger() - - -class UserScenario(metaclass=ABCMeta): - def __init__(self, seed, logFileName=None): - if logFileName: - Logger().enableFileLogging(logFileName) - - self._seed = seed - - self._client = None - self._wallet = None - - self._looper = None - - @property - def identifier(self): - if self._wallet: - return self._wallet.defaultId - else: - return None - - @property - def verkey(self): - if self._wallet: - return self._wallet.getVerkey() - else: - return None - - @classmethod - def runInstance(cls, *args, **kwargs): - cls(*args, **kwargs).run() - - def run(self): - try: - self._createClientAndWallet() - - self._looper = Looper(debug=getConfig().LOOPER_DEBUG) - try: - self._startClient() - self.do() - finally: - self._looper.shutdownSync() - self._looper = None - - except BaseException as ex: - logger.exception( - "User scenario throws out exception: {}".format(ex), - exc_info=ex) - raise ex - - @abstractmethod - def do(self): - pass - - def performOperation(self, op): - req = self._wallet.signOp(op) - self._client.submitReqs(req) - - def getRequestResult(reqKey): - reply, error = get_reply_if_confirmed(self._client, *reqKey) - if reply is None and error is None: - raise Exception("Request has not been completed yet") - else: - return reply, error - - reply, error = self._looper.run(eventually(partial(getRequestResult, - (req.identifier, - req.reqId)), - retryWait=.5, - timeout=5)) - assert not error, error - - if reply[DATA]: - result = json.loads(reply[DATA]) - else: - result = None - - return result - - def generateNewSigner(self): - assert self.identifier - return SimpleSigner(identifier=self.identifier) - - def changeSigner(self, newSigner): - assert newSigner.identifier == self.identifier - self._wallet.updateSigner(self.identifier, newSigner) - logger.info("Changed signer. New verkey: {}".format(self.verkey)) - - def _createClientAndWallet(self): - signer = SimpleSigner(seed=self._seed) - - port = genHa()[1] - ha = HA('0.0.0.0', port) - self._client = Client(name=signer.identifier, ha=ha) - - self._wallet = Wallet(name=signer.identifier) - self._wallet.addIdentifier(signer=signer) - - logger.info("Identifier: {}".format(self.identifier)) - logger.info("Signer's verkey: {}".format(self.verkey)) - - def _startClient(self): - self._looper.add(self._client) - - def ensureConnectedToAll(): - connectedNodes = self._client.nodestack.connecteds - connectedNodesNum = len(connectedNodes) - totalNodes = len(self._client.nodeReg) - - logger.info( - "Connected {} / {} nodes".format(connectedNodesNum, totalNodes)) - for node in connectedNodes: - logger.info(" {}".format(node)) - - if connectedNodesNum == 0: - raise Exception("Not connected to any") - elif connectedNodesNum < totalNodes * 0.8: - raise Exception("Not connected fully") - else: - return True - - self._looper.run(eventually(ensureConnectedToAll, - retryWait=.5, - timeout=5)) - - -class NymsCreationScenario(UserScenario): - def __init__(self, seed, nymsIdsAndVerkeys, logFileName=None): - super().__init__(seed, logFileName) - self.nymsIdsAndVerkeys = nymsIdsAndVerkeys - - def do(self): - for id, verkey in self.nymsIdsAndVerkeys: - self.setNym(id, verkey) - - def setNym(self, dest, verkey): - logger.info("Setting nym: dest={}, verkey={}...".format(dest, verkey)) - self.performOperation({ - TXN_TYPE: NYM, - TARGET_NYM: dest, - VERKEY: verkey - }) - logger.info("Nym set") - - -class KeyRotationAndReadScenario(UserScenario): - def __init__(self, seed, iterations, logFileName=None): - super().__init__(seed, logFileName) - self.iterations = iterations - - def do(self): - for i in range(self.iterations): - newSigner = self.generateNewSigner() - self.setMyVerkey(newSigner.verkey) - newVerkey = self.getMyVerkey() - - assert newVerkey == newSigner.verkey, \ - "Got wrong verkey: expected was {}, actual was {}".format( - newSigner.verkey, newVerkey) - - self.changeSigner(newSigner) - - def setMyVerkey(self, verkey): - logger.info("Setting my verkey to {}...".format(verkey)) - self.performOperation({ - TXN_TYPE: NYM, - TARGET_NYM: self.identifier, - VERKEY: verkey - }) - logger.info("Verkey set") - - def getMyVerkey(self): - logger.info("Getting my verkey...") - result = self.performOperation({ - TXN_TYPE: GET_NYM, - TARGET_NYM: self.identifier - }) - logger.info("Verkey gotten: {}".format(result[VERKEY])) - return result[VERKEY] - - -class KeyRotationScenario(UserScenario): - def __init__(self, seed, iterations, logFileName=None): - super().__init__(seed, logFileName) - self.iterations = iterations - - def do(self): - for i in range(self.iterations): - newSigner = self.generateNewSigner() - self.setMyVerkey(newSigner.verkey) - self.changeSigner(newSigner) - - def setMyVerkey(self, verkey): - logger.info("Setting my verkey to {}...".format(verkey)) - self.performOperation({ - TXN_TYPE: NYM, - TARGET_NYM: self.identifier, - VERKEY: verkey - }) - logger.info("Verkey set") - - -class ForeignKeysReadScenario(UserScenario): - def __init__(self, seed, nymsIds, iterations, logFileName=None): - super().__init__(seed, logFileName) - self.nymsIds = nymsIds - self.iterations = iterations - - def do(self): - for i in range(self.iterations): - id = random.choice(self.nymsIds) - self.getVerkey(id) - # TODO: Add an assertion verifying that the gotten verkey is - # from the expected section of the nym's verkey values history - - def getVerkey(self, dest): - logger.info("Getting verkey of NYM {}...".format(dest)) - result = self.performOperation({ - TXN_TYPE: GET_NYM, - TARGET_NYM: dest - }) - logger.info("Verkey gotten: {}".format(result[VERKEY])) - return result[VERKEY] - - -def generateNymsData(count): - signers = [SimpleSigner(seed=randomSeed()) for i in range(count)] - Nym = namedtuple("Nym", ["seed", "identifier", "verkey"]) - return [Nym(signer.seed, signer.identifier, signer.verkey) - for signer in signers] diff --git a/indy_common/config.py b/indy_common/config.py index c591e0d1a..874c3c46b 100644 --- a/indy_common/config.py +++ b/indy_common/config.py @@ -2,11 +2,9 @@ import logging from collections import OrderedDict -from plenum.common.constants import ClientBootStrategy, HS_ROCKSDB, HS_LEVELDB, KeyValueStorageType +from plenum.common.constants import ClientBootStrategy, HS_ROCKSDB, KeyValueStorageType from plenum.config import rocksdb_default_config -from indy_common.constants import Environment - nodeReg = OrderedDict([ ('Alpha', ('127.0.0.1', 9701)), ('Beta', ('127.0.0.1', 9703)), @@ -23,9 +21,6 @@ GENERAL_CONFIG_DIR = '/etc/indy/' -CLI_BASE_DIR = '~/.indy-cli/' -CLI_NETWORK_DIR = os.path.join(CLI_BASE_DIR, 'networks') - GENERAL_CONFIG_FILE = 'indy_config.py' NETWORK_CONFIG_FILE = 'indy_config.py' USER_CONFIG_FILE = 'indy_config.py' @@ -61,27 +56,22 @@ db_attr_db_config = rocksdb_attr_db_config db_idr_cache_db_config = rocksdb_idr_cache_db_config - PluginsToLoad = [] - # TODO: This should be in indy_node's config # File that stores the version of the Node ran the last time it started. (It # might be incorrect sometimes if Node failed to update the file and crashed) lastRunVersionFile = 'last_version' - # File that stores the version of the code to which the update has to be made. # This is used to detect if there was an error while upgrading. Once it has # been found out that there was error while upgrading, then it can be upgraded. nextVersionFile = 'next_version' - # Minimum time difference (seconds) between the code update of 2 nodes MinSepBetweenNodeUpgrades = 300 - upgradeLogFile = "upgrade_log" restartLogFile = "restart_log" @@ -93,7 +83,6 @@ controlServiceHost = "127.0.0.1" controlServicePort = "30003" - ''' logging level for agents ''' @@ -109,3 +98,5 @@ UPGRADE_ENTRY = 'indy-node' ANYONE_CAN_WRITE = False + +PACKAGES_TO_HOLD = ['indy-plenum', 'indy-node', 'python3-indy-crypto', 'libindy-crypto'] diff --git a/indy_common/plugin_helper.py b/indy_common/plugin_helper.py deleted file mode 100644 index cdaa94fb7..000000000 --- a/indy_common/plugin_helper.py +++ /dev/null @@ -1,66 +0,0 @@ -import os - - -def writeAnonCredPlugin(plugins_path, reloadTestModules: bool=False): - if not os.path.exists(plugins_path): - os.makedirs(plugins_path) - - initFile = os.path.join(plugins_path, "__init__.py") - if not os.path.exists(initFile): - with open(initFile, "a"): - pass - - anonPluginFilePath = os.path.join(plugins_path, "anoncreds.py") - if not os.path.exists(initFile): - anonPluginContent = "" \ - "import importlib\n" \ - "\n" \ - "import anoncreds.protocol.issuer\n" \ - "import anoncreds.protocol.verifier\n" \ - "import anoncreds.protocol.prover\n" \ - "\n" \ - "import indy_client.anon_creds.issuer\n" \ - "import indy_client.anon_creds.verifier\n"\ - "import indy_client.anon_creds.prover\n" \ - "\n" \ - "Name = \"Anon creds\"\n" \ - "Version = 1.1\n" \ - "IndyVersion = 1.1\n" \ - "\n" \ - "indy_client.anon_creds.issuer.Credential = anoncreds.protocol.types.Credential\n" \ - "indy_client.anon_creds.issuer.AttribType = anoncreds.protocol.types.AttribType\n" \ - "indy_client.anon_creds.issuer.AttribDef = anoncreds.protocol.types.AttribDef\n" \ - "indy_client.anon_creds.issuer.Attribs = anoncreds.protocol.types.Attribs\n" \ - "indy_client.anon_creds.issuer.AttrRepo = anoncreds.protocol.attribute_repo.AttrRepo\n" \ - "indy_client.anon_creds.issuer.InMemoryAttrRepo = anoncreds.protocol.attribute_repo.InMemoryAttrRepo\n" \ - "indy_client.anon_creds.issuer.Issuer = anoncreds.protocol.issuer.Issuer\n" \ - "indy_client.anon_creds.prover.Prover = anoncreds.protocol.prover.Prover\n" \ - "indy_client.anon_creds.verifier.Verifier = anoncreds.protocol.verifier.Verifier\n" \ - "indy_client.anon_creds.proof_builder.ProofBuilder = anoncreds.protocol.proof_builder.ProofBuilder\n" \ - "indy_client.anon_creds.proof_builder.Proof = anoncreds.protocol.types.Proof\n" \ - "indy_client.anon_creds.cred_def.CredDef = anoncreds.protocol.credential_definition.CredentialDefinition\n" \ - - modules_to_reload = ["indy_client.cli.cli"] - test_modules_to_reload = [ - "indy_client.test.helper", "indy_client.test.cli.helper", - "indy_client.test.anon_creds.conftest", - "indy_client.test.anon_creds.test_anon_creds", - # "indy_client.test.anon_creds.anon_creds_demo" - ] - - if reloadTestModules: - modules_to_reload.extend(test_modules_to_reload) - - reload_module_code = \ - "reload_modules = " + str(modules_to_reload) + "\n" \ - "for m in reload_modules:\n" \ - " try:\n" \ - " module_obj = importlib.import_module(m)\n" \ - " importlib.reload(module_obj)\n" \ - " except AttributeError as ae:\n" \ - " print(\"Plugin loading failed: module {}, detail: {}\".format(m, str(ae)))\n" \ - "\n" - - anonPluginContent += reload_module_code - with open(anonPluginFilePath, "w") as f: - f.write(anonPluginContent) diff --git a/indy_common/test/conftest.py b/indy_common/test/conftest.py index 0824b05c1..1b608108b 100644 --- a/indy_common/test/conftest.py +++ b/indy_common/test/conftest.py @@ -18,7 +18,6 @@ from plenum.test.conftest import GENERAL_CONFIG_DIR, \ txnPoolNodesLooper, overriddenConfigValues # noqa - logger = getlogger() @@ -74,24 +73,22 @@ def general_conf_tdir_for_func(tdir_for_func): return general_config_dir -def _tconf(general_config, client_temp_dir): +def _tconf(general_config): config = getConfig(general_config_dir=general_config) for k, v in overriddenConfigValues.items(): setattr(config, k, v) config.MinSepBetweenNodeUpgrades = 5 - config.CLI_BASE_DIR = client_temp_dir - config.CLI_NETWORK_DIR = os.path.join(config.CLI_BASE_DIR, 'networks') return config @pytest.fixture(scope="module") -def tconf(general_conf_tdir, client_tdir): - return _tconf(general_conf_tdir, client_tdir) +def tconf(general_conf_tdir): + return _tconf(general_conf_tdir) @pytest.fixture() -def tconf_for_func(general_conf_tdir_for_func, client_tdir): - return _tconf(general_conf_tdir_for_func, client_tdir) +def tconf_for_func(general_conf_tdir_for_func): + return _tconf(general_conf_tdir_for_func) @pytest.fixture(scope="module") diff --git a/indy_common/types.py b/indy_common/types.py index 57ff99b99..82f154fed 100644 --- a/indy_common/types.py +++ b/indy_common/types.py @@ -222,7 +222,7 @@ class ClientClaimDefSubmitOperation(MessageValidator): (CLAIM_DEF_SCHEMA_REF, TxnSeqNoField()), (CLAIM_DEF_PUBLIC_KEYS, ClaimDefField()), (CLAIM_DEF_SIGNATURE_TYPE, LimitedLengthStringField(max_length=SIGNATURE_TYPE_FIELD_LIMIT)), - (CLAIM_DEF_TAG, LimitedLengthStringField(max_length=256, optional=True)), + (CLAIM_DEF_TAG, LimitedLengthStringField(max_length=256)), ) diff --git a/indy_node/pool/__init__.py b/indy_node/pool/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/indy_node/pool/local_pool.py b/indy_node/pool/local_pool.py deleted file mode 100644 index d84aac02d..000000000 --- a/indy_node/pool/local_pool.py +++ /dev/null @@ -1,150 +0,0 @@ -import os -import shutil -from collections import deque - -from plenum.common.constants import TYPE, NODE, NYM -from plenum.common.member.steward import Steward -from plenum.common.test_network_setup import TestNetworkSetup -from plenum.common.txn_util import get_type -from plenum.common.util import adict, randomString -from indy_client.agent.walleted_agent import WalletedAgent -from indy_client.client.client import Client -from indy_client.client.wallet.wallet import Wallet -from indy_common.config_util import getConfig -from indy_common.init_util import initialize_node_environment -from indy_common.pool.pool import Pool -from indy_common.txn_util import getTxnOrderedFields -from indy_common.config_helper import NodeConfigHelper -from indy_node.server.node import Node -from stp_core.crypto.util import randomSeed -from stp_core.loop.looper import Looper - - -def create_local_pool(node_base_dir, cli_base_dir, config=None, node_size=4): - conf = config or getConfig() - - stewards = [] - node_conf = [] - nodes = [] - genesis_txns = [] - for i in range(node_size): - w = Wallet("steward") - s = Steward(wallet=w) - s.wallet.addIdentifier() - - stewards.append(s) - - node_config_helper = NodeConfigHelper(conf.name, conf, chroot=node_base_dir) - n_config = adict(name='Node' + str(i + 1), - basedirpath=node_config_helper.ledger_dir, - ha=('127.0.0.1', 9700 + (i * 2)), - cliha=('127.0.0.1', 9700 + (i * 2) + 1)) - - n_verkey, n_bls_key, n_bls_key_proof = \ - initialize_node_environment(name=n_config.name, - node_config_helper=node_config_helper, - override_keep=True, - sigseed=randomSeed()) - - s.set_node(n_config, verkey=n_verkey, blskey=n_bls_key, - blsley_proof=n_bls_key_proof) - - node_conf.append(n_config) - - genesis_txns += s.generate_genesis_txns() - - pool = None # LocalPool(genesis_txns, pool_dir, steward=stewards[0]) - - for c in node_conf: - n = Node(**c) - pool.add(n) - nodes.append(n) - - pool.runFor(5) - - return pool - - -class LocalPool(Pool, Looper): - def __init__(self, genesis_txns, base_dir, config=None, - loop=None, steward: Steward = None): - super().__init__(loop=loop) - self.base_dir = base_dir - self.genesis_txns = genesis_txns - self.config = config or getConfig(self.base_dir) - self._generate_genesis_files() - self._steward = steward - - if steward is not None: - self._steward_agent = WalletedAgent(name="steward1", - basedirpath=self.base_dir, - client=self.create_client( - 5005), - wallet=steward.wallet, - port=8781) - self.add(self._steward_agent) - - @property - def genesis_transactions(self): - return self.genesis_txns - - def create_client(self, port: int): - return Client(name=randomString(6), - basedirpath=self.base_dir, - ha=('127.0.0.1', port)) - - def steward_agent(self): - return self._steward_agent - - # def setup_local_node(self, name, sigseed, override=True): - # _, verkey = initNodeKeysForBothStacks(name, self.base_dir, sigseed, override) - - def _generate_genesis_files(self): - - pool_txns, domain_txns = self._split_pool_and_domain() - - # TODO make Ledger a context manager - pl = TestNetworkSetup.init_pool_ledger(appendToLedgers=False, - baseDir=self.base_dir, - config=self.config, - envName='test') - pl_lines = self._add_and_stop(pool_txns, pl) - - dl = TestNetworkSetup.init_domain_ledger( - appendToLedgers=False, - baseDir=self.base_dir, - config=self.config, - envName='test', - domainTxnFieldOrder=getTxnOrderedFields()) - dl_lines = self._add_and_stop(domain_txns, dl) - - return adict(pool=adict(lines=pl_lines, - root=pl.root_hash, - size=pl.size), - domain=adict(lines=dl_lines, - root=dl.root_hash, - size=dl.size)) - - def _split_pool_and_domain(self): - pool_txns = deque() - domain_txns = deque() - for txn in self.genesis_txns: - if get_type(txn) in [NODE]: - pool_txns.appendleft(txn) - elif get_type(txn) in [NYM]: - domain_txns.appendleft(txn) - else: - raise NotImplementedError("txn type '{}' not supported") - return pool_txns, domain_txns - - @staticmethod - def _add_and_stop(txns, ledger): - try: - while True: - ledger.add(txns.pop()) - except IndexError: - pass - finally: - ledger.stop() - with open(ledger._transactionLog.db_path) as f: - return f.readlines() diff --git a/indy_node/server/action_req_handler.py b/indy_node/server/action_req_handler.py index d5aa7e128..93e8b6112 100644 --- a/indy_node/server/action_req_handler.py +++ b/indy_node/server/action_req_handler.py @@ -75,7 +75,7 @@ def apply(self, req: Request, cons_time: int = None): result = self._generate_action_result(req) result[DATA] = self.info_tool.info result[DATA].update(self.info_tool.memory_profiler) - result[DATA].update(self.info_tool.software_info) + result[DATA].update(self.info_tool._generate_software_info()) result[DATA].update(self.info_tool.extractions) result[DATA].update(self.info_tool.node_disk_size) else: diff --git a/indy_node/server/config_req_handler.py b/indy_node/server/config_req_handler.py index 33ab29062..0048a7150 100644 --- a/indy_node/server/config_req_handler.py +++ b/indy_node/server/config_req_handler.py @@ -63,15 +63,6 @@ def curr_pkt_info(self, pkg_name): return Upgrader.getVersion(), [APP_NAME] return NodeControlUtil.curr_pkt_info(pkg_name) - def get_dependencies(self, pkg_name, version): - base_deps = [APP_NAME, "indy-plenum"] - if pkg_name == APP_NAME: - return base_deps - deps = [] - NodeControlUtil.dep_tree_traverse( - NodeControlUtil.get_deps_tree("{}={}".format(pkg_name, version), base_deps), deps) - return deps - def validate(self, req: Request): status = None operation = req.operation diff --git a/indy_node/server/domain_req_handler.py b/indy_node/server/domain_req_handler.py index 2e3e4e3aa..d572a279c 100644 --- a/indy_node/server/domain_req_handler.py +++ b/indy_node/server/domain_req_handler.py @@ -10,7 +10,7 @@ REVOC_REG_DEF_ID, VALUE, ISSUANCE_BY_DEFAULT, ISSUANCE_ON_DEMAND, TAG, CRED_DEF_ID, \ GET_REVOC_REG_DEF, ID, GET_REVOC_REG, GET_REVOC_REG_DELTA, REVOC_TYPE, \ TIMESTAMP, FROM, TO, ISSUED, REVOKED, STATE_PROOF_FROM, ACCUM_FROM, ACCUM_TO, \ - CLAIM_DEF_SIGNATURE_TYPE, SCHEMA_NAME, SCHEMA_VERSION + CLAIM_DEF_SIGNATURE_TYPE, SCHEMA_NAME, SCHEMA_VERSION, REF from indy_common.req_utils import get_read_schema_name, get_read_schema_version, \ get_read_schema_from, get_write_schema_name, get_write_schema_version, get_read_claim_def_from, \ get_read_claim_def_signature_type, get_read_claim_def_schema_ref, get_read_claim_def_tag @@ -282,6 +282,17 @@ def _validate_schema(self, req: Request): def _validate_claim_def(self, req: Request): # we can not add a Claim Def with existent ISSUER_DID # sine a Claim Def needs to be identified by seqNo + ref = req.operation[REF] + try: + txn = self.ledger.getBySeqNo(ref) + except KeyError: + raise InvalidClientRequest(req.identifier, + req.reqId, + "Mentioned seqNo ({}) doesn't exist.".format(ref)) + if txn['txn']['type'] != SCHEMA: + raise InvalidClientRequest(req.identifier, + req.reqId, + "Mentioned seqNo ({}) isn't seqNo of the schema.".format(ref)) try: origin_role = self.idrCache.getRole( req.identifier, isCommitted=False) or None diff --git a/indy_node/server/validator_info_tool.py b/indy_node/server/validator_info_tool.py index a2ebd3082..52900b46a 100644 --- a/indy_node/server/validator_info_tool.py +++ b/indy_node/server/validator_info_tool.py @@ -1,8 +1,11 @@ import importlib +import time +import os from indy_node.__metadata__ import __version__ as node_pgk_version from plenum.server.validator_info_tool import none_on_fail, \ ValidatorNodeInfoTool as PlenumValidatorNodeInfoTool +from plenum.common.constants import POOL_LEDGER_ID, DOMAIN_LEDGER_ID, CONFIG_LEDGER_ID class ValidatorNodeInfoTool(PlenumValidatorNodeInfoTool): @@ -10,25 +13,34 @@ class ValidatorNodeInfoTool(PlenumValidatorNodeInfoTool): @property def info(self): info = super().info + ts_str = "{}".format(time.strftime( + "%A, %B %{0}d, %Y %{0}I:%M:%S %p %z".format('#' if os.name == 'nt' else '-'), + time.localtime(info["timestamp"]))) + info.update({"Update time": ts_str}) if 'Node_info' in info: if 'Metrics' in info['Node_info']: - info['Node_info']['Metrics']['transaction-count'].update( - config=self.__config_ledger_size - ) - return info + std_ledgers = [POOL_LEDGER_ID, DOMAIN_LEDGER_ID, CONFIG_LEDGER_ID] + other_ledgers = {} + for idx, linfo in self._node.ledgerManager.ledgerRegistry.items(): + if linfo.id in std_ledgers: + continue + other_ledgers[linfo.id] = linfo.ledger.size + info['Node_info']['Metrics']['transaction-count'].update(other_ledgers) - @property - @none_on_fail - def software_info(self): - info = super().software_info - if 'Software' in info: - info['Software'].update({'indy-node': self.__node_pkg_version}) return info - @property @none_on_fail - def __config_ledger_size(self): - return self._node.configLedger.size + def _generate_software_info(self): + sfv = super()._generate_software_info() + sfv['Software'].update({'indy-node': self.__node_pkg_version}) + sfv['Software'].update({'sovrin': "unknown"}) + try: + pkg = importlib.import_module(self._config.UPGRADE_ENTRY) + sfv['Software'].update({self._config.UPGRADE_ENTRY: pkg.__version__}) + except Exception: + pass + + return sfv @property @none_on_fail diff --git a/indy_node/test/anon_creds/conftest.py b/indy_node/test/anon_creds/conftest.py index 60350e361..1d9fa7ef5 100644 --- a/indy_node/test/anon_creds/conftest.py +++ b/indy_node/test/anon_creds/conftest.py @@ -18,6 +18,8 @@ from plenum.common.types import f, OPERATION from plenum.test.helper import create_new_test_node +from indy_node.test.schema.test_send_get_schema import send_schema_seq_no + @pytest.fixture(scope="module") def add_revoc_def_by_default(create_node_and_not_start, @@ -130,10 +132,10 @@ def build_txn_for_revoc_def_entry_by_demand(looper, return req @pytest.fixture(scope="module") -def claim_def(): +def claim_def(send_schema_seq_no): return { "type": CLAIM_DEF, - "ref": 1, + "ref": send_schema_seq_no, "signature_type": "CL", "tag": "some_tag", "data": { diff --git a/indy_node/test/api/helper.py b/indy_node/test/api/helper.py index 5a3985c96..2e6b08296 100644 --- a/indy_node/test/api/helper.py +++ b/indy_node/test/api/helper.py @@ -3,7 +3,7 @@ from indy.anoncreds import issuer_create_schema from indy.ledger import build_schema_request -from plenum.test.helper import sdk_get_reply, sdk_sign_and_submit_req +from plenum.test.helper import sdk_get_reply, sdk_sign_and_submit_req, sdk_get_and_check_replies # Utility predicates @@ -177,10 +177,35 @@ def validate_claim_def_txn(txn): # Misc utility -def sdk_write_schema(looper, sdk_pool_handle, sdk_wallet_steward): +def sdk_write_schema(looper, sdk_pool_handle, sdk_wallet_steward, multi_attribute=[], name="", version=""): _, identifier = sdk_wallet_steward - _, schema_json = looper.loop.run_until_complete( - issuer_create_schema(identifier, "name", "1.0", json.dumps(["first", "last"]))) + + if multi_attribute: + _, schema_json = looper.loop.run_until_complete( + issuer_create_schema(identifier, name, version, json.dumps(multi_attribute))) + else: + _, schema_json = looper.loop.run_until_complete( + issuer_create_schema(identifier, "name", "1.0", json.dumps(["first", "last"]))) + request = looper.loop.run_until_complete(build_schema_request(identifier, schema_json)) + return schema_json, \ sdk_get_reply(looper, sdk_sign_and_submit_req(sdk_pool_handle, sdk_wallet_steward, request))[1] + + +def sdk_write_schema_and_check(looper, sdk_pool_handle, sdk_wallet_steward, + attributes=[], name="", version=""): + _, identifier = sdk_wallet_steward + + _, schema_json = looper.loop.run_until_complete( + issuer_create_schema( + identifier, name, + version, json.dumps(attributes) + )) + + request = looper.loop.run_until_complete( + build_schema_request(identifier, schema_json) + ) + req = sdk_sign_and_submit_req(sdk_pool_handle, sdk_wallet_steward, request) + rep = sdk_get_and_check_replies(looper, [req]) + return rep diff --git a/indy_node/test/api/test_nym_reply.py b/indy_node/test/api/test_nym_reply.py index c41847351..b04063b97 100644 --- a/indy_node/test/api/test_nym_reply.py +++ b/indy_node/test/api/test_nym_reply.py @@ -1,7 +1,5 @@ -import json - from indy.ledger import build_nym_request -from indy_client.test.cli.helper import createHalfKeyIdentifierAndAbbrevVerkey +from indy_node.test.helper import createHalfKeyIdentifierAndAbbrevVerkey from indy_node.test.api.helper import validate_write_reply, validate_nym_txn from plenum.test.helper import sdk_get_reply, sdk_sign_and_submit_req diff --git a/indy_node/test/attrib_txn/test_nym_attrib.py b/indy_node/test/attrib_txn/test_nym_attrib.py new file mode 100644 index 000000000..90c70aa20 --- /dev/null +++ b/indy_node/test/attrib_txn/test_nym_attrib.py @@ -0,0 +1,253 @@ +import json +from contextlib import contextmanager +import pytest + +from plenum.common.constants import STEWARD_STRING +from plenum.common.exceptions import RequestRejectedException +from plenum.common.util import adict, randomString +from indy_common.constants import TRUST_ANCHOR_STRING +from indy_common.util import getSymmetricallyEncryptedVal +from indy_node.test.helper import sdk_add_attribute_and_check, sdk_get_attribute_and_check +from plenum.test.pool_transactions.helper import sdk_add_new_nym +from stp_core.common.log import getlogger + +logger = getlogger() + + +@pytest.fixture(scope="module") +def attributeName(): + return 'endpoint' + + +@pytest.fixture(scope="module") +def attributeValue(): + return { + "ha": "127.0.0.1:9700", + "verkey": "F46i9NmUN72QMbbm5qWetB6CmfT7hiU8BM1qrtTGLKsc" + } + + +@pytest.fixture(scope="module") +def attributeData(attributeName, attributeValue): + return json.dumps({attributeName: attributeValue}) + + +@pytest.fixture(scope="module") +def sdk_added_raw_attribute(sdk_pool_handle, sdk_user_wallet_a, + sdk_wallet_trust_anchor, attributeData, looper): + _, did_cl = sdk_user_wallet_a + req_couple = sdk_add_attribute_and_check(looper, sdk_pool_handle, sdk_wallet_trust_anchor, attributeData, did_cl)[0] + return req_couple[1] + + +@pytest.fixture(scope="module") +def symEncData(attributeData): + encData, secretKey = getSymmetricallyEncryptedVal(attributeData) + return adict(data=attributeData, encData=encData, secretKey=secretKey) + + +@contextmanager +def whitelistextras(*msg): + global whitelistArray + ins = {m: (m in whitelistArray) for m in msg} + [whitelistArray.append(m) for m, _in in ins.items() if not _in] + yield + [whitelistArray.remove(m) for m, _in in ins.items() if not _in] + + +def testTrustAnchorAddsAttributeForUser(sdk_added_raw_attribute): + pass + + +def testTrustAnchorGetAttrsForUser(looper, + sdk_user_wallet_a, + sdk_wallet_trust_anchor, + sdk_pool_handle, + attributeName, + sdk_added_raw_attribute): + _, dest = sdk_user_wallet_a + sdk_get_attribute_and_check(looper, sdk_pool_handle, + sdk_wallet_trust_anchor, dest, attributeName) + + +def test_non_trust_anchor_cannot_add_attribute_for_user( + looper, + nodeSet, + sdk_wallet_client, + sdk_pool_handle, + sdk_user_wallet_a, + attributeData): + _, dest = sdk_user_wallet_a + + with pytest.raises(RequestRejectedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, + sdk_wallet_client, attributeData, dest) + e.match('Only identity owner/guardian can add attribute for that identity') + + +def testOnlyUsersTrustAnchorCanAddAttribute( + nodeSet, + looper, + attributeData, + sdk_pool_handle, + sdk_wallet_trustee, + sdk_user_wallet_a): + _, dest = sdk_user_wallet_a + wallet_another_ta = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee, + alias='TA-' + randomString(5), role=TRUST_ANCHOR_STRING) + with pytest.raises(RequestRejectedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, + wallet_another_ta, attributeData, dest) + e.match('Only identity owner/guardian can add attribute for that identity') + + +def testStewardCannotAddUsersAttribute( + nodeSet, + looper, + attributeData, + sdk_pool_handle, + sdk_wallet_trustee, + sdk_user_wallet_a): + _, dest = sdk_user_wallet_a + wallet_another_stewatd = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee, + alias='TA-' + randomString(5), role=STEWARD_STRING) + with pytest.raises(RequestRejectedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, + wallet_another_stewatd, attributeData, dest) + e.match('Only identity owner/guardian can add attribute for that identity') + + +# TODO: Ask Jason, if getting the latest attribute makes sense since in case +# of encrypted and hashed attributes, there is no name. +def testLatestAttrIsReceived( + looper, + nodeSet, + sdk_wallet_trust_anchor, + sdk_pool_handle, + sdk_user_wallet_a): + _, dest = sdk_user_wallet_a + + attr1 = json.dumps({'name': 'Mario'}) + sdk_add_attribute_and_check(looper, sdk_pool_handle, + sdk_wallet_trust_anchor, attr1, dest) + reply = sdk_get_attribute_and_check(looper, sdk_pool_handle, + sdk_wallet_trust_anchor, dest, 'name')[0] + reply_equality_of_get_attribute(reply, 'Mario') + + attr2 = json.dumps({'name': 'Luigi'}) + sdk_add_attribute_and_check(looper, sdk_pool_handle, + sdk_wallet_trust_anchor, attr2, dest) + reply = sdk_get_attribute_and_check(looper, sdk_pool_handle, + sdk_wallet_trust_anchor, dest, 'name')[0] + reply_equality_of_get_attribute(reply, 'Luigi') + + +def reply_equality_of_get_attribute(reply, value): + result = reply[1]['result'] + assert json.loads(result['data'])[result['raw']] == value + + +def test_user_add_attrs_for_herself_and_get_it( + looper, + nodeSet, + sdk_wallet_trustee, + sdk_pool_handle): + wallet_client = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee, role=None) + _, dest = wallet_client + attr = json.dumps({'name': 'Albus'}) + sdk_add_attribute_and_check(looper, sdk_pool_handle, + wallet_client, attr, dest) + sdk_get_attribute_and_check(looper, sdk_pool_handle, + wallet_client, dest, 'name') + + +@pytest.mark.skip(reason="INDY-896 ATTR cannot be added without dest") +def test_attr_with_no_dest_added(nodeSet, looper, attributeData): + pass + # user_wallet = Wallet() + # signer = DidSigner() + # user_wallet.addIdentifier(signer=signer) + # + # client.registerObserver(user_wallet.handleIncomingReply) + # looper.add(client) + # looper.run(client.ensureConnectedToNodes()) + # makePendingTxnsRequest(client, user_wallet) + # + # createNym(looper, + # user_wallet.defaultId, + # trustAnchor, + # addedTrustAnchor, + # role=None, + # verkey=user_wallet.getVerkey()) + # + # attr1 = json.dumps({'age': "24"}) + # attrib = Attribute(name='test4 attribute', + # origin=user_wallet.defaultId, + # value=attr1, + # dest=None, + # ledgerStore=LedgerStore.RAW) + # addAttributeAndCheck(looper, client, user_wallet, attrib) + + +@pytest.mark.skip(reason="SOV-561. Test not implemented") +def testGetTxnsNoSeqNo(): + """ + Test GET_TXNS from client and do not provide any seqNo to fetch from + """ + raise NotImplementedError + + +@pytest.mark.skip(reason="SOV-560. Come back to it later since " + "requestPendingTxns move to wallet") +def testGetTxnsSeqNo(nodeSet, trustAnchorWallet, looper): + pass + """ + Test GET_TXNS from client and provide seqNo to fetch from + """ + # looper.add(trustAnchor) + # looper.run(trustAnchor.ensureConnectedToNodes()) + # + # def chk(): + # assert trustAnchor.spylog.count( + # trustAnchor.requestPendingTxns.__name__) > 0 + # + # # TODO choose or create timeout in 'waits' on this case. + # looper.run(eventually(chk, retryWait=1, timeout=3)) + + +@pytest.mark.skip(reason="SOV-560. Attribute encryption is done in client") +def testTrustAnchorAddedAttributeIsEncrypted(addedEncryptedAttribute): + pass + + +@pytest.mark.skip(reason="SOV-560. Attribute Disclosure is not done for now") +def testTrustAnchorDisclosesEncryptedAttribute( + addedEncryptedAttribute, + symEncData, + looper, + userSignerA, + trustAnchorSigner, + trustAnchor): + pass + # box = libnacl.public.Box(trustAnchorSigner.naclSigner.keyraw, + # userSignerA.naclSigner.verraw) + # + # data = json.dumps({SKEY: symEncData.secretKey, + # TXN_ID: addedEncryptedAttribute[TXN_ID]}) + # nonce, boxedMsg = box.encrypt(data.encode(), pack_nonce=False) + # + # op = { + # TARGET_NYM: userSignerA.verstr, + # TXN_TYPE: ATTRIB, + # NONCE: base58.b58encode(nonce).decode("utf-8"), + # ENC: base58.b58encode(boxedMsg).decode("utf-8") + # } + # submitAndCheck(looper, trustAnchor, op, + # identifier=trustAnchorSigner.verstr) + + +@pytest.mark.skip(reason="SOV-561. Pending implementation") +def testTrustAnchorAddedAttributeCanBeChanged(sdk_added_raw_attribute): + # TODO but only by user(if user has taken control of his identity) and + # trustAnchor + raise NotImplementedError diff --git a/indy_node/test/attrib_txn/test_send_get_attr.py b/indy_node/test/attrib_txn/test_send_get_attr.py new file mode 100644 index 000000000..c6abd2c87 --- /dev/null +++ b/indy_node/test/attrib_txn/test_send_get_attr.py @@ -0,0 +1,126 @@ +import pytest +import json + +from indy.ledger import build_get_attrib_request +from libnacl.secret import SecretBox +from hashlib import sha256 + +from plenum.common.exceptions import RequestNackedException + +from plenum.test.helper import sdk_get_and_check_replies + +from indy_node.test.helper import createUuidIdentifier, sdk_add_attribute_and_check, \ + sdk_get_attribute_and_check, modify_field +from plenum.test.pool_transactions.helper import sdk_sign_and_send_prepared_request + +attrib_name = 'dateOfBirth' + +secretBox = SecretBox() +enc_data = secretBox.encrypt(json.dumps({'name': 'Alice'}).encode()).hex() +hash_data = sha256(json.dumps({'name': 'Alice'}).encode()).hexdigest() + + +@pytest.fixture(scope="module") +def send_raw_attrib(looper, sdk_pool_handle, sdk_wallet_trustee): + rep = sdk_add_attribute_and_check(looper, sdk_pool_handle, sdk_wallet_trustee, + json.dumps({ + attrib_name: { + 'dayOfMonth': 23, + 'year': 1984, + 'month': 5 + } + })) + + return rep + + +@pytest.fixture(scope="module") +def send_enc_attrib(looper, sdk_pool_handle, sdk_wallet_trustee): + rep = sdk_add_attribute_and_check(looper, sdk_pool_handle, sdk_wallet_trustee, None, + enc=json.dumps({attrib_name: enc_data})) + return rep + + +@pytest.fixture(scope="module") +def send_hash_attrib(looper, sdk_pool_handle, sdk_wallet_trustee): + rep = sdk_add_attribute_and_check(looper, sdk_pool_handle, sdk_wallet_trustee, None, + xhash=hash_data) + return rep + + +def test_send_get_attr_succeeds_for_existing_uuid_dest( + looper, sdk_pool_handle, sdk_wallet_trustee, send_raw_attrib): + wh, did = sdk_wallet_trustee + sdk_get_attribute_and_check(looper, sdk_pool_handle, sdk_wallet_trustee, did, attrib_name) + + +def test_send_get_attr_fails_for_nonexistent_uuid_dest( + looper, sdk_pool_handle, sdk_wallet_trustee, send_raw_attrib): + _, submitter_did = sdk_wallet_trustee + req = looper.loop.run_until_complete( + build_get_attrib_request(submitter_did, submitter_did, attrib_name, None, None)) + req = modify_field(req, submitter_did[:-10], 'operation', 'dest') + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, req) + with pytest.raises(RequestNackedException) as e: + sdk_get_and_check_replies(looper, [request_couple]) + e.match('should be one of \[16, 32\]') + + +def test_send_get_attr_fails_for_invalid_attrib( + looper, sdk_pool_handle, sdk_wallet_trustee, send_raw_attrib): + did = createUuidIdentifier() + _, submitter_did = sdk_wallet_trustee + req = looper.loop.run_until_complete( + build_get_attrib_request(submitter_did, did, attrib_name, None, None)) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, req) + sdk_get_and_check_replies(looper, [request_couple]) + + +def test_send_get_attr_fails_with_missing_dest( + looper, sdk_pool_handle, sdk_wallet_trustee, send_raw_attrib): + _, submitter_did = sdk_wallet_trustee + req = looper.loop.run_until_complete( + build_get_attrib_request(submitter_did, submitter_did, attrib_name, None, None)) + req = modify_field(req, '', 'operation', 'dest') + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, req) + with pytest.raises(RequestNackedException) as e: + sdk_get_and_check_replies(looper, [request_couple]) + e.match('should be one of \[16, 32\]') + + +def test_send_get_attr_fails_with_missing_attrib( + looper, sdk_pool_handle, sdk_wallet_trustee, send_raw_attrib): + _, submitter_did = sdk_wallet_trustee + req = looper.loop.run_until_complete( + build_get_attrib_request(submitter_did, submitter_did, attrib_name, None, None)) + req = json.loads(req) + del req['operation']['raw'] + req = json.dumps(req) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, req) + with pytest.raises(RequestNackedException) as e: + sdk_get_and_check_replies(looper, [request_couple]) + e.match('missed fields') + + +def test_send_get_attr_enc_succeeds_for_existing_uuid_dest( + looper, sdk_pool_handle, sdk_wallet_trustee, send_enc_attrib): + _, submitter_did = sdk_wallet_trustee + req = looper.loop.run_until_complete( + build_get_attrib_request(submitter_did, submitter_did, None, None, attrib_name)) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, req) + sdk_get_and_check_replies(looper, [request_couple]) + + +def test_send_get_attr_hash_succeeds_for_existing_uuid_dest( + looper, sdk_pool_handle, sdk_wallet_trustee, send_hash_attrib): + _, submitter_did = sdk_wallet_trustee + req = looper.loop.run_until_complete( + build_get_attrib_request(submitter_did, submitter_did, None, hash_data, None)) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, req) + sdk_get_and_check_replies(looper, [request_couple]) diff --git a/indy_client/__init__.py b/indy_node/test/claim_def/__init__.py similarity index 100% rename from indy_client/__init__.py rename to indy_node/test/claim_def/__init__.py diff --git a/indy_node/test/claim_def/test_send_claim_def.py b/indy_node/test/claim_def/test_send_claim_def.py new file mode 100644 index 000000000..8e7b56396 --- /dev/null +++ b/indy_node/test/claim_def/test_send_claim_def.py @@ -0,0 +1,93 @@ +import json +import pytest + +from indy.anoncreds import issuer_create_and_store_credential_def +from indy.ledger import build_cred_def_request, parse_get_schema_response, \ + build_get_schema_request +from plenum.common.exceptions import RequestRejectedException + +from indy_common.constants import REF + +from indy_node.test.api.helper import sdk_write_schema +from indy_node.test.helper import modify_field + +from plenum.common.types import OPERATION +from plenum.test.helper import sdk_sign_and_submit_req, sdk_get_and_check_replies, sdk_get_reply + + +@pytest.fixture(scope="module") +def schema_json(looper, sdk_pool_handle, sdk_wallet_trustee): + wallet_handle, identifier = sdk_wallet_trustee + schema_json, _ = sdk_write_schema(looper, sdk_pool_handle, sdk_wallet_trustee) + schema_id = json.loads(schema_json)['id'] + + request = looper.loop.run_until_complete(build_get_schema_request(identifier, schema_id)) + reply = sdk_get_reply(looper, sdk_sign_and_submit_req(sdk_pool_handle, sdk_wallet_trustee, request))[1] + _, schema_json = looper.loop.run_until_complete(parse_get_schema_response(json.dumps(reply))) + return schema_json + + +def test_send_claim_def_succeeds( + looper, sdk_pool_handle, nodeSet, sdk_wallet_trustee, schema_json): + wallet_handle, identifier = sdk_wallet_trustee + + _, definition_json = looper.loop.run_until_complete(issuer_create_and_store_credential_def( + wallet_handle, identifier, schema_json, "some_tag", "CL", json.dumps({"support_revocation": True}))) + request = looper.loop.run_until_complete(build_cred_def_request(identifier, definition_json)) + reply = sdk_get_and_check_replies(looper, [sdk_sign_and_submit_req(sdk_pool_handle, sdk_wallet_trustee, request)]) + + +def test_send_claim_def_fails_if_ref_is_seqno_of_non_schema_txn( + looper, sdk_pool_handle, nodeSet, sdk_wallet_trustee, schema_json): + wallet_handle, identifier = sdk_wallet_trustee + + _, definition_json = looper.loop.run_until_complete(issuer_create_and_store_credential_def( + wallet_handle, identifier, schema_json, "some_tag1", "CL", json.dumps({"support_revocation": True}))) + request = looper.loop.run_until_complete(build_cred_def_request(identifier, definition_json)) + request = modify_field(request, 1, OPERATION, REF) + with pytest.raises(RequestRejectedException) as e: + sdk_get_and_check_replies(looper, [sdk_sign_and_submit_req(sdk_pool_handle, sdk_wallet_trustee, request)]) + e.match('isn\'t seqNo of the schema.') + + +def test_send_claim_def_fails_if_ref_is_not_existing_seqno( + looper, sdk_pool_handle, nodeSet, sdk_wallet_trustee, schema_json): + wallet_handle, identifier = sdk_wallet_trustee + + _, definition_json = looper.loop.run_until_complete(issuer_create_and_store_credential_def( + wallet_handle, identifier, schema_json, "some_tag2", "CL", json.dumps({"support_revocation": True}))) + request = looper.loop.run_until_complete(build_cred_def_request(identifier, definition_json)) + request = modify_field(request, 999999, OPERATION, REF) + with pytest.raises(RequestRejectedException) as e: + sdk_get_and_check_replies(looper, [sdk_sign_and_submit_req(sdk_pool_handle, sdk_wallet_trustee, request)]) + e.match('doesn\'t exist') + + +def test_update_claim_def_for_same_schema_and_signature_type( + looper, sdk_pool_handle, nodeSet, sdk_wallet_trustee, schema_json): + wallet_handle, identifier = sdk_wallet_trustee + + _, definition_json = looper.loop.run_until_complete(issuer_create_and_store_credential_def( + wallet_handle, identifier, schema_json, "some_tag3", "CL", json.dumps({"support_revocation": True}))) + request = looper.loop.run_until_complete(build_cred_def_request(identifier, definition_json)) + sdk_get_and_check_replies(looper, [sdk_sign_and_submit_req(sdk_pool_handle, sdk_wallet_trustee, request)]) + + definition_json = modify_field(definition_json, '999', 'value', 'primary', 'n') + request = looper.loop.run_until_complete(build_cred_def_request(identifier, definition_json)) + sdk_get_and_check_replies(looper, [sdk_sign_and_submit_req(sdk_pool_handle, sdk_wallet_trustee, request)]) + + +def test_can_send_same_claim_def_by_different_issuers( + looper, sdk_pool_handle, nodeSet, sdk_wallet_trustee, sdk_wallet_steward, schema_json): + wallet_handle, identifier = sdk_wallet_trustee + + _, definition_json = looper.loop.run_until_complete(issuer_create_and_store_credential_def( + wallet_handle, identifier, schema_json, "some_tag4", "CL", json.dumps({"support_revocation": True}))) + request = looper.loop.run_until_complete(build_cred_def_request(identifier, definition_json)) + sdk_get_and_check_replies(looper, [sdk_sign_and_submit_req(sdk_pool_handle, sdk_wallet_trustee, request)]) + + wallet_handle, identifier = sdk_wallet_steward + _, definition_json = looper.loop.run_until_complete(issuer_create_and_store_credential_def( + wallet_handle, identifier, schema_json, "some_tag4", "CL", json.dumps({"support_revocation": True}))) + request = looper.loop.run_until_complete(build_cred_def_request(identifier, definition_json)) + sdk_get_and_check_replies(looper, [sdk_sign_and_submit_req(sdk_pool_handle, sdk_wallet_steward, request)]) diff --git a/indy_node/test/claim_def/test_send_get_claim_def.py b/indy_node/test/claim_def/test_send_get_claim_def.py new file mode 100644 index 000000000..b8ef6931f --- /dev/null +++ b/indy_node/test/claim_def/test_send_get_claim_def.py @@ -0,0 +1,61 @@ +import json +import pytest + +from indy.anoncreds import issuer_create_and_store_credential_def +from indy.ledger import build_get_cred_def_request, build_cred_def_request + +from plenum.common.exceptions import RequestNackedException +from plenum.common.constants import TXN_METADATA, TXN_METADATA_ID +from plenum.test.helper import sdk_sign_and_submit_req, sdk_get_and_check_replies + +from indy_node.test.helper import modify_field + +from indy_node.test.claim_def.test_send_claim_def import schema_json + + +@pytest.fixture(scope="module") +def added_claim_def_id(looper, sdk_pool_handle, nodeSet, + sdk_wallet_trustee, schema_json): + wallet_handle, identifier = sdk_wallet_trustee + _, definition_json = looper.loop.run_until_complete(issuer_create_and_store_credential_def( + wallet_handle, identifier, schema_json, "some_tag", "CL", json.dumps({"support_revocation": True}))) + request = looper.loop.run_until_complete(build_cred_def_request(identifier, definition_json)) + rep = sdk_get_and_check_replies(looper, [sdk_sign_and_submit_req(sdk_pool_handle, sdk_wallet_trustee, request)]) + return rep[0][1]['result'][TXN_METADATA][TXN_METADATA_ID] + + +def test_send_get_claim_def_succeeds(looper, sdk_pool_handle, nodeSet, + sdk_wallet_trustee, added_claim_def_id): + _, did = sdk_wallet_trustee + request = looper.loop.run_until_complete(build_get_cred_def_request(did, added_claim_def_id)) + sdk_get_and_check_replies(looper, [sdk_sign_and_submit_req(sdk_pool_handle, sdk_wallet_trustee, request)]) + + +def test_send_get_claim_def_as_client_succeeds( + looper, + sdk_pool_handle, + nodeSet, + added_claim_def_id, + sdk_wallet_client): + _, did = sdk_wallet_client + request = looper.loop.run_until_complete(build_get_cred_def_request(did, added_claim_def_id)) + sdk_get_and_check_replies(looper, [sdk_sign_and_submit_req(sdk_pool_handle, sdk_wallet_client, request)]) + + +def test_send_get_claim_def_with_invalid_ref_fails(looper, sdk_pool_handle, nodeSet, + sdk_wallet_trustee, added_claim_def_id): + _, did = sdk_wallet_trustee + request = looper.loop.run_until_complete(build_get_cred_def_request(did, added_claim_def_id)) + request = modify_field(request, '!@#', 'operation', 'ref') + with pytest.raises(RequestNackedException) as e: + sdk_get_and_check_replies(looper, [sdk_sign_and_submit_req(sdk_pool_handle, sdk_wallet_trustee, request)]) + e.match('expected types \'int\', got \'str\'') + + +def test_send_get_claim_def_with_invalid_signature_not_get_claim( + looper, sdk_pool_handle, nodeSet, sdk_wallet_trustee, added_claim_def_id): + _, did = sdk_wallet_trustee + request = looper.loop.run_until_complete(build_get_cred_def_request(did, added_claim_def_id)) + request = modify_field(request, 'ABC', 'operation', 'signature_type') + rep = sdk_get_and_check_replies(looper, [sdk_sign_and_submit_req(sdk_pool_handle, sdk_wallet_trustee, request)]) + assert rep[0][1]['result']['data'] is None diff --git a/indy_node/test/conftest.py b/indy_node/test/conftest.py index 6f360d4ad..33bde3a0e 100644 --- a/indy_node/test/conftest.py +++ b/indy_node/test/conftest.py @@ -2,6 +2,8 @@ import warnings import pytest +from indy_common.config_helper import NodeConfigHelper +from indy_node.test.helper import TestNode from plenum.test.pool_transactions.helper import sdk_add_new_nym, sdk_pool_refresh, prepare_new_node_data, \ create_and_start_new_node, prepare_node_request, sdk_sign_and_send_prepared_request from stp_core.common.log import Logger @@ -16,17 +18,10 @@ strict_types.defaultShouldCheck = True # noinspection PyUnresolvedReferences -from indy_client.test.conftest import trustAnchorWallet, \ - trustAnchor, tdirWithDomainTxns, \ - stewardWallet, steward, genesisTxns, testClientClass, client_ledger_dir, \ - addedTrustAnchor, userWalletB, nodeSet, testNodeClass, updatedPoolTxnData, \ - trusteeData, trusteeWallet, trustee, warnfilters as client_warnfilters - -# noinspection PyUnresolvedReferences -from plenum.test.conftest import tdir, client_tdir, nodeReg, \ +from plenum.test.conftest import tdir, nodeReg, \ whitelist, concerningLogLevels, logcapture, \ - tdirWithPoolTxns, tdirWithDomainTxns as PTdirWithDomainTxns, \ - tdirWithClientPoolTxns, txnPoolNodeSet, \ + tdirWithPoolTxns, tdirWithDomainTxns, \ + txnPoolNodeSet, \ poolTxnData, dirName, poolTxnNodeNames, allPluginsPath, tdirWithNodeKeepInited, \ poolTxnStewardData, poolTxnStewardNames, getValueFromModule, \ patchPluginManager, txnPoolNodesLooper, warncheck, \ @@ -51,9 +46,8 @@ def sdk_pool_handle(plenum_pool_handle, nodeSet): @pytest.fixture(scope="session") -def warnfilters(client_warnfilters): +def warnfilters(): def _(): - client_warnfilters() warnings.filterwarnings( 'ignore', category=DeprecationWarning, @@ -126,3 +120,42 @@ def sdk_node_theta_added(looper, def sdk_wallet_trust_anchor(looper, sdk_pool_handle, sdk_wallet_trustee): return sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee, alias='TA-1', role='TRUST_ANCHOR') + + +@pytest.fixture(scope="module") +def sdk_user_wallet_a(nodeSet, sdk_wallet_trust_anchor, + sdk_pool_handle, looper): + return sdk_add_new_nym(looper, sdk_pool_handle, + sdk_wallet_trust_anchor, alias='userA', + skipverkey=True) + + +@pytest.fixture(scope="module") +def nodeSet(txnPoolNodeSet): + return txnPoolNodeSet + + +@pytest.fixture(scope="module") +def testNodeClass(): + return TestNode + + +@pytest.fixture(scope="module") +def newNodeAdded(looper, nodeSet, tdir, tconf, sdk_pool_handle, + sdk_wallet_trustee, allPluginsPath): + new_steward_wallet, new_node = sdk_node_theta_added(looper, + nodeSet, + tdir, + tconf, + sdk_pool_handle, + sdk_wallet_trustee, + allPluginsPath, + node_config_helper_class=NodeConfigHelper, + testNodeClass=TestNode, + name='') + return new_steward_wallet, new_node + + +@pytest.fixture(scope='module') +def nodeIds(nodeSet): + return next(iter(nodeSet)).poolManager.nodeIds diff --git a/indy_node/test/did/conftest.py b/indy_node/test/did/conftest.py index cd44d7085..4658e50f6 100644 --- a/indy_node/test/did/conftest.py +++ b/indy_node/test/did/conftest.py @@ -1,46 +1,58 @@ -import base58 import pytest - -from indy_client.client.wallet.wallet import Wallet -from indy_node.test.helper import makePendingTxnsRequest -from indy_client.test.helper import genTestClient - -pf = pytest.fixture(scope='module') - - -@pf -def wallet(): - return Wallet('my wallet') - - -@pf -def client(wallet, looper, tdirWithClientPoolTxns): - s, _ = genTestClient(tmpdir=tdirWithClientPoolTxns, usePoolLedger=True) - s.registerObserver(wallet.handleIncomingReply) - looper.add(s) - looper.run(s.ensureConnectedToNodes()) - makePendingTxnsRequest(s, wallet) - return s - - -@pf -def abbrevIdr(wallet): - idr, _ = wallet.addIdentifier() - return idr - - -@pf -def abbrevVerkey(wallet, abbrevIdr): - return wallet.getVerkey(abbrevIdr) - - -@pf -def noKeyIdr(wallet): - idr = base58.b58encode(b'1' * 16).decode("utf-8") - return wallet.addIdentifier(identifier=idr)[0] - - -@pf -def fullKeyIdr(wallet): - idr = base58.b58encode(b'2' * 16).decode("utf-8") - return wallet.addIdentifier(identifier=idr)[0] +import json + +from indy.did import abbreviate_verkey, create_and_store_my_did +from indy.ledger import build_get_nym_request, build_nym_request + +from plenum.common.util import randomString +from plenum.test.helper import sdk_get_and_check_replies +from plenum.test.pool_transactions.helper import sdk_sign_and_send_prepared_request + + +def add_new_nym(looper, sdk_pool_handle, creators_wallet, dest, verkey=None): + wh, submitter_did = creators_wallet + nym_request = looper.loop.run_until_complete(build_nym_request(submitter_did, dest, verkey, None, None)) + req = sdk_sign_and_send_prepared_request(looper, creators_wallet, sdk_pool_handle, nym_request) + sdk_get_and_check_replies(looper, [req]) + return wh, dest + + +@pytest.fixture(scope='function') +def nym_full_vk(looper, tconf, nodeSet, sdk_pool_handle, sdk_wallet_trustee): + wh, tr_did = sdk_wallet_trustee + (new_did, new_verkey) = looper.loop.run_until_complete( + create_and_store_my_did(wh, json.dumps({'seed': randomString(32)}))) + assert not new_verkey.startswith("~") + nwh, nd = add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee, dest=new_did, verkey=new_verkey) + return nwh, nd, new_verkey + + +@pytest.fixture(scope='function') +def nym_abbrv_vk(looper, tconf, nodeSet, sdk_pool_handle, sdk_wallet_trustee): + wh, tr_did = sdk_wallet_trustee + (new_did, new_verkey) = looper.loop.run_until_complete( + create_and_store_my_did(wh, json.dumps({'seed': randomString(32)}))) + abbrv_vk = looper.loop.run_until_complete(abbreviate_verkey(new_did, new_verkey)) + assert abbrv_vk.startswith("~") + nwh, nd = add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee, dest=new_did, verkey=abbrv_vk) + return nwh, nd, abbrv_vk + + +@pytest.fixture(scope='function') +def nym_empty_vk(looper, tconf, nodeSet, sdk_pool_handle, sdk_wallet_trustee): + wh, tr_did = sdk_wallet_trustee + (new_did, new_verkey) = looper.loop.run_until_complete( + create_and_store_my_did(wh, json.dumps({'seed': randomString(32)}))) + nwh, nd = add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee, dest=new_did) + return nwh, nd + + +def nym_get(looper, sdk_pool_handle, sdk_wallet_sign, did): + sign_w, sign_did = sdk_wallet_sign + get_nym_req = looper.loop.run_until_complete(build_get_nym_request(sign_did, did)) + req = sdk_sign_and_send_prepared_request(looper, sdk_wallet_sign, sdk_pool_handle, get_nym_req) + repl_data = sdk_get_and_check_replies(looper, [req])[0][1].get("result", {}).get("data", "") + dt = json.loads(repl_data) + nym = dt.get("dest", None) + vk = dt.get("verkey", None) + return nym, vk diff --git a/indy_node/test/did/helper.py b/indy_node/test/did/helper.py deleted file mode 100644 index 9aad11e1f..000000000 --- a/indy_node/test/did/helper.py +++ /dev/null @@ -1,90 +0,0 @@ -import base58 -from plenum.common.signer_did import DidSigner -from plenum.common.verifier import DidVerifier -from stp_core.loop.eventually import eventually -from plenum.test.helper import assertEquality -from plenum.test import waits as plenumWaits - -from indy_common.identity import Identity - -MsgForSigning = {'sender': 'Mario', 'msg': 'Lorem ipsum'} - - -def signMsg(wallet, idr): - return wallet.signMsg(MsgForSigning, identifier=idr) - - -def verifyMsg(verifier, sig): - sig = base58.b58decode(sig) - return verifier.verifyMsg(sig, MsgForSigning) - - -def chkVerifyForRetrievedIdentity(signerWallet, verifierWallet, idr): - sig = signMsg(signerWallet, idr) - verkey = verifierWallet.getIdentity(idr).verkey - assert verifyMsg(DidVerifier(verkey, idr), sig) - - -def updateWalletIdrWithFullKeySigner(wallet, idr): - newSigner = DidSigner(identifier=idr) - wallet.updateSigner(idr, newSigner) - assertEquality(newSigner.verkey, wallet.getVerkey(idr)) - checkFullVerkeySize(wallet.getVerkey(idr)) - return newSigner.verkey - - -def updateWalletIdrWithFullVerkeySigner(wallet, idr, signer): - wallet.updateSigner(idr, signer) - assertEquality(signer.verkey, wallet.getVerkey(idr)) - checkFullVerkeySize(wallet.getVerkey(idr)) - - -def updateIndyIdrWithVerkey( - looper, senderWallet, senderClient, idr, fullKey): - idy = Identity(identifier=idr, verkey=fullKey) - senderWallet.updateTrustAnchoredIdentity(idy) - # TODO: What if the request fails, there must be some rollback mechanism - assert senderWallet.getTrustAnchoredIdentity(idr).seqNo is None - reqs = senderWallet.preparePending() - senderClient.submitReqs(*reqs) - - def chk(): - assert senderWallet.getTrustAnchoredIdentity(idr).seqNo is not None - - timeout = plenumWaits.expectedReqAckQuorumTime() - looper.run(eventually(chk, retryWait=1, timeout=timeout)) - - -def fetchFullVerkeyFromIndy(looper, senderWallet, senderClient, - ownerWallet, idr): - identity = Identity(identifier=idr) - req = senderWallet.requestIdentity(identity, sender=senderWallet.defaultId) - senderClient.submitReqs(req) - - def chk(): - retrievedVerkey = senderWallet.getIdentity(idr).verkey - assertEquality(retrievedVerkey, ownerWallet.getVerkey(idr)) - checkFullVerkeySize(retrievedVerkey) - - timeout = plenumWaits.expectedReqAckQuorumTime() - looper.run(eventually(chk, retryWait=1, timeout=timeout)) - - -def checkDidSize(did): - # A base58 encoding of 32 bytes string can be either 44 bytes or 43 bytes, - # since the did takes first 16 bytes, base58 of did is either - # 21 or 22 characters - assert len(did) == 21 or len(did) == 22 - - -def checkAbbrVerkeySize(verkey): - # A base58 encoding of 32 bytes string can be either 44 bytes or 43 bytes, - # since the abbreviated verkey takes last 16 bytes, base58 of abbreviated - # verkey is either 21 or 22 characters and since its prefixed by a `~` its - # length will be either 23 or 22 - assert len(verkey) == 23 or len(verkey) == 22 - - -def checkFullVerkeySize(verkey): - # A base58 encoding of 32 bytes string can be either 44 bytes or 43 bytes. - assert len(verkey) == 44 or len(verkey) == 43 diff --git a/indy_node/test/did/test_did_with_abbreviated_verkey.py b/indy_node/test/did/test_did_with_abbreviated_verkey.py index b7e3bbcb3..692d95060 100644 --- a/indy_node/test/did/test_did_with_abbreviated_verkey.py +++ b/indy_node/test/did/test_did_with_abbreviated_verkey.py @@ -1,125 +1,38 @@ -""" -Abbreviated verkey tests - Add a nym (16 byte, base58) with an abbreviated verkey (‘~’ with 16 bytes, base58) (Form 3) - { type: NYM, dest: , verkey: ~<16byte abbreviated key> } - Retrieve the verkey. - { type: GET_NYM, dest: } - Verify a signature from this identifier - Change a verkey for a nym with a full verkey. - { type: NYM, dest: , verkey: <32byte ED25519 key> } - Retrieve new verkey - { type: GET_NYM, dest: } - Verify a signature from this identifier with the new verkey -""" -from plenum.common.signer_did import DidSigner -from stp_core.loop.eventually import eventually -from plenum.test.helper import assertEquality -from plenum.test import waits as plenumWaits +from indy_node.test.did.conftest import nym_get +from indy_node.test.helper import sdk_rotate_verkey -from indy_common.identity import Identity -from indy_node.test.did.conftest import pf -from indy_node.test.did.helper import chkVerifyForRetrievedIdentity, \ - updateIndyIdrWithVerkey, fetchFullVerkeyFromIndy, checkAbbrVerkeySize, \ - checkDidSize, updateWalletIdrWithFullVerkeySigner -from indy_client.test.helper import createNym - -@pf -def didAddedWithAbbrvVerkey( - addedTrustAnchor, - looper, - trustAnchor, - trustAnchorWallet, - wallet, - abbrevIdr): - """{ type: NYM, dest: }""" - createNym(looper, abbrevIdr, trustAnchor, trustAnchorWallet, - verkey=wallet.getVerkey(abbrevIdr)) - return wallet - - -@pf -def newAbbrvKey(wallet, abbrevIdr): - newSigner = DidSigner(identifier=abbrevIdr) - wallet.updateSigner(abbrevIdr, newSigner) - assertEquality(newSigner.verkey, wallet.getVerkey(abbrevIdr)) - return newSigner.verkey - - -@pf -def newFullKeySigner(wallet, abbrevIdr): - return DidSigner(identifier=abbrevIdr) - - -@pf -def newFullKey(newFullKeySigner): - return newFullKeySigner.verkey - - -@pf -def didUpdatedWithFullVerkey(didAddedWithAbbrvVerkey, looper, trustAnchor, - trustAnchorWallet, abbrevIdr, newFullKey, - newFullKeySigner, wallet, client): - """{ type: NYM, dest: , verkey: }""" - updateIndyIdrWithVerkey(looper, wallet, client, abbrevIdr, newFullKey) - updateWalletIdrWithFullVerkeySigner(wallet, abbrevIdr, newFullKeySigner) - - -@pf -def newVerkeyFetched( - didAddedWithAbbrvVerkey, - looper, - trustAnchor, - trustAnchorWallet, - abbrevIdr, - wallet): - """{ type: GET_NYM, dest: }""" - fetchFullVerkeyFromIndy(looper, trustAnchorWallet, trustAnchor, wallet, - abbrevIdr) - - -def testNewIdentifierInWalletIsDid(abbrevIdr): - checkDidSize(abbrevIdr) - - -def testDefaultVerkeyIsAbbreviated(abbrevVerkey): - # A base58 encoding of 32 bytes string can be either 44 bytes or 43 bytes, - # since the did takes first 22 bytes, abbreviated verkey will take - # remaining 22 or 21 characters - checkAbbrVerkeySize(abbrevVerkey) - assert abbrevVerkey[0] == '~' - - -def testAddDidWithVerkey(didAddedWithAbbrvVerkey): +def testAddDidWithVerkey(nym_abbrv_vk): pass -def testRetrieveAbbrvVerkey(didAddedWithAbbrvVerkey, looper, trustAnchor, - trustAnchorWallet, wallet, abbrevIdr): - """{ type: GET_NYM, dest: }""" - identity = Identity(identifier=abbrevIdr) - req = trustAnchorWallet.requestIdentity(identity, - sender=trustAnchorWallet.defaultId) - trustAnchor.submitReqs(req) - - def chk(): - retrievedVerkey = trustAnchorWallet.getIdentity(abbrevIdr).verkey - assertEquality(retrievedVerkey, wallet.getVerkey(abbrevIdr)) - checkAbbrVerkeySize(retrievedVerkey) +def testRetrieveAbbrvVerkey(looper, tconf, nodeSet, sdk_pool_handle, sdk_wallet_trustee, nym_abbrv_vk): + nwh, ndid, nvk = nym_abbrv_vk + resp_data = nym_get(looper, sdk_pool_handle, sdk_wallet_trustee, ndid) + assert ndid == resp_data[0] + assert nvk == resp_data[1] - timeout = plenumWaits.expectedReqAckQuorumTime() - looper.run(eventually(chk, retryWait=1, timeout=timeout)) - chkVerifyForRetrievedIdentity(wallet, trustAnchorWallet, abbrevIdr) +def testChangeVerkeyToNewVerkey(looper, tconf, nodeSet, sdk_pool_handle, nym_abbrv_vk): + wh, did, nvk = nym_abbrv_vk + new_verkey = sdk_rotate_verkey(looper, sdk_pool_handle, wh, did, did) + assert nvk != new_verkey -def testChangeVerkeyToNewVerkey(didUpdatedWithFullVerkey): - pass - -def testRetrieveChangedVerkey(didUpdatedWithFullVerkey, newVerkeyFetched): - pass +def testRetrieveChangedVerkey(looper, tconf, nodeSet, sdk_pool_handle, sdk_wallet_trustee, nym_abbrv_vk): + wh, did, vk = nym_abbrv_vk + new_vk = sdk_rotate_verkey(looper, sdk_pool_handle, wh, did, did) + resp_data = nym_get(looper, sdk_pool_handle, sdk_wallet_trustee, did) + assert did == resp_data[0] + assert vk != resp_data[1] + assert new_vk == resp_data[1] -def testVerifySigWithChangedVerkey(didUpdatedWithFullVerkey, newVerkeyFetched, - trustAnchorWallet, abbrevIdr, wallet): - chkVerifyForRetrievedIdentity(wallet, trustAnchorWallet, abbrevIdr) +def testVerifySigWithChangedVerkey(looper, tconf, nodeSet, sdk_pool_handle, nym_abbrv_vk): + wh, did, vk = nym_abbrv_vk + new_vk = sdk_rotate_verkey(looper, sdk_pool_handle, wh, did, did) + # check sign by getting nym from ledger - if succ then sign is ok + resp_data = nym_get(looper, sdk_pool_handle, (wh, did), did) + assert did == resp_data[0] + assert vk != resp_data[1] + assert new_vk == resp_data[1] diff --git a/indy_node/test/did/test_did_with_full_verkey.py b/indy_node/test/did/test_did_with_full_verkey.py index 9fd277981..19b6da426 100644 --- a/indy_node/test/did/test_did_with_full_verkey.py +++ b/indy_node/test/did/test_did_with_full_verkey.py @@ -1,100 +1,38 @@ -""" -Full verkey tests - Add a nym (16 byte, base58) with a full verkey (32 byte, base58) (Form 1) - { type: NYM, dest: , verkey: <32byte key> } - Retrieve the verkey. - { type: GET_NYM, dest: } - Verify a signature from this identifier - Change a verkey for a nym with a full verkey. - { type: NYM, dest: , verkey: <32byte ED25519 key> } - Retrieve new verkey - { type: GET_NYM, dest: } - Verify a signature from this identifier with the new verkey -""" -from stp_core.loop.eventually import eventually -from plenum.common.signer_did import DidSigner -from plenum.test import waits as plenumWaits +from indy_node.test.did.conftest import nym_get +from indy_node.test.helper import sdk_rotate_verkey -from indy_common.identity import Identity -from indy_node.test.did.conftest import pf -from indy_node.test.did.helper import chkVerifyForRetrievedIdentity, \ - updateIndyIdrWithVerkey, \ - fetchFullVerkeyFromIndy, checkFullVerkeySize, \ - updateWalletIdrWithFullVerkeySigner -from indy_client.test.helper import createNym - -@pf -def didAddedWithFullVerkey( - addedTrustAnchor, - looper, - trustAnchor, - trustAnchorWallet, - wallet, - fullKeyIdr): - """{ type: NYM, dest: }""" - createNym(looper, fullKeyIdr, trustAnchor, trustAnchorWallet, - verkey=wallet.getVerkey(fullKeyIdr)) - return wallet - - -@pf -def newFullKeySigner(wallet, fullKeyIdr): - return DidSigner(identifier=fullKeyIdr) - - -@pf -def newFullKey(newFullKeySigner): - return newFullKeySigner.verkey - - -@pf -def didUpdatedWithFullVerkey(didAddedWithFullVerkey, looper, trustAnchor, - trustAnchorWallet, fullKeyIdr, newFullKey, - newFullKeySigner, wallet, client): - """{ type: NYM, dest: , verkey: }""" - updateIndyIdrWithVerkey(looper, wallet, client, fullKeyIdr, newFullKey) - updateWalletIdrWithFullVerkeySigner(wallet, fullKeyIdr, newFullKeySigner) - - -@pf -def newVerkeyFetched(didAddedWithFullVerkey, looper, trustAnchor, - trustAnchorWallet, fullKeyIdr, wallet): - """{ type: GET_NYM, dest: }""" - fetchFullVerkeyFromIndy(looper, trustAnchorWallet, trustAnchor, wallet, - fullKeyIdr) - - -def testAddDidWithVerkey(didAddedWithFullVerkey): +def testAddDidWithVerkey(nym_full_vk): pass -def testRetrieveFullVerkey(didAddedWithFullVerkey, looper, trustAnchor, - trustAnchorWallet, wallet, fullKeyIdr): - """{ type: GET_NYM, dest: }""" - identity = Identity(identifier=fullKeyIdr) - req = trustAnchorWallet.requestIdentity(identity, - sender=trustAnchorWallet.defaultId) - trustAnchor.submitReqs(req) +def testRetrieveFullVerkey(looper, tconf, nodeSet, sdk_pool_handle, sdk_wallet_trustee, nym_full_vk): + nwh, ndid, nvk = nym_full_vk + resp_data = nym_get(looper, sdk_pool_handle, sdk_wallet_trustee, ndid) + assert ndid == resp_data[0] + assert nvk == resp_data[1] - def chk(): - retrievedVerkey = trustAnchorWallet.getIdentity(fullKeyIdr).verkey - assert retrievedVerkey == wallet.getVerkey(fullKeyIdr) - checkFullVerkeySize(retrievedVerkey) - timeout = plenumWaits.expectedReqAckQuorumTime() - looper.run(eventually(chk, retryWait=1, timeout=timeout)) - chkVerifyForRetrievedIdentity(wallet, trustAnchorWallet, fullKeyIdr) +def testChangeVerkeyToNewVerkey(looper, tconf, nodeSet, sdk_pool_handle, nym_full_vk): + wh, did, nvk = nym_full_vk + new_verkey = sdk_rotate_verkey(looper, sdk_pool_handle, wh, did, did) + assert nvk != new_verkey -def testChangeVerkeyToNewVerkey(didUpdatedWithFullVerkey): - pass - - -def testRetrieveChangedVerkey(didUpdatedWithFullVerkey, newVerkeyFetched): - pass +def testRetrieveChangedVerkey(looper, tconf, nodeSet, sdk_pool_handle, sdk_wallet_trustee, nym_full_vk): + wh, did, vk = nym_full_vk + new_vk = sdk_rotate_verkey(looper, sdk_pool_handle, wh, did, did) + resp_data = nym_get(looper, sdk_pool_handle, sdk_wallet_trustee, did) + assert did == resp_data[0] + assert vk != resp_data[1] + assert new_vk == resp_data[1] -def testVerifySigWithChangedVerkey(didUpdatedWithFullVerkey, newVerkeyFetched, - trustAnchorWallet, fullKeyIdr, wallet): - chkVerifyForRetrievedIdentity(wallet, trustAnchorWallet, fullKeyIdr) +def testVerifySigWithChangedVerkey(looper, tconf, nodeSet, sdk_pool_handle, nym_full_vk): + wh, did, vk = nym_full_vk + new_vk = sdk_rotate_verkey(looper, sdk_pool_handle, wh, did, did) + # check sign by getting nym from ledger - if succ then sign is ok + resp_data = nym_get(looper, sdk_pool_handle, (wh, did), did) + assert did == resp_data[0] + assert vk != resp_data[1] + assert new_vk == resp_data[1] diff --git a/indy_node/test/did/test_did_with_no_verkey.py b/indy_node/test/did/test_did_with_no_verkey.py index 1357e9324..3735aa563 100644 --- a/indy_node/test/did/test_did_with_no_verkey.py +++ b/indy_node/test/did/test_did_with_no_verkey.py @@ -1,112 +1,39 @@ -""" -Empty verkey tests - Add a nym (16 byte, base58) without a verkey (Form 2). - { type: NYM, dest: } - Retrieve the verkey. - { type: GET_NYM, dest: } - Change verkey to new verkey (32 byte) - { type: NYM, dest: , verkey: } - Retrieve new verkey - { type: GET_NYM, dest: } - Verify a signature from this identifier with the new verkey +from indy_node.test.did.conftest import nym_get +from indy_node.test.helper import sdk_rotate_verkey -DID Objects tests - Store a DID object - Retrieve a DID object - Change a whole DID object - Update just a portion of a DID object -DID forms tests - Allow for identifiers that have the ‘did:indy:’ prefix - did:indy:<16 byte, base58> - Don’t store the prefix - Allow for identifiers that omit the ‘did:indy:’ prefix - <16 byte, base58> - Allow for legacy cryptonyms - Test that a 32-byte identifier is assumed to be a cryptonym, and the first 16 bytes are the identifier, and the last 16 bytes are the abbreviated verkey, and it is stored that way - Any other forms are rejected. -""" - -from stp_core.loop.eventually import eventually - -from indy_common.identity import Identity -from indy_node.test.did.conftest import pf -from indy_node.test.did.helper import chkVerifyForRetrievedIdentity, \ - updateIndyIdrWithVerkey -from indy_client.test.helper import createNym -from plenum.test import waits as plenumWaits - - -@pf -def didAddedWithoutVerkey( - addedTrustAnchor, - looper, - trustAnchor, - trustAnchorWallet, - wallet, - noKeyIdr): - """{ type: NYM, dest: }""" - createNym(looper, noKeyIdr, trustAnchor, trustAnchorWallet) - return wallet - - -@pf -def didUpdatedWithVerkey(didAddedWithoutVerkey, looper, trustAnchor, - trustAnchorWallet, noKeyIdr, wallet): - """{ type: NYM, dest: , verkey: }""" - updateIndyIdrWithVerkey(looper, trustAnchorWallet, trustAnchor, - noKeyIdr, wallet.getVerkey(noKeyIdr)) - - -@pf -def verkeyFetched(didUpdatedWithVerkey, looper, trustAnchor, trustAnchorWallet, - noKeyIdr, wallet): - """{ type: GET_NYM, dest: }""" - identity = Identity(identifier=noKeyIdr) - req = trustAnchorWallet.requestIdentity(identity, - sender=trustAnchorWallet.defaultId) - trustAnchor.submitReqs(req) - - def chk(): - assert trustAnchorWallet.getIdentity( - noKeyIdr).verkey == wallet.getVerkey(noKeyIdr) - - timeout = plenumWaits.expectedReqAckQuorumTime() - looper.run(eventually(chk, retryWait=1, timeout=timeout)) - - -def testWalletCanProvideAnIdentifierWithoutAKey(wallet, noKeyIdr): - # TODO, Question: Why would `getVerkey` return `None` for a DID?. - assert wallet.getVerkey(noKeyIdr) - - -def testAddDidWithoutAVerkey(didAddedWithoutVerkey): +def testAddDidWithoutAVerkey(nym_empty_vk): pass -def testRetrieveEmptyVerkey(didAddedWithoutVerkey, looper, trustAnchor, - trustAnchorWallet, noKeyIdr): - """{ type: GET_NYM, dest: }""" - identity = Identity(identifier=noKeyIdr) - req = trustAnchorWallet.requestIdentity(identity, - sender=trustAnchorWallet.defaultId) - trustAnchor.submitReqs(req) +def testRetrieveEmptyVerkey(looper, tconf, nodeSet, sdk_pool_handle, sdk_wallet_trustee, nym_empty_vk): + nwh, ndid = nym_empty_vk + resp_data = nym_get(looper, sdk_pool_handle, sdk_wallet_trustee, ndid) + assert ndid == resp_data[0] + assert not resp_data[1] - def chk(): - assert trustAnchorWallet.getIdentity(noKeyIdr).verkey is None - timeout = plenumWaits.expectedReqAckQuorumTime() - looper.run(eventually(chk, retryWait=1, timeout=timeout)) +def testChangeEmptyVerkeyToNewVerkey(looper, tconf, nodeSet, sdk_pool_handle, sdk_wallet_trustee, nym_empty_vk): + _, did = nym_empty_vk + trw, trd = sdk_wallet_trustee + new_vk = sdk_rotate_verkey(looper, sdk_pool_handle, trw, trd, did) + assert new_vk -def testChangeEmptyVerkeyToNewVerkey(didUpdatedWithVerkey): - pass - - -def testRetrieveChangedVerkey(didUpdatedWithVerkey, verkeyFetched): - pass +def testRetrieveChangedVerkey(looper, tconf, nodeSet, sdk_pool_handle, sdk_wallet_trustee, nym_empty_vk): + _, did = nym_empty_vk + trw, trd = sdk_wallet_trustee + new_vk = sdk_rotate_verkey(looper, sdk_pool_handle, trw, trd, did) + resp_data = nym_get(looper, sdk_pool_handle, sdk_wallet_trustee, did) + assert did == resp_data[0] + assert new_vk == resp_data[1] -def testVerifySigWithChangedVerkey(didUpdatedWithVerkey, verkeyFetched, - trustAnchorWallet, noKeyIdr, wallet): - chkVerifyForRetrievedIdentity(wallet, trustAnchorWallet, noKeyIdr) +def testVerifySigWithChangedVerkey(looper, tconf, nodeSet, sdk_pool_handle, sdk_wallet_trustee, nym_empty_vk): + wh, did = nym_empty_vk + trw, trd = sdk_wallet_trustee + new_vk = sdk_rotate_verkey(looper, sdk_pool_handle, trw, trd, did) + # check sign by getting nym from ledger - if succ then sign is ok + resp_data = nym_get(looper, sdk_pool_handle, (wh, did), did) + assert did == resp_data[0] + assert new_vk == resp_data[1] diff --git a/indy_node/test/helper.py b/indy_node/test/helper.py index 4a6c439d1..3c0cc2781 100644 --- a/indy_node/test/helper.py +++ b/indy_node/test/helper.py @@ -1,52 +1,27 @@ -import inspect import json -from contextlib import ExitStack -from typing import Iterable import base58 from indy.did import replace_keys_start, replace_keys_apply -from indy.ledger import build_attrib_request +from indy.ledger import build_attrib_request, build_get_attrib_request +from libnacl import randombytes from indy_common.config_helper import NodeConfigHelper -from plenum.common.constants import REQACK, TXN_ID, DATA +from plenum.common.signer_did import DidSigner +from plenum.common.signer_simple import SimpleSigner +from plenum.common.util import rawToFriendly from plenum.test.pool_transactions.helper import sdk_sign_and_send_prepared_request, sdk_add_new_nym -from plenum.common.txn_util import get_type, get_txn_id from stp_core.common.log import getlogger -from plenum.common.signer_simple import SimpleSigner -from plenum.common.util import getMaxFailures, runall, randomString -from plenum.test.helper import waitForSufficientRepliesForRequests, \ - checkLastClientReqForNode, buildCompletedTxnFromReply, sdk_get_and_check_replies -from plenum.test.test_node import checkNodesAreReady, TestNodeCore -from plenum.test.test_node import checkNodesConnected +from plenum.test.helper import sdk_get_and_check_replies +from plenum.test.test_node import TestNodeCore from plenum.test.testable import spyable -from plenum.test import waits as plenumWaits, waits -from indy_client.client.wallet.attribute import LedgerStore, Attribute -from indy_client.client.wallet.wallet import Wallet -from indy_client.test.helper import genTestClient, genTestClientProvider -from indy_common.constants import ATTRIB, TARGET_NYM, TXN_TYPE, GET_NYM from indy_common.test.helper import TempStorage from indy_node.server.node import Node from indy_node.server.upgrader import Upgrader -from stp_core.loop.eventually import eventually -from stp_core.loop.looper import Looper from stp_core.types import HA logger = getlogger() -class Organization: - def __init__(self, client=None): - self.client = client - self.wallet = Wallet(self.client) # created only once per organization - self.userWallets = {} # type: Dict[str, Wallet] - - def removeUserWallet(self, userId: str): - if userId in self.userWallets: - del self.userWallets[userId] - else: - raise ValueError("No wallet exists for this user id") - - @spyable(methods=[Upgrader.processLedger]) class TestUpgrader(Upgrader): pass @@ -104,92 +79,26 @@ def clientStackClass(self): return self.ClientStackClass -def checkSubmitted(looper, client, optype, txnsBefore): - txnsAfter = [] - - def checkTxnCountAdvanced(): - nonlocal txnsAfter - txnsAfter = client.getTxnsByType(optype) - logger.debug("old and new txns {} {}".format(txnsBefore, txnsAfter)) - assert len(txnsAfter) > len(txnsBefore) - - timeout = plenumWaits.expectedReqAckQuorumTime() - looper.run(eventually(checkTxnCountAdvanced, retryWait=1, - timeout=timeout)) - txnIdsBefore = [get_txn_id(txn) for txn in txnsBefore] - txnIdsAfter = [get_txn_id(txn) for txn in txnsAfter] - logger.debug("old and new txnids {} {}".format(txnIdsBefore, txnIdsAfter)) - return list(set(txnIdsAfter) - set(txnIdsBefore)) - - -def submitAndCheck(looper, client, wallet, op, identifier=None): - # TODO: This assumes every transaction will have an edge in graph, why? - # Fix this - optype = op[TXN_TYPE] - txnsBefore = client.getTxnsByType(optype) - req = wallet.signOp(op, identifier=identifier) - wallet.pendRequest(req) - reqs = wallet.preparePending() - client.submitReqs(*reqs) - return checkSubmitted(looper, client, optype, txnsBefore) - - -def makePendingTxnsRequest(client, wallet): - wallet.pendSyncRequests() - prepared = wallet.preparePending() - client.submitReqs(*prepared) - - -def makeGetNymRequest(client, wallet, nym): - op = { - TARGET_NYM: nym, - TXN_TYPE: GET_NYM, - } - req = wallet.signOp(op) - # TODO: This looks boilerplate - wallet.pendRequest(req) - reqs = wallet.preparePending() - return client.submitReqs(*reqs)[0] - - -def makeAttribRequest(client, wallet, attrib): - wallet.addAttribute(attrib) - # TODO: This looks boilerplate - reqs = wallet.preparePending() - return client.submitReqs(*reqs)[0] - - -def _newWallet(name=None): - signer = SimpleSigner() - w = Wallet(name or signer.identifier) - w.addIdentifier(signer=signer) - return w - - -def addAttributeAndCheck(looper, client, wallet, attrib): - old = wallet.pendingCount - pending = wallet.addAttribute(attrib) - assert pending == old + 1 - reqs = wallet.preparePending() - client.submitReqs(*reqs) - - def chk(): - assert wallet.getAttribute(attrib).seqNo is not None - - timeout = plenumWaits.expectedTransactionExecutionTime(client.totalNodes) - looper.run(eventually(chk, retryWait=1, timeout=timeout)) - return wallet.getAttribute(attrib).seqNo - - -def sdk_add_attribute_and_check(looper, sdk_pool_handle, sdk_wallet_handle, attrib, dest=None): +def sdk_add_attribute_and_check(looper, sdk_pool_handle, sdk_wallet_handle, attrib, + dest=None, xhash=None, enc=None): _, s_did = sdk_wallet_handle t_did = dest or s_did attrib_req = looper.loop.run_until_complete( - build_attrib_request(s_did, t_did, None, attrib, None)) + build_attrib_request(s_did, t_did, xhash, attrib, enc)) request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_handle, sdk_pool_handle, attrib_req) - sdk_get_and_check_replies(looper, [request_couple]) - return request_couple + rep = sdk_get_and_check_replies(looper, [request_couple]) + return rep + + +def sdk_get_attribute_and_check(looper, sdk_pool_handle, submitter_wallet, target_did, attrib_name): + _, submitter_did = submitter_wallet + req = looper.loop.run_until_complete( + build_get_attrib_request(submitter_did, target_did, attrib_name, None, None)) + request_couple = sdk_sign_and_send_prepared_request(looper, submitter_wallet, + sdk_pool_handle, req) + rep = sdk_get_and_check_replies(looper, [request_couple]) + return rep def sdk_add_raw_attribute(looper, sdk_pool_handle, sdk_wallet_handle, name, value): @@ -198,49 +107,6 @@ def sdk_add_raw_attribute(looper, sdk_pool_handle, sdk_wallet_handle, name, valu sdk_add_attribute_and_check(looper, sdk_pool_handle, sdk_wallet_handle, attrData) -def checkGetAttr(reqKey, trustAnchor, attrName, attrValue): - reply, status = trustAnchor.getReply(*reqKey) - assert reply - data = json.loads(reply.get(DATA)) - assert status == "CONFIRMED" and \ - (data is not None and data.get(attrName) == attrValue) - return reply - - -def getAttribute( - looper, - trustAnchor, - trustAnchorWallet, - userIdA, - attributeName, - attributeValue): - # Should be renamed to get_attribute_and_check - attrib = Attribute(name=attributeName, - value=None, - dest=userIdA, - ledgerStore=LedgerStore.RAW) - req = trustAnchorWallet.requestAttribute( - attrib, sender=trustAnchorWallet.defaultId) - trustAnchor.submitReqs(req) - timeout = waits.expectedTransactionExecutionTime(len(trustAnchor.nodeReg)) - return looper.run(eventually(checkGetAttr, (req.identifier, req.reqId), - trustAnchor, attributeName, attributeValue, - retryWait=1, timeout=timeout)) - - -def sdk_get_attribute(): - pass - - -def buildStewardClient(looper, tdir, stewardWallet): - s, _ = genTestClient(tmpdir=tdir, usePoolLedger=True) - s.registerObserver(stewardWallet.handleIncomingReply) - looper.add(s) - looper.run(s.ensureConnectedToNodes()) - makePendingTxnsRequest(s, stewardWallet) - return s - - base58_alphabet = set(base58.alphabet.decode("utf-8")) @@ -274,3 +140,36 @@ def start_stopped_node(stopped_node, looper, tconf, tdir, allPluginsPath): pluginPaths=allPluginsPath) looper.add(restarted_node) return restarted_node + + +def modify_field(string, value, *field_path): + d = json.loads(string) + prev = None + for i in range(0, len(field_path) - 1): + if prev is None: + prev = d[field_path[i]] + continue + prev = prev[field_path[i]] + if prev: + prev[field_path[-1]] = value + else: + d[field_path[-1]] = value + return json.dumps(d) + + +def createUuidIdentifier(): + return rawToFriendly(randombytes(16)) + + +def createHalfKeyIdentifierAndAbbrevVerkey(seed=None): + didSigner = DidSigner(seed=seed) + return didSigner.identifier, didSigner.verkey + + +def createCryptonym(seed=None): + return SimpleSigner(seed=seed).identifier + + +def createUuidIdentifierAndFullVerkey(seed=None): + didSigner = DidSigner(identifier=createUuidIdentifier(), seed=seed) + return didSigner.identifier, didSigner.verkey diff --git a/indy_client/agent/__init__.py b/indy_node/test/memory_debugging/__init__.py similarity index 100% rename from indy_client/agent/__init__.py rename to indy_node/test/memory_debugging/__init__.py diff --git a/indy_node/test/memory_debugging/test_memory_debugging.py b/indy_node/test/memory_debugging/test_memory_debugging.py new file mode 100644 index 000000000..8158b75d0 --- /dev/null +++ b/indy_node/test/memory_debugging/test_memory_debugging.py @@ -0,0 +1,270 @@ +import json +import logging +import types +from collections import OrderedDict +from typing import Any + +import sys + +import pytest +from stp_core.common.log import getlogger + +from plenum.common.constants import STEWARD_STRING +from plenum.common.util import randomString +from plenum.common.messages.node_messages import Commit + +from plenum.server.node import Node +from plenum.test.pool_transactions.helper import sdk_add_new_nym, prepare_nym_request, \ + sdk_sign_and_send_prepared_request +from plenum.test.helper import sdk_json_to_request_object +from pympler import asizeof + +max_depth = 10 + + +# Self made memory function. We can use it if we want to explore +# something specific. +def get_max(obj, seen=None, now_depth=0, path=str()): + if now_depth > max_depth: + return {} + dictionary = {(path, type(obj)): sys.getsizeof(obj)} + path += str(type(obj)) + ' ---> ' + if seen is None: + seen = set() + obj_id = id(obj) + if obj_id in seen: + return {} + seen.add(obj_id) + if isinstance(obj, dict): + vpath = path + 'value ---> ' + for d in [get_max(v, seen, now_depth + 1, vpath) for v in obj.values()]: + updater(dictionary, d) + kpath = path + 'key ---> ' + for d in [get_max(k, seen, now_depth + 1, kpath) for k in obj.keys()]: + updater(dictionary, d) + elif hasattr(obj, '__dict__'): + dpath = path + '__dict__ ---> ' + d = get_max(obj.__dict__, seen, now_depth + 1, dpath) + updater(dictionary, d) + elif hasattr(obj, '__iter__') and not isinstance(obj, (str, bytes, bytearray)): + ipath = path + '__iter__ ---> ' + for d in [get_max(i, seen, now_depth + 1, ipath) for i in obj]: + updater(dictionary, d) + return dictionary + + +def updater(store_d, new_d): + for k in new_d.keys(): + if k in store_d: + store_d[k] += int(new_d[k]) + else: + store_d[k] = new_d[k] + + +def dont_send_commit(self, msg: Any, *rids, signer=None, message_splitter=None): + if isinstance(msg, (Commit)): + if rids: + rids = [rid for rid in rids if rid not in self.nodestack.getRemote(self.ignore_node_name).uid] + else: + rids = [self.nodestack.getRemote(name).uid for name + in self.nodestack.remotes.keys() if name not in self.ignore_node_name] + self.old_send(msg, *rids, signer=signer, message_splitter=message_splitter) + + +def dont_send_commit_to(nodes, ignore_node_name): + for node in nodes: + if not hasattr(node, 'ignore_node_name'): + node.ignore_node_name = [] + node.ignore_node_name.append(ignore_node_name) + node.old_send = types.MethodType(Node.send, node) + node.send = types.MethodType(dont_send_commit, node) + + +def reset_sending(nodes): + for node in nodes: + node.send = types.MethodType(Node.send, node) + + +def sdk_add_new_nym_without_waiting(looper, sdk_pool_handle, creators_wallet, + alias=None, role=None, seed=None, + dest=None, verkey=None, skipverkey=False): + seed = seed or randomString(32) + alias = alias or randomString(5) + wh, _ = creators_wallet + + nym_request, new_did = looper.loop.run_until_complete( + prepare_nym_request(creators_wallet, seed, + alias, role, dest, verkey, skipverkey)) + sdk_sign_and_send_prepared_request(looper, creators_wallet, + sdk_pool_handle, nym_request) + + +# Pytest logger is heavy, so we exclude it +@pytest.fixture +def logger(): + logger = getlogger() + old_value = logger.getEffectiveLevel() + logger.root.setLevel(logging.CRITICAL) + yield logger + logger.root.setLevel(old_value) + + +@pytest.mark.skip('Unskip if you need to debug') +def test_memory_debugging(looper, + nodeSet, + sdk_wallet_trust_anchor, + sdk_pool_handle, + logger): + # Settings + requests_count = 500 + file_name = '.memory_data.txt' + + # Sets for emulating commits problems + set1 = list(nodeSet) + set1.remove(nodeSet[0]) + set2 = list(nodeSet) + set2.remove(nodeSet[1]) + set3 = list(nodeSet) + set3.remove(nodeSet[2]) + primary = nodeSet[0] + + memory_dicts = OrderedDict() + + memory_dicts['After starting'] = asizeof.asized(primary, detail=15) + + while primary.master_replica.lastPrePrepareSeqNo < requests_count: + sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trust_anchor) + + memory_dicts['After ordering'] = asizeof.asized(primary, detail=15) + + # Emulate commit sending problems + dont_send_commit_to(set1, nodeSet[0].name) + dont_send_commit_to(set2, nodeSet[1].name) + dont_send_commit_to(set3, nodeSet[2].name) + + # Sending requests until nodes generate `unordered_requests_count` 3pc batches + while primary.master_replica.lastPrePrepareSeqNo < requests_count * 2: + sdk_add_new_nym_without_waiting(looper, sdk_pool_handle, sdk_wallet_trust_anchor) + + memory_dicts['After {} unordered'.format(requests_count)] = asizeof.asized(primary, detail=15) + + # Remove commit problems + reset_sending(set1) + reset_sending(set2) + reset_sending(set3) + + # primary ask for commits + for i in range(primary.master_replica.last_ordered_3pc[1], primary.master_replica.lastPrePrepareSeqNo): + primary.replicas._replicas.values()[0]._request_commit((0, i)) + for i in range(primary.replicas._replicas.values()[1].last_ordered_3pc[1], + primary.replicas._replicas.values()[1].lastPrePrepareSeqNo): + primary.replicas._replicas.values()[1]._request_commit((0, i)) + looper.runFor(5) + + memory_dicts['After {} ordered'.format(requests_count)] = asizeof.asized(primary, detail=15) + + # primary clear queues + primary.replicas._replicas.values()[0]._gc(primary.replicas._replicas.values()[0].last_ordered_3pc) + primary.replicas._replicas.values()[1]._gc(primary.replicas._replicas.values()[1].last_ordered_3pc) + + memory_dicts['After _gc called'] = asizeof.asized(primary, detail=15) + + # Emulate problems again + dont_send_commit_to(set1, nodeSet[0].name) + dont_send_commit_to(set2, nodeSet[1].name) + dont_send_commit_to(set3, nodeSet[2].name) + + while primary.master_replica.lastPrePrepareSeqNo < requests_count * 3: + sdk_add_new_nym_without_waiting(looper, sdk_pool_handle, sdk_wallet_trust_anchor) + + memory_dicts['After {} unordered again'.format(requests_count)] = asizeof.asized(primary, detail=15) + + # Remove commit problems + reset_sending(set1) + reset_sending(set2) + reset_sending(set3) + + for i in range(primary.master_replica.last_ordered_3pc[1], primary.master_replica.lastPrePrepareSeqNo): + primary.replicas._replicas.values()[0]._request_commit((0, i)) + for i in range(primary.replicas._replicas.values()[1].last_ordered_3pc[1], + primary.replicas._replicas.values()[1].lastPrePrepareSeqNo): + primary.replicas._replicas.values()[1]._request_commit((0, i)) + looper.runFor(5) + + memory_dicts['After {} ordered again'.format(requests_count)] = asizeof.asized(primary, detail=15) + + primary.replicas._replicas.values()[0]._gc(primary.replicas._replicas.values()[0].last_ordered_3pc) + primary.replicas._replicas.values()[1]._gc(primary.replicas._replicas.values()[1].last_ordered_3pc) + + memory_dicts['After _gc called again'] = asizeof.asized(primary, detail=15) + + file = open(file_name, 'w') + indent = 75 + for k, size_obj in memory_dicts.items(): + # Formatting + header = str(k) + ': {}'.format(size_obj.size) + ' bytes. Detailed size:' + if len(header) < indent: + header += ' ' * (indent - len(header)) + file.write(header) + + size_obj = next(r for r in size_obj.refs if r.name == '__dict__') + # Sort in descending order to select most 'heavy' collections + for num, sub_obj in enumerate(sorted(size_obj.refs, key=lambda v: v.size, reverse=True)): + if num > 10: + break + file.write('[{} : {}], '.format(sub_obj.name, sub_obj.size)) + file.write('\n') + file.close() + + +@pytest.mark.skip('Unskip if you need to debug') +def test_requests_collection_debugging(looper, + nodeSet, + sdk_wallet_trustee): + primary = nodeSet[0] + + seed = randomString(32) + alias = randomString(5) + wh, _ = sdk_wallet_trustee + nym_request, new_did = looper.loop.run_until_complete( + prepare_nym_request(sdk_wallet_trustee, seed, + alias, STEWARD_STRING)) + + nym_request = json.loads(nym_request) + a = sys.getsizeof(primary.requests) + + mas = [] + for _ in range(50000): + req = sdk_json_to_request_object(nym_request) + req.reqId = randomString(32) + mas.append(req) + primary.requests.add_propagate(req, 'asd') + primary.requests.mark_as_forwarded(req, 2) + primary.requests.set_finalised(req) + + b = sys.getsizeof(primary.requests) + lb = len(primary.requests) + + for req in mas: + primary.requests.mark_as_executed(req) + primary.requests.free(req.key) + primary.requests.free(req.key) + + c = sys.getsizeof(primary.requests) + lc = len(primary.requests) + + for _ in range(100000): + req = sdk_json_to_request_object(nym_request) + req.reqId = randomString(32) + mas.append(req) + primary.requests.add_propagate(req, 'asd') + primary.requests.mark_as_forwarded(req, 2) + primary.requests.set_finalised(req) + + d = sys.getsizeof(primary.requests) + ld = len(primary.requests) + + print(a) + print(b, lb) + print(c, lc) + print(d, ld) diff --git a/indy_client/anon_creds/__init__.py b/indy_node/test/node_txn/__init__.py similarity index 100% rename from indy_client/anon_creds/__init__.py rename to indy_node/test/node_txn/__init__.py diff --git a/indy_node/test/node_txn/test_send_node_validation.py b/indy_node/test/node_txn/test_send_node_validation.py new file mode 100644 index 000000000..32d4e19f8 --- /dev/null +++ b/indy_node/test/node_txn/test_send_node_validation.py @@ -0,0 +1,559 @@ +import json +import pytest + +from plenum.common.constants import NODE_IP, NODE_PORT, CLIENT_IP, CLIENT_PORT, ALIAS, VALIDATOR, SERVICES +from plenum.common.util import cryptonymToHex, hexToFriendly +from plenum.common.exceptions import RequestNackedException, RequestRejectedException + +from plenum.test.helper import sdk_get_and_check_replies, sdk_get_bad_response, sdk_sign_request_strings, \ + sdk_send_signed_requests +from plenum.test.pool_transactions.helper import sdk_add_new_nym, prepare_node_request, \ + sdk_sign_and_send_prepared_request + + +@pytest.fixture(scope='function') +def node_request(looper, sdk_node_theta_added): + sdk_steward_wallet, node = sdk_node_theta_added + node_dest = hexToFriendly(node.nodestack.verhex) + wh, did = sdk_steward_wallet + node_request = looper.loop.run_until_complete( + prepare_node_request(did, node.name, destination=node_dest, + nodeIp=node.nodestack.ha[0], + nodePort=node.nodestack.ha[1], + clientIp=node.clientstack.ha[0], + clientPort=node.clientstack.ha[1])) + return json.loads(node_request) + + +def ensurePoolIsOperable(looper, sdk_pool_handle, sdk_wallet_creator): + sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_creator) + + +def testSendNodeFailsIfDestIsShortReadableName( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['dest'] = 'TheNewNode' + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'b58 decoded value length 8 should be one of [16, 32]') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfDestIsHexKey( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['dest'] = cryptonymToHex( + node_request['operation']['dest']).decode() + "0" + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'should not contain the following chars') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeHasInvalidSyntaxIfDestIsEmpty( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['dest'] = '' + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'b58 decoded value length 0 should be one of [16, 32]') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeHasInvalidSyntaxIfDestIsMissed( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + del node_request['operation']['dest'] + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'missed fields - dest') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfNodeIpContainsLeadingSpace( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['data'][NODE_IP] = ' 122.62.52.13' + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'invalid network ip address') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfNodeIpContainsTrailingSpace( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['data'][NODE_IP] = '122.62.52.13 ' + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'invalid network ip address') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfNodeIpHasWrongFormat( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['data'][NODE_IP] = '122.62.52' + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'invalid network ip address') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfSomeNodeIpComponentsAreNegative( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['data'][NODE_IP] = '122.-1.52.13' + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'invalid network ip address') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfSomeNodeIpComponentsAreHigherThanUpperBound( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['data'][NODE_IP] = '122.62.256.13' + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'invalid network ip address') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfNodeIpIsEmpty( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['data'][NODE_IP] = '' + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'invalid network ip address') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfNodeIpIsMissed( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + del node_request['operation']['data'][NODE_IP] + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'missed fields - node_ip') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfNodePortIsNegative( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['data'][NODE_PORT] = -1 + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'network port out of the range 0-65535') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfNodePortIsHigherThanUpperBound( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['data'][NODE_PORT] = 65536 + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'network port out of the range 0-65535') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfNodePortIsFloat( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['data'][NODE_PORT] = 5555.5 + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'expected types') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfNodePortHasWrongFormat( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['data'][NODE_PORT] = 'ninety' + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'expected types') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfNodePortIsEmpty( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['data'][NODE_PORT] = '' + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'expected types ') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfNodePortIsMissed( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + del node_request['operation']['data'][NODE_PORT] + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'missed fields - node_port') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfClientIpContainsLeadingSpace( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['data'][CLIENT_IP] = ' 122.62.52.13' + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'invalid network ip address') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfClientIpContainsTrailingSpace( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['data'][CLIENT_IP] = '122.62.52.13 ' + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'invalid network ip address') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfClientIpHasWrongFormat( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['data'][CLIENT_IP] = '122.62.52' + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'invalid network ip address') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfSomeClientIpComponentsAreNegative( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['data'][CLIENT_IP] = '122.-1.52.13' + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'invalid network ip address') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfSomeClientIpComponentsAreHigherThanUpperBound( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['data'][CLIENT_IP] = '122.62.256.13' + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'invalid network ip address') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfClientIpIsEmpty( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['data'][CLIENT_IP] = '' + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'invalid network ip address') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfClientIpIsMissed( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + del node_request['operation']['data'][CLIENT_IP] + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'missed fields - client_ip') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfClientPortIsNegative( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['data'][CLIENT_PORT] = -1 + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'network port out of the range 0-65535') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfClientPortIsHigherThanUpperBound( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['data'][CLIENT_PORT] = 65536 + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'network port out of the range 0-65535') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfClientPortIsFloat( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['data'][CLIENT_PORT] = 5555.5 + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'expected types') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfClientPortHasWrongFormat( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['data'][CLIENT_PORT] = 'ninety' + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'expected types') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfClientPortIsEmpty( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['data'][CLIENT_PORT] = '' + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'expected types') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfClientPortIsMissed( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + del node_request['operation']['data'][CLIENT_PORT] + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'missed fields - client_port') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfAliasIsEmpty( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['data'][ALIAS] = '' + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'empty string') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfAliasIsMissed( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + del node_request['operation']['data'][ALIAS] + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'missed fields ') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfServicesContainsUnknownValue( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['data'][SERVICES] = [VALIDATOR, 'DECIDER'] + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'unknown value') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfServicesIsValidatorValue( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['data'][SERVICES] = VALIDATOR # just string, not array + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'expected types') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfServicesIsEmptyString( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['data'][SERVICES] = '' + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'expected types') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeSuccessIfDataContainsUnknownField( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['data'][SERVICES] = [] + node_request['operation']['data']['extra'] = 42 + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestRejectedException, + 'not found in authorized map') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfDataIsEmptyJson( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['data'] = {} + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'missed fields ') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfDataIsBrokenJson( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['data'] = "{'node_ip': '10.0.0.105', 'node_port': 9701" + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'invalid type') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeFailsIfDataIsNotJson( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['data'] = 'not_json' + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'invalid type') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeHasInvalidSyntaxIfDataIsEmptyString( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['data'] = '' + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'invalid type') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeHasInvalidSyntaxIfDataIsMissed( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + del node_request['operation']['data'] + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'missed fields') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +@pytest.mark.skip(reason='INDY-1864') +def testSendNodeHasInvalidSyntaxIfUnknownParameterIsPassed( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + node_request['operation']['albus'] = 'severus' + steward_wallet, node = sdk_node_theta_added + signed_reqs = sdk_sign_request_strings(looper, steward_wallet, [node_request]) + request_couple = sdk_send_signed_requests(sdk_pool_handle, signed_reqs)[0] + sdk_get_and_check_replies(looper, [request_couple]) + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeHasInvalidSyntaxIfAllParametersAreMissed( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + for f in node_request['operation'].keys(): + node_request['operation'][f] = '' + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'invalid type') + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) + + +def testSendNodeSucceedsIfServicesIsMissed( + looper, sdk_pool_handle, nodeSet, sdk_node_theta_added, node_request): + del node_request['operation']['data'][SERVICES] + steward_wallet, node = sdk_node_theta_added + request_couple = sdk_sign_and_send_prepared_request(looper, steward_wallet, + sdk_pool_handle, + json.dumps(node_request)) + sdk_get_and_check_replies(looper, [request_couple]) + ensurePoolIsOperable(looper, sdk_pool_handle, steward_wallet) diff --git a/indy_node/test/nym_txn/test_nym.py b/indy_node/test/nym_txn/test_nym.py new file mode 100644 index 000000000..f961a590d --- /dev/null +++ b/indy_node/test/nym_txn/test_nym.py @@ -0,0 +1,34 @@ +import pytest +from indy_common.constants import TRUST_ANCHOR_STRING + +from plenum.common.exceptions import RequestRejectedException + +from plenum.test.pool_transactions.helper import sdk_add_new_nym + + +def test_non_steward_cannot_create_trust_anchor( + nodeSet, looper, sdk_pool_handle, sdk_wallet_steward): + sdk_wallet_client = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_steward) + with pytest.raises(RequestRejectedException) as e: + sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_client, role=TRUST_ANCHOR_STRING) + e.match('None role cannot') + + +def testStewardCreatesATrustAnchor(looper, sdk_pool_handle, sdk_wallet_steward): + sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_steward, role=TRUST_ANCHOR_STRING) + + +def testStewardCreatesAnotherTrustAnchor(looper, sdk_pool_handle, sdk_wallet_steward): + sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_steward, role=TRUST_ANCHOR_STRING) + + +def test_non_trust_anchor_cannot_create_user( + nodeSet, looper, sdk_pool_handle, sdk_wallet_steward): + sdk_wallet_client = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_steward) + with pytest.raises(RequestRejectedException) as e: + sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_client) + e.match('None role cannot') + + +def testTrustAnchorCreatesAUser(sdk_user_wallet_a): + pass diff --git a/indy_client/test/cli/test_nym.py b/indy_node/test/nym_txn/test_nym_additional.py similarity index 98% rename from indy_client/test/cli/test_nym.py rename to indy_node/test/nym_txn/test_nym_additional.py index 5cb17a9ac..b9a887ab2 100644 --- a/indy_client/test/cli/test_nym.py +++ b/indy_node/test/nym_txn/test_nym_additional.py @@ -38,7 +38,7 @@ def test_pool_nodes_started(nodeSet): def test_send_same_nyms_only_first_gets_written( - looper, do, sdk_pool_handle, sdk_wallet_steward): + looper, sdk_pool_handle, sdk_wallet_steward): wh, _ = sdk_wallet_steward seed = randomString(32) did, verkey = looper.loop.run_until_complete( diff --git a/indy_node/test/nym_txn/test_nym_resend.py b/indy_node/test/nym_txn/test_nym_resend.py index 88e962f1a..89eb4586b 100644 --- a/indy_node/test/nym_txn/test_nym_resend.py +++ b/indy_node/test/nym_txn/test_nym_resend.py @@ -1,6 +1,6 @@ import json -from indy_client.test.cli.helper import createHalfKeyIdentifierAndAbbrevVerkey +from indy_node.test.helper import createHalfKeyIdentifierAndAbbrevVerkey from indy.ledger import sign_request, submit_request, build_nym_request from plenum.common.constants import REPLY, REJECT diff --git a/indy_node/test/nym_txn/test_send_nym_validation.py b/indy_node/test/nym_txn/test_send_nym_validation.py new file mode 100644 index 000000000..1bcade423 --- /dev/null +++ b/indy_node/test/nym_txn/test_send_nym_validation.py @@ -0,0 +1,515 @@ +import json + +import pytest + +from libnacl import randombytes +from plenum.common.exceptions import RequestNackedException + +from plenum.common.constants import TRUSTEE, STEWARD, ROLE +from indy_common.constants import TRUST_ANCHOR +from plenum.common.types import OPERATION +from plenum.common.util import randomString, hexToFriendly, friendlyToHex, rawToFriendly, friendlyToHexStr + +from plenum.test.helper import sdk_get_and_check_replies, sdk_get_bad_response +from indy_node.test.helper import createUuidIdentifier, createHalfKeyIdentifierAndAbbrevVerkey, createCryptonym, \ + createUuidIdentifierAndFullVerkey +from plenum.test.pool_transactions.helper import prepare_nym_request, sdk_sign_and_send_prepared_request + + +@pytest.fixture(scope='module') +def nym_request(looper, sdk_wallet_trustee): + seed = randomString(32) + alias = randomString(5) + dest = None + role = None + verkey = None + nym_request, _ = looper.loop.run_until_complete( + prepare_nym_request(sdk_wallet_trustee, seed, + alias, role, dest, verkey, True)) + return json.loads(nym_request) + + +def testSendNymSucceedsForUuidIdentifierAnsdk_pool_handlemittedVerkey( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + parameters = { + 'dest': createUuidIdentifier(), + 'role': TRUST_ANCHOR + } + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_and_check_replies(looper, [request_couple]) + + +def testSendNymSucceedsForUuidIdentifierAndFullVerkey( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + uuidIdentifier, fullVerkey = createUuidIdentifierAndFullVerkey() + parameters = { + 'dest': uuidIdentifier, + 'verkey': fullVerkey, + 'role': TRUST_ANCHOR + } + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_and_check_replies(looper, [request_couple]) + + +def testSendNymSucceedsForHalfKeyIdentifierAndAbbrevVerkey( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + halfKeyIdentifier, abbrevVerkey = createHalfKeyIdentifierAndAbbrevVerkey() + parameters = { + 'dest': halfKeyIdentifier, + 'verkey': abbrevVerkey, + 'role': TRUST_ANCHOR + } + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_and_check_replies(looper, [request_couple]) + + +def testSendNymSucceedsForTrusteeRole( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + halfKeyIdentifier, abbrevVerkey = createHalfKeyIdentifierAndAbbrevVerkey() + parameters = { + 'dest': halfKeyIdentifier, + 'verkey': abbrevVerkey, + 'role': TRUSTEE + } + + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_and_check_replies(looper, [request_couple]) + + +def testSendNymSucceedsForStewardRole( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + halfKeyIdentifier, abbrevVerkey = createHalfKeyIdentifierAndAbbrevVerkey() + parameters = { + 'dest': halfKeyIdentifier, + 'verkey': abbrevVerkey, + 'role': STEWARD + } + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_and_check_replies(looper, [request_couple]) + + +def testSendNymSucceedsForTrustAnchorRole( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + halfKeyIdentifier, abbrevVerkey = createHalfKeyIdentifierAndAbbrevVerkey() + parameters = { + 'dest': halfKeyIdentifier, + 'verkey': abbrevVerkey, + 'role': TRUST_ANCHOR + } + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_and_check_replies(looper, [request_couple]) + + +def testSendNymSucceedsForOmittedRole( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + halfKeyIdentifier, abbrevVerkey = createHalfKeyIdentifierAndAbbrevVerkey() + parameters = { + 'dest': halfKeyIdentifier, + 'verkey': abbrevVerkey + } + del nym_request[OPERATION][ROLE] + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_and_check_replies(looper, [request_couple]) + + +def testSendNymSucceedsForNoneRole( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + halfKeyIdentifier, abbrevVerkey = createHalfKeyIdentifierAndAbbrevVerkey() + parameters = { + 'dest': halfKeyIdentifier, + 'verkey': abbrevVerkey, + 'role': None + } + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_and_check_replies(looper, [request_couple]) + + +@pytest.mark.skip(reason='INDY-210') +def testSendNymFailsForCryptonymIdentifierAnsdk_pool_handlemittedVerkey( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + parameters = { + 'dest': createCryptonym(), + 'role': TRUST_ANCHOR + } + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_and_check_replies(looper, [request_couple]) + + +@pytest.mark.skip(reason='INDY-210') +def testSendNymFailsForCryptonymIdentifierAndFullVerkey( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + cryptonym = createCryptonym() + + _, fullVerkey = createUuidIdentifierAndFullVerkey() + parameters = { + 'dest': cryptonym, + 'verkey': fullVerkey, + 'role': TRUST_ANCHOR + } + + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_and_check_replies(looper, [request_couple]) + + +def testSendNymFailsForCryptonymIdentifierAndMatchedAbbrevVerkey( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + cryptonym = createCryptonym() + + hexCryptonym = friendlyToHex(cryptonym) + abbrevVerkey = '~' + hexToFriendly(hexCryptonym[16:]) + parameters = { + 'dest': cryptonym, + 'verkey': abbrevVerkey, + 'role': TRUST_ANCHOR + } + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'Neither a full verkey nor an abbreviated one') + + +@pytest.mark.skip(reason='SOV-1108') +def testSendNymFailsIfIdentifierSizeIs15Bytes( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + parameters = { + 'dest': rawToFriendly(randombytes(15)), + 'role': TRUST_ANCHOR + } + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, '') + + +@pytest.mark.skip(reason='SOV-1108') +def testSendNymFailsIfIdentifierSizeIs17Bytes( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + parameters = { + 'dest': rawToFriendly(randombytes(17)), + 'role': TRUST_ANCHOR + } + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_and_check_replies(looper, [request_couple]) + + +@pytest.mark.skip(reason='SOV-1108') +def testSendNymFailsIfFullVerkeySizeIs31Bytes( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + parameters = { + 'dest': rawToFriendly(randombytes(16)), + 'verkey': rawToFriendly(randombytes(31)), + 'role': TRUST_ANCHOR + } + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_and_check_replies(looper, [request_couple]) + + +@pytest.mark.skip(reason='SOV-1108') +def testSendNymFailsIfFullVerkeySizeIs33Bytes( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + parameters = { + 'dest': rawToFriendly(randombytes(16)), + 'verkey': rawToFriendly(randombytes(33)), + 'role': TRUST_ANCHOR + } + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_and_check_replies(looper, [request_couple]) + + +@pytest.mark.skip(reason='SOV-1108') +def testSendNymFailsIfAbbrevVerkeySizeIs15Bytes( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + parameters = { + 'dest': rawToFriendly(randombytes(16)), + 'verkey': '~' + rawToFriendly(randombytes(15)), + 'role': TRUST_ANCHOR + } + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_and_check_replies(looper, [request_couple]) + + +@pytest.mark.skip(reason='SOV-1108') +def testSendNymFailsIfAbbrevVerkeySizeIs17Bytes( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + parameters = { + 'dest': rawToFriendly(randombytes(16)), + 'verkey': '~' + rawToFriendly(randombytes(17)), + 'role': TRUST_ANCHOR + } + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_and_check_replies(looper, [request_couple]) + + +@pytest.mark.skip(reason='SOV-1109') +def testSendNymFailsIfUuidIdentifierIsHexEncoded( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + parameters = { + 'dest': friendlyToHexStr(createUuidIdentifier()), + 'role': TRUST_ANCHOR + } + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_and_check_replies(looper, [request_couple]) + + +@pytest.mark.skip(reason='SOV-1109') +def testSendNymFailsIfFullVerkeyIsHexEncoded( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + uuidIdentifier, fullVerkey = createUuidIdentifierAndFullVerkey() + parameters = { + 'dest': uuidIdentifier, + 'verkey': friendlyToHexStr(fullVerkey), + 'role': TRUST_ANCHOR + } + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_and_check_replies(looper, [request_couple]) + + +@pytest.mark.skip(reason='SOV-1109') +def testSendNymFailsIfAbbrevVerkeyIsHexEncoded( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + halfKeyIdentifier, abbrevVerkey = createHalfKeyIdentifierAndAbbrevVerkey() + parameters = { + 'dest': halfKeyIdentifier, + 'verkey': '~' + friendlyToHexStr(abbrevVerkey.replace('~', '')), + 'role': TRUST_ANCHOR + } + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_and_check_replies(looper, [request_couple]) + + +@pytest.mark.skip(reason='SOV-1109') +def testSendNymFailsIfIdentifierContainsNonBase58Characters( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + uuidIdentifier = createUuidIdentifier() + parameters = { + 'dest': uuidIdentifier[:5] + '/' + uuidIdentifier[6:], + 'role': TRUST_ANCHOR + } + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_and_check_replies(looper, [request_couple]) + + +@pytest.mark.skip(reason='SOV-1109') +def testSendNymFailsIfFullVerkeyContainsNonBase58Characters( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + uuidIdentifier, fullVerkey = createUuidIdentifierAndFullVerkey() + parameters = { + 'dest': uuidIdentifier, + 'verkey': fullVerkey[:5] + '/' + fullVerkey[6:], + 'role': TRUST_ANCHOR + } + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_and_check_replies(looper, [request_couple]) + + +@pytest.mark.skip(reason='SOV-1109') +def testSendNymFailsIfAbbrevVerkeyContainsNonBase58Characters( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + halfKeyIdentifier, abbrevVerkey = createHalfKeyIdentifierAndAbbrevVerkey() + parameters = { + 'dest': halfKeyIdentifier, + 'verkey': abbrevVerkey[:6] + '/' + abbrevVerkey[7:], + 'role': TRUST_ANCHOR + } + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_and_check_replies(looper, [request_couple]) + + +@pytest.mark.skip(reason='SOV-1109') +def testSendNymFailsIfFullVerkeyContainsTilde( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + uuidIdentifier, fullVerkey = createUuidIdentifierAndFullVerkey() + parameters = { + 'dest': uuidIdentifier, + 'verkey': '~' + fullVerkey, + 'role': TRUST_ANCHOR + } + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_and_check_replies(looper, [request_couple]) + + +@pytest.mark.skip(reason='SOV-1109') +def testSendNymFailsIfAbbrevVerkeysdk_pool_handleesNotContainTilde( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + halfKeyIdentifier, abbrevVerkey = createHalfKeyIdentifierAndAbbrevVerkey() + parameters = { + 'dest': halfKeyIdentifier, + 'verkey': abbrevVerkey.replace('~', ''), + 'role': TRUST_ANCHOR + } + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_and_check_replies(looper, [request_couple]) + + +@pytest.mark.skip(reason='SOV-1110') +def testSendNymFailsIfRoleIsUnknown( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + halfKeyIdentifier, abbrevVerkey = createHalfKeyIdentifierAndAbbrevVerkey() + parameters = { + 'dest': halfKeyIdentifier, + 'verkey': abbrevVerkey, + 'role': 'SUPERVISOR' + } + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_and_check_replies(looper, [request_couple]) + + +@pytest.mark.skip(reason='SOV-1110') +def testSendNymFailsIfRoleIsSpecifiedUsingNumericCode( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + halfKeyIdentifier, abbrevVerkey = createHalfKeyIdentifierAndAbbrevVerkey() + parameters = { + 'dest': halfKeyIdentifier, + 'verkey': abbrevVerkey, + 'role': TRUST_ANCHOR.value + } + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_and_check_replies(looper, [request_couple]) + + +@pytest.mark.skip(reason='SOV-1111') +def testSendNymHasInvalidSyntaxIfParametersOrderIsWrong( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + halfKeyIdentifier, abbrevVerkey = createHalfKeyIdentifierAndAbbrevVerkey() + parameters = { + 'dest': halfKeyIdentifier, + 'verkey': abbrevVerkey, + 'role': TRUST_ANCHOR + } + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_and_check_replies(looper, [request_couple]) + + +@pytest.mark.skip(reason='SOV-1111') +def testSendNymHasInvalidSyntaxIfIdentifierIsEmpty( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + _, fullVerkey = createUuidIdentifierAndFullVerkey() + parameters = { + 'dest': '', + 'verkey': fullVerkey, + 'role': TRUST_ANCHOR + } + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_and_check_replies(looper, [request_couple]) + + +@pytest.mark.skip(reason='SOV-1111') +def testSendNymHasInvalidSyntaxIfIdentifierIsOmitted( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + _, fullVerkey = createUuidIdentifierAndFullVerkey() + parameters = { + 'verkey': fullVerkey, + 'role': TRUST_ANCHOR + } + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_and_check_replies(looper, [request_couple]) + + +def testSendNymHasInvalidSyntaxForUuidIdentifierAndEmptyVerkey( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + parameters = { + 'dest': createUuidIdentifier(), + 'verkey': '', + 'role': TRUST_ANCHOR + } + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'Neither a full verkey nor an abbreviated one') + + +@pytest.mark.skip(reason='SOV-1111') +def testSendNymHasInvalidSyntaxIfIdentifierAndVerkeyAreOmitted( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + parameters = { + 'role': TRUST_ANCHOR + } + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_and_check_replies(looper, [request_couple]) + + +@pytest.mark.skip(reason='SOV-1111') +def testSendNymHasInvalidSyntaxIfUnknownParameterIsPassed( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + uuidIdentifier, fullVerkey = createUuidIdentifierAndFullVerkey() + parameters = { + 'dest': uuidIdentifier, + 'verkey': fullVerkey, + 'role': TRUST_ANCHOR, + 'extra': 42 + } + nym_request[OPERATION].update(parameters) + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_and_check_replies(looper, [request_couple]) + + +def testSendNymHasInvalidSyntaxIfAllParametersAreOmitted( + looper, sdk_pool_handle, txnPoolNodeSet, nym_request, sdk_wallet_trustee): + for f in nym_request[OPERATION].keys(): + nym_request[OPERATION][f] = '' + + request_couple = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, json.dumps(nym_request)) + sdk_get_bad_response(looper, [request_couple], RequestNackedException, + 'Reason: client request invalid') diff --git a/indy_node/test/persistence/test_idr_cache_update_after_catchup.py b/indy_node/test/persistence/test_idr_cache_update_after_catchup.py index d19541666..298b38455 100644 --- a/indy_node/test/persistence/test_idr_cache_update_after_catchup.py +++ b/indy_node/test/persistence/test_idr_cache_update_after_catchup.py @@ -2,9 +2,8 @@ from indy.ledger import build_nym_request, sign_request, submit_request -from indy_client.test.cli.helper import createHalfKeyIdentifierAndAbbrevVerkey from indy_common.state import domain -from indy_node.test.helper import start_stopped_node +from indy_node.test.helper import start_stopped_node, createHalfKeyIdentifierAndAbbrevVerkey from plenum.common.txn_util import get_txn_time from plenum.test.node_catchup.helper import waitNodeDataEquality from plenum.test.pool_transactions.helper import disconnect_node_and_ensure_disconnected diff --git a/indy_node/test/pool_config/helper.py b/indy_node/test/pool_config/helper.py index ee812ca6e..55dcce50e 100644 --- a/indy_node/test/pool_config/helper.py +++ b/indy_node/test/pool_config/helper.py @@ -1,23 +1,24 @@ -from plenum.test.helper import sdk_sign_and_submit_req_obj, \ - sdk_get_and_check_replies -from indy_client.client.wallet.pool_config import PoolConfig as WPoolConfig +from indy.ledger import build_pool_config_request + +from plenum.test.helper import sdk_get_and_check_replies, sdk_sign_and_submit_req def sdk_pool_config_sent(looper, sdk_pool_handle, sdk_wallet_trustee, pool_config_data): _, did = sdk_wallet_trustee - pool_cfg = WPoolConfig(trustee=did, **pool_config_data) - req = pool_cfg.ledgerRequest() - req = sdk_sign_and_submit_req_obj(looper, sdk_pool_handle, sdk_wallet_trustee, req) - return pool_cfg, req + req = looper.loop.run_until_complete(build_pool_config_request( + did, pool_config_data['writes'], pool_config_data['force'])) + req = sdk_sign_and_submit_req(sdk_pool_handle, sdk_wallet_trustee, req) + return req + def sdk_ensure_pool_config_sent(looper, sdk_pool_handle, sdk_wallet_trustee, pool_config_data): _, did = sdk_wallet_trustee - pool_cfg = WPoolConfig(trustee=did, **pool_config_data) - req = pool_cfg.ledgerRequest() - req = sdk_sign_and_submit_req_obj(looper, sdk_pool_handle, sdk_wallet_trustee, req) - sdk_get_and_check_replies(looper, [req]) - return pool_cfg + req = looper.loop.run_until_complete(build_pool_config_request( + did, pool_config_data['writes'], pool_config_data['force'])) + req = sdk_sign_and_submit_req(sdk_pool_handle, sdk_wallet_trustee, req) + rep = sdk_get_and_check_replies(looper, [req]) + return rep def check_pool_config_writable_set(nodes, writable): diff --git a/indy_node/test/pool_config/test_pool_config.py b/indy_node/test/pool_config/test_pool_config.py new file mode 100644 index 000000000..ecd66b285 --- /dev/null +++ b/indy_node/test/pool_config/test_pool_config.py @@ -0,0 +1,84 @@ +import json +import pytest + +from indy.ledger import build_pool_config_request + +from indy_node.test.nym_txn.test_nym_additional import get_nym +from indy_node.test.upgrade.helper import sdk_ensure_upgrade_sent +from indy_node.test.pool_config.helper import sdk_ensure_pool_config_sent + +from plenum.common.exceptions import RequestNackedException +from plenum.common.types import OPERATION +from plenum.test.helper import sdk_get_bad_response, sdk_sign_and_submit_req +from plenum.common.constants import VERSION +from plenum.test.pool_transactions.helper import sdk_add_new_nym + +from indy_node.test.upgrade.conftest import validUpgrade, nodeIds + + +def sdk_pool_bad_config_sent(looper, sdk_pool_handle, sdk_wallet_trustee, change_writes, + change_force, change_writes_value=None, change_force_value=None): + _, did = sdk_wallet_trustee + req = looper.loop.run_until_complete(build_pool_config_request( + did, True, True)) + req = json.loads(req) + del req[OPERATION]['writes'] + req[OPERATION][change_writes] = change_writes_value if change_writes_value else True + del req[OPERATION]['force'] + req[OPERATION][change_force] = change_force_value if change_force_value else True + req = json.dumps(req) + req = sdk_sign_and_submit_req(sdk_pool_handle, sdk_wallet_trustee, req) + return req + + +def testPoolConfigInvalidSyntax(looper, sdk_pool_handle, sdk_wallet_trustee, poolConfigWTFF): + req = sdk_pool_bad_config_sent(looper, sdk_pool_handle, sdk_wallet_trustee, + 'wites', 'force', True, False) + sdk_get_bad_response(looper, [req], RequestNackedException, 'missed fields - writes') + req = sdk_pool_bad_config_sent(looper, sdk_pool_handle, sdk_wallet_trustee, + 'writes', 'force', 'Tue', False) + sdk_get_bad_response(looper, [req], RequestNackedException, 'expected types \'bool\', got \'str\'') + req = sdk_pool_bad_config_sent(looper, sdk_pool_handle, sdk_wallet_trustee, + 'writes', 'force', True, 1) + sdk_get_bad_response(looper, [req], RequestNackedException, 'expected types \'bool\', got \'int\'') + + +def testPoolConfigWritableFalse(looper, sdk_pool_handle, sdk_wallet_trustee, poolConfigWFFF): + sdk_ensure_pool_config_sent(looper, sdk_pool_handle, sdk_wallet_trustee, + poolConfigWFFF) + with pytest.raises(RequestNackedException) as e: + sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + e.match('Pool is in readonly mode') + + +def testPoolConfigWritableTrue(looper, sdk_pool_handle, sdk_wallet_trustee, poolConfigWTFF): + with pytest.raises(RequestNackedException) as e: + sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + e.match('Pool is in readonly mode') + sdk_ensure_pool_config_sent(looper, sdk_pool_handle, sdk_wallet_trustee, + poolConfigWTFF) + sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + + +def testPoolConfigWritableFalseCanRead(looper, sdk_pool_handle, sdk_wallet_trustee, poolConfigWFFF): + _, did = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + get_nym(looper, sdk_pool_handle, sdk_wallet_trustee, did) + sdk_ensure_pool_config_sent(looper, sdk_pool_handle, sdk_wallet_trustee, + poolConfigWFFF) + with pytest.raises(RequestNackedException) as e: + sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + e.match('Pool is in readonly mode') + get_nym(looper, sdk_pool_handle, sdk_wallet_trustee, did) + + +def testPoolUpgradeOnReadonlyPool( + looper, nodeSet, sdk_pool_handle, sdk_wallet_trustee, validUpgrade, poolConfigWFFF): + sdk_ensure_pool_config_sent(looper, sdk_pool_handle, sdk_wallet_trustee, + poolConfigWFFF) + sdk_ensure_upgrade_sent(looper, sdk_pool_handle, sdk_wallet_trustee, + validUpgrade) + + for node in nodeSet: + assert len(node.upgrader.aqStash) > 0 + assert node.upgrader.scheduledAction + assert node.upgrader.scheduledAction[0] == validUpgrade[VERSION] diff --git a/indy_node/test/pool_config/test_send_pool_config_only_trustee.py b/indy_node/test/pool_config/test_send_pool_config_only_trustee.py index 55ba4a172..087b5835e 100644 --- a/indy_node/test/pool_config/test_send_pool_config_only_trustee.py +++ b/indy_node/test/pool_config/test_send_pool_config_only_trustee.py @@ -9,7 +9,7 @@ def test_only_trustee_send_pool_config_writes_true_force_false( nodeSet, looper, sdk_pool_handle, sdk_wallet_trustee, poolConfigWTFF): sdk_wallet_steward = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee, 'tmpname', STEWARD_STRING) - _, req = sdk_pool_config_sent(looper, sdk_pool_handle, sdk_wallet_steward, poolConfigWTFF) + req = sdk_pool_config_sent(looper, sdk_pool_handle, sdk_wallet_steward, poolConfigWTFF) sdk_get_bad_response(looper, [req], RequestRejectedException, 'cannot do') @@ -17,7 +17,7 @@ def test_only_trustee_send_pool_config_writes_false_force_false( nodeSet, looper, sdk_pool_handle, sdk_wallet_trustee, poolConfigWFFF): sdk_wallet_steward = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee, 'tmpname', STEWARD_STRING) - _, req = sdk_pool_config_sent(looper, sdk_pool_handle, sdk_wallet_steward, poolConfigWFFF) + req = sdk_pool_config_sent(looper, sdk_pool_handle, sdk_wallet_steward, poolConfigWFFF) sdk_get_bad_response(looper, [req], RequestRejectedException, 'cannot do') @@ -25,7 +25,7 @@ def test_only_trustee_send_pool_config_writes_true_force_true( nodeSet, looper, sdk_pool_handle, sdk_wallet_trustee, poolConfigWTFT): sdk_wallet_steward = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee, 'tmpname', STEWARD_STRING) - _, req = sdk_pool_config_sent(looper, sdk_pool_handle, sdk_wallet_steward, poolConfigWTFT) + req = sdk_pool_config_sent(looper, sdk_pool_handle, sdk_wallet_steward, poolConfigWTFT) sdk_get_bad_response(looper, [req], RequestNackedException, 'cannot do') @@ -33,5 +33,5 @@ def test_only_trustee_send_pool_config_writes_false_force_true( nodeSet, looper, sdk_pool_handle, sdk_wallet_trustee, poolConfigWFFT): sdk_wallet_steward = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee, 'tmpname', STEWARD_STRING) - _, req = sdk_pool_config_sent(looper, sdk_pool_handle, sdk_wallet_steward, poolConfigWFFT) + req = sdk_pool_config_sent(looper, sdk_pool_handle, sdk_wallet_steward, poolConfigWFFT) sdk_get_bad_response(looper, [req], RequestNackedException, 'cannot do') diff --git a/indy_node/test/request_propagates/test_request_propagates.py b/indy_node/test/request_propagates/test_request_propagates.py index c7c681cad..09f562842 100644 --- a/indy_node/test/request_propagates/test_request_propagates.py +++ b/indy_node/test/request_propagates/test_request_propagates.py @@ -5,7 +5,7 @@ from indy.ledger import build_attrib_request, sign_request, build_schema_request, build_cred_def_request, \ build_nym_request, build_get_schema_request, parse_get_schema_response -from indy_client.test.cli.helper import createHalfKeyIdentifierAndAbbrevVerkey +from indy_node.test.helper import createHalfKeyIdentifierAndAbbrevVerkey from indy_common.types import Request from indy_node.test.api.helper import sdk_write_schema from plenum.common.messages.node_messages import Propagate diff --git a/indy_client/cli/__init__.py b/indy_node/test/schema/__init__.py similarity index 100% rename from indy_client/cli/__init__.py rename to indy_node/test/schema/__init__.py diff --git a/indy_node/test/schema/test_send_get_schema.py b/indy_node/test/schema/test_send_get_schema.py new file mode 100644 index 000000000..49e157679 --- /dev/null +++ b/indy_node/test/schema/test_send_get_schema.py @@ -0,0 +1,125 @@ +import json + +import pytest +from indy.ledger import build_get_schema_request +from plenum.common.exceptions import RequestNackedException + +from plenum.common.constants import DATA, NAME, VERSION, TXN_METADATA, TXN_METADATA_SEQ_NO + +from plenum.common.types import OPERATION + +from plenum.test.helper import sdk_sign_and_submit_req, sdk_get_and_check_replies + +from indy_node.test.api.helper import sdk_write_schema +from indy_node.test.helper import createUuidIdentifier, modify_field + + +@pytest.fixture(scope="module") +def send_schema(looper, sdk_pool_handle, nodeSet, sdk_wallet_trustee): + schema_json, _ = sdk_write_schema(looper, sdk_pool_handle, sdk_wallet_trustee) + return json.loads(schema_json)['id'] + + +@pytest.fixture(scope="module") +def send_schema_seq_no(looper, sdk_pool_handle, nodeSet, sdk_wallet_trustee): + _, reply = sdk_write_schema(looper, sdk_pool_handle, sdk_wallet_trustee) + return reply['result'][TXN_METADATA][TXN_METADATA_SEQ_NO] + + +def test_send_get_schema_succeeds( + looper, sdk_pool_handle, nodeSet, sdk_wallet_trustee, send_schema): + _, did = sdk_wallet_trustee + + request = looper.loop.run_until_complete(build_get_schema_request(did, send_schema)) + rep = sdk_get_and_check_replies(looper, [sdk_sign_and_submit_req(sdk_pool_handle, sdk_wallet_trustee, request)]) + assert rep[0][1]['result']['seqNo'] + + +def test_send_get_schema_as_client( + looper, sdk_pool_handle, nodeSet, sdk_wallet_client, send_schema): + _, did = sdk_wallet_client + + request = looper.loop.run_until_complete(build_get_schema_request(did, send_schema)) + rep = sdk_get_and_check_replies(looper, [sdk_sign_and_submit_req(sdk_pool_handle, sdk_wallet_client, request)]) + assert rep[0][1]['result']['seqNo'] + + +def test_send_get_schema_fails_with_invalid_name( + looper, sdk_pool_handle, nodeSet, sdk_wallet_trustee, send_schema): + _, did = sdk_wallet_trustee + + request = looper.loop.run_until_complete(build_get_schema_request(did, send_schema)) + request = modify_field(request, 'name111', OPERATION, DATA, NAME) + rep = sdk_get_and_check_replies(looper, [sdk_sign_and_submit_req(sdk_pool_handle, sdk_wallet_trustee, request)]) + assert rep[0][1]['result']['seqNo'] is None + + +def test_send_get_schema_fails_with_invalid_dest( + looper, sdk_pool_handle, nodeSet, sdk_wallet_trustee, send_schema): + uuid_identifier = createUuidIdentifier() + _, did = sdk_wallet_trustee + + request = looper.loop.run_until_complete(build_get_schema_request(did, send_schema)) + request = modify_field(request, uuid_identifier, OPERATION, 'dest') + rep = sdk_get_and_check_replies(looper, [sdk_sign_and_submit_req(sdk_pool_handle, sdk_wallet_trustee, request)]) + assert rep[0][1]['result']['seqNo'] is None + + +def test_send_get_schema_fails_with_invalid_version( + looper, sdk_pool_handle, nodeSet, sdk_wallet_trustee, send_schema): + _, did = sdk_wallet_trustee + + request = looper.loop.run_until_complete(build_get_schema_request(did, send_schema)) + request = modify_field(request, '2.0', OPERATION, DATA, VERSION) + rep = sdk_get_and_check_replies(looper, [sdk_sign_and_submit_req(sdk_pool_handle, sdk_wallet_trustee, request)]) + assert rep[0][1]['result']['seqNo'] is None + + +def test_send_get_schema_fails_with_invalid_version_syntax( + looper, sdk_pool_handle, nodeSet, sdk_wallet_trustee, send_schema): + _, did = sdk_wallet_trustee + + request = looper.loop.run_until_complete(build_get_schema_request(did, send_schema)) + request = modify_field(request, 'asd', OPERATION, DATA, VERSION) + with pytest.raises(RequestNackedException) as e: + sdk_get_and_check_replies(looper, [sdk_sign_and_submit_req(sdk_pool_handle, sdk_wallet_trustee, request)]) + e.match('version consists of 1 components, but it should contain \(2, 3\)') + + +def test_send_get_schema_fails_without_version( + looper, sdk_pool_handle, nodeSet, sdk_wallet_trustee, send_schema): + _, did = sdk_wallet_trustee + + request = looper.loop.run_until_complete(build_get_schema_request(did, send_schema)) + request = json.loads(request) + del request[OPERATION][DATA][VERSION] + request = json.dumps(request) + with pytest.raises(RequestNackedException) as e: + sdk_get_and_check_replies(looper, [sdk_sign_and_submit_req(sdk_pool_handle, sdk_wallet_trustee, request)]) + e.match('missed fields - version') + + +def test_send_get_schema_fails_without_name( + looper, sdk_pool_handle, nodeSet, sdk_wallet_trustee, send_schema): + _, did = sdk_wallet_trustee + + request = looper.loop.run_until_complete(build_get_schema_request(did, send_schema)) + request = json.loads(request) + del request[OPERATION][DATA][NAME] + request = json.dumps(request) + with pytest.raises(RequestNackedException) as e: + sdk_get_and_check_replies(looper, [sdk_sign_and_submit_req(sdk_pool_handle, sdk_wallet_trustee, request)]) + e.match('missed fields - name') + + +def test_send_get_schema_fails_without_dest( + looper, sdk_pool_handle, nodeSet, sdk_wallet_trustee, send_schema): + _, did = sdk_wallet_trustee + + request = looper.loop.run_until_complete(build_get_schema_request(did, send_schema)) + request = json.loads(request) + del request[OPERATION]['dest'] + request = json.dumps(request) + with pytest.raises(RequestNackedException) as e: + sdk_get_and_check_replies(looper, [sdk_sign_and_submit_req(sdk_pool_handle, sdk_wallet_trustee, request)]) + e.match('missed fields - dest') diff --git a/indy_node/test/schema/test_send_schema.py b/indy_node/test/schema/test_send_schema.py new file mode 100644 index 000000000..caf65c81e --- /dev/null +++ b/indy_node/test/schema/test_send_schema.py @@ -0,0 +1,50 @@ +import pytest + +from indy_node.test.api.helper import validate_write_reply, sdk_write_schema_and_check +from plenum.common.exceptions import RequestRejectedException + + +def test_send_schema_multiple_attrib(looper, sdk_pool_handle, + sdk_wallet_trust_anchor): + sdk_write_schema_and_check( + looper, sdk_pool_handle, + sdk_wallet_trust_anchor, + ["attrib1", "attrib2", "attrib3"], + "faber", + "1.4" + ) + + +def test_send_schema_one_attrib(looper, sdk_pool_handle, + sdk_wallet_trust_anchor): + sdk_write_schema_and_check( + looper, sdk_pool_handle, + sdk_wallet_trust_anchor, + ["attrib1"], + "University of Saber", + "1.0" + ) + + +def test_can_not_send_same_schema(looper, sdk_pool_handle, + sdk_wallet_trust_anchor): + sdk_write_schema_and_check( + looper, sdk_pool_handle, + sdk_wallet_trust_anchor, + ["attrib1", "attrib2", "attrib3"], + "business", + "1.8" + ) + + with pytest.raises(RequestRejectedException) as ex_info: + resp = sdk_write_schema_and_check( + looper, sdk_pool_handle, + sdk_wallet_trust_anchor, + ["attrib1", "attrib2", "attrib3"], + "business", + "1.8" + ) + validate_write_reply(resp) + ex_info.match( + "can have one and only one SCHEMA with name business and version 1.8" + ) diff --git a/indy_node/test/state_proof/conftest.py b/indy_node/test/state_proof/conftest.py new file mode 100644 index 000000000..f8e0bcf41 --- /dev/null +++ b/indy_node/test/state_proof/conftest.py @@ -0,0 +1,14 @@ +import pytest + +from plenum.test.delayers import req_delay + + +@pytest.fixture(scope="module") +def nodeSetWithOneNodeResponding(nodeSet): + # the order of nodes the client sends requests to is [Alpha, Beta, Gamma, Delta] + # delay all requests to Beta, Gamma and Delta + # we expect that it's sufficient for the client to get Reply from Alpha only + # as for write requests, we can send it to 1 node only, and it will be propagated to others + for node in nodeSet[1:]: + node.clientIbStasher.delay(req_delay()) + return nodeSet diff --git a/indy_client/test/state_proof/helper.py b/indy_node/test/state_proof/helper.py similarity index 87% rename from indy_client/test/state_proof/helper.py rename to indy_node/test/state_proof/helper.py index 593f8d41e..17d62a259 100644 --- a/indy_client/test/state_proof/helper.py +++ b/indy_node/test/state_proof/helper.py @@ -5,13 +5,14 @@ from plenum.test.helper import sdk_sign_and_submit_op, sdk_get_and_check_replies -def sdk_submit_operation_and_get_replies(looper, sdk_pool_handle, sdk_wallet_sender, operation): +def sdk_submit_operation_and_get_result(looper, sdk_pool_handle, sdk_wallet_sender, operation): req = sdk_sign_and_submit_op(looper, sdk_pool_handle, sdk_wallet_sender, operation) - return sdk_get_and_check_replies(looper, [req]) + replies = sdk_get_and_check_replies(looper, [req]) + assert len(replies) == 1 + return replies[0][1]['result'] -def check_valid_proof(reply): - result = reply['result'] +def check_valid_proof(result): assert STATE_PROOF in result state_proof = result[STATE_PROOF] diff --git a/indy_node/test/state_proof/test_asking_one_node.py b/indy_node/test/state_proof/test_asking_one_node.py new file mode 100644 index 000000000..12d97946a --- /dev/null +++ b/indy_node/test/state_proof/test_asking_one_node.py @@ -0,0 +1,30 @@ +import pytest + +from indy_node.test.state_proof.helper import sdk_submit_operation_and_get_result +from plenum.common.constants import TARGET_NYM, TXN_TYPE, RAW +from indy_common.constants import GET_ATTR + +# fixtures +from indy_node.test.attrib_txn.test_nym_attrib import attributeData, \ + attributeName, attributeValue, sdk_added_raw_attribute + + +def test_client_gets_read_reply_from_1_node_only(looper, + nodeSetWithOneNodeResponding, + sdk_added_raw_attribute, + attributeName, + sdk_pool_handle, + sdk_wallet_trustee): + """ + Tests that client could send get-requests to only one node instead of n + """ + # Prepare and send get-request + get_attr_operation = { + TARGET_NYM: sdk_added_raw_attribute['result']['txn']['data']['dest'], + TXN_TYPE: GET_ATTR, + RAW: attributeName + } + + sdk_submit_operation_and_get_result(looper, sdk_pool_handle, + sdk_wallet_trustee, + get_attr_operation) diff --git a/indy_node/test/state_proof/test_state_proofs_for_get_requests.py b/indy_node/test/state_proof/test_state_multi_proofs_for_get_requests.py similarity index 100% rename from indy_node/test/state_proof/test_state_proofs_for_get_requests.py rename to indy_node/test/state_proof/test_state_multi_proofs_for_get_requests.py diff --git a/indy_client/test/state_proof/test_state_proof_for_get_requests.py b/indy_node/test/state_proof/test_state_proof_for_get_requests.py similarity index 50% rename from indy_client/test/state_proof/test_state_proof_for_get_requests.py rename to indy_node/test/state_proof/test_state_proof_for_get_requests.py index 3982bd3b4..4e1a6728c 100644 --- a/indy_client/test/state_proof/test_state_proof_for_get_requests.py +++ b/indy_node/test/state_proof/test_state_proof_for_get_requests.py @@ -1,21 +1,25 @@ +import pytest + from common.serializers.serialization import domain_state_serializer from plenum.common.constants import TARGET_NYM, TXN_TYPE, RAW, DATA, \ - ROLE, VERKEY, TXN_TIME, NYM, NAME, VERSION, ORIGIN + ROLE, VERKEY, TXN_TIME, NYM, NAME, VERSION from plenum.common.types import f -from indy_client.test.state_proof.helper import check_valid_proof, \ - sdk_submit_operation_and_get_replies +from indy_node.test.state_proof.helper import check_valid_proof, \ + sdk_submit_operation_and_get_result from indy_common.constants import GET_ATTR, GET_NYM, SCHEMA, GET_SCHEMA, \ CLAIM_DEF, REVOCATION, GET_CLAIM_DEF, CLAIM_DEF_SIGNATURE_TYPE, CLAIM_DEF_SCHEMA_REF, CLAIM_DEF_FROM, \ - SCHEMA_ATTR_NAMES, SCHEMA_NAME, SCHEMA_VERSION + SCHEMA_ATTR_NAMES, SCHEMA_NAME, SCHEMA_VERSION, CLAIM_DEF_TAG from indy_common.serialization import attrib_raw_data_serializer # Fixtures, do not remove -from indy_client.test.test_nym_attrib import \ +from indy_node.test.attrib_txn.test_nym_attrib import \ sdk_added_raw_attribute, attributeName, attributeValue, attributeData +from indy_node.test.schema.test_send_get_schema import send_schema_seq_no def test_state_proof_returned_for_get_attr(looper, + nodeSetWithOneNodeResponding, sdk_added_raw_attribute, attributeName, attributeData, @@ -26,24 +30,24 @@ def test_state_proof_returned_for_get_attr(looper, Use different submitter and reader! """ get_attr_operation = { - TARGET_NYM: sdk_added_raw_attribute['operation']['dest'], + TARGET_NYM: sdk_added_raw_attribute['result']['txn']['data']['dest'], TXN_TYPE: GET_ATTR, RAW: attributeName } - replies = sdk_submit_operation_and_get_replies(looper, sdk_pool_handle, - sdk_wallet_client, - get_attr_operation) + + result = sdk_submit_operation_and_get_result(looper, sdk_pool_handle, + sdk_wallet_client, + get_attr_operation) expected_data = attrib_raw_data_serializer.deserialize(attributeData) - for reply in replies: - result = reply[1]['result'] - assert DATA in result - data = attrib_raw_data_serializer.deserialize(result[DATA]) - assert data == expected_data - assert result[TXN_TIME] - check_valid_proof(reply[1]) + assert DATA in result + data = attrib_raw_data_serializer.deserialize(result[DATA]) + assert data == expected_data + assert result[TXN_TIME] + check_valid_proof(result) def test_state_proof_returned_for_get_nym(looper, + nodeSetWithOneNodeResponding, sdk_user_wallet_a, sdk_pool_handle, sdk_wallet_client, @@ -59,31 +63,30 @@ def test_state_proof_returned_for_get_nym(looper, TXN_TYPE: NYM } - sdk_submit_operation_and_get_replies(looper, sdk_pool_handle, - sdk_wallet_trust_anchor, - nym_operation) + sdk_submit_operation_and_get_result(looper, sdk_pool_handle, + sdk_wallet_trust_anchor, + nym_operation) get_nym_operation = { TARGET_NYM: dest, TXN_TYPE: GET_NYM } - replies = sdk_submit_operation_and_get_replies(looper, sdk_pool_handle, - sdk_wallet_client, - get_nym_operation) - for reply in replies: - result = reply[1]['result'] - assert DATA in result - assert result[DATA] - data = domain_state_serializer.deserialize(result[DATA]) - assert ROLE in data - assert VERKEY in data - assert f.IDENTIFIER.nm in data - assert result[TXN_TIME] - check_valid_proof(reply[1]) + result = sdk_submit_operation_and_get_result(looper, sdk_pool_handle, + sdk_wallet_client, + get_nym_operation) + assert DATA in result + assert result[DATA] + data = domain_state_serializer.deserialize(result[DATA]) + assert ROLE in data + assert VERKEY in data + assert f.IDENTIFIER.nm in data + assert result[TXN_TIME] + check_valid_proof(result) def test_state_proof_returned_for_get_schema(looper, + nodeSetWithOneNodeResponding, sdk_wallet_trust_anchor, sdk_pool_handle, sdk_wallet_client): @@ -104,10 +107,10 @@ def test_state_proof_returned_for_get_schema(looper, TXN_TYPE: SCHEMA, DATA: data } - sdk_submit_operation_and_get_replies(looper, - sdk_pool_handle, - sdk_wallet_trust_anchor, - schema_operation) + sdk_submit_operation_and_get_result(looper, + sdk_pool_handle, + sdk_wallet_trust_anchor, + schema_operation) get_schema_operation = { TARGET_NYM: dest, @@ -117,26 +120,26 @@ def test_state_proof_returned_for_get_schema(looper, VERSION: schema_version, } } - replies = sdk_submit_operation_and_get_replies(looper, sdk_pool_handle, - sdk_wallet_client, - get_schema_operation) - for reply in replies: - result = reply[1]['result'] - assert DATA in result - data = result.get(DATA) - assert data - assert SCHEMA_ATTR_NAMES in data - assert data[SCHEMA_ATTR_NAMES] == schema_attr_names - assert NAME in data - assert VERSION in data - assert result[TXN_TIME] - check_valid_proof(reply[1]) + result = sdk_submit_operation_and_get_result(looper, sdk_pool_handle, + sdk_wallet_client, + get_schema_operation) + assert DATA in result + data = result.get(DATA) + assert data + assert SCHEMA_ATTR_NAMES in data + assert data[SCHEMA_ATTR_NAMES] == schema_attr_names + assert NAME in data + assert VERSION in data + assert result[TXN_TIME] + check_valid_proof(result) def test_state_proof_returned_for_get_claim_def(looper, + nodeSetWithOneNodeResponding, sdk_wallet_trust_anchor, sdk_pool_handle, - sdk_wallet_client): + sdk_wallet_client, + send_schema_seq_no): """ Tests that state proof is returned in the reply for GET_CLAIM_DEF transactions. @@ -146,30 +149,30 @@ def test_state_proof_returned_for_get_claim_def(looper, data = {"primary": {'N': '123'}, REVOCATION: {'h0': '456'}} claim_def_operation = { TXN_TYPE: CLAIM_DEF, - CLAIM_DEF_SCHEMA_REF: 12, + CLAIM_DEF_SCHEMA_REF: send_schema_seq_no, DATA: data, - CLAIM_DEF_SIGNATURE_TYPE: 'CL' + CLAIM_DEF_SIGNATURE_TYPE: 'CL', + CLAIM_DEF_TAG: "tag1" } - sdk_submit_operation_and_get_replies(looper, - sdk_pool_handle, - sdk_wallet_trust_anchor, - claim_def_operation) + sdk_submit_operation_and_get_result(looper, + sdk_pool_handle, + sdk_wallet_trust_anchor, + claim_def_operation) get_claim_def_operation = { CLAIM_DEF_FROM: dest, TXN_TYPE: GET_CLAIM_DEF, - CLAIM_DEF_SCHEMA_REF: 12, - CLAIM_DEF_SIGNATURE_TYPE: 'CL' + CLAIM_DEF_SCHEMA_REF: send_schema_seq_no, + CLAIM_DEF_SIGNATURE_TYPE: 'CL', + CLAIM_DEF_TAG: "tag1" } - replies = sdk_submit_operation_and_get_replies(looper, - sdk_pool_handle, - sdk_wallet_client, - get_claim_def_operation) + result = sdk_submit_operation_and_get_result(looper, + sdk_pool_handle, + sdk_wallet_client, + get_claim_def_operation) expected_data = data - for reply in replies: - result = reply[1]['result'] - assert DATA in result - data = result.get(DATA) - assert data - assert data == expected_data - assert result[TXN_TIME] - check_valid_proof(reply[1]) + assert DATA in result + data = result.get(DATA) + assert data + assert data == expected_data + assert result[TXN_TIME] + check_valid_proof(result) diff --git a/indy_client/test/state_proof/test_state_proof_for_missing_data.py b/indy_node/test/state_proof/test_state_proof_for_missing_data.py similarity index 57% rename from indy_client/test/state_proof/test_state_proof_for_missing_data.py rename to indy_node/test/state_proof/test_state_proof_for_missing_data.py index e7d887a66..b49e02058 100644 --- a/indy_client/test/state_proof/test_state_proof_for_missing_data.py +++ b/indy_node/test/state_proof/test_state_proof_for_missing_data.py @@ -1,25 +1,23 @@ -from plenum.common.types import f -from plenum.common.constants import TARGET_NYM, TXN_TYPE, RAW, DATA, NAME, \ - VERSION, ORIGIN +import pytest -from indy_client.test.state_proof.helper import check_valid_proof, \ - sdk_submit_operation_and_get_replies +from plenum.common.constants import TARGET_NYM, TXN_TYPE, RAW, DATA + +from indy_node.test.state_proof.helper import check_valid_proof, \ + sdk_submit_operation_and_get_result from indy_common.constants import GET_ATTR, GET_NYM, GET_SCHEMA, GET_CLAIM_DEF, CLAIM_DEF_FROM, CLAIM_DEF_SCHEMA_REF, \ CLAIM_DEF_SIGNATURE_TYPE, SCHEMA_NAME, SCHEMA_VERSION, SCHEMA_ATTR_NAMES # fixtures, do not remove -from indy_client.test.test_nym_attrib import \ +from indy_node.test.attrib_txn.test_nym_attrib import \ sdk_added_raw_attribute, attributeName, attributeValue, attributeData -def check_no_data_and_valid_proof(replies): - for reply in replies: - result = reply[1][f.RESULT.nm] - assert result.get(DATA) is None - check_valid_proof(reply[1]) +def check_no_data_and_valid_proof(result): + assert result.get(DATA) is None + check_valid_proof(result) -def test_state_proof_returned_for_missing_attr(looper, +def test_state_proof_returned_for_missing_attr(looper, nodeSetWithOneNodeResponding, attributeName, sdk_pool_handle, sdk_wallet_trust_anchor): @@ -33,12 +31,12 @@ def test_state_proof_returned_for_missing_attr(looper, TXN_TYPE: GET_ATTR, RAW: attributeName } - replies = sdk_submit_operation_and_get_replies(looper, sdk_pool_handle, - sdk_wallet_trust_anchor, get_attr_operation) - check_no_data_and_valid_proof(replies) + result = sdk_submit_operation_and_get_result(looper, sdk_pool_handle, + sdk_wallet_trust_anchor, get_attr_operation) + check_no_data_and_valid_proof(result) -def test_state_proof_returned_for_missing_nym(looper, +def test_state_proof_returned_for_missing_nym(looper, nodeSetWithOneNodeResponding, sdk_pool_handle, sdk_wallet_trust_anchor, sdk_user_wallet_a): @@ -55,12 +53,12 @@ def test_state_proof_returned_for_missing_nym(looper, TXN_TYPE: GET_NYM } - replies = sdk_submit_operation_and_get_replies(looper, sdk_pool_handle, - sdk_wallet_trust_anchor, get_nym_operation) - check_no_data_and_valid_proof(replies) + result = sdk_submit_operation_and_get_result(looper, sdk_pool_handle, + sdk_wallet_trust_anchor, get_nym_operation) + check_no_data_and_valid_proof(result) -def test_state_proof_returned_for_missing_schema(looper, +def test_state_proof_returned_for_missing_schema(looper, nodeSetWithOneNodeResponding, sdk_pool_handle, sdk_wallet_trust_anchor): """ @@ -77,16 +75,14 @@ def test_state_proof_returned_for_missing_schema(looper, SCHEMA_VERSION: schema_version, } } - replies = sdk_submit_operation_and_get_replies(looper, sdk_pool_handle, - sdk_wallet_trust_anchor, - get_schema_operation) - for reply in replies: - result = reply[1][f.RESULT.nm] - assert SCHEMA_ATTR_NAMES not in result[DATA] - check_valid_proof(reply[1]) + result = sdk_submit_operation_and_get_result(looper, sdk_pool_handle, + sdk_wallet_trust_anchor, + get_schema_operation) + assert SCHEMA_ATTR_NAMES not in result[DATA] + check_valid_proof(result) -def test_state_proof_returned_for_missing_claim_def(looper, +def test_state_proof_returned_for_missing_claim_def(looper, nodeSetWithOneNodeResponding, sdk_pool_handle, sdk_wallet_trust_anchor): """ @@ -100,7 +96,7 @@ def test_state_proof_returned_for_missing_claim_def(looper, CLAIM_DEF_SCHEMA_REF: 12, CLAIM_DEF_SIGNATURE_TYPE: 'CL' } - replies = sdk_submit_operation_and_get_replies(looper, sdk_pool_handle, - sdk_wallet_trust_anchor, - get_claim_def_operation) - check_no_data_and_valid_proof(replies) + result = sdk_submit_operation_and_get_result(looper, sdk_pool_handle, + sdk_wallet_trust_anchor, + get_claim_def_operation) + check_no_data_and_valid_proof(result) diff --git a/indy_node/test/suspension/helper.py b/indy_node/test/suspension/helper.py index 6e7dfcaba..8ffcda9eb 100644 --- a/indy_node/test/suspension/helper.py +++ b/indy_node/test/suspension/helper.py @@ -1,32 +1,4 @@ -from plenum.test import waits -from indy_client.test.helper import checkRejects, checkNacks from plenum.test.helper import sdk_sign_and_submit_op, sdk_get_and_check_replies -from stp_core.loop.eventually import eventually - - -def checkIdentityRequestFailed(looper, client, req, cause): - timeout = waits.expectedReqRejectQuorumTime() - # TODO: Just for now, better to have a generic negative response checker - try: - looper.run(eventually(checkRejects, - client, - req.reqId, - cause, retryWait=1, timeout=timeout)) - except AssertionError: - looper.run(eventually(checkNacks, - client, - req.reqId, - cause, retryWait=1, timeout=timeout)) - - -def checkIdentityRequestSucceed(looper, actingClient, actingWallet, idr): - def chk(): - assert actingWallet.getTrustAnchoredIdentity(idr).seqNo is not None - - timeout = waits.expectedTransactionExecutionTime( - len(actingClient.nodeReg) - ) - looper.run(eventually(chk, retryWait=1, timeout=timeout)) def sdk_suspend_role(looper, sdk_pool_handle, sdk_wallet_sender, susp_did): diff --git a/indy_node/test/suspension/test_node_suspension.py b/indy_node/test/suspension/test_node_suspension.py new file mode 100644 index 000000000..11b964e52 --- /dev/null +++ b/indy_node/test/suspension/test_node_suspension.py @@ -0,0 +1,58 @@ +from plenum.common.util import randomString, hexToFriendly +from plenum.common.constants import SERVICES, TARGET_NYM, DATA +from plenum.common.txn_util import get_payload_data + +from plenum.test.pool_transactions.helper import sdk_add_new_nym, sdk_add_new_node, demote_node, promote_node + + +def testSuspendNode(looper, sdk_pool_handle, sdk_wallet_trustee, newNodeAdded): + """ + Suspend a node and then cancel suspension. Suspend while suspended + to test that there is no error + """ + new_steward_wallet, new_node = newNodeAdded + + demote_node(looper, sdk_wallet_trustee, sdk_pool_handle, new_node) + demote_node(looper, sdk_wallet_trustee, sdk_pool_handle, new_node) + + promote_node(looper, sdk_wallet_trustee, sdk_pool_handle, new_node) + promote_node(looper, sdk_wallet_trustee, sdk_pool_handle, new_node) + + +def testDemoteNodeWhichWasNeverActive(looper, nodeSet, sdk_pool_handle, + sdk_wallet_trustee, tdir, tconf, + allPluginsPath): + """ + Add a node without services field and check that the ledger does not + contain the `services` field and check that it can be demoted and + the ledger has `services` as empty list + """ + alias = randomString(5) + new_node_name = "Node-" + alias + sdk_wallet_steward = sdk_add_new_nym(looper, + sdk_pool_handle, + sdk_wallet_trustee, + alias="Steward-" + alias, + role='STEWARD') + new_node = sdk_add_new_node(looper, + sdk_pool_handle, + sdk_wallet_steward, + new_node_name, + tdir, + tconf, + allPluginsPath, + services=None) + + for node in nodeSet: + txn = [t for _, t in node.poolLedger.getAllTxn()][-1] + txn_data = get_payload_data(txn) + assert txn_data[TARGET_NYM] == hexToFriendly(new_node.nodestack.verhex) + assert SERVICES not in txn_data[DATA] + + demote_node(looper, sdk_wallet_steward, sdk_pool_handle, new_node) + + for node in nodeSet: + txn = [t for _, t in node.poolLedger.getAllTxn()][-1] + txn_data = get_payload_data(txn) + assert txn_data[TARGET_NYM] == hexToFriendly(new_node.nodestack.verhex) + assert SERVICES in txn_data[DATA] and txn_data[DATA][SERVICES] == [] diff --git a/indy_client/test/cli/test_nym_suspension.py b/indy_node/test/suspension/test_nym_suspension.py similarity index 100% rename from indy_client/test/cli/test_nym_suspension.py rename to indy_node/test/suspension/test_nym_suspension.py diff --git a/indy_node/test/suspension/test_suspension.py b/indy_node/test/suspension/test_suspension.py index cd60bd9f9..3e1c37565 100644 --- a/indy_node/test/suspension/test_suspension.py +++ b/indy_node/test/suspension/test_suspension.py @@ -9,8 +9,6 @@ from plenum.common.constants import STEWARD_STRING, TRUSTEE_STRING from plenum.test.pool_transactions.test_suspend_node import \ checkNodeNotInNodeReg -from indy_client.test.helper import addRole, \ - getClientAddedWithRole from indy_common.constants import TRUST_ANCHOR_STRING logger = getlogger() diff --git a/indy_node/test/tools/test_nsreplay.py b/indy_node/test/tools/test_nsreplay.py index 9bb11d57d..514cecdf6 100644 --- a/indy_node/test/tools/test_nsreplay.py +++ b/indy_node/test/tools/test_nsreplay.py @@ -75,8 +75,6 @@ def tconf(tconf): import indy_node.test.helper _reload_module(indy_node.server.node) _reload_module(indy_node.test.helper) - import indy_client.test.conftest - importlib.reload(indy_client.test.conftest) return tconf diff --git a/indy_client/client/wallet/__init__.py b/indy_node/test/txn_validation/__init__.py similarity index 100% rename from indy_client/client/wallet/__init__.py rename to indy_node/test/txn_validation/__init__.py diff --git a/indy_node/test/txn_validation/test_pool_upgrade_validation.py b/indy_node/test/txn_validation/test_pool_upgrade_validation.py new file mode 100644 index 000000000..a4e091377 --- /dev/null +++ b/indy_node/test/txn_validation/test_pool_upgrade_validation.py @@ -0,0 +1,40 @@ +from copy import deepcopy +import pytest + +from plenum.common.exceptions import RequestNackedException, RequestRejectedException +from plenum.common.constants import VERSION +from plenum.common.util import randomString +from indy_node.test.upgrade.helper import loweredVersion, sdk_ensure_upgrade_sent +from indy_common.constants import JUSTIFICATION, JUSTIFICATION_MAX_SIZE + +from indy_node.test.upgrade.conftest import validUpgrade, nodeIds + + +def testPoolUpgradeFailsIfVersionIsLowerThanCurrent( + looper, sdk_pool_handle, validUpgrade, sdk_wallet_trustee): + upgrade = deepcopy(validUpgrade) + upgrade[VERSION] = loweredVersion() + + with pytest.raises(RequestRejectedException) as e: + sdk_ensure_upgrade_sent(looper, sdk_pool_handle, sdk_wallet_trustee, upgrade) + e.match('Version is not upgradable') + + +def testPoolUpgradeHasInvalidSyntaxIfJustificationIsEmpty( + looper, sdk_pool_handle, validUpgrade, sdk_wallet_trustee): + upgrade = deepcopy(validUpgrade) + upgrade[JUSTIFICATION] = '' + + with pytest.raises(RequestNackedException) as e: + sdk_ensure_upgrade_sent(looper, sdk_pool_handle, sdk_wallet_trustee, upgrade) + e.match('empty string') + + +def testPoolUpgradeHasInvalidSyntaxIfJustificationIsVeryLong( + looper, sdk_pool_handle, validUpgrade, sdk_wallet_trustee): + upgrade = deepcopy(validUpgrade) + upgrade[JUSTIFICATION] = randomString(JUSTIFICATION_MAX_SIZE + 1) + + with pytest.raises(RequestNackedException) as e: + sdk_ensure_upgrade_sent(looper, sdk_pool_handle, sdk_wallet_trustee, upgrade) + e.match('is longer than {} symbols'.format(JUSTIFICATION_MAX_SIZE)) diff --git a/indy_node/test/txn_validation/test_send_attrib_validation.py b/indy_node/test/txn_validation/test_send_attrib_validation.py new file mode 100644 index 000000000..730e2a6ff --- /dev/null +++ b/indy_node/test/txn_validation/test_send_attrib_validation.py @@ -0,0 +1,527 @@ +import json +from base64 import b64encode +from binascii import hexlify +from hashlib import sha256 + +import pytest +from indy.did import create_and_store_my_did +from libnacl import randombytes +from libnacl.secret import SecretBox + +from indy_node.test.helper import sdk_add_attribute_and_check +from plenum.common.exceptions import RequestRejectedException, RequestNackedException +from plenum.common.util import rawToFriendly, randomString +from plenum.test.pool_transactions.helper import sdk_add_new_nym + + +def testSendAttribSucceedsForExistingDest( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = json.dumps({'name': 'Alice'}) + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters) + + +def testSendAttribFailsForNotExistingDest( + looper, sdk_pool_handle, sdk_wallet_trustee): + wh, _ = sdk_wallet_trustee + seed = randomString(32) + did, _ = looper.loop.run_until_complete(create_and_store_my_did( + wh, json.dumps({'seed': seed}))) + + parameters = json.dumps({'name': 'Alice'}) + with pytest.raises(RequestRejectedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, sdk_wallet_trustee, + parameters, dest=did) + e.match('dest should be added before adding attribute for it') + + +def testSendAttribSucceedsForRawWithCompoundAttr( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = json.dumps({ + 'dateOfBirth': { + 'year': 1984, + 'month': 5, + 'dayOfMonth': 23 + } + }) + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters) + + +def testSendAttribSucceedsForRawWithNullifiedAttr( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = json.dumps({ + 'name': None + }) + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters) + + +def testSendAttribSucceedsForRawWithEndpointWithHaContainingIpAddrAndPort( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = json.dumps({ + 'endpoint': { + 'ha': '52.11.117.186:6321' + } + }) + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters) + + +def testSendAttribSucceedsForRawWithEndpointWithHaBeingNull( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = json.dumps({ + 'endpoint': { + 'ha': None + } + }) + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters) + + +def testSendAttribSucceedsForRawWithEndpointWithValidHaAndOtherProperties( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = json.dumps({ + 'endpoint': { + 'ha': '52.11.117.186:6321', + 'name': 'SOV Agent', + 'description': 'The SOV agent.' + } + }) + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters) + + +def testSendAttribSucceedsForRawWithEndpointWithoutHaButWithOtherProperties( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = json.dumps({ + 'endpoint': { + 'name': 'SOV Agent', + 'description': 'The SOV agent.' + } + }) + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters) + + +def testSendAttribSucceedsForRawWithEndpointWithoutProperties( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = json.dumps({ + 'endpoint': {} + }) + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters) + + +def testSendAttribSucceedsForRawWithEndpointBeingNull( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = json.dumps({ + 'endpoint': None + }) + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters) + + +def testSendAttribFailsForRawWithEndpointWithHaIfIpAddrHasWrongFormat( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = json.dumps({ + 'endpoint': { + 'ha': '52.11.117:6321' + } + }) + with pytest.raises(RequestNackedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters) + e.match('invalid endpoint address') + + +def testSendAttribFailsForRawWithEndpointWithHaIfSomeIpComponentsAreNegative( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = json.dumps({ + 'endpoint': { + 'ha': '52.-1.117.186:6321' + } + }) + with pytest.raises(RequestNackedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters) + e.match('invalid endpoint address') + + +def testSendAttribFailsForRawWithEndpointWithHaIfSomeIpCompHigherThanUpperBound( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = json.dumps({ + 'endpoint': { + 'ha': '52.11.256.186:6321' + } + }) + with pytest.raises(RequestNackedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters) + e.match('invalid endpoint address') + + +def testSendAttribFailsForRawWithEndpointWithHaIfIpAddrIsEmpty( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = json.dumps({ + 'endpoint': { + 'ha': ':6321' + } + }) + with pytest.raises(RequestNackedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters) + e.match('invalid endpoint address') + + +def testSendAttribFailsForRawWithEndpointWithHaIfPortIsNegative( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = json.dumps({ + 'endpoint': { + 'ha': '52.11.117.186:-1' + } + }) + with pytest.raises(RequestNackedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters) + e.match('invalid endpoint port') + + +def testSendAttribFailsForRawWithEndpointWithHaIfPortIsHigherThanUpperBound( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = json.dumps({ + 'endpoint': { + 'ha': '52.11.117.186:65536' + } + }) + with pytest.raises(RequestNackedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters) + e.match('invalid endpoint port') + + +def testSendAttribFailsForRawWithEndpointWithHaIfPortIsFloat( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = json.dumps({ + 'endpoint': { + 'ha': '52.11.117.186:6321.5' + } + }) + with pytest.raises(RequestNackedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters) + e.match('invalid endpoint port') + + +def testSendAttribFailsForRawWithEndpointWithHaIfPortHasWrongFormat( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = json.dumps({ + 'endpoint': { + 'ha': '52.11.117.186:ninety' + } + }) + with pytest.raises(RequestNackedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters) + e.match('invalid endpoint port') + + +def testSendAttribFailsForRawWithEndpointWithHaIfPortIsEmpty( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = json.dumps({ + 'endpoint': { + 'ha': '52.11.117.186:' + } + }) + with pytest.raises(RequestNackedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters) + e.match('invalid endpoint port') + + +def testSendAttribFailsForRawWithEndpointWithHaContainingIpAddrOnly( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = json.dumps({ + 'endpoint': { + 'ha': '52.11.117.186' + } + }) + with pytest.raises(RequestNackedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters) + e.match('invalid endpoint format') + + +def testSendAttribFailsForRawWithEndpointWithHaContainingPortOnly( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = json.dumps({ + 'endpoint': { + 'ha': '6321' + } + }) + with pytest.raises(RequestNackedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters) + e.match('invalid endpoint format') + + +def testSendAttribFailsForRawWithEndpointWithHaContainingDomainNameAndPort( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = json.dumps({ + 'endpoint': { + 'ha': 'sovrin.org:6321' + } + }) + with pytest.raises(RequestNackedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters) + e.match('invalid endpoint address') + + +def testSendAttribFailsForRawWithEndpointWithHaContainingDomainNameOnly( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = json.dumps({ + 'endpoint': { + 'ha': 'sovrin.org' + } + }) + with pytest.raises(RequestNackedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters) + e.match('invalid endpoint format') + + +def testSendAttribFailsForRawWithEndpointWithHaBeingHumanReadableText( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = json.dumps({ + 'endpoint': { + 'ha': 'This is not a host address.' + } + }) + with pytest.raises(RequestNackedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters) + e.match('invalid endpoint format') + + +def testSendAttribFailsForRawWithEndpointWithHaBeingDecimalNumber( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = json.dumps({ + 'endpoint': { + 'ha': 42 + } + }) + with pytest.raises(RequestNackedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters) + e.match('is not iterable') + + +def testSendAttribFailsForRawWithEndpointWithEmptyHa( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = json.dumps({ + 'endpoint': { + 'ha': '' + } + }) + with pytest.raises(RequestNackedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters) + e.match('invalid endpoint format') + + +def testSendAttribFailsForRawWithEndpointBeingEmptyString( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = json.dumps({ + 'endpoint': '' + }) + with pytest.raises(RequestNackedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters) + e.match('object has no attribute') + + +def testSendAttribFailsIfRawContainsMulipleAttrs( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = json.dumps({ + 'name': 'Alice', + 'dateOfBirth': '05/23/2017' + }) + with pytest.raises(RequestNackedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters) + e.match(' should contain one attribute') + + +def testSendAttribFailsIfRawContainsNoAttrs( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = json.dumps({}) + with pytest.raises(RequestNackedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters) + e.match(' should contain one attribute') + + +def testSendAttribSucceedsForHexSha256Hash( + looper, sdk_pool_handle, sdk_wallet_trustee): + raw = json.dumps({ + 'name': 'Alice' + }) + + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = None + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters, + xhash=sha256(raw.encode()).hexdigest()) + + +def testSendAttribSucceedsForHexHashWithLettersInBothCases( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = None + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters, + xhash='6d4a333838d0ef96756cccC680AF2531075C512502Fb68c5503c63d93de859b3') + + +def testSendAttribFailsForHashShorterThanSha256( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = None + with pytest.raises(RequestNackedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters, + xhash=hexlify(randombytes(31)).decode()) + e.match('not a valid hash') + + +def testSendAttribFailsForHashLongerThanSha256( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = None + with pytest.raises(RequestNackedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters, + xhash=hexlify(randombytes(33)).decode()) + e.match('not a valid hash') + + +def testSendAttribFailsForBase58Hash( + looper, sdk_pool_handle, sdk_wallet_trustee): + raw = json.dumps({ + 'name': 'Alice' + }) + hash = sha256(raw.encode()).digest() + + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = None + with pytest.raises(RequestNackedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters, + xhash=rawToFriendly(hash)) + e.match('not a valid hash') + + +def testSendAttribFailsForBase64Hash( + looper, sdk_pool_handle, sdk_wallet_trustee): + raw = json.dumps({ + 'name': 'Alice' + }) + + hash = sha256(raw.encode()).digest() + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = None + with pytest.raises(RequestNackedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters, + xhash=b64encode(hash).decode()) + e.match('not a valid hash') + + +def testSendAttribHasInvalidSyntaxIfHashIsEmpty( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = None + with pytest.raises(RequestNackedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters, + xhash='') + e.match('not a valid hash') + + +def testSendAttribSucceedsForNonEmptyEnc( + looper, sdk_pool_handle, sdk_wallet_trustee): + raw = json.dumps({ + 'name': 'Alice' + }) + secretBox = SecretBox() + + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = None + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters, + enc=secretBox.encrypt(raw.encode()).hex()) + + +def testSendAttribHasInvalidSyntaxIfEncIsEmpty( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = None + with pytest.raises(RequestNackedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters, + enc='') + e.match('empty string') + + +def testSendAttribHasInvalidSyntaxIfRawAndHashPassedAtSameTime( + looper, sdk_pool_handle, sdk_wallet_trustee): + raw = json.dumps({ + 'name': 'Alice' + }) + + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = None + with pytest.raises(RequestNackedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters, + xhash=sha256(raw.encode()).hexdigest(), enc=raw) + e.match('only one field from raw, enc, hash is expected') + + +def testSendAttribHasInvalidSyntaxIfRawAndEncPassedAtSameTime( + looper, sdk_pool_handle, sdk_wallet_trustee): + raw = json.dumps({ + 'name': 'Alice' + }) + secretBox = SecretBox() + + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = None + with pytest.raises(RequestNackedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters, + xhash=secretBox.encrypt(raw.encode()).hex(), enc=raw) + e.match('not a valid hash') + + +def testSendAttribHasInvalidSyntaxIfHashAndEncPassedAtSameTime( + looper, sdk_pool_handle, sdk_wallet_trustee): + raw = json.dumps({ + 'name': 'Alice' + }) + + secretBox = SecretBox() + encryptedRaw = secretBox.encrypt(raw.encode()) + + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = None + with pytest.raises(RequestNackedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters, + xhash=sha256(encryptedRaw).hexdigest(), enc=encryptedRaw.hex()) + e.match('only one field from raw, enc, hash is expected') + + +def testSendAttribHasInvalidSyntaxIfRawHashAndEncPassedAtSameTime( + looper, sdk_pool_handle, sdk_wallet_trustee): + raw = json.dumps({ + 'name': 'Alice' + }) + + secretBox = SecretBox() + encryptedRaw = secretBox.encrypt(raw.encode()) + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + parameters = raw + with pytest.raises(RequestNackedException) as e: + sdk_add_attribute_and_check(looper, sdk_pool_handle, new_wallet, parameters, + xhash=sha256(encryptedRaw).hexdigest(), enc=encryptedRaw.hex()) + e.match('only one field from raw, enc, hash is expected') diff --git a/indy_node/test/txn_validation/test_send_get_nym_validation.py b/indy_node/test/txn_validation/test_send_get_nym_validation.py new file mode 100644 index 000000000..6d38bbfa5 --- /dev/null +++ b/indy_node/test/txn_validation/test_send_get_nym_validation.py @@ -0,0 +1,92 @@ +from binascii import hexlify +import pytest + +from indy.ledger import build_get_nym_request +from indy_common.constants import TRUST_ANCHOR_STRING +from indy_node.test.helper import check_str_is_base58_compatible, modify_field, \ + createUuidIdentifier, createHalfKeyIdentifierAndAbbrevVerkey, createCryptonym +from indy_node.test.nym_txn.test_nym_additional import get_nym + +from plenum.test.helper import sdk_get_and_check_replies +from plenum.common.util import friendlyToRaw +from plenum.common.exceptions import RequestNackedException +from plenum.common.constants import IDENTIFIER +from plenum.test.pool_transactions.helper import sdk_add_new_nym, sdk_sign_and_send_prepared_request + + +def testSendGetNymSucceedsForExistingUuidDest( + looper, sdk_pool_handle, sdk_wallet_trustee): + new_wallet = sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee) + get_nym(looper, sdk_pool_handle, sdk_wallet_trustee, new_wallet[1]) + + +def testSendGetNymFailsForNotExistingUuidDest( + looper, sdk_pool_handle, sdk_wallet_trustee): + get_nym(looper, sdk_pool_handle, sdk_wallet_trustee, createUuidIdentifier()) + + +def test_get_nym_returns_role( + looper, sdk_pool_handle, sdk_wallet_trustee): + current_role = TRUST_ANCHOR_STRING + uuidIdentifier, abbrevVerkey = createHalfKeyIdentifierAndAbbrevVerkey() + sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee, + dest=uuidIdentifier, verkey=abbrevVerkey, role=current_role) + get_nym(looper, sdk_pool_handle, sdk_wallet_trustee, createUuidIdentifier()) + + new_role = '' + sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee, + dest=uuidIdentifier, verkey=abbrevVerkey, role=new_role) + get_nym(looper, sdk_pool_handle, sdk_wallet_trustee, createUuidIdentifier()) + + +def testSendGetNymFailsIfCryptonymIsPassedAsDest( + looper, sdk_pool_handle, sdk_wallet_trustee): + get_nym(looper, sdk_pool_handle, sdk_wallet_trustee, createCryptonym()) + + +def testSendGetNymFailsIfDestIsPassedInHexFormat( + looper, sdk_pool_handle, sdk_wallet_trustee): + # Sometimes hex representation can use only base58 compatible characters + while True: + uuidIdentifier, abbrevVerkey = createHalfKeyIdentifierAndAbbrevVerkey() + hexEncodedUuidIdentifier = hexlify( + friendlyToRaw(uuidIdentifier)).decode() + if not check_str_is_base58_compatible(hexEncodedUuidIdentifier): + break + sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee, dest=uuidIdentifier, verkey=abbrevVerkey) + + _, s_did = sdk_wallet_trustee + get_nym_req = looper.loop.run_until_complete(build_get_nym_request(s_did, uuidIdentifier)) + get_nym_req = modify_field(get_nym_req, hexEncodedUuidIdentifier, IDENTIFIER) + req = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, get_nym_req) + with pytest.raises(RequestNackedException) as e: + sdk_get_and_check_replies(looper, [req]) + e.match('should not contain the following chars') + + +def testSendGetNymFailsIfDestIsInvalid( + looper, sdk_pool_handle, sdk_wallet_trustee): + uuidIdentifier = createUuidIdentifier() + invalidIdentifier = uuidIdentifier[:-4] + _, s_did = sdk_wallet_trustee + get_nym_req = looper.loop.run_until_complete(build_get_nym_request(s_did, uuidIdentifier)) + get_nym_req = modify_field(get_nym_req, invalidIdentifier, IDENTIFIER) + req = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, get_nym_req) + with pytest.raises(RequestNackedException) as e: + sdk_get_and_check_replies(looper, [req]) + e.match('should be one of \[16, 32\]') + + +def testSendGetNymHasInvalidSyntaxIfDestIsEmpty( + looper, sdk_pool_handle, sdk_wallet_trustee): + uuidIdentifier = createUuidIdentifier() + _, s_did = sdk_wallet_trustee + get_nym_req = looper.loop.run_until_complete(build_get_nym_request(s_did, uuidIdentifier)) + get_nym_req = modify_field(get_nym_req, '', IDENTIFIER) + req = sdk_sign_and_send_prepared_request(looper, sdk_wallet_trustee, + sdk_pool_handle, get_nym_req) + with pytest.raises(RequestNackedException) as e: + sdk_get_and_check_replies(looper, [req]) + e.match('client request invalid') diff --git a/indy_node/test/upgrade/conftest.py b/indy_node/test/upgrade/conftest.py index c5a9c1f24..a0f9c903d 100644 --- a/indy_node/test/upgrade/conftest.py +++ b/indy_node/test/upgrade/conftest.py @@ -33,21 +33,18 @@ def patch_packet_mgr_output(monkeypatch, pkg_name, pkg_version): "License: EXT_PKT_DEPS-lic\nVendor: none\n". \ format(pkg_name, pkg_version, APP_NAME, *EXT_TOP_PKT_DEPS[0], *EXT_TOP_PKT_DEPS[1]) top_level_package = (pkg_name, pkg_version) - anoncreds_package = ('indy-anoncreds', '0.0.2') plenum_package = ('indy-plenum', '0.0.3') top_level_package_with_version = '{}={}'.format(*top_level_package) top_level_package_dep1_with_version = '{}={}'.format(*EXT_TOP_PKT_DEPS[0]) top_level_package_dep2_with_version = '{}={}'.format(*EXT_TOP_PKT_DEPS[1]) node_package_with_version = '{}={}'.format(*node_package) plenum_package_with_version = '{}={}'.format(*plenum_package) - anoncreds_package_with_version = '{}={}'.format(*anoncreds_package) mock_info = { top_level_package_with_version: "{}{} (= {}) {} (= {}), {} (= {})".format( randomText(100), *node_package, *EXT_TOP_PKT_DEPS[0], *EXT_TOP_PKT_DEPS[1]), - node_package_with_version: '{}{} (= {}){}{} (= {}){}'.format( - randomText(100), *plenum_package, randomText(100), *anoncreds_package, randomText(100)), + node_package_with_version: '{}{} (= {}){}{}'.format( + randomText(100), *plenum_package, randomText(100), randomText(100)), plenum_package_with_version: '{}'.format(randomText(100)), - anoncreds_package_with_version: '{}'.format(randomText(100)), top_level_package_dep1_with_version: '{}{} (= {})'.format(randomText(100), *plenum_package), top_level_package_dep2_with_version: '{}{} (= {})'.format(randomText(100), *node_package) } diff --git a/indy_node/test/upgrade/helper.py b/indy_node/test/upgrade/helper.py index e41668d46..1b5bdbfb2 100644 --- a/indy_node/test/upgrade/helper.py +++ b/indy_node/test/upgrade/helper.py @@ -160,7 +160,7 @@ def sendUpgradeMessage(version, pkg_name: str = APP_NAME): def nodeControlGeneralMonkeypatching(tool, monkeypatch, tdir, stdout): ret = type("", (), {})() ret.returncode = 0 - ret.stdout = stdout + ret.stdout = stdout if isinstance(stdout, bytes) else stdout.encode() tool.base_dir = tdir tool.indy_dir = os.path.join(tool.base_dir, '.indy') tool.tmp_dir = os.path.join(tool.base_dir, '.indy_tmp') diff --git a/indy_node/test/upgrade/test_get_deps_with_filter.py b/indy_node/test/upgrade/test_get_deps_with_filter.py new file mode 100644 index 000000000..fbbbf0b53 --- /dev/null +++ b/indy_node/test/upgrade/test_get_deps_with_filter.py @@ -0,0 +1,60 @@ +import pytest +from indy_node.utils.node_control_utils import NodeControlUtil + + +# a -> bb, cc +# bb -> ddd +# cc -> eee +# ddd -> ffff, jjjj +# eee -> hhhh, iiii +pkg_a = ('a', '0.0.0') +pkg_bb = ('bb', '1.0.0') +pkg_cc = ('cc', '1.0.0') +pkg_ddd = ('ddd', '1.1.0') +pkg_eee = ('eee', '1.1.0') +pkg_ffff = ('ffff', '1.1.1') +pkg_jjjj = ('jjjj', '1.1.1') +pkg_hhhh = ('hhhh', '1.1.1') +pkg_iiii = ('iiii', '1.1.1') + +mock_info = { + "{}={}".format(*pkg_a): 'Version: {}\nDepends:{} (= {}), {} (= {})\n'.format(pkg_a[1], *pkg_bb, *pkg_cc), + "{}={}".format(*pkg_bb): 'Version: {}\nDepends:{} (= {})\n'.format(pkg_bb[1], *pkg_ddd), + "{}={}".format(*pkg_cc): 'Version: {}\nDepends:{} (= {})\n'.format(pkg_cc[1], *pkg_eee), + "{}={}".format(*pkg_ddd): 'Version: {}\nDepends:{} (= {}), {} (= {})\n'.format(pkg_ddd[1], *pkg_ffff, *pkg_jjjj), + "{}={}".format(*pkg_eee): 'Version: {}\nDepends:{} (= {}), {} (= {})\n'.format(pkg_eee[1], *pkg_hhhh, *pkg_iiii), + "{}={}".format(*pkg_ffff): 'Version: {}\nDepends: \n'.format(pkg_ffff[1]), + "{}={}".format(*pkg_jjjj): 'Version: {}\nDepends: \n'.format(pkg_jjjj[1]), + "{}={}".format(*pkg_hhhh): 'Version: {}\nDepends: \n'.format(pkg_hhhh[1]), + "{}={}".format(*pkg_iiii): 'Version: {}\nDepends: \n'.format(pkg_iiii[1]) +} + + +def mock_get_info_from_package_manager(*package): + ret = "" + for p in package: + ret += mock_info.get(p, "") + return ret + + +@pytest.fixture() +def patch_pkg_mgr(monkeypatch): + monkeypatch.setattr(NodeControlUtil, '_get_info_from_package_manager', + lambda *x: mock_get_info_from_package_manager(*x)) + + +@pytest.mark.parametrize("fltr_hld,res_dep", + [([], [pkg_a]), + ([pkg_a[0]], [pkg_a]), + ([pkg_a[0], pkg_cc[0]], [pkg_a, pkg_bb, pkg_cc]), + ([pkg_cc[0]], [pkg_a, pkg_bb, pkg_cc]), + ([pkg_a[0], pkg_cc[0], pkg_ddd[0]], [pkg_a, pkg_bb, pkg_cc, pkg_ddd, pkg_eee]), + (["out_scope"], [pkg_a, pkg_bb, pkg_cc, pkg_ddd, pkg_eee, pkg_iiii, pkg_hhhh, pkg_jjjj, pkg_ffff]), + ]) +def test_deps_levels(patch_pkg_mgr, fltr_hld, res_dep): + deps_list = NodeControlUtil.get_deps_tree_filtered('{}={}'.format(*pkg_a), filter_list=fltr_hld) + flat_deps = [] + NodeControlUtil.dep_tree_traverse(deps_list, flat_deps) + assert len(flat_deps) == len(res_dep) + for d in res_dep: + assert "{}={}".format(*d) in flat_deps diff --git a/indy_node/test/upgrade/test_node_control_tool_resolves_dep_top_level.py b/indy_node/test/upgrade/test_node_control_tool_resolves_dep_top_level.py index 2195b7f13..d9d025ffc 100644 --- a/indy_node/test/upgrade/test_node_control_tool_resolves_dep_top_level.py +++ b/indy_node/test/upgrade/test_node_control_tool_resolves_dep_top_level.py @@ -5,7 +5,6 @@ from plenum.test.helper import randomText from indy_node.utils.node_control_utils import NodeControlUtil - EXT_PKT_VERSION = '7.88.999' EXT_PKT_NAME = 'SomeTopLevelPkt' node_package = (APP_NAME, '0.0.1') @@ -13,7 +12,7 @@ PACKAGE_MNG_EXT_PTK_OUTPUT = "Package: {}\nStatus: install ok installed\nPriority: extra\nSection: default\n" \ "Installed-Size: 21\nMaintainer: EXT_PKT_NAME-fond\nArchitecture: amd64\nVersion: {}\n" \ "Depends: {}, {} (= {}), {} (= {})\nDescription: EXT_PKT_DEPS-desc\n" \ - "License: EXT_PKT_DEPS-lic\nVendor: none\n".\ + "License: EXT_PKT_DEPS-lic\nVendor: none\n". \ format(EXT_PKT_NAME, EXT_PKT_VERSION, APP_NAME, *EXT_TOP_PKT_DEPS[0], *EXT_TOP_PKT_DEPS[1]) @@ -28,35 +27,50 @@ def tconf(tconf): def test_node_as_depend(monkeypatch, tconf): nct = NodeControlTool(config=tconf) top_level_package = (EXT_PKT_NAME, EXT_PKT_VERSION) - anoncreds_package = ('indy-anoncreds', '0.0.2') plenum_package = ('indy-plenum', '0.0.3') + python_crypto = ('python3-indy-crypto', '0.4.5') + libindy_crypto = ('libindy-crypto', '0.4.5') top_level_package_with_version = '{}={}'.format(*top_level_package) top_level_package_dep1_with_version = '{}={}'.format(*EXT_TOP_PKT_DEPS[0]) top_level_package_dep2_with_version = '{}={}'.format(*EXT_TOP_PKT_DEPS[1]) node_package_with_version = '{}={}'.format(*node_package) plenum_package_with_version = '{}={}'.format(*plenum_package) - anoncreds_package_with_version = '{}={}'.format(*anoncreds_package) + python_crypto_with_version = '{}={}'.format(*python_crypto) + libindy_crypto_with_version = '{}={}'.format(*libindy_crypto) mock_info = { - top_level_package_with_version: "{}{} (= {}) {} (= {}), {} (= {})".format( - randomText(100), *node_package, *EXT_TOP_PKT_DEPS[0], *EXT_TOP_PKT_DEPS[1]), - node_package_with_version: '{}{} (= {}){}{} (= {}){}'.format( - randomText(100), *plenum_package, randomText(100), *anoncreds_package, randomText(100)), - plenum_package_with_version: '{}'.format(randomText(100)), - anoncreds_package_with_version: '{}'.format(randomText(100)), - top_level_package_dep1_with_version: '{}{} (= {})'.format(randomText(100), *plenum_package), - top_level_package_dep2_with_version: '{}{} (= {})'.format(randomText(100), *node_package) + top_level_package_with_version: "{}\nVersion:{}\nDepends:{} (= {}), {} (= {}), {} (= {})\n".format( + randomText(100), top_level_package[1], *node_package, *EXT_TOP_PKT_DEPS[0], *EXT_TOP_PKT_DEPS[1]), + node_package_with_version: '{}\nVersion:{}\nDepends:{} (= {})\n'.format( + randomText(100), node_package[1], *plenum_package), + plenum_package_with_version: '{}\nVersion:{}\nDepends:{} (= {})\n'.format( + randomText(100), plenum_package[1], *python_crypto), + top_level_package_dep1_with_version: '{}\nVersion:{}\nDepends:{} (= {})\n'.format( + randomText(100), EXT_TOP_PKT_DEPS[0][1], *plenum_package), + top_level_package_dep2_with_version: '{}\nVersion:{}\nDepends:{} (= {})\n'.format( + randomText(100), EXT_TOP_PKT_DEPS[1][1], *node_package), + python_crypto_with_version: '{}\nVersion:{}\nDepends:{} (= {})\n'.format( + randomText(100), python_crypto[1], *libindy_crypto), + libindy_crypto_with_version: '{}\nVersion:{}\nDepends: \n{}'.format( + randomText(100), libindy_crypto[1], randomText(100)), } - def mock_get_info_from_package_manager(package): - return mock_info.get(package, None) + def mock_get_info_from_package_manager(*package): + ret = "" + for p in package: + ret += mock_info.get(p, "") + return ret monkeypatch.setattr(NodeControlUtil, 'update_package_cache', lambda *x: None) monkeypatch.setattr(NodeControlUtil, '_get_info_from_package_manager', - lambda x: mock_get_info_from_package_manager(x)) + lambda *x: mock_get_info_from_package_manager(*x)) + monkeypatch.setattr(NodeControlUtil, 'get_sys_holds', + lambda *x: [top_level_package[0], plenum_package[0], node_package[0], + EXT_TOP_PKT_DEPS[0][0], EXT_TOP_PKT_DEPS[1][0], python_crypto[0], + libindy_crypto[0]]) monkeypatch.setattr(NodeControlUtil, '_get_curr_info', lambda *x: PACKAGE_MNG_EXT_PTK_OUTPUT) - nct._ext_init() ret = nct._get_deps_list(top_level_package_with_version) nct.server.close() - assert ret.split() == [anoncreds_package_with_version, plenum_package_with_version, - node_package_with_version, top_level_package_dep2_with_version, - top_level_package_dep1_with_version, top_level_package_with_version] \ No newline at end of file + assert sorted(ret.split()) == sorted([libindy_crypto_with_version, + python_crypto_with_version, plenum_package_with_version, + node_package_with_version, top_level_package_dep2_with_version, + top_level_package_dep1_with_version, top_level_package_with_version]) diff --git a/indy_node/test/upgrade/test_node_control_tool_resolves_dependencies.py b/indy_node/test/upgrade/test_node_control_tool_resolves_dependencies.py index 5157b17a1..7cb97aa46 100644 --- a/indy_node/test/upgrade/test_node_control_tool_resolves_dependencies.py +++ b/indy_node/test/upgrade/test_node_control_tool_resolves_dependencies.py @@ -6,32 +6,35 @@ def testNodeControlResolvesDependencies(monkeypatch, tconf): nct = NodeControlTool(config=tconf) node_package = ('indy-node', '0.0.1') - anoncreds_package = ('indy-anoncreds', '0.0.2') plenum_package = ('indy-plenum', '0.0.3') node_package_with_version = '{}={}'.format(*node_package) plenum_package_with_version = '{}={}'.format(*plenum_package) - anoncreds_package_with_version = '{}={}'.format(*anoncreds_package) - mock_info = {node_package_with_version: '{}{} (= {}){}{} (= {}){}'.format( - randomText(100), *plenum_package, randomText(100), *anoncreds_package, randomText(100)), - plenum_package_with_version: '{}'.format(randomText(100)), - anoncreds_package_with_version: '{}'.format(randomText(100)) + mock_info = {node_package_with_version: '{}\nVersion: {}\nDepends:{} (= {})\n'.format( + randomText(100), node_package[1], *plenum_package), + plenum_package_with_version: '{}'.format(randomText(100)) } - def mock_get_info_from_package_manager(package): - return mock_info.get(package, None) + def mock_get_info_from_package_manager(*package): + ret = "" + for p in package: + ret += mock_info.get(p, "") + return ret monkeypatch.setattr(NodeControlUtil, 'update_package_cache', lambda *x: None) + monkeypatch.setattr(NodeControlUtil, 'get_sys_holds', + lambda *x: [node_package[0], plenum_package[0]]) monkeypatch.setattr(NodeControlUtil, '_get_info_from_package_manager', - lambda x: mock_get_info_from_package_manager(x)) + lambda *x: mock_get_info_from_package_manager(*x)) ret = nct._get_deps_list(node_package_with_version) nct.server.close() - assert ret.split() == [anoncreds_package_with_version, plenum_package_with_version, node_package_with_version] + assert sorted(ret.split()) == sorted([plenum_package_with_version, + node_package_with_version]) def test_create_deps_for_exotic_version_style(): depends = ['package1', 'package2'] versions = ['1.6.74', '0.9.4~+.-AbCd1.2.3.4.EiF'] - def mock_info_from_package_manager(package): + def mock_info_from_package_manager(*package): pkg_info = """Package: {package} Version: 1.1.26 Priority: extra @@ -45,7 +48,7 @@ def mock_info_from_package_manager(package): Download-Size: 10.4 kB APT-Sources: https://some.org/deb xenial/rc amd64 Packages Description: Some package -""".format(**{'package': package, +""".format(**{'package': package[0], 'dep1': depends[0], 'dep2': depends[1], 'ver1': versions[0], @@ -53,27 +56,27 @@ def mock_info_from_package_manager(package): return pkg_info ncu = NodeControlUtil ncu._get_info_from_package_manager = mock_info_from_package_manager - ret = ncu.get_deps_tree('package', include=depends, depth=MAX_DEPS_DEPTH-1) + ret = ncu.get_deps_tree('package', depth=MAX_DEPS_DEPTH-1) """ Expected return value is: 0 item is package, 1 deps with version for previous package, """ - assert len(ret[1]) == len(depends) - assert depends[0] in ret[1][0] - assert depends[1] in ret[1][1] - assert "{}={}".format(depends[0], versions[0]) in ret[1] - assert "{}={}".format(depends[1], versions[1]) in ret[1] + assert len(ret[1][0]) == len(depends) + assert any([depends[0] in l for l in ret[1][0]]) + assert any([depends[0] in l for l in ret[1][0]]) + assert any(["{}={}".format(depends[0], versions[0]) in l for l in ret[1][0]]) + assert any(["{}={}".format(depends[1], versions[1]) in l for l in ret[1][0]]) def test_max_depth_for_deps_tree(): depends = ['package1', 'package2'] - def mock_info_from_package_manager(package): + def mock_info_from_package_manager(*package): pkg_info = """Depends: {} (= 1.1.1), {} (= 2.2.2)""".format(depends[0], depends[1]) return pkg_info ncu = NodeControlUtil ncu._get_info_from_package_manager = mock_info_from_package_manager - ret = ncu.get_deps_tree('package', include=depends) + ret = ncu.get_deps_tree('package') assert len(ret) <= MAX_DEPS_DEPTH \ No newline at end of file diff --git a/indy_node/test/upgrade/test_upgrade_top_level_config_not_set.py b/indy_node/test/upgrade/test_upgrade_top_level_config_not_set.py deleted file mode 100644 index 8af13154e..000000000 --- a/indy_node/test/upgrade/test_upgrade_top_level_config_not_set.py +++ /dev/null @@ -1,20 +0,0 @@ -import pytest -from indy_node.test.upgrade.helper import NodeControlToolExecutor as NCT, \ - nodeControlGeneralMonkeypatching - - -@pytest.fixture(scope="module") -def tconf(tconf): - oldv = tconf.UPGRADE_ENTRY - tconf.UPGRADE_ENTRY = None - yield tconf - tconf.UPGRADE_ENTRY = oldv - - -def test_upg_invalid_cfg(tdir, monkeypatch, tconf): - def transform(tool): - nodeControlGeneralMonkeypatching(tool, monkeypatch, tdir, "") - - with pytest.raises(AssertionError) as ex: - NCT(backup_dir=tdir, backup_target=tdir, transform=transform) - assert "UPGRADE_ENTRY config parameter must be set" in str(ex.value) diff --git a/indy_node/test/upgrade/test_upgrade_top_level_def_cfg.py b/indy_node/test/upgrade/test_upgrade_top_level_def_cfg.py deleted file mode 100644 index 8769128eb..000000000 --- a/indy_node/test/upgrade/test_upgrade_top_level_def_cfg.py +++ /dev/null @@ -1,16 +0,0 @@ -from indy_node.test.upgrade.helper import NodeControlToolExecutor as NCT, \ - nodeControlGeneralMonkeypatching -from indy_node.utils.node_control_tool import DEPS, PACKAGES_TO_HOLD - - -def test_upg_default_cfg(tdir, monkeypatch, tconf): - def transform(tool): - nodeControlGeneralMonkeypatching(tool, monkeypatch, tdir, "") - - nct = NCT(backup_dir=tdir, backup_target=tdir, transform=transform) - try: - assert nct.tool.ext_ver is None - assert nct.tool.deps == DEPS - assert nct.tool.packages_to_hold.strip(" ") == PACKAGES_TO_HOLD.strip(" ") - finally: - nct.stop() diff --git a/indy_node/test/upgrade/test_upgrade_top_level_ext_pkg.py b/indy_node/test/upgrade/test_upgrade_top_level_ext_pkg.py deleted file mode 100644 index 0a0a4ab6d..000000000 --- a/indy_node/test/upgrade/test_upgrade_top_level_ext_pkg.py +++ /dev/null @@ -1,41 +0,0 @@ -import pytest -from indy_node.test.upgrade.helper import NodeControlToolExecutor as NCT, \ - nodeControlGeneralMonkeypatching -from indy_node.utils.node_control_tool import DEPS, PACKAGES_TO_HOLD - - -EXT_PKT_VERSION = '7.88.999' -EXT_PKT_NAME = 'SomeTopLevelPkt' -EXT_PKT_DEPS = ['aa0', 'bb1', 'cc2', 'dd3', 'ee4', 'ff5', 'gg6', 'hh7'] -PACKAGE_MNG_EXT_PTK_OUTPUT = "Package: {}\nStatus: install ok installed\nPriority: extra\nSection: default\n" \ - "Installed-Size: 21\nMaintainer: EXT_PKT_NAME-fond\nArchitecture: amd64\nVersion: {}\n" \ - "Depends: {}, {} (= 1.1.1), {} (< 1.1.1), {} (<= 1.1.1), {} (> 1.1.1), {} (>= 1.1.1)," \ - " {} (<< 1.1.1), {} (>> 1.1.1)\nDescription: EXT_PKT_DEPS-desc\n" \ - "License: EXT_PKT_DEPS-lic\nVendor: none\n".\ - format(EXT_PKT_NAME, EXT_PKT_VERSION, EXT_PKT_DEPS[0], EXT_PKT_DEPS[1], EXT_PKT_DEPS[2], - EXT_PKT_DEPS[3], EXT_PKT_DEPS[4], EXT_PKT_DEPS[5], EXT_PKT_DEPS[6], EXT_PKT_DEPS[7]) - - -@pytest.fixture(scope="module") -def tconf(tconf): - oldv = tconf.UPGRADE_ENTRY - tconf.UPGRADE_ENTRY = EXT_PKT_NAME - yield tconf - tconf.UPGRADE_ENTRY = oldv - - -def test_upg_ext_info(tdir, monkeypatch, tconf): - def transform(tool): - nodeControlGeneralMonkeypatching(tool, monkeypatch, tdir, PACKAGE_MNG_EXT_PTK_OUTPUT) - - nct = NCT(backup_dir=tdir, backup_target=tdir, transform=transform) - try: - assert EXT_PKT_VERSION, EXT_PKT_DEPS == nct.tool._ext_info() - nct.tool._ext_init() - assert nct.tool.ext_ver == EXT_PKT_VERSION - assert nct.tool.deps == EXT_PKT_DEPS + DEPS - hlds = list(set([EXT_PKT_NAME] + EXT_PKT_DEPS + PACKAGES_TO_HOLD.strip(" ").split(" "))).sort() - cmp_with = list(set(nct.tool.packages_to_hold.strip(" ").split(" "))).sort() - assert cmp_with == hlds - finally: - nct.stop() diff --git a/indy_node/test/upgrade/test_version_parsing.py b/indy_node/test/upgrade/test_version_parsing.py new file mode 100644 index 000000000..0b8f3197d --- /dev/null +++ b/indy_node/test/upgrade/test_version_parsing.py @@ -0,0 +1,21 @@ +import pytest +from indy_node.utils.node_control_utils import NodeControlUtil + + +@pytest.mark.parametrize("vers_str,vers_parsed", + [("aa (= 1)", ["aa=1"]), ("aa (= 1), bb", ["aa=1", "bb"]), + ("aa (= 1), bb (= 2) | cc (= 3)", ["aa=1", "bb=2", "cc=3"]), + ("aa (< 1), bb (> 2) | cc (<= 3), dd (>= 4)", ["aa=1", "bb=2", "cc=3", "dd=4"]), + ("aa (<< 1), bb (>> 2) | cc (<= 3) | dd", ["aa=1", "bb=2", "cc=3", "dd"])]) +def test_version_parse(vers_str, vers_parsed): + vers = NodeControlUtil._parse_deps(vers_str) + assert vers == vers_parsed + + +@pytest.mark.parametrize("pkcts,pkcts_dd", + [([], []), (["aa=1"], ["aa=1"]), (["aa=1", "aa=1", "aa=2"], ["aa=1"]), + (["aa=1", "bb=2", "cc=3"], ["aa=1", "bb=2", "cc=3"]), + (["aa=1", "bb=2", "cc=3", "aa=2", "bb=3", "cc=4"], ["aa=1", "bb=2", "cc=3"])]) +def test_pkts_dedup(pkcts, pkcts_dd): + processed_pckts = NodeControlUtil._pkts_dedup(pkcts) + assert processed_pckts == pkcts_dd diff --git a/indy_node/test/validator_info/test_validator_info.py b/indy_node/test/validator_info/test_validator_info.py index ae7851052..fcf128715 100644 --- a/indy_node/test/validator_info/test_validator_info.py +++ b/indy_node/test/validator_info/test_validator_info.py @@ -1,32 +1,22 @@ import pytest -import os -import importlib -import json - -from stp_core.loop.eventually import eventually +from indy_node.test.state_proof.helper import sdk_submit_operation_and_get_result from plenum.common.constants import TARGET_NYM, RAW, NAME, VERSION, ORIGIN -from plenum.test import waits -from plenum.test.helper import check_sufficient_replies_received - # noinspection PyUnresolvedReferences -from plenum.test.validator_info.conftest import \ - info, node # qa +from plenum.common.ledger import Ledger +from plenum.test.primary_selection.test_primary_selector import FakeLedger +from plenum.test.validator_info.conftest import info, node # qa from indy_common.constants import TXN_TYPE, DATA, GET_NYM, GET_ATTR, GET_SCHEMA, GET_CLAIM_DEF, REF, SIGNATURE_TYPE - -from indy_client.client.wallet.attribute import Attribute from indy_node.__metadata__ import __version__ as node_pgk_version - PERIOD_SEC = 1 TEST_NODE_NAME = 'Alpha' STATUS_FILENAME = '{}_info.json'.format(TEST_NODE_NAME.lower()) INFO_FILENAME = '{}_info.json'.format(TEST_NODE_NAME.lower()) - def test_validator_info_file_schema_is_valid(info): assert isinstance(info, dict) assert 'config' in info['Node_info']['Metrics']['transaction-count'] @@ -36,19 +26,19 @@ def test_validator_info_file_metrics_count_ledger_field_valid(info): assert info['Node_info']['Metrics']['transaction-count']['config'] == 0 -@pytest.mark.skip(reason="info will not be included by default") -def test_validator_info_file_software_indy_node_valid(info): - assert info['Software']['indy-node'] == node_pgk_version +def test_validator_info_bls_key_field_valid(node, info): + assert info['Node_info']['BLS_key'] -@pytest.mark.skip(reason="info will not be included by default") -def test_validator_info_file_software_sovrin_valid(info): - try: - pkg = importlib.import_module('sovrin') - except ImportError: - assert info['Software']['sovrin'] is None - else: - assert info['Software']['sovrin'] == pkg.__version__ +def test_validator_info_ha_fields_valid(node, info): + assert info['Node_info']['Node_ip'] + assert info['Node_info']['Client_ip'] + assert info['Node_info']['Node_port'] + assert info['Node_info']['Client_port'] + + +def test_validator_info_file_software_indy_node_valid(info): + assert info['Software']['indy-node'] == node_pgk_version @pytest.fixture() @@ -102,28 +92,15 @@ def test_validator_info_file_get_claim_def(read_txn_and_get_latest_info, assert latest_info['Node_info']['Metrics']['average-per-second']['read-transactions'] > 0 -@pytest.fixture() -def client_and_wallet(steward, stewardWallet): - return steward, stewardWallet - - -def submitRequests(client, wallet, op): - req = wallet.signOp(op) - # TODO: This looks boilerplate - wallet.pendRequest(req) - reqs = wallet.preparePending() - return client.submitReqs(*reqs)[0] - - -def makeGetNymRequest(client, wallet, nym): +def makeGetNymRequest(looper, sdk_pool_handle, sdk_wallet, nym): op = { TARGET_NYM: nym, TXN_TYPE: GET_NYM, } - return submitRequests(client, wallet, op) + return sdk_submit_operation_and_get_result(looper, sdk_pool_handle, sdk_wallet, op) -def makeGetSchemaRequest(client, wallet, nym): +def makeGetSchemaRequest(looper, sdk_pool_handle, sdk_wallet, nym): op = { TARGET_NYM: nym, TXN_TYPE: GET_SCHEMA, @@ -132,55 +109,89 @@ def makeGetSchemaRequest(client, wallet, nym): VERSION: '1.0', } } - return submitRequests(client, wallet, op) + return sdk_submit_operation_and_get_result(looper, sdk_pool_handle, sdk_wallet, op) -def makeGetAttrRequest(client, wallet, nym, raw): +def makeGetAttrRequest(looper, sdk_pool_handle, sdk_wallet, nym, raw): op = { TARGET_NYM: nym, TXN_TYPE: GET_ATTR, RAW: raw } - return submitRequests(client, wallet, op) + return sdk_submit_operation_and_get_result(looper, sdk_pool_handle, sdk_wallet, op) -def makeGetClaimDefRequest(client, wallet): +def makeGetClaimDefRequest(looper, sdk_pool_handle, sdk_wallet): op = { TXN_TYPE: GET_CLAIM_DEF, - ORIGIN: '1' * 16, # must be a valid DID + ORIGIN: '1' * 16, # must be a valid DID REF: 1, SIGNATURE_TYPE: 'any' } - return submitRequests(client, wallet, op) + return sdk_submit_operation_and_get_result(looper, sdk_pool_handle, sdk_wallet, op) @pytest.fixture -def read_txn_and_get_latest_info(txnPoolNodesLooper, - client_and_wallet, node): - client, wallet = client_and_wallet +def read_txn_and_get_latest_info(looper, sdk_pool_handle, + sdk_wallet_client, node): + _, did = sdk_wallet_client def read_wrapped(txn_type): - if txn_type == GET_NYM: - reqs = makeGetNymRequest(client, wallet, wallet.defaultId) + makeGetNymRequest(looper, sdk_pool_handle, sdk_wallet_client, did) elif txn_type == GET_SCHEMA: - reqs = makeGetSchemaRequest(client, wallet, wallet.defaultId) + makeGetSchemaRequest(looper, sdk_pool_handle, sdk_wallet_client, did) elif txn_type == GET_ATTR: - reqs = makeGetAttrRequest(client, wallet, wallet.defaultId, "attrName") + makeGetAttrRequest(looper, sdk_pool_handle, sdk_wallet_client, did, "attrName") elif txn_type == GET_CLAIM_DEF: - reqs = makeGetClaimDefRequest(client, wallet) + makeGetClaimDefRequest(looper, sdk_pool_handle, sdk_wallet_client) else: assert False, "unexpected txn type {}".format(txn_type) - - timeout = waits.expectedTransactionExecutionTime( - len(client.inBox)) - txnPoolNodesLooper.run( - eventually(check_sufficient_replies_received, - client, reqs[0].identifier, reqs[0].reqId, - retryWait=1, timeout=timeout)) return node._info_tool.info + return read_wrapped def reset_node_total_read_request_number(node): node.total_read_request_number = 0 + + +class FakeTree: + @property + def root_hash(self): + return '222222222222222222222222222' + +class FakeLedgerEx(FakeLedger): + @property + def uncommittedRootHash(self): + return '111111111111111111111111111111111' + + @property + def uncommittedTxns(self): + return [] + + @property + def tree(self): + return FakeTree() + + @property + def size(self): + return 100 + + +def test_validator_info_file_metrics_count_all_ledgers_field_valid(node): + new_ids = [444, 555, 666, 777] + for newid in new_ids: + node.ledgerManager.addLedger(newid, FakeLedgerEx(newid, newid)) + info = node._info_tool.info + has_cnt = len(info['Node_info']['Metrics']['transaction-count']) + assert has_cnt == len(new_ids) + 3 + + +def test_validator_info_update_date_field_valid(info): + assert "Update time" in info + import time + import datetime + from_str = time.mktime(datetime.datetime.strptime(info["Update time"], + "%A, %B %d, %Y %I:%M:%S %p %z").timetuple()) + assert int(from_str) == info["timestamp"] diff --git a/indy_node/test/write_permission/test_send_write_permission_anyone.py b/indy_node/test/write_permission/test_send_write_permission_anyone.py index 6156a259b..56e31dc8b 100644 --- a/indy_node/test/write_permission/test_send_write_permission_anyone.py +++ b/indy_node/test/write_permission/test_send_write_permission_anyone.py @@ -15,9 +15,11 @@ from indy.ledger import build_schema_request from indy_node.test.anon_creds.conftest import claim_def, build_revoc_reg_entry_for_given_revoc_reg_def -from indy_client.test.test_nym_attrib import attributeData, attributeName, attributeValue +from indy_node.test.attrib_txn.test_nym_attrib import attributeData, attributeName, attributeValue from indy_node.test.anon_creds.conftest import build_revoc_def_by_default +from indy_node.test.schema.test_send_get_schema import send_schema_seq_no + @pytest.fixture(scope="module") def tconf(tconf): diff --git a/indy_node/test/write_permission/test_send_write_permission_no_client.py b/indy_node/test/write_permission/test_send_write_permission_no_client.py index 188480ab4..e71b4f90f 100644 --- a/indy_node/test/write_permission/test_send_write_permission_no_client.py +++ b/indy_node/test/write_permission/test_send_write_permission_no_client.py @@ -15,9 +15,11 @@ from indy.ledger import build_schema_request from indy_node.test.anon_creds.conftest import claim_def, build_revoc_reg_entry_for_given_revoc_reg_def -from indy_client.test.test_nym_attrib import attributeData, attributeName, attributeValue +from indy_node.test.attrib_txn.test_nym_attrib import attributeData, attributeName, attributeValue from indy_node.test.anon_creds.conftest import build_revoc_def_by_default +from indy_node.test.schema.test_send_get_schema import send_schema_seq_no + @pytest.fixture(scope="module") def tconf(tconf): @@ -119,7 +121,6 @@ def client_send_revoc_reg_def(looper, tconf.ANYONE_CAN_WRITE = OLD_ANYONE_CAN_WRITE Authoriser.auth_map = None - _, author_did = sdk_wallet_client revoc_reg = build_revoc_def_by_default revoc_reg['operation'][CRED_DEF_ID] = \ diff --git a/indy_node/utils/node_control_tool.py b/indy_node/utils/node_control_tool.py index df8cae91c..abf604029 100644 --- a/indy_node/utils/node_control_tool.py +++ b/indy_node/utils/node_control_tool.py @@ -17,9 +17,7 @@ TIMEOUT = 300 BACKUP_FORMAT = 'zip' -DEPS = ['indy-plenum', 'indy-anoncreds', 'python3-indy-crypto'] BACKUP_NUM = 10 -PACKAGES_TO_HOLD = 'indy-anoncreds indy-plenum indy-node python3-indy-crypto libindy-crypto' TMP_DIR = '/tmp/.indy_tmp' @@ -31,7 +29,6 @@ def __init__( timeout: int = TIMEOUT, backup_format: str = BACKUP_FORMAT, test_mode: bool = False, - deps: List[str] = DEPS, backup_target: str = None, files_to_preserve: List[str] = None, backup_dir: str = None, @@ -41,20 +38,17 @@ def __init__( config=None): self.config = config or getConfig() - assert self.config.UPGRADE_ENTRY, "UPGRADE_ENTRY config parameter must be set" - self.upgrade_entry = self.config.UPGRADE_ENTRY - self.test_mode = test_mode self.timeout = timeout or TIMEOUT + self.hold_ext = hold_ext.split(" ") + config_helper = ConfigHelper(self.config) self.backup_dir = backup_dir or config_helper.backup_dir self.backup_target = backup_target or config_helper.genesis_dir self.tmp_dir = TMP_DIR self.backup_format = backup_format - self.ext_ver = None - self.deps = deps _files_to_preserve = [self.config.lastRunVersionFile, self.config.nextVersionFile, self.config.upgradeLogFile, self.config.lastVersionFilePath, @@ -66,7 +60,6 @@ def __init__( _backup_name_prefix = '{}_backup_'.format(self.config.NETWORK_NAME) self.backup_name_prefix = backup_name_prefix or _backup_name_prefix - self.packages_to_hold = ' '.join([PACKAGES_TO_HOLD, hold_ext]) # Create a TCP/IP socket self.server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) @@ -82,41 +75,32 @@ def __init__( # Listen for incoming connections self.server.listen(1) - def _ext_init(self): - NodeControlUtil.update_package_cache() - self.ext_ver, ext_deps = self._ext_info() - self.deps = ext_deps + self.deps - holds = set([self.upgrade_entry] + ext_deps + self.packages_to_hold.strip(" ").split(" ")) - self.packages_to_hold = ' '.join(list(holds)) - - def _ext_info(self, pkg=None): - pkg_name = pkg or self.upgrade_entry - return NodeControlUtil.curr_pkt_info(pkg_name) - - def _hold_packages(self): - if shutil.which("apt-mark"): - cmd = compose_cmd(['apt-mark', 'hold', self.packages_to_hold]) - ret = NodeControlUtil.run_shell_command(cmd, TIMEOUT) - if ret.returncode != 0: - raise Exception('cannot mark {} packages for hold ' - 'since {} returned {}' - .format(self.packages_to_hold, cmd, ret.returncode)) - logger.info('Successfully put {} packages on hold'.format(self.packages_to_hold)) - else: - logger.info('Skipping packages holding') - def _get_deps_list(self, package): logger.info('Getting dependencies for {}'.format(package)) NodeControlUtil.update_package_cache() - dep_tree = NodeControlUtil.get_deps_tree(package, self.deps) + app_holded = self.config.PACKAGES_TO_HOLD + self.hold_ext + dep_tree = NodeControlUtil.get_deps_tree_filtered(package, filter_list=app_holded) ret = [] NodeControlUtil.dep_tree_traverse(dep_tree, ret) - return " ".join(ret) - - def _call_upgrade_script(self, version): + # Filter deps according to system hold list + # in case of hold empty return only package + holded = NodeControlUtil.get_sys_holds() + if not holded: + return package + else: + ret_list = [] + for rl in ret: + name = rl.split("=", maxsplit=1)[0] + if name in holded: + ret_list.append(rl) + if package not in ret_list: + ret_list.append(package) + return " ".join(ret_list) + + def _call_upgrade_script(self, pkg_name, version): logger.info('Upgrading indy node to version {}, test_mode {}'.format(version, int(self.test_mode))) - deps = self._get_deps_list('{}={}'.format(self.upgrade_entry, version)) + deps = self._get_deps_list('{}={}'.format(pkg_name, version)) deps = '"{}"'.format(deps) cmd_file = 'upgrade_indy_node' @@ -124,16 +108,12 @@ def _call_upgrade_script(self, version): cmd_file = 'upgrade_indy_node_test' cmd = compose_cmd([cmd_file, deps]) - ret = NodeControlUtil.run_shell_script(cmd, self.timeout) - if ret.returncode != 0: - raise Exception('upgrade script failed, exit code is {}'.format(ret.returncode)) + NodeControlUtil.run_shell_script(cmd, timeout=self.timeout) def _call_restart_node_script(self): logger.info('Restarting indy') cmd = compose_cmd(['restart_indy_node']) - ret = NodeControlUtil.run_shell_script(cmd, self.timeout) - if ret.returncode != 0: - raise Exception('restart failed: script returned {}'.format(ret.returncode)) + NodeControlUtil.run_shell_script(cmd, timeout=self.timeout) def _backup_name(self, version): return os.path.join(self.backup_dir, '{}{}'.format( @@ -195,13 +175,12 @@ def _do_migration(self, current_version, new_version): self._remove_old_backups() def _upgrade(self, new_version, pkg_name, migrate=True, rollback=True): - self.upgrade_entry = pkg_name - current_version, _ = self._ext_info() + current_version, _ = NodeControlUtil.curr_pkt_info(pkg_name) try: from indy_node.server.upgrader import Upgrader node_cur_version = Upgrader.getVersion() logger.info('Trying to upgrade from {}={} to {}'.format(pkg_name, current_version, new_version)) - self._call_upgrade_script(new_version) + self._call_upgrade_script(pkg_name, new_version) if migrate: node_new_version = Upgrader.getVersion() self._do_migration(node_cur_version, node_new_version) @@ -247,8 +226,16 @@ def _declare_upgrade_failed(self, *, reason=reason) logger.error(msg) + def _hold_packages(self): + if shutil.which("apt-mark"): + packages_to_hold = ' '.join(self.config.PACKAGES_TO_HOLD + self.hold_ext) + cmd = compose_cmd(['apt-mark', 'hold', packages_to_hold]) + NodeControlUtil.run_shell_script(cmd) + logger.info('Successfully put {} packages on hold'.format(packages_to_hold)) + else: + logger.info('Skipping packages holding') + def start(self): - self._ext_init() self._hold_packages() # Sockets from which we expect to read diff --git a/indy_node/utils/node_control_utils.py b/indy_node/utils/node_control_utils.py index bbf5ee5ec..b546a652f 100644 --- a/indy_node/utils/node_control_utils.py +++ b/indy_node/utils/node_control_utils.py @@ -1,55 +1,98 @@ -import re import subprocess +import shutil +import codecs +import locale from stp_core.common.log import getlogger from indy_common.util import compose_cmd + +# Package manager command output could contain some utf-8 symbols +# to handle such a case automatic stream parsing is prohibited, +# decode error handler is added, proper decoder is selected + +# copied from validator-info from plenum +def decode_err_handler(error): + length = error.end - error.start + return length * ' ', error.end + + +# copied from validator-info from plenum +codecs.register_error('decode_errors', decode_err_handler) + + logger = getlogger() TIMEOUT = 300 MAX_DEPS_DEPTH = 6 class NodeControlUtil: + # Method is used in case we are interested in command output + # errors are ignored + # only critical errors are logged to journalctl @classmethod - def run_shell_command(cls, command, timeout): - return subprocess.run(command, shell=True, check=True, universal_newlines=True, - stdout=subprocess.PIPE, timeout=timeout) + def run_shell_command(cls, command, timeout=TIMEOUT): + try: + ret = subprocess.run(command, shell=True, check=True, stdout=subprocess.PIPE, timeout=timeout) + ret_bytes = ret.stdout + except subprocess.CalledProcessError as ex: + ret_bytes = ex.output + except Exception as ex: + raise Exception("command {} failed with {}".format(command, ex)) + ret_msg = ret_bytes.decode(locale.getpreferredencoding(), 'decode_errors').strip() if ret_bytes else "" + return ret_msg + # Method is used in case we are NOT interested in command output + # everything: command, errors, output etc are logged to journalctl @classmethod - def run_shell_script(cls, command, timeout): - return subprocess.run(command, shell=True, timeout=timeout) + def run_shell_script(cls, command, timeout=TIMEOUT): + subprocess.run(command, shell=True, timeout=timeout, check=True) @classmethod def _get_curr_info(cls, package): cmd = compose_cmd(['dpkg', '-s', package]) - try: - ret = cls.run_shell_command(cmd, TIMEOUT) - except Exception as ex: - return "" - if ret.returncode != 0: - return "" - return ret.stdout.strip() + return cls.run_shell_command(cmd) @classmethod - def _parse_version_deps_from_pkt_mgr_output(cls, output): - def _parse_deps(deps: str): - ret = [] - pkgs = deps.split(",") - for pkg in pkgs: - name_ver = pkg.strip(" ").split(" ", maxsplit=1) - name = name_ver[0].strip(" \n") + def _parse_deps(cls, deps: str): + ret = [] + deps = deps.replace("|", ",") + pkgs = deps.split(",") + for pkg in pkgs: + if not pkg: + continue + name_ver = pkg.strip(" ").split(" ", maxsplit=1) + name = name_ver[0].strip(" \n") + if len(name_ver) == 1: ret.append(name) - return ret + else: + ver = name_ver[1].strip("()<>= \n") + ret.append("{}={}".format(name, ver)) + return ret + + @classmethod + def _pkts_dedup(cls, deps): + ret = [] + processed = set() + for d in deps: + name_ver = d.split("=", maxsplit=1) + if name_ver[0] not in processed: + ret.append(d) + processed.add(name_ver[0]) + return ret + @classmethod + def _parse_version_deps_from_pkt_mgr_output(cls, output): out_lines = output.split("\n") ver = None ext_deps = [] - for l in out_lines: - if l.startswith("Version:"): - ver = l.split(":", maxsplit=1)[1].strip(" \n") - if l.startswith("Depends:"): - ext_deps = _parse_deps(l.split(":", maxsplit=1)[1].strip(" \n")) - return ver, ext_deps + for ln in out_lines: + act_line = ln.strip(" \n") + if act_line.startswith("Version:"): + ver = ver or act_line.split(":", maxsplit=1)[1].strip(" \n") + if act_line.startswith("Depends:"): + ext_deps += cls._parse_deps(act_line.split(":", maxsplit=1)[1].strip(" \n")) + return ver, cls._pkts_dedup(ext_deps) @classmethod def curr_pkt_info(cls, pkg_name): @@ -57,44 +100,42 @@ def curr_pkt_info(cls, pkg_name): return cls._parse_version_deps_from_pkt_mgr_output(package_info) @classmethod - def _get_info_from_package_manager(cls, package): - cmd = compose_cmd(['apt-cache', 'show', package]) - try: - ret = cls.run_shell_command(cmd, TIMEOUT) - except Exception as ex: - return "" - if ret.returncode != 0: - return "" - return ret.stdout.strip() + def _get_info_from_package_manager(cls, *package): + cmd_arg = " ".join(list(package)) + cmd = compose_cmd(['apt-cache', 'show', cmd_arg]) + return cls.run_shell_command(cmd) @classmethod def update_package_cache(cls): cmd = compose_cmd(['apt', 'update']) - ret = cls.run_shell_command(cmd, TIMEOUT) - if ret.returncode != 0: - raise Exception('cannot update package cache since {} returned {}'.format(cmd, ret.returncode)) - return ret.stdout.strip() + cls.run_shell_script(cmd) @classmethod - def get_deps_tree(cls, package, include, depth=0): + def get_deps_tree(cls, *package, depth=0): + ret = list(set(package)) if depth < MAX_DEPS_DEPTH: - package_info = cls._get_info_from_package_manager(package) - ret = [package] - deps = [] + package_info = cls._get_info_from_package_manager(*ret) + _, deps = cls._parse_version_deps_from_pkt_mgr_output(package_info) deps_deps = [] - for dep in include: - if dep in package_info: - match = re.search('.*{} \(= ([0-9]+\.[0-9]+\.[0-9]+[\-\.\+\~0-9A-Za-z]*)\).*'.format(dep), package_info) - if match: - dep_version = match.group(1) - dep_package = '{}={}'.format(dep, dep_version) - deps.append(dep_package) - next_deps = cls.get_deps_tree(dep_package, include, depth=depth + 1) - if next_deps: - deps_deps.append(next_deps) - ret.append(deps) + deps = list(set(deps) - set(ret)) + deps_deps.append(cls.get_deps_tree(*deps, depth=depth + 1)) + ret.append(deps_deps) - return ret + return ret + + @classmethod + def get_deps_tree_filtered(cls, *package, filter_list=[], depth=0): + ret = list(set(package)) + filter_list = [f for f in filter_list if not list(filter(lambda x: f in x, ret))] + if depth < MAX_DEPS_DEPTH and filter_list: + package_info = cls._get_info_from_package_manager(*ret) + _, deps = cls._parse_version_deps_from_pkt_mgr_output(package_info) + deps_deps = [] + deps = list(set(deps) - set(ret)) + deps_deps.append(cls.get_deps_tree_filtered(*deps, filter_list=filter_list, depth=depth + 1)) + + ret.append(deps_deps) + return ret @classmethod def dep_tree_traverse(cls, dep_tree, deps_so_far): @@ -103,3 +144,15 @@ def dep_tree_traverse(cls, dep_tree, deps_so_far): elif isinstance(dep_tree, list) and dep_tree: for d in reversed(dep_tree): cls.dep_tree_traverse(d, deps_so_far) + + @classmethod + def get_sys_holds(cls): + if shutil.which("apt-mark"): + cmd = compose_cmd(['apt-mark', 'showhold']) + ret = cls.run_shell_command(cmd) + + hlds = ret.strip().split("\n") + return [h for h in hlds if h] + else: + logger.info('apt-mark not found. Assume holds is empty.') + return [] diff --git a/pool_automation/README.md b/pool_automation/README.md index 4c74fce0d..7be009a9f 100644 --- a/pool_automation/README.md +++ b/pool_automation/README.md @@ -2,17 +2,18 @@ ## Quickstart -- Make sure you have `AWS_ACCESS_KEY` and `AWS_SECRET_KEY` in your environment - with corresponding AWS access keys. +- Make sure you have [AWS CLI][f681b33b] installed and configured. - Run `ansible-playbook pool_create.yml` - this will create 4 EC2 instances and `test_nodes` directory with inventory and SSH keys. -- Run `ansible-playbook -i test_nodes/hosts pool_install.yml` - this will +- Run `ansible-playbook -i test_nodes/hosts pool.yml` - this will install and configure Indy Node pool on previously created EC2 instances. - Run `ssh -F test_nodes/ssh_config test_node_1` to login to first node and take a look around. -- Run `ansible-playbook pool_destroy.yml` - this will terminate previously +- Run `ansible-playbook destroy.nodes.yml` - this will terminate previously created AWS EC2 instances and clear `config_pool` and `inventory_pool` files. + [f681b33b]: https://aws.amazon.com/cli/ "aws cli" + ## Roles @@ -22,29 +23,29 @@ Used to manage number of AWS instances. Parameters: - _instance_count_: number of instances in pool (provide 0 to destroy) -- _aws_type_ (t2.micro): type of instances +- _aws_ec2_type_ (t2.micro): type of instances - _aws_region_ (eu-central-1): region of instances -- _tag_namespace_ (test): namespace of created instances -- _tag_role_ (default): role of created instances +- _aws_tag_project_ (PoolAutomation): project name for created instances +- _aws_tag_namespace_ (test): namespace of created instances +- _aws_tag_group_ (default): ansible inventory group of created instances Todos: -- allow created instances span all regions - extract key generation and inventory export to separate role - make inventory span separate roles in namespace - more tests -### common +### ansible_bootstrap -Installs python and sovrin GPG keys. +Installs python and sudo. -### node_install +### indy_node -Adds sovrin repository and installs Indy Node. +Adds sovrin repository and installs and configures Indy Node. Parameters: -- _channel_ (master): which release channel to use (master/rc/stable) +- _indy_node_channel_ (master): which release channel to use (master/rc/stable) - _indy_node_ver_ - _indy_plenum_ver_ - _python_indy_crypto_ver_ @@ -55,10 +56,9 @@ Todos: - remove unused repositories when switching channels -### pool_install - -Configures Indy Node pool. +## Scripts +The directory [scripts](scripts) includes helper scripts. Please refer to [scripts/README.md](scripts/README.md) for more details. ## Development @@ -72,21 +72,37 @@ Install virtualenv packages: - python-vagrant - boto - boto3 +- docker + +Default development workflow would be: +- `molecule lint` +- `molecule converge` +- `molecule verify` +- `molecule destroy` + +When you are ready you can run aggregative command `molecule test`. + +### Scenarios + +By default scenarios based on `docker` are used. Also `vagrant` scenarios are available +and might be run like `molecule -s vagrant`. + +In order to test all scenarios for some role go to a directory with the role (for example +`roles/indy_node`) and run `molecule test --all`. -In order to test role go to directory with role (for example -`roles/pool_install`) and run `molecule test --all`. +#### Vagrant scenarios specific -To speed up development following workflow is recommended: +To speed up development and testing on vagrant VMs following workflow is recommended: - After each change run `molecule lint`. This will quickly show some style recommendations and probably highlight some stupid mistakes. - When lint is satisfied run `molecule converge`. This will spin up virtual machines (if neccessary) and run default playbook. This - could be slow operation when running for the first time, but + could be slow operation when running for the first time, but subsequent runs are much faster. - When converge finishes without errors run `molecule verify`. This will start tests. - Do more changes, running this lint-converge-verify sequence. -- When done (or there is suspicion that VM state gone very bad) run +- When done (or there is suspicion that VM state gone very bad) run `molecule destroy`, this will destroy virtual machines. - When virtual machines are running it's possible to login to them using `molecule login --host name` diff --git a/pool_automation/configure.yml b/pool_automation/configure.yml new file mode 100644 index 000000000..a8e58a863 --- /dev/null +++ b/pool_automation/configure.yml @@ -0,0 +1,23 @@ +--- +- name: Check inventory directory + gather_facts: false + hosts: localhost + tasks: + - include_tasks: tasks/check_inventory.yml + when: not (skip_inventory_check|default(false)|bool) + +- name: Configure pool + gather_facts: false + hosts: nodes + roles: + - indy_node + tags: [ 'pool', 'nodes' ] + +- name: Configure clients + gather_facts: false + hosts: clients + roles: + - indy_cli + - perf_scripts + tags: [ 'clients' ] +... diff --git a/pool_automation/destroy.yml b/pool_automation/destroy.yml new file mode 100644 index 000000000..dbe4835c4 --- /dev/null +++ b/pool_automation/destroy.yml @@ -0,0 +1,28 @@ +--- +- name: Check inventory directory + gather_facts: false + hosts: localhost + tasks: + - include_tasks: tasks/check_inventory.yml + when: not (skip_inventory_check|default(false)|bool) + +- name: Destroy clients + gather_facts: false + hosts: localhost + roles: + - role: aws_manage + vars: + aws_group_name: clients + aws_instance_count: 0 + tags: [ 'clients' ] + +- name: Destroy Indy nodes + gather_facts: false + hosts: localhost + roles: + - role: aws_manage + vars: + aws_group_name: nodes + aws_instance_count: 0 + tags: [ 'nodes' ] +... diff --git a/pool_automation/pool_create.yml b/pool_automation/pool_create.yml deleted file mode 100644 index 6c34a1428..000000000 --- a/pool_automation/pool_create.yml +++ /dev/null @@ -1,6 +0,0 @@ ---- -- name: Create pool - gather_facts: false - hosts: localhost - roles: - - { role: aws_manage, tag_namespace: test, tag_role: node, instance_count: 4 } diff --git a/pool_automation/pool_destroy.yml b/pool_automation/pool_destroy.yml deleted file mode 100644 index 53b58e1bd..000000000 --- a/pool_automation/pool_destroy.yml +++ /dev/null @@ -1,6 +0,0 @@ ---- -- name: Destroy pool - gather_facts: false - hosts: localhost - roles: - - { role: aws_manage, tag_namespace: test, tag_role: node, instance_count: 0 } diff --git a/pool_automation/pool_install.yml b/pool_automation/pool_install.yml deleted file mode 100644 index dfad90622..000000000 --- a/pool_automation/pool_install.yml +++ /dev/null @@ -1,6 +0,0 @@ ---- -- name: Install pool - gather_facts: false - hosts: nodes - roles: - - role: pool_install diff --git a/pool_automation/provision.yml b/pool_automation/provision.yml new file mode 100644 index 000000000..06baacf17 --- /dev/null +++ b/pool_automation/provision.yml @@ -0,0 +1,39 @@ +--- +- name: Check inventory directory + gather_facts: false + hosts: localhost + tasks: + - include_tasks: tasks/check_inventory.yml + when: not (skip_inventory_check|default(false)|bool) + +- name: Ensure provisioner hosts exist + gather_facts: false + hosts: localhost + tasks: + - name: Add provisioner hosts to inventory + add_host: + name: "{{ item }}" + ansible_connection: local + ansible_python_interpreter: "{{ ansible_playbook_python }}" + when: not hostvars[item] + changed_when: false + loop: ['aws_nodes_provisioner', 'aws_clients_provisioner'] + +- name: Provision hosts for nodes + gather_facts: false + hosts: aws_nodes_provisioner + roles: + - role: aws_manage + vars: + aws_group_name: nodes + tags: [ 'nodes' ] + +- name: Provision hosts for clients + gather_facts: false + hosts: aws_clients_provisioner + roles: + - role: aws_manage + vars: + aws_group_name: clients + tags: [ 'clients' ] +... diff --git a/pool_automation/roles/ansible_bootstrap/molecule/default/molecule.yml b/pool_automation/roles/ansible_bootstrap/molecule/default/molecule.yml new file mode 100644 index 000000000..2d64e325f --- /dev/null +++ b/pool_automation/roles/ansible_bootstrap/molecule/default/molecule.yml @@ -0,0 +1,26 @@ +--- +dependency: + name: galaxy +driver: + name: docker +lint: + name: yamllint + options: + config-file: ../../yamllint +platforms: + - name: instance + image: ubuntu:16.04 # TODO parametrize + pre_build_image: true +provisioner: + name: ansible + lint: + name: ansible-lint + playbooks: + converge: ../resources/playbooks/playbook.yml +scenario: + name: default +verifier: + name: testinfra + directory: ../resources/tests + lint: + name: flake8 diff --git a/pool_automation/roles/node_install/molecule/default/playbook.yml b/pool_automation/roles/ansible_bootstrap/molecule/resources/playbooks/playbook.yml similarity index 68% rename from pool_automation/roles/node_install/molecule/default/playbook.yml rename to pool_automation/roles/ansible_bootstrap/molecule/resources/playbooks/playbook.yml index 97fd9effd..f72154b28 100644 --- a/pool_automation/roles/node_install/molecule/default/playbook.yml +++ b/pool_automation/roles/ansible_bootstrap/molecule/resources/playbooks/playbook.yml @@ -3,4 +3,4 @@ gather_facts: false hosts: all roles: - - role: node_install + - role: ansible_bootstrap diff --git a/pool_automation/roles/common/molecule/default/tests/test_default.py b/pool_automation/roles/ansible_bootstrap/molecule/resources/tests/test_default.py similarity index 50% rename from pool_automation/roles/common/molecule/default/tests/test_default.py rename to pool_automation/roles/ansible_bootstrap/molecule/resources/tests/test_default.py index 3a5948b70..4fb207d06 100644 --- a/pool_automation/roles/common/molecule/default/tests/test_default.py +++ b/pool_automation/roles/ansible_bootstrap/molecule/resources/tests/test_default.py @@ -1,3 +1,7 @@ def test_python_is_installed(host): assert host.run('python --version').rc == 0 + + +def test_sudo_is_installed(host): + assert host.run('sudo --version').rc == 0 diff --git a/pool_automation/roles/common/molecule/default/molecule.yml b/pool_automation/roles/ansible_bootstrap/molecule/vagrant/molecule.yml similarity index 66% rename from pool_automation/roles/common/molecule/default/molecule.yml rename to pool_automation/roles/ansible_bootstrap/molecule/vagrant/molecule.yml index 1c124dbf6..ad94a6929 100644 --- a/pool_automation/roles/common/molecule/default/molecule.yml +++ b/pool_automation/roles/ansible_bootstrap/molecule/vagrant/molecule.yml @@ -1,11 +1,13 @@ --- scenario: - name: default + name: vagrant provisioner: name: ansible lint: name: ansible-lint + playbooks: + converge: ../resources/playbooks/playbook.yml platforms: - name: instance @@ -19,9 +21,10 @@ driver: lint: name: yamllint options: - config-file: yamllint + config-file: ../../yamllint verifier: name: testinfra + directory: ../resources/tests lint: name: flake8 diff --git a/pool_automation/roles/ansible_bootstrap/tasks/main.yml b/pool_automation/roles/ansible_bootstrap/tasks/main.yml new file mode 100644 index 000000000..9e4e88e21 --- /dev/null +++ b/pool_automation/roles/ansible_bootstrap/tasks/main.yml @@ -0,0 +1,20 @@ +--- +- name: Check if python is installed + raw: test -e /usr/bin/python + register: test_python + failed_when: false + changed_when: false + +- name: Check if sudo is installed + raw: sudo --version + register: test_sudo + failed_when: false + changed_when: false + +# assumption: +# either sudo installed and remote user is in sudoers +# or sudo is not installed but remote user has root priviledges +- name: Install python for Ansible and sudo + raw: apt -y update && apt install -y python-minimal sudo + when: test_python.rc != 0 or test_sudo.rc != 0 + become: "{{ test_sudo.rc == 0 }}" diff --git a/pool_automation/roles/aws_manage/defaults/main.yml b/pool_automation/roles/aws_manage/defaults/main.yml index 40dd60eee..38739b35b 100644 --- a/pool_automation/roles/aws_manage/defaults/main.yml +++ b/pool_automation/roles/aws_manage/defaults/main.yml @@ -1,20 +1,22 @@ --- -# AWS primary parameters -aws_type: t2.micro -aws_image: ami-027583e616ca104df +# General +aws_project_name: Indy-PA +aws_group_name: defaults -# Tags -tag_namespace: test -tag_role: default + # Namespace parameters +aws_namespace_name: "{{ namespace_name | default(inventory_dir, true) | default('test', true) | basename }}" +aws_build_dir: "{{ namespace_dir | default(inventory_dir, true) | default(aws_namespace_name, true) }}" -# Derivative parameters -group_name: "{{ tag_role }}s" -inventory_dir: "{{ tag_namespace }}_{{ group_name }}" -aws_keyname: "{{ tag_namespace }}-{{ group_name }}" -aws_group: "{{ tag_namespace }}-{{ group_name }}" + # Inventory parameters +aws_inventory_dir: "{{ inventory_dir | default(aws_build_dir ~ '/inventory', true) }}" +aws_inventory_file: "{{ aws_inventory_dir }}/{{ aws_group_name }}.yml" +aws_base_group: "{{ aws_namespace_name }}" -# List of all potential regions to use -aws_regions: +# Provision parameters + + # List of all potential regions to use + # (for reference mostly) +aws_regions_all: - ap-northeast-1 - ap-northeast-2 - ap-south-1 @@ -25,8 +27,30 @@ aws_regions: - eu-west-1 - eu-west-2 - eu-west-3 - - sa-east-1 + - sa-east-1 # !!! be aware: expensive - us-east-1 - us-east-2 - us-west-1 - us-west-2 + + # List of regions to use + # (the cheapest ones) +aws_regions: + - us-east-1 + - us-east-2 + - us-west-2 + +aws_instance_count: 4 + +aws_ec2_type: t2.micro + + # Resource tags and names +aws_tag_project: "{{ aws_project_name }}" +aws_tag_namespace: "{{ aws_namespace_name }}" +aws_tag_group: "{{ aws_group_name }}" +aws_add_tags: {} # additional tags + +aws_resource_common_name: "{{ [aws_project_name, aws_tag_namespace, aws_group_name]|join('-')|lower }}" +aws_keyname: "{{ aws_resource_common_name }}" +aws_sgroup: "{{ aws_resource_common_name }}" +aws_tag_sgroup_name: "{{ aws_sgroup }}" diff --git a/pool_automation/roles/aws_manage/handlers/main.yml b/pool_automation/roles/aws_manage/handlers/main.yml deleted file mode 100644 index ed97d539c..000000000 --- a/pool_automation/roles/aws_manage/handlers/main.yml +++ /dev/null @@ -1 +0,0 @@ ---- diff --git a/pool_automation/roles/aws_manage/library/stateful_set.py b/pool_automation/roles/aws_manage/library/stateful_set.py index 8b4926e61..f972e0cd8 100644 --- a/pool_automation/roles/aws_manage/library/stateful_set.py +++ b/pool_automation/roles/aws_manage/library/stateful_set.py @@ -5,6 +5,15 @@ from itertools import cycle import boto3 +# import logging +# boto3.set_stream_logger('', logging.DEBUG) + +HostInfo = namedtuple('HostInfo', 'tag_id public_ip user') + +InstanceParams = namedtuple( + 'InstanceParams', 'project namespace group add_tags key_name security_group type_name') + +ManageResults = namedtuple('ManageResults', 'changed active terminated') AWS_REGIONS = [ 'ap-northeast-1', @@ -24,6 +33,7 @@ 'us-west-2'] +# TODO think about moving these module level funcitons into classes def find_ubuntu_ami(ec2): images = ec2.images.filter( Owners=['099720109477'], @@ -38,44 +48,13 @@ def find_ubuntu_ami(ec2): return images[-1].image_id if len(images) > 0 else None -InstanceParams = namedtuple( - 'InstanceParams', 'namespace role key_name group type_name') - - -def create_instances(ec2, params, count): - instances = ec2.create_instances( - ImageId=find_ubuntu_ami(ec2), - KeyName=params.key_name, - SecurityGroups=[params.group], - InstanceType=params.type_name, - MinCount=count, - MaxCount=count, - TagSpecifications=[ - { - 'ResourceType': 'instance', - 'Tags': [ - { - 'Key': 'namespace', - 'Value': params.namespace - }, - { - 'Key': 'role', - 'Value': params.role - } - ] - } - ] - ) - - return instances - - -def find_instances(ec2, namespace, role=None): +def find_instances(ec2, project, namespace, group=None): filters = [ - {'Name': 'tag:namespace', 'Values': [namespace]} + {'Name': 'tag:Project', 'Values': [project]}, + {'Name': 'tag:Namespace', 'Values': [namespace]} ] - if role is not None: - filters.append({'Name': 'tag:role', 'Values': [role]}) + if group is not None: + filters.append({'Name': 'tag:Group', 'Values': [group]}) return [instance for instance in ec2.instances.filter(Filters=filters) if instance.state['Name'] not in ['terminated', 'shutting-down']] @@ -95,78 +74,184 @@ def get_tag(inst, name): return None -HostInfo = namedtuple('HostInfo', 'tag_id public_ip user') +class AwsEC2Waiter(object): + """ Base class for EC2 actors which calls long running async actions. """ + + def __init__(self, ev_name): + self._awaited = defaultdict(list) + self._ev_name = ev_name + + @property + def awaited(self): + return dict(self._awaited) + + def add_instance(self, instance, region=None): + # fallback - autodetect placement region, + # might lead to additional AWS API calls + if not region: + # TODO more mature would be to use + # ec2.client.describe_availability_zones + # and create a map av.zone -> region + region = instance.placement['AvailabilityZone'][:-1] + self._awaited[region].append(instance) + + def wait(self, update=True): + for region, instances in dict(self._awaited).iteritems(): + ec2cl = boto3.client('ec2', region_name=region) + ec2cl.get_waiter(self._ev_name).wait( + InstanceIds=[inst.id for inst in instances]) + if update: + for inst in instances: + inst.reload() + del self._awaited[region] + + +class AwsEC2Terminator(AwsEC2Waiter): + """ Helper class to terminate EC2 instances. """ + + def __init__(self): + super(AwsEC2Terminator, self).__init__('instance_terminated') + + def terminate(self, instance, region=None): + instance.terminate() + self.add_instance(instance, region) + + +class AwsEC2Launcher(AwsEC2Waiter): + """ Helper class to launch EC2 instances. """ + + def __init__(self): + # TODO consider to use waiter for 'instance_status_ok' + # if 'instance_running' is not enough in any circumstances + super(AwsEC2Launcher, self).__init__('instance_running') + + def launch(self, params, count, region=None, ec2=None): + if not ec2: + ec2 = boto3.resource('ec2', region_name=region) + + instances = ec2.create_instances( + ImageId=find_ubuntu_ami(ec2), + KeyName=params.key_name, + SecurityGroups=[params.security_group], + InstanceType=params.type_name, + MinCount=count, + MaxCount=count, + TagSpecifications=[ + { + 'ResourceType': 'instance', + 'Tags': [ + { + 'Key': 'Project', + 'Value': params.project + }, + { + 'Key': 'Namespace', + 'Value': params.namespace + }, + { + 'Key': 'Group', + 'Value': params.group + } + ] + } + ] + ) + + for i in instances: + self.add_instance(i, region) + + return instances def manage_instances(regions, params, count): - valid_region_ids = valid_instances(regions, count) hosts = [] + terminated = [] changed = False + def _host_info(inst): + return HostInfo( + tag_id=get_tag(inst, 'ID'), + public_ip=inst.public_ip_address, + user='ubuntu') + + aws_launcher = AwsEC2Launcher() + aws_terminator = AwsEC2Terminator() + + valid_region_ids = valid_instances(regions, count) + for region in AWS_REGIONS: ec2 = boto3.resource('ec2', region_name=region) valid_ids = valid_region_ids[region] - instances = find_instances(ec2, params.namespace, params.role) + instances = find_instances(ec2, params.project, params.namespace, params.group) for inst in instances: - tag_id = get_tag(inst, 'id') + tag_id = get_tag(inst, 'ID') if tag_id in valid_ids: valid_ids.remove(tag_id) hosts.append(inst) + aws_launcher.add_instance(inst, region) else: - inst.terminate() + terminated.append(_host_info(inst)) + aws_terminator.terminate(inst, region) changed = True - if len(valid_ids) == 0: - continue - - instances = create_instances(ec2, params, len(valid_ids)) - for inst, tag_id in zip(instances, valid_ids): - inst.create_tags(Tags=[{'Key': 'id', 'Value': tag_id}]) - hosts.append(inst) - changed = True - - wait_for_ip = True - while wait_for_ip: - wait_for_ip = False - for inst in hosts: - if inst.public_ip_address is not None: - continue - wait_for_ip = True - inst.reload() + if valid_ids: + instances = aws_launcher.launch( + params, len(valid_ids), region=region, ec2=ec2) + for inst, tag_id in zip(instances, valid_ids): + inst.create_tags(Tags=[ + {'Key': 'Name', 'Value': "{}-{}-{}-{}" + .format(params.project, + params.namespace, + params.group, + tag_id.zfill(3)).lower()}, + {'Key': 'ID', 'Value': tag_id}] + + [{'Key': k, 'Value': v} for k, v in params.add_tags.iteritems()]) + hosts.append(inst) + changed = True - hosts = [HostInfo(tag_id=get_tag(inst, 'id'), - public_ip=inst.public_ip_address, - user='ubuntu') for inst in hosts] + aws_launcher.wait() + aws_terminator.wait() - return changed, hosts + return ManageResults( + changed, + [_host_info(inst) for inst in hosts], + terminated + ) def run(module): params = module.params inst_params = InstanceParams( + project=params['project'], namespace=params['namespace'], - role=params['role'], - key_name=params['key_name'], group=params['group'], + add_tags=params['add_tags'], + key_name=params['key_name'], + security_group=params['security_group'], type_name=params['instance_type'] ) - changed, results = manage_instances( + res = manage_instances( params['regions'], inst_params, params['instance_count']) - module.exit_json(changed=changed, - results=[r.__dict__ for r in results]) + module.exit_json( + changed=res.changed, + active=[r.__dict__ for r in res.active], + terminated=[r.__dict__ for r in res.terminated] + ) if __name__ == '__main__': module_args = dict( regions=dict(type='list', required=True), + project=dict(type='str', required=True), namespace=dict(type='str', required=True), - role=dict(type='str', required=True), - key_name=dict(type='str', required=True), group=dict(type='str', required=True), + add_tags=dict(type='dict', required=False, default=dict()), + key_name=dict(type='str', required=True), + security_group=dict(type='str', required=True), instance_type=dict(type='str', required=True), instance_count=dict(type='int', required=True) ) diff --git a/pool_automation/roles/aws_manage/library/test_stateful_set.py b/pool_automation/roles/aws_manage/library/test_stateful_set.py index 2903306db..845da99d4 100644 --- a/pool_automation/roles/aws_manage/library/test_stateful_set.py +++ b/pool_automation/roles/aws_manage/library/test_stateful_set.py @@ -2,15 +2,20 @@ import boto3 import pytest -from stateful_set import AWS_REGIONS, InstanceParams, find_ubuntu_ami, \ - create_instances, find_instances, valid_instances, get_tag, manage_instances +from stateful_set import ( + AWS_REGIONS, InstanceParams, find_ubuntu_ami, + AwsEC2Launcher, AwsEC2Terminator, find_instances, + valid_instances, get_tag, manage_instances +) PARAMS = InstanceParams( + project='Indy-PA', + add_tags={'Purpose': 'Test Pool Automation'}, namespace='test_stateful_set', - role=None, + group=None, key_name='test_stateful_set_key', - group='test_stateful_set_group', + security_group='test_stateful_set_security_group', type_name='t2.micro' ) @@ -30,31 +35,33 @@ def manage_key_pair(ec2, present): def manage_security_group(ec2, present): count = 0 - for group in ec2.security_groups.all(): - if group.group_name != PARAMS.group: + for sgroup in ec2.security_groups.all(): + if sgroup.group_name != PARAMS.security_group: continue if present and count == 0: count = 1 else: - group.delete() + sgroup.delete() if present and count == 0: - ec2.create_security_group(GroupName=PARAMS.group, + ec2.create_security_group(GroupName=PARAMS.security_group, Description='Test security group') def terminate_instances(ec2): - instances = find_instances(ec2, PARAMS.namespace) + instances = find_instances(ec2, PARAMS.project, PARAMS.namespace) for inst in instances: inst.terminate() def check_params(inst, params): - assert {'Key': 'namespace', 'Value': params.namespace} in inst.tags - assert {'Key': 'role', 'Value': params.role} in inst.tags + assert {'Key': 'Project', 'Value': params.project} in inst.tags + assert {'Key': 'Namespace', 'Value': params.namespace} in inst.tags + assert {'Key': 'Group', 'Value': params.group} in inst.tags assert inst.key_name == params.key_name assert len(inst.security_groups) == 1 - assert inst.security_groups[0]['GroupName'] == params.group + assert inst.security_groups[0]['GroupName'] == params.security_group assert inst.instance_type == params.type_name + assert inst.state['Name'] == 'running' @pytest.fixture(scope="session") @@ -68,13 +75,19 @@ def ec2_environment(ec2_all): manage_key_pair(ec2, True) manage_security_group(ec2, True) yield + + terminator = AwsEC2Terminator() + for region, ec2 in ec2_all.iteritems(): + for inst in find_instances(ec2, PARAMS.project, PARAMS.namespace): + terminator.terminate(inst, region) + terminator.wait(False) + for ec2 in ec2_all.values(): - terminate_instances(ec2) manage_key_pair(ec2, False) - # manage_security_group(ec2, False) + manage_security_group(ec2, False) -@pytest.fixture(params=AWS_REGIONS) +@pytest.fixture(params=sorted(AWS_REGIONS)) def ec2(request, ec2_all): return ec2_all[request.param] @@ -93,25 +106,56 @@ def test_find_ubuntu_image(ec2): assert 'UNSUPPORTED' not in image.description -def test_create_instances(ec2): - params = PARAMS._replace(role='test_create') - instances = create_instances(ec2, params, 2) +def test_AwsEC2Launcher(ec2): + launcher = AwsEC2Launcher() + params = PARAMS._replace(group='test_create') + instances = launcher.launch(params, 2, ec2=ec2) assert len(instances) == 2 + + assert len(launcher.awaited) > 0 + launcher.wait() + assert len(launcher.awaited) == 0 + for instance in instances: check_params(instance, params) +def test_AwsEC2Terminator(ec2): + launcher = AwsEC2Launcher() + terminator = AwsEC2Terminator() + + params = PARAMS._replace(group='test_terminate') + instances = launcher.launch(params, 2, ec2=ec2) + launcher.wait() + + for instance in instances: + terminator.terminate(instance) + + assert len(terminator.awaited) > 0 + terminator.wait() + assert len(terminator.awaited) == 0 + + for instance in instances: + assert instance.state['Name'] == 'terminated' + + def test_find_instances(ec2_all): - ec2 = ec2_all['eu-central-1'] - terminate_instances(ec2) + region = 'eu-central-1' + launcher = AwsEC2Launcher() + terminator = AwsEC2Terminator() + ec2 = ec2_all[region] + + for inst in find_instances(ec2, PARAMS.project, PARAMS.namespace): + terminator.terminate(inst, region) + terminator.wait(False) - create_instances(ec2, PARAMS._replace(role='aaa'), 2) - create_instances(ec2, PARAMS._replace(role='bbb'), 3) + launcher.launch(PARAMS._replace(group='aaa'), 2, ec2=ec2) + launcher.launch(PARAMS._replace(group='bbb'), 3, ec2=ec2) - aaa = find_instances(ec2, PARAMS.namespace, 'aaa') - bbb = find_instances(ec2, PARAMS.namespace, 'bbb') - aaa_and_bbb = find_instances(ec2, PARAMS.namespace) + aaa = find_instances(ec2, PARAMS.project, PARAMS.namespace, 'aaa') + bbb = find_instances(ec2, PARAMS.project, PARAMS.namespace, 'bbb') + aaa_and_bbb = find_instances(ec2, PARAMS.project, PARAMS.namespace) assert len(aaa) == 2 assert len(bbb) == 3 @@ -145,9 +189,9 @@ def test_valid_instances(): def test_manage_instances(ec2_all): - regions = ['eu-central-1', 'us-west-1', 'us-west-2'] + regions = ['us-east-1', 'us-east-2', 'us-west-2'] connections = [ec2_all[r] for r in regions] - params = PARAMS._replace(role='test_manage') + params = PARAMS._replace(group='test_manage') def check_hosts(hosts): assert len(set(host.tag_id for host in hosts)) == len(hosts) @@ -157,69 +201,83 @@ def check_tags(instances): for group in instances: for inst in group: check_params(inst, params) - assert get_tag(inst, 'id') is not None - - changed, hosts = manage_instances(regions, params, 4) - instances = [find_instances(c, PARAMS.namespace, 'test_manage') + inst_tag_id = get_tag(inst, 'ID') + assert inst_tag_id is not None + inst_tag_name = get_tag(inst, 'Name') + assert inst_tag_name == "{}-{}-{}-{}".format( + params.project, + params.namespace, + params.group, + inst_tag_id.zfill(3)).lower() + for tag_key, tag_value in params.add_tags.iteritems(): + assert tag_value == get_tag(inst, tag_key) + + res = manage_instances(regions, params, 4) + instances = [find_instances(c, PARAMS.project, PARAMS.namespace, 'test_manage') for c in connections] - assert changed - check_hosts(hosts) + assert res.changed + assert len(res.active) == 4 + assert len(res.terminated) == 0 + check_hosts(res.active + res.terminated) check_tags(instances) - assert len(hosts) == 4 assert len(instances[0]) == 2 assert len(instances[1]) == 1 assert len(instances[2]) == 1 - assert set([get_tag(instances[0][0], 'id'), - get_tag(instances[0][1], 'id')]) == set(['1', '4']) - assert get_tag(instances[1][0], 'id') == '2' - assert get_tag(instances[2][0], 'id') == '3' + assert set([get_tag(instances[0][0], 'ID'), + get_tag(instances[0][1], 'ID')]) == set(['1', '4']) + assert get_tag(instances[1][0], 'ID') == '2' + assert get_tag(instances[2][0], 'ID') == '3' - changed, hosts = manage_instances(regions, params, 4) - instances = [find_instances(c, PARAMS.namespace, 'test_manage') + res = manage_instances(regions, params, 4) + instances = [find_instances(c, PARAMS.project, PARAMS.namespace, 'test_manage') for c in connections] - assert not changed - check_hosts(hosts) + assert not res.changed + assert len(res.active) == 4 + assert len(res.terminated) == 0 + check_hosts(res.active + res.terminated) check_tags(instances) - assert len(hosts) == 4 assert len(instances[0]) == 2 assert len(instances[1]) == 1 assert len(instances[2]) == 1 - assert set([get_tag(instances[0][0], 'id'), - get_tag(instances[0][1], 'id')]) == set(['1', '4']) - assert get_tag(instances[1][0], 'id') == '2' - assert get_tag(instances[2][0], 'id') == '3' + assert set([get_tag(instances[0][0], 'ID'), + get_tag(instances[0][1], 'ID')]) == set(['1', '4']) + assert get_tag(instances[1][0], 'ID') == '2' + assert get_tag(instances[2][0], 'ID') == '3' - changed, hosts = manage_instances(regions, params, 2) - instances = [find_instances(c, PARAMS.namespace, 'test_manage') + res = manage_instances(regions, params, 2) + instances = [find_instances(c, PARAMS.project, PARAMS.namespace, 'test_manage') for c in connections] - assert changed - check_hosts(hosts) + assert res.changed + assert len(res.active) == 2 + assert len(res.terminated) == 2 + check_hosts(res.active + res.terminated) check_tags(instances) - assert len(hosts) == 2 assert len(instances[0]) == 1 assert len(instances[1]) == 1 assert len(instances[2]) == 0 - assert get_tag(instances[0][0], 'id') == '1' - assert get_tag(instances[1][0], 'id') == '2' + assert get_tag(instances[0][0], 'ID') == '1' + assert get_tag(instances[1][0], 'ID') == '2' - changed, hosts = manage_instances(regions, params, 0) - instances = [find_instances(c, PARAMS.namespace, 'test_manage') + res = manage_instances(regions, params, 0) + instances = [find_instances(c, PARAMS.project, PARAMS.namespace, 'test_manage') for c in connections] - assert changed - check_hosts(hosts) + assert res.changed + assert len(res.active) == 0 + assert len(res.terminated) == 2 + check_hosts(res.active + res.terminated) check_tags(instances) - assert len(hosts) == 0 assert len(instances[0]) == 0 assert len(instances[1]) == 0 assert len(instances[2]) == 0 - changed, hosts = manage_instances(regions, params, 0) - instances = [find_instances(c, PARAMS.namespace, 'test_manage') + res = manage_instances(regions, params, 0) + instances = [find_instances(c, PARAMS.project, PARAMS.namespace, 'test_manage') for c in connections] - assert not changed - check_hosts(hosts) + assert not res.changed + assert len(res.active) == 0 + assert len(res.terminated) == 0 + check_hosts(res.active + res.terminated) check_tags(instances) - assert len(hosts) == 0 assert len(instances[0]) == 0 assert len(instances[1]) == 0 assert len(instances[2]) == 0 diff --git a/pool_automation/roles/aws_manage/molecule/cleanup/create.yml b/pool_automation/roles/aws_manage/molecule/cleanup/create.yml index 61f5f7b03..cddaeb34b 100644 --- a/pool_automation/roles/aws_manage/molecule/cleanup/create.yml +++ b/pool_automation/roles/aws_manage/molecule/cleanup/create.yml @@ -3,4 +3,4 @@ hosts: localhost gather_facts: false roles: - - { role: aws_manage, instance_count: 4 } + - aws_manage diff --git a/pool_automation/roles/aws_manage/molecule/cleanup/destroy.yml b/pool_automation/roles/aws_manage/molecule/cleanup/destroy.yml index 0f9251ba5..68ad7e3b3 100644 --- a/pool_automation/roles/aws_manage/molecule/cleanup/destroy.yml +++ b/pool_automation/roles/aws_manage/molecule/cleanup/destroy.yml @@ -3,4 +3,5 @@ hosts: localhost gather_facts: false roles: - - { role: aws_manage, instance_count: 0 } + - role: aws_manage + aws_instance_count: 0 diff --git a/pool_automation/roles/aws_manage/molecule/cleanup/molecule.yml b/pool_automation/roles/aws_manage/molecule/cleanup/molecule.yml index bff8b5ff6..a7c038b3a 100644 --- a/pool_automation/roles/aws_manage/molecule/cleanup/molecule.yml +++ b/pool_automation/roles/aws_manage/molecule/cleanup/molecule.yml @@ -6,6 +6,9 @@ provisioner: name: ansible lint: name: ansible-lint + inventory: + links: + group_vars: ${MOLECULE_INVENTORY_GROUP_VARS:-../resources/group_vars} platforms: - name: instance @@ -16,7 +19,7 @@ driver: lint: name: yamllint options: - config-file: ../common/.yamllint + config-file: ../../yamllint verifier: name: testinfra diff --git a/pool_automation/roles/aws_manage/molecule/cleanup/playbook.yml b/pool_automation/roles/aws_manage/molecule/cleanup/playbook.yml index 260fe9186..dc1963f19 100644 --- a/pool_automation/roles/aws_manage/molecule/cleanup/playbook.yml +++ b/pool_automation/roles/aws_manage/molecule/cleanup/playbook.yml @@ -3,4 +3,5 @@ hosts: localhost gather_facts: false roles: - - { role: aws_manage, instance_count: 0 } + - role: aws_manage + aws_instance_count: 0 diff --git a/pool_automation/roles/aws_manage/molecule/cleanup/tests/test_default.py b/pool_automation/roles/aws_manage/molecule/cleanup/tests/test_default.py index 3aba5560b..b761c45e3 100644 --- a/pool_automation/roles/aws_manage/molecule/cleanup/tests/test_default.py +++ b/pool_automation/roles/aws_manage/molecule/cleanup/tests/test_default.py @@ -5,9 +5,11 @@ def is_valid_instance(inst): if inst.state != "running": return False - if inst.tags.get('namespace') != 'test': + if inst.tags.get('Project') != 'PoolAutomation': return False - if inst.tags.get('role') != 'default': + if inst.tags.get('Namespace') != 'test': + return False + if inst.tags.get('Group') != 'default': return False return True diff --git a/pool_automation/roles/aws_manage/molecule/default/destroy.yml b/pool_automation/roles/aws_manage/molecule/default/destroy.yml index 0f9251ba5..68ad7e3b3 100644 --- a/pool_automation/roles/aws_manage/molecule/default/destroy.yml +++ b/pool_automation/roles/aws_manage/molecule/default/destroy.yml @@ -3,4 +3,5 @@ hosts: localhost gather_facts: false roles: - - { role: aws_manage, instance_count: 0 } + - role: aws_manage + aws_instance_count: 0 diff --git a/pool_automation/roles/aws_manage/molecule/default/molecule.yml b/pool_automation/roles/aws_manage/molecule/default/molecule.yml index 62351351b..8b15c899c 100644 --- a/pool_automation/roles/aws_manage/molecule/default/molecule.yml +++ b/pool_automation/roles/aws_manage/molecule/default/molecule.yml @@ -6,6 +6,9 @@ provisioner: name: ansible lint: name: ansible-lint + inventory: + links: + group_vars: ${MOLECULE_INVENTORY_GROUP_VARS:-../resources/group_vars} platforms: - name: instance @@ -16,7 +19,7 @@ driver: lint: name: yamllint options: - config-file: ../common/yamllint + config-file: ../../yamllint verifier: name: testinfra diff --git a/pool_automation/roles/aws_manage/molecule/default/playbook.yml b/pool_automation/roles/aws_manage/molecule/default/playbook.yml index 67ea2d831..cc5cd1ac7 100644 --- a/pool_automation/roles/aws_manage/molecule/default/playbook.yml +++ b/pool_automation/roles/aws_manage/molecule/default/playbook.yml @@ -3,4 +3,4 @@ hosts: localhost gather_facts: false roles: - - { role: aws_manage, instance_count: 4 } + - aws_manage diff --git a/pool_automation/roles/aws_manage/molecule/default/tests/test_default.py b/pool_automation/roles/aws_manage/molecule/default/tests/test_default.py index af1ff38eb..b516c8f34 100644 --- a/pool_automation/roles/aws_manage/molecule/default/tests/test_default.py +++ b/pool_automation/roles/aws_manage/molecule/default/tests/test_default.py @@ -4,9 +4,11 @@ def is_valid_instance(inst): if inst.state != "running": return False - if inst.tags.get('namespace') != 'test': + if inst.tags.get('Project') != 'PoolAutomation': return False - if inst.tags.get('role') != 'default': + if inst.tags.get('Namespace') != 'test': + return False + if inst.tags.get('Group') != 'default': return False return True diff --git a/indy_client/persistence/__init__.py b/pool_automation/roles/aws_manage/molecule/resources/group_vars/all.yml similarity index 100% rename from indy_client/persistence/__init__.py rename to pool_automation/roles/aws_manage/molecule/resources/group_vars/all.yml diff --git a/pool_automation/roles/aws_manage/tasks/main.yml b/pool_automation/roles/aws_manage/tasks/main.yml index a6a63491d..569a1d731 100644 --- a/pool_automation/roles/aws_manage/tasks/main.yml +++ b/pool_automation/roles/aws_manage/tasks/main.yml @@ -1,23 +1,47 @@ --- +- name: Pre-Checks - check variables + import_tasks: "pre_checks.yml" + +# TODO combine all 'set_fact's to one task when +# it would be possible (https://github.com/ansible/ansible/issues/40239) +- name: Set ssh_dir_name, ssh_key_name and ssh_known_hosts_name variables + set_fact: + ssh_dir_name: "{{ '.ssh' if aws_build_dir == aws_inventory_dir else 'ssh'}}" + ssh_key_name: "key.{{ aws_group_name }}" + ssh_known_hosts_name: "known_hosts.{{ aws_group_name }}" + ssh_config_name: "config.{{ aws_group_name }}" + +- name: Set ssh_dir variable + set_fact: + ssh_dir: "{{ aws_build_dir }}/{{ ssh_dir_name }}" + +- name: Set ssh_key and ssh_known_hosts variable + set_fact: + ssh_key: "{{ ssh_dir }}/{{ ssh_key_name }}" + ssh_known_hosts: "{{ ssh_dir }}/{{ ssh_known_hosts_name }}" + ssh_config: "{{ ssh_dir }}/{{ ssh_config_name }}" + - name: Pre-Up - include_tasks: "{{ role_path }}/tasks/pre_up.yml" - when: instance_count > 0 + import_tasks: "pre_up.yml" + when: aws_instance_count > 0 - name: Manage instances stateful_set: regions: "{{ aws_regions }}" - namespace: "{{ tag_namespace }}" - role: "{{ tag_role }}" + project: "{{ aws_tag_project }}" + namespace: "{{ aws_tag_namespace }}" + group: "{{ aws_tag_group }}" + add_tags: "{{ aws_add_tags }}" key_name: "{{ aws_keyname }}" - group: "{{ aws_group }}" - instance_type: "{{ aws_type }}" - instance_count: "{{ instance_count }}" - register: ec2 + security_group: "{{ aws_sgroup }}" + instance_type: "{{ aws_ec2_type }}" + instance_count: "{{ aws_instance_count }}" + register: aws_ec2hosts - name: Post-Up - include_tasks: "{{ role_path }}/tasks/post_up.yml" - when: instance_count > 0 + import_tasks: "post_up.yml" + when: aws_instance_count > 0 - name: Post-Down - include_tasks: "{{ role_path }}/tasks/post_down.yml" - when: instance_count == 0 + import_tasks: "post_down.yml" + when: aws_instance_count == 0 diff --git a/pool_automation/roles/aws_manage/tasks/post_down.yml b/pool_automation/roles/aws_manage/tasks/post_down.yml index 5bd95ba1d..7b88acd0b 100644 --- a/pool_automation/roles/aws_manage/tasks/post_down.yml +++ b/pool_automation/roles/aws_manage/tasks/post_down.yml @@ -1,9 +1,4 @@ --- -- name: Ensure inventory doesn't exists - file: - path: "{{ inventory_dir }}" - state: absent - - name: Destroy public key ec2_key: name: "{{ aws_keyname }}" @@ -13,7 +8,15 @@ - name: Destroy AWS security group ec2_group: - name: "{{ aws_group }}" + name: "{{ aws_sgroup }}" region: "{{ item }}" state: absent with_items: "{{ aws_regions }}" + +- name: Ensure inventory file and ssh settings for '{{ aws_group_name }}' doesn't exist + file: + path: "{{ item }}" + state: absent + loop: "{{ [aws_inventory_file, ssh_config] + + q('fileglob', ssh_known_hosts ~ '*') + + q('fileglob', ssh_key ~ '*') }}" diff --git a/pool_automation/roles/aws_manage/tasks/post_up.yml b/pool_automation/roles/aws_manage/tasks/post_up.yml index 48c14aae0..c9ed4ab3e 100644 --- a/pool_automation/roles/aws_manage/tasks/post_up.yml +++ b/pool_automation/roles/aws_manage/tasks/post_up.yml @@ -1,10 +1,20 @@ --- -- name: Create ssh config +- name: Create/Update ssh config template: src: ssh_config.j2 - dest: "{{ inventory_dir }}/ssh_config" + dest: "{{ ssh_config }}" -- name: Create inventory +- name: Create/Update known_hosts file + import_tasks: "set_known_hosts.yml" + vars: + active: "{{ aws_ec2hosts.active|map(attribute='public_ip')|list }}" + terminated: "{{ aws_ec2hosts.terminated|map(attribute='public_ip')|list }}" + +- name: Create/Update inventory file for AWS hosts + vars: + hosts: "{{ aws_ec2hosts.active }}" + ssh_private_key: "{% raw %}{{ inventory_dir }}{% endraw %}/{{ namespace_dir_relative|default('.', true) }}/{{ ssh_dir_name }}/{{ ssh_key_name }}" + known_hosts_file: "{% raw %}{{ inventory_dir }}{% endraw %}/{{ namespace_dir_relative|default('.', true) }}/{{ ssh_dir_name }}/{{ ssh_known_hosts_name }}" template: - src: hosts.j2 - dest: "{{ inventory_dir }}/hosts" + src: hosts.yml.j2 + dest: "{{ aws_inventory_file }}" diff --git a/pool_automation/roles/aws_manage/tasks/pre_checks.yml b/pool_automation/roles/aws_manage/tasks/pre_checks.yml new file mode 100644 index 000000000..1e763c26a --- /dev/null +++ b/pool_automation/roles/aws_manage/tasks/pre_checks.yml @@ -0,0 +1,27 @@ +--- +- name: Check that required variables are specified + assert: + that: + - lookup('vars', item, default='') + msg: "{{ lookup('vars', item, default='undefined')|string }}" + loop: + - aws_project_name + - aws_group_name + - aws_namespace_name + - aws_build_dir + - aws_inventory_dir + - aws_inventory_file + - aws_regions + - aws_ec2_type + - aws_tag_project + - aws_tag_namespace + - aws_tag_group + - aws_keyname + - aws_sgroup + - aws_tag_sgroup_name + +- name: Check that instance_count has acceptable values + assert: + that: + - aws_instance_count >= 0 + msg: "{{ aws_instance_count|string }}" diff --git a/pool_automation/roles/aws_manage/tasks/pre_up.yml b/pool_automation/roles/aws_manage/tasks/pre_up.yml index 6b72e4661..6f093b1d0 100644 --- a/pool_automation/roles/aws_manage/tasks/pre_up.yml +++ b/pool_automation/roles/aws_manage/tasks/pre_up.yml @@ -1,13 +1,13 @@ --- -- name: Ensure inventory dir exists +- name: Ensure directory for ssh settings exists file: - path: "{{ inventory_dir }}" + path: "{{ ssh_dir }}" state: directory - name: Create key-pair expect: - command: "ssh-keygen -f {{ inventory_dir }}/key" - creates: "{{ inventory_dir }}/key" + command: "ssh-keygen -f {{ ssh_key }}" + creates: "{{ ssh_key }}" responses: passphrase: "" @@ -15,14 +15,19 @@ ec2_key: name: "{{ aws_keyname }}" region: "{{ item }}" - key_material: "{{ lookup('file', '{{ inventory_dir }}/key.pub') }}" + key_material: "{{ lookup('file', '{{ ssh_key }}.pub') }}" with_items: "{{ aws_regions }}" - name: Create AWS security group ec2_group: - name: "{{ aws_group }}" + name: "{{ aws_sgroup }}" region: "{{ item }}" - description: "Security group for {{ tag_role }} in {{ tag_namespace }}" + description: "[{{ aws_project_name }}] Security group for {{ aws_group_name }} in {{ aws_tag_namespace }}" + tags: + Name: "{{ aws_tag_sgroup_name }}" + Project: "{{ aws_tag_project }}" + Namespace: "{{ aws_tag_namespace }}" + Group: "{{ aws_tag_group }}" rules: - proto: all cidr_ip: 0.0.0.0/0 diff --git a/pool_automation/roles/aws_manage/tasks/set_known_hosts.yml b/pool_automation/roles/aws_manage/tasks/set_known_hosts.yml new file mode 100644 index 000000000..e146db319 --- /dev/null +++ b/pool_automation/roles/aws_manage/tasks/set_known_hosts.yml @@ -0,0 +1,41 @@ +--- +- name: Ensure '{{ ssh_known_hosts }}' exists + copy: + content: "" + dest: "{{ ssh_known_hosts }}" + force: no + when: active or terminated + +- name: Ensure active hosts' keys are in '{{ ssh_known_hosts }}' + block: + - name: Check existent records in '{{ ssh_known_hosts }}' + command: "ssh-keygen -F {{ item }} -f {{ ssh_known_hosts }}" + register: known_hosts + loop: "{{ active }}" + ignore_errors: true + failed_when: false + changed_when: false + + - name: Gather host keys for unknown hosts + command: "ssh-keyscan -H {{ item.item }}" + when: item.rc != 0 + register: host_keys + loop: "{{ known_hosts.results }}" + + - name: Add host keys into '{{ ssh_known_hosts }}' + known_hosts: + name: "{{ item.item.item }}" + key: "{{ item.stdout }}" + state: "present" + path: "{{ ssh_known_hosts }}" + when: item.stdout is defined + loop: "{{ host_keys.results }}" + when: active + +- name: Remove terminated hosts' keys from '{{ ssh_known_hosts }}' + known_hosts: + name: "{{ item }}" + state: "absent" + path: "{{ ssh_known_hosts }}" + loop: "{{ terminated }}" + when: terminated diff --git a/pool_automation/roles/aws_manage/templates/hosts.j2 b/pool_automation/roles/aws_manage/templates/hosts.j2 deleted file mode 100644 index 767e8b886..000000000 --- a/pool_automation/roles/aws_manage/templates/hosts.j2 +++ /dev/null @@ -1,8 +0,0 @@ -[{{ group_name }}] -{% for item in ec2.results %} -{{ tag_namespace }}_{{ tag_role }}{{ item.tag_id }} tag_id={{ item.tag_id }} ansible_host={{ item.public_ip }} -{% endfor %} - -[{{ group_name }}:vars] -ansible_user=ubuntu -ansible_ssh_private_key_file={% raw %}{{ inventory_dir }}/key{% endraw %} diff --git a/pool_automation/roles/aws_manage/templates/hosts.yml.j2 b/pool_automation/roles/aws_manage/templates/hosts.yml.j2 new file mode 100644 index 000000000..f28f16a67 --- /dev/null +++ b/pool_automation/roles/aws_manage/templates/hosts.yml.j2 @@ -0,0 +1,19 @@ +#jinja2: lstrip_blocks: True +--- +{{ ansible_managed | comment }} +all: + children: + {{ aws_base_group }}: + children: + {{ aws_tag_group }}: + vars: + ansible_ssh_private_key_file: "{{ ssh_private_key }}" + ansible_ssh_extra_args: "-o UserKnownHostsFile={{ known_hosts_file }}" + hosts: + {% for host in hosts|sort(attribute='tag_id') %} + {{ aws_tag_namespace }}_{{ aws_tag_group }}{{ host.tag_id }}: + tag_id: {{ host.tag_id }} + ansible_user: {{ host.user }} + ansible_host: {{ host.public_ip }} + {% endfor %} +... diff --git a/pool_automation/roles/aws_manage/templates/ssh_config.j2 b/pool_automation/roles/aws_manage/templates/ssh_config.j2 index 206481302..4d27b81f9 100644 --- a/pool_automation/roles/aws_manage/templates/ssh_config.j2 +++ b/pool_automation/roles/aws_manage/templates/ssh_config.j2 @@ -1,6 +1,7 @@ -{% for item in ec2.results %} -Host {{ tag_namespace }}_{{ tag_role }}{{ item.tag_id }} +{% for item in aws_ec2hosts.active %} +Host {{ aws_tag_namespace }}_{{ aws_tag_group }}{{ item.tag_id }} HostName {{ item.public_ip }} User {{ item.user }} - IdentityFile ./key + IdentityFile ./{{ ssh_key_name }} + UserKnownHostsFile ./{{ ssh_known_hosts_name }} {% endfor %} diff --git a/pool_automation/roles/common/handlers/main.yml b/pool_automation/roles/common/handlers/main.yml deleted file mode 100644 index ed97d539c..000000000 --- a/pool_automation/roles/common/handlers/main.yml +++ /dev/null @@ -1 +0,0 @@ ---- diff --git a/pool_automation/roles/common/molecule/default/playbook.yml b/pool_automation/roles/common/molecule/default/playbook.yml deleted file mode 100644 index b73db92e0..000000000 --- a/pool_automation/roles/common/molecule/default/playbook.yml +++ /dev/null @@ -1,6 +0,0 @@ ---- -- name: Converge - gather_facts: false - hosts: all - roles: - - role: common diff --git a/pool_automation/roles/common/tasks/main.yml b/pool_automation/roles/common/tasks/main.yml deleted file mode 100644 index ad29eb4c7..000000000 --- a/pool_automation/roles/common/tasks/main.yml +++ /dev/null @@ -1,17 +0,0 @@ ---- -- name: Check if python is installed - raw: test -e /usr/bin/python - register: test_python - failed_when: false - changed_when: false - -- name: Install python for Ansible - raw: apt -y update && apt install -y python-minimal - when: test_python.rc != 0 - become: true - -- name: Add sovrin key - apt_key: - keyserver: keyserver.ubuntu.com - id: 3B75B82CF342D9FB - become: true diff --git a/pool_automation/roles/indy_cli/defaults/main.yml b/pool_automation/roles/indy_cli/defaults/main.yml new file mode 100644 index 000000000..66c0163d4 --- /dev/null +++ b/pool_automation/roles/indy_cli/defaults/main.yml @@ -0,0 +1,18 @@ +--- +indy_cli_build_dir: "{{ namespace_dir | default(inventory_dir, true) | default('.build', true) }}" + +indy_cli_channel: master +# TODO remove default values for versions +indy_cli_ver: 1.6.7~829 +indy_cli_libindy_ver: null + +# configuration parameters +indy_cli_configuration: true + +indy_cli_pool_name: indy-pool + +indy_cli_pool_dir_name: "{{ '.pool' if indy_cli_build_dir == inventory_dir else 'pool' }}" +indy_cli_pool_genesis_txns_name: pool_transactions_genesis +indy_cli_pool_genesis_txns_path_local: "{{ [indy_cli_build_dir, indy_cli_pool_dir_name, indy_cli_pool_genesis_txns_name]|join('/') }}" +# TODO as an option we may use ansible_env.HOME but it needs facts gathering +indy_cli_pool_genesis_txns_path_remote: '$HOME/{{ indy_cli_pool_genesis_txns_name }}' diff --git a/pool_automation/roles/indy_cli/meta/main.yml b/pool_automation/roles/indy_cli/meta/main.yml new file mode 100644 index 000000000..5376abc34 --- /dev/null +++ b/pool_automation/roles/indy_cli/meta/main.yml @@ -0,0 +1,3 @@ +--- +dependencies: + - role: ansible_bootstrap diff --git a/pool_automation/roles/indy_cli/molecule/base/molecule.yml b/pool_automation/roles/indy_cli/molecule/base/molecule.yml new file mode 100644 index 000000000..f1e312e65 --- /dev/null +++ b/pool_automation/roles/indy_cli/molecule/base/molecule.yml @@ -0,0 +1,32 @@ +--- +dependency: + name: galaxy +driver: + name: docker +lint: + name: yamllint + options: + config-file: ../../yamllint +platforms: + - name: indy_cli_base + image: ubuntu:16.04 # TODO parametrize + pre_build_image: true + network_mode: ${MOLECULE_DOCKER_NETWORK_MODE:-bridge} + groups: + - clients +provisioner: + name: ansible + lint: + name: ansible-lint + inventory: + links: + group_vars: ${MOLECULE_INVENTORY_GROUP_VARS:-../resources/group_vars} + playbooks: + converge: ../resources/playbooks/playbook_base.yml +scenario: + name: base +verifier: + name: testinfra + directory: ../resources/tests/base + lint: + name: flake8 diff --git a/pool_automation/roles/indy_cli/molecule/default/host_vars/client1 b/pool_automation/roles/indy_cli/molecule/default/host_vars/client1 new file mode 100644 index 000000000..5cc32260e --- /dev/null +++ b/pool_automation/roles/indy_cli/molecule/default/host_vars/client1 @@ -0,0 +1,2 @@ +--- +ansible_host: client1 diff --git a/pool_automation/roles/indy_cli/molecule/default/host_vars/client2 b/pool_automation/roles/indy_cli/molecule/default/host_vars/client2 new file mode 100644 index 000000000..e9a796669 --- /dev/null +++ b/pool_automation/roles/indy_cli/molecule/default/host_vars/client2 @@ -0,0 +1,2 @@ +--- +ansible_host: client2 diff --git a/pool_automation/roles/indy_cli/molecule/default/host_vars/node1 b/pool_automation/roles/indy_cli/molecule/default/host_vars/node1 new file mode 100644 index 000000000..83511dd10 --- /dev/null +++ b/pool_automation/roles/indy_cli/molecule/default/host_vars/node1 @@ -0,0 +1,3 @@ +--- +tag_id: 1 +ansible_host: node1 diff --git a/pool_automation/roles/indy_cli/molecule/default/host_vars/node2 b/pool_automation/roles/indy_cli/molecule/default/host_vars/node2 new file mode 100644 index 000000000..83bb42fe6 --- /dev/null +++ b/pool_automation/roles/indy_cli/molecule/default/host_vars/node2 @@ -0,0 +1,3 @@ +--- +tag_id: 2 +ansible_host: node2 diff --git a/pool_automation/roles/indy_cli/molecule/default/host_vars/node3 b/pool_automation/roles/indy_cli/molecule/default/host_vars/node3 new file mode 100644 index 000000000..89bbea3c0 --- /dev/null +++ b/pool_automation/roles/indy_cli/molecule/default/host_vars/node3 @@ -0,0 +1,3 @@ +--- +tag_id: 3 +ansible_host: node3 diff --git a/pool_automation/roles/indy_cli/molecule/default/host_vars/node4 b/pool_automation/roles/indy_cli/molecule/default/host_vars/node4 new file mode 100644 index 000000000..9b66a2614 --- /dev/null +++ b/pool_automation/roles/indy_cli/molecule/default/host_vars/node4 @@ -0,0 +1,3 @@ +--- +tag_id: 4 +ansible_host: node4 diff --git a/pool_automation/roles/indy_cli/molecule/default/molecule.yml b/pool_automation/roles/indy_cli/molecule/default/molecule.yml new file mode 100644 index 000000000..5c170b664 --- /dev/null +++ b/pool_automation/roles/indy_cli/molecule/default/molecule.yml @@ -0,0 +1,68 @@ +--- +scenario: + name: default + +driver: + name: docker + +parameters: + node_config: &node_config + image: solita/ubuntu-systemd:16.04 # TODO parametrize + command: ${MOLECULE_DOCKER_COMMAND:-""} + volumes: + - /sys/fs/cgroup:/sys/fs/cgroup:ro + privileged: true + pre_build_image: true + networks: + - name: private_network + groups: + - nodes + + client_config: &client_config + image: ubuntu:16.04 # TODO parametrize + pre_build_image: true + networks: + - name: private_network + groups: + - clients + +provisioner: + name: ansible + lint: + name: ansible-lint + inventory: + links: + group_vars: ${MOLECULE_INVENTORY_GROUP_VARS:-../resources/group_vars} + host_vars: ./host_vars + playbooks: + converge: ../resources/playbooks/playbook.yml + +platforms: + - <<: *node_config + name: node1 + - <<: *node_config + name: node2 + - <<: *node_config + name: node3 + - <<: *node_config + name: node4 + - <<: *client_config + name: client1 + - <<: *client_config + name: client2 + +dependency: + name: galaxy + +lint: + name: yamllint + options: + config-file: ../../yamllint + +verifier: + name: testinfra + directory: ../resources/tests/base + additional_files_or_dirs: + - ../test_configured.py + lint: + name: flake8 diff --git a/pool_automation/roles/indy_cli/molecule/resources/group_vars/all.yml b/pool_automation/roles/indy_cli/molecule/resources/group_vars/all.yml new file mode 100644 index 000000000..0c49d6237 --- /dev/null +++ b/pool_automation/roles/indy_cli/molecule/resources/group_vars/all.yml @@ -0,0 +1,3 @@ +--- +indy_cli_ver: 1.6.7~829 +indy_cli_libindy_ver: 1.6.7~829 diff --git a/pool_automation/roles/indy_cli/molecule/resources/playbooks/playbook.yml b/pool_automation/roles/indy_cli/molecule/resources/playbooks/playbook.yml new file mode 100644 index 000000000..8813f3b5c --- /dev/null +++ b/pool_automation/roles/indy_cli/molecule/resources/playbooks/playbook.yml @@ -0,0 +1,12 @@ +--- +- name: Converge pool + gather_facts: false + hosts: nodes + roles: + - role: indy_node + +- name: Converge clients + gather_facts: false + hosts: clients + roles: + - role: indy_cli diff --git a/pool_automation/roles/indy_cli/molecule/resources/playbooks/playbook_base.yml b/pool_automation/roles/indy_cli/molecule/resources/playbooks/playbook_base.yml new file mode 100644 index 000000000..85b128813 --- /dev/null +++ b/pool_automation/roles/indy_cli/molecule/resources/playbooks/playbook_base.yml @@ -0,0 +1,8 @@ +--- +- name: Converge + gather_facts: false + hosts: clients + roles: + - role: indy_cli + vars: + indy_cli_configuration: false diff --git a/pool_automation/roles/indy_cli/molecule/resources/tests/base/test_installed.py b/pool_automation/roles/indy_cli/molecule/resources/tests/base/test_installed.py new file mode 100644 index 000000000..1f4e09c03 --- /dev/null +++ b/pool_automation/roles/indy_cli/molecule/resources/tests/base/test_installed.py @@ -0,0 +1,21 @@ +testinfra_hosts = ['clients'] + + +def test_correct_package_versions_are_installed(host): + v = host.ansible.get_variables() + + indy_cli = host.package('indy-cli') + + assert indy_cli.is_installed + assert indy_cli.version == v['indy_cli_ver'] + + libindy = host.package('libindy') + + assert libindy.is_installed + + if v['indy_cli_libindy_ver'] is not None: + assert libindy.version == v['indy_cli_libindy_ver'] + + +def test_indy_cli_is_available_in_path(host): + assert host.exists('indy-cli') diff --git a/pool_automation/roles/indy_cli/molecule/resources/tests/test_configured.py b/pool_automation/roles/indy_cli/molecule/resources/tests/test_configured.py new file mode 100644 index 000000000..fabb9893d --- /dev/null +++ b/pool_automation/roles/indy_cli/molecule/resources/tests/test_configured.py @@ -0,0 +1,16 @@ +# TODO more tests + +testinfra_hosts = ['clients'] + +pool_name = "indy-pool" + + +def test_pool_txns_genesis_file_exists(host): + txns_file = host.file("{}/pool_transactions_genesis".format(host.user().home)) + assert txns_file.exists + + +def test_cli_is_configured(host): + # XXX indy-cli won't return non-zero if can't connect + res = host.run("echo 'pool connect %s' | indy-cli", pool_name) + assert 'Pool "{}" has been connected'.format(pool_name) in res.stdout diff --git a/pool_automation/roles/indy_cli/tasks/configure.yml b/pool_automation/roles/indy_cli/tasks/configure.yml new file mode 100644 index 000000000..da4faeedb --- /dev/null +++ b/pool_automation/roles/indy_cli/tasks/configure.yml @@ -0,0 +1,18 @@ +--- +- name: Push pool genesis txns file to client + copy: + src: "{{ indy_cli_pool_genesis_txns_path_local }}" + dest: "{{ indy_cli_pool_genesis_txns_path_remote }}" + +- name: Check list of configured pools + shell: "echo 'pool list' | indy-cli" + register: test_cli_res + failed_when: false + changed_when: false + +- name: Configure CLI to work with the pool + shell: "echo \"pool create {{ indy_cli_pool_name }} gen_txn_file={{ indy_cli_pool_genesis_txns_path_remote }}\" | indy-cli" + register: pool_create_res + failed_when: ('Pool config \"' ~ indy_cli_pool_name ~ '\" has been created') not in pool_create_res.stdout + when: indy_cli_pool_name not in test_cli_res.stdout +... diff --git a/pool_automation/roles/indy_cli/tasks/main.yml b/pool_automation/roles/indy_cli/tasks/main.yml new file mode 100644 index 000000000..fae2ffc98 --- /dev/null +++ b/pool_automation/roles/indy_cli/tasks/main.yml @@ -0,0 +1,49 @@ +--- +- name: Check that required variables are specified + assert: + that: + - lookup('vars', item, default='') + msg: "{{ lookup('vars', item, default='undefined')|string }}" + loop: + - indy_cli_build_dir + - indy_cli_channel + - indy_cli_ver + - indy_cli_pool_name + - indy_cli_pool_dir_name + - indy_cli_pool_genesis_txns_name + - indy_cli_pool_genesis_txns_path_local + - indy_cli_pool_genesis_txns_path_remote + +# TODO move the following three tasks (HTTPS, sovrin key and sovrin repos) +# into separate role +- name: Install HTTPS support for apt + apt: + name: + - apt-transport-https + - ca-certificates + update_cache: true + become: true + +- name: Add sovrin key + apt_key: + keyserver: keyserver.ubuntu.com + id: 3B75B82CF342D9FB + become: true + +- name: Add sovrin repositories for indy-sdk + apt_repository: + repo: "deb https://repo.sovrin.org/sdk/deb xenial {{ indy_cli_channel }}" + state: present + become: true + +- name: Install indy cli + apt: + name: + - "libindy{{ ('=' ~ indy_cli_libindy_ver) if indy_cli_libindy_ver else '' }}" + - "indy-cli={{ indy_cli_ver }}" + update_cache: true + become: true + +- name: Configuration tasks + include_tasks: configure.yml + when: indy_cli_configuration diff --git a/pool_automation/roles/indy_node/defaults/main.yml b/pool_automation/roles/indy_node/defaults/main.yml new file mode 100644 index 000000000..43e02a476 --- /dev/null +++ b/pool_automation/roles/indy_node/defaults/main.yml @@ -0,0 +1,17 @@ +--- +indy_node_build_dir: "{{ namespace_dir | default(inventory_dir, true) | default('.build', true) }}" + +indy_node_channel: master +# TODO remove default values for versions +indy_node_ver: 1.6.563 +indy_plenum_ver: 1.6.501 +python_indy_crypto_ver: 0.4.1 +libindy_crypto_ver: 0.4.0 + +# configuration parameters +indy_node_configuration: true +network_name: sandbox + +indy_node_pool_dir_name: "{{ '.pool' if indy_node_build_dir == inventory_dir else 'pool' }}" +indy_node_pool_genesis_txns_name: pool_transactions_genesis +indy_node_pool_genesis_txns_path: "{{ [indy_node_build_dir, indy_node_pool_dir_name, indy_node_pool_genesis_txns_name]|join('/') }}" diff --git a/pool_automation/roles/indy_node/meta/main.yml b/pool_automation/roles/indy_node/meta/main.yml new file mode 100644 index 000000000..5376abc34 --- /dev/null +++ b/pool_automation/roles/indy_node/meta/main.yml @@ -0,0 +1,3 @@ +--- +dependencies: + - role: ansible_bootstrap diff --git a/pool_automation/roles/indy_node/molecule/base/molecule.yml b/pool_automation/roles/indy_node/molecule/base/molecule.yml new file mode 100644 index 000000000..0f1bc3f2c --- /dev/null +++ b/pool_automation/roles/indy_node/molecule/base/molecule.yml @@ -0,0 +1,34 @@ +--- +dependency: + name: galaxy +driver: + name: docker +lint: + name: yamllint + options: + config-file: ../../yamllint +platforms: + - name: indy_node_base + image: solita/ubuntu-systemd:16.04 # TODO parametrize + command: ${MOLECULE_DOCKER_COMMAND:-""} + volumes: + - /sys/fs/cgroup:/sys/fs/cgroup:ro + privileged: true + pre_build_image: true + network_mode: ${MOLECULE_DOCKER_NETWORK_MODE:-bridge} +provisioner: + name: ansible + lint: + name: ansible-lint + inventory: + links: + group_vars: ${MOLECULE_INVENTORY_GROUP_VARS:-../resources/group_vars} + playbooks: + converge: ../resources/playbooks/playbook_base.yml +scenario: + name: base +verifier: + name: testinfra + directory: ../resources/tests/base + lint: + name: flake8 diff --git a/pool_automation/roles/indy_node/molecule/default/host_vars/node1 b/pool_automation/roles/indy_node/molecule/default/host_vars/node1 new file mode 100644 index 000000000..83511dd10 --- /dev/null +++ b/pool_automation/roles/indy_node/molecule/default/host_vars/node1 @@ -0,0 +1,3 @@ +--- +tag_id: 1 +ansible_host: node1 diff --git a/pool_automation/roles/indy_node/molecule/default/host_vars/node2 b/pool_automation/roles/indy_node/molecule/default/host_vars/node2 new file mode 100644 index 000000000..83bb42fe6 --- /dev/null +++ b/pool_automation/roles/indy_node/molecule/default/host_vars/node2 @@ -0,0 +1,3 @@ +--- +tag_id: 2 +ansible_host: node2 diff --git a/pool_automation/roles/indy_node/molecule/default/host_vars/node3 b/pool_automation/roles/indy_node/molecule/default/host_vars/node3 new file mode 100644 index 000000000..89bbea3c0 --- /dev/null +++ b/pool_automation/roles/indy_node/molecule/default/host_vars/node3 @@ -0,0 +1,3 @@ +--- +tag_id: 3 +ansible_host: node3 diff --git a/pool_automation/roles/indy_node/molecule/default/host_vars/node4 b/pool_automation/roles/indy_node/molecule/default/host_vars/node4 new file mode 100644 index 000000000..9b66a2614 --- /dev/null +++ b/pool_automation/roles/indy_node/molecule/default/host_vars/node4 @@ -0,0 +1,3 @@ +--- +tag_id: 4 +ansible_host: node4 diff --git a/pool_automation/roles/indy_node/molecule/default/molecule.yml b/pool_automation/roles/indy_node/molecule/default/molecule.yml new file mode 100644 index 000000000..ed3ac1caa --- /dev/null +++ b/pool_automation/roles/indy_node/molecule/default/molecule.yml @@ -0,0 +1,56 @@ +--- +scenario: + name: default + +driver: + name: docker + +parameters: + node_config: &node_config + image: solita/ubuntu-systemd:16.04 # TODO parametrize + command: ${MOLECULE_DOCKER_COMMAND:-""} # necessary to start /sbin/init from solita's dockerfile + volumes: + - /sys/fs/cgroup:/sys/fs/cgroup:ro + privileged: true + pre_build_image: true + networks: + - name: private_network + groups: + - nodes + +provisioner: + name: ansible + lint: + name: ansible-lint + inventory: + links: + group_vars: ${MOLECULE_INVENTORY_GROUP_VARS:-../resources/group_vars} + host_vars: ./host_vars + playbooks: + converge: ../resources/playbooks/playbook.yml + +platforms: + - <<: *node_config + name: node1 + - <<: *node_config + name: node2 + - <<: *node_config + name: node3 + - <<: *node_config + name: node4 + +dependency: + name: galaxy + +lint: + name: yamllint + options: + config-file: ../../yamllint + +verifier: + name: testinfra + directory: ../resources/tests/base + additional_files_or_dirs: + - ../test_configured.py + lint: + name: flake8 diff --git a/pool_automation/roles/node_install/defaults/main.yml b/pool_automation/roles/indy_node/molecule/resources/group_vars/all.yml similarity index 87% rename from pool_automation/roles/node_install/defaults/main.yml rename to pool_automation/roles/indy_node/molecule/resources/group_vars/all.yml index 9508be34b..daa3439e9 100644 --- a/pool_automation/roles/node_install/defaults/main.yml +++ b/pool_automation/roles/indy_node/molecule/resources/group_vars/all.yml @@ -1,5 +1,4 @@ --- -channel: master indy_node_ver: 1.6.563 indy_plenum_ver: 1.6.501 python_indy_crypto_ver: 0.4.1 diff --git a/pool_automation/roles/pool_install/molecule/default/playbook.yml b/pool_automation/roles/indy_node/molecule/resources/playbooks/playbook.yml similarity index 72% rename from pool_automation/roles/pool_install/molecule/default/playbook.yml rename to pool_automation/roles/indy_node/molecule/resources/playbooks/playbook.yml index ab9af6ef9..60ed5b032 100644 --- a/pool_automation/roles/pool_install/molecule/default/playbook.yml +++ b/pool_automation/roles/indy_node/molecule/resources/playbooks/playbook.yml @@ -3,4 +3,4 @@ gather_facts: false hosts: nodes roles: - - role: pool_install + - role: indy_node diff --git a/pool_automation/roles/indy_node/molecule/resources/playbooks/playbook_base.yml b/pool_automation/roles/indy_node/molecule/resources/playbooks/playbook_base.yml new file mode 100644 index 000000000..0e9cccb2a --- /dev/null +++ b/pool_automation/roles/indy_node/molecule/resources/playbooks/playbook_base.yml @@ -0,0 +1,8 @@ +--- +- name: Converge + gather_facts: false + hosts: all + roles: + - role: indy_node + vars: + indy_node_configuration: false diff --git a/pool_automation/roles/node_install/molecule/default/tests/test_default.py b/pool_automation/roles/indy_node/molecule/resources/tests/base/test_installed.py similarity index 100% rename from pool_automation/roles/node_install/molecule/default/tests/test_default.py rename to pool_automation/roles/indy_node/molecule/resources/tests/base/test_installed.py diff --git a/pool_automation/roles/pool_install/molecule/default/tests/test_default.py b/pool_automation/roles/indy_node/molecule/resources/tests/test_configured.py similarity index 100% rename from pool_automation/roles/pool_install/molecule/default/tests/test_default.py rename to pool_automation/roles/indy_node/molecule/resources/tests/test_configured.py diff --git a/pool_automation/roles/indy_node/molecule/vagrant/host_vars/node1 b/pool_automation/roles/indy_node/molecule/vagrant/host_vars/node1 new file mode 100644 index 000000000..1c1194820 --- /dev/null +++ b/pool_automation/roles/indy_node/molecule/vagrant/host_vars/node1 @@ -0,0 +1,4 @@ +--- +tag_id: 1 +ansible_host: 192.168.33.11 +ansible_port: 22 diff --git a/pool_automation/roles/indy_node/molecule/vagrant/host_vars/node2 b/pool_automation/roles/indy_node/molecule/vagrant/host_vars/node2 new file mode 100644 index 000000000..31534d81b --- /dev/null +++ b/pool_automation/roles/indy_node/molecule/vagrant/host_vars/node2 @@ -0,0 +1,4 @@ +--- +tag_id: 2 +ansible_host: 192.168.33.12 +ansible_port: 22 diff --git a/pool_automation/roles/indy_node/molecule/vagrant/host_vars/node3 b/pool_automation/roles/indy_node/molecule/vagrant/host_vars/node3 new file mode 100644 index 000000000..512395c17 --- /dev/null +++ b/pool_automation/roles/indy_node/molecule/vagrant/host_vars/node3 @@ -0,0 +1,4 @@ +--- +tag_id: 3 +ansible_host: 192.168.33.13 +ansible_port: 22 diff --git a/pool_automation/roles/indy_node/molecule/vagrant/host_vars/node4 b/pool_automation/roles/indy_node/molecule/vagrant/host_vars/node4 new file mode 100644 index 000000000..384a50eeb --- /dev/null +++ b/pool_automation/roles/indy_node/molecule/vagrant/host_vars/node4 @@ -0,0 +1,4 @@ +--- +tag_id: 4 +ansible_host: 192.168.33.14 +ansible_port: 22 diff --git a/pool_automation/roles/pool_install/molecule/default/molecule.yml b/pool_automation/roles/indy_node/molecule/vagrant/molecule.yml similarity index 55% rename from pool_automation/roles/pool_install/molecule/default/molecule.yml rename to pool_automation/roles/indy_node/molecule/vagrant/molecule.yml index 6f27b0082..a60ac4fe6 100644 --- a/pool_automation/roles/pool_install/molecule/default/molecule.yml +++ b/pool_automation/roles/indy_node/molecule/vagrant/molecule.yml @@ -1,50 +1,45 @@ --- scenario: - name: default + name: vagrant parameters: node_config: &node_config box: ubuntu/xenial64 groups: - nodes - node_ips: - - &node1_ip 192.168.33.11 - - &node2_ip 192.168.33.12 - - &node3_ip 192.168.33.13 - - &node4_ip 192.168.33.14 provisioner: name: ansible lint: name: ansible-lint inventory: - host_vars: - node1: {tag_id: 1, ansible_host: *node1_ip, ansible_port: 22} - node2: {tag_id: 2, ansible_host: *node2_ip, ansible_port: 22} - node3: {tag_id: 3, ansible_host: *node3_ip, ansible_port: 22} - node4: {tag_id: 4, ansible_host: *node4_ip, ansible_port: 22} + links: + group_vars: ${MOLECULE_INVENTORY_GROUP_VARS:-../resources/group_vars} + host_vars: ./host_vars + playbooks: + converge: ../resources/playbooks/playbook.yml platforms: - <<: *node_config name: node1 interfaces: - network_name: private_network - ip: *node1_ip + ip: 192.168.33.11 - <<: *node_config name: node2 interfaces: - network_name: private_network - ip: *node2_ip + ip: 192.168.33.12 - <<: *node_config name: node3 interfaces: - network_name: private_network - ip: *node3_ip + ip: 192.168.33.13 - <<: *node_config name: node4 interfaces: - network_name: private_network - ip: *node4_ip + ip: 192.168.33.14 driver: name: vagrant @@ -54,9 +49,12 @@ driver: lint: name: yamllint options: - config-file: ../common/yamllint + config-file: ../../yamllint verifier: name: testinfra + directory: ../resources/tests/base + additional_files_or_dirs: + - ../test_configured.py lint: name: flake8 diff --git a/pool_automation/roles/node_install/molecule/default/molecule.yml b/pool_automation/roles/indy_node/molecule/vagrant_base/molecule.yml similarity index 54% rename from pool_automation/roles/node_install/molecule/default/molecule.yml rename to pool_automation/roles/indy_node/molecule/vagrant_base/molecule.yml index 1ef8957e8..194905377 100644 --- a/pool_automation/roles/node_install/molecule/default/molecule.yml +++ b/pool_automation/roles/indy_node/molecule/vagrant_base/molecule.yml @@ -1,18 +1,16 @@ --- scenario: - name: default + name: vagrant_base provisioner: name: ansible lint: name: ansible-lint inventory: - group_vars: - all: - indy_node_ver: 1.6.563 - indy_plenum_ver: 1.6.501 - python_indy_crypto_ver: 0.4.1 - libindy_crypto_ver: 0.4.0 + links: + group_vars: ${MOLECULE_INVENTORY_GROUP_VARS:-../resources/group_vars} + playbooks: + converge: ../resources/playbooks/playbook_base.yml platforms: - name: instance @@ -26,9 +24,10 @@ driver: lint: name: yamllint options: - config-file: ../common/yamllint + config-file: ../../yamllint verifier: name: testinfra + directory: ../resources/tests/base lint: name: flake8 diff --git a/pool_automation/roles/pool_install/tasks/main.yml b/pool_automation/roles/indy_node/tasks/configure.yml similarity index 67% rename from pool_automation/roles/pool_install/tasks/main.yml rename to pool_automation/roles/indy_node/tasks/configure.yml index 583ea9096..274b8b2a4 100644 --- a/pool_automation/roles/pool_install/tasks/main.yml +++ b/pool_automation/roles/indy_node/tasks/configure.yml @@ -1,4 +1,12 @@ --- +- name: Check that required variables are specified + assert: + that: + - lookup('vars', item, default='') + msg: "{{ lookup('vars', item, default='undefined')|string }}" + loop: + - network_name + - name: Gather sorted node data set_fact: sorted_nodes: "{{ groups['nodes'] | map('extract', hostvars) | sort(attribute='tag_id') }}" @@ -35,13 +43,23 @@ - "/var/lib/indy/{{ network_name }}" - "/var/log/indy/{{ network_name }}" +- set_fact: + pool_genesis_txns_path_remote: "/var/lib/indy/{{ network_name }}/pool_transactions_genesis" + - name: Generate indy pool transactions command: "generate_indy_pool_transactions --nodes {{ pool_size }} --clients 1 --nodeNum {{ tag_id }} --ips '{{ node_ips }}'" args: - creates: "/var/lib/indy/{{ network_name }}/pool_transactions_genesis" + creates: "{{ pool_genesis_txns_path_remote }}" become: true become_user: indy +- name: Fetch pool genesis txns file to Ansible controller + fetch: + src: "{{ pool_genesis_txns_path_remote }}" + dest: "{{ indy_node_pool_genesis_txns_path }}" + flat: true + run_once: true + - name: Start indy service service: name: indy-node diff --git a/pool_automation/roles/indy_node/tasks/main.yml b/pool_automation/roles/indy_node/tasks/main.yml new file mode 100644 index 000000000..97684041c --- /dev/null +++ b/pool_automation/roles/indy_node/tasks/main.yml @@ -0,0 +1,59 @@ +--- +- name: Check that required variables are specified + assert: + that: + - lookup('vars', item, default='') + msg: "{{ lookup('vars', item, default='undefined')|string }}" + loop: + - indy_node_build_dir + - indy_node_channel + - indy_node_ver + - indy_plenum_ver + - python_indy_crypto_ver + - libindy_crypto_ver + - network_name + - indy_node_pool_dir_name + - indy_node_pool_genesis_txns_name + - indy_node_pool_genesis_txns_path + +# TODO move the following three tasks (HTTPS, sovrin key and sovrin repos) +# into separate role +- name: Install HTTPS support for apt + apt: + name: + - apt-transport-https + - ca-certificates + update_cache: true + become: true + +- name: Add sovrin key + apt_key: + keyserver: keyserver.ubuntu.com + id: 3B75B82CF342D9FB + become: true + +- name: Add sovrin repositories + apt_repository: + repo: "deb https://repo.sovrin.org/deb xenial {{ indy_node_channel }}" + state: present + become: true + +- name: Install indy node + apt: + name: + - "indy-node={{ indy_node_ver }}" + - "indy-plenum={{ indy_plenum_ver }}" + - "libindy-crypto={{ libindy_crypto_ver }}" + - "python3-indy-crypto={{ python_indy_crypto_ver }}" + update_cache: true + become: true + +- name: Enable node service + service: + name: indy-node + enabled: true + become: true + +- name: Configuration tasks + include_tasks: configure.yml + when: indy_node_configuration diff --git a/pool_automation/roles/pool_install/templates/indy_config.j2 b/pool_automation/roles/indy_node/templates/indy_config.j2 similarity index 100% rename from pool_automation/roles/pool_install/templates/indy_config.j2 rename to pool_automation/roles/indy_node/templates/indy_config.j2 diff --git a/pool_automation/roles/pool_install/templates/indy_env.j2 b/pool_automation/roles/indy_node/templates/indy_env.j2 similarity index 76% rename from pool_automation/roles/pool_install/templates/indy_env.j2 rename to pool_automation/roles/indy_node/templates/indy_env.j2 index 437f141ea..3e2adb32f 100644 --- a/pool_automation/roles/pool_install/templates/indy_env.j2 +++ b/pool_automation/roles/indy_node/templates/indy_env.j2 @@ -3,4 +3,4 @@ NODE_IP=0.0.0.0 NODE_PORT=9707 NODE_CLIENT_IP=0.0.0.0 NODE_CLIENT_PORT=9708 -CLIENT_CONNECTIONS_LIMIT=15360 +CLIENT_CONNECTIONS_LIMIT=500 diff --git a/pool_automation/roles/node_install/handlers/main.yml b/pool_automation/roles/node_install/handlers/main.yml deleted file mode 100644 index 8338e70c7..000000000 --- a/pool_automation/roles/node_install/handlers/main.yml +++ /dev/null @@ -1,2 +0,0 @@ ---- -# handlers file for indy_node diff --git a/pool_automation/roles/node_install/meta/main.yml b/pool_automation/roles/node_install/meta/main.yml deleted file mode 100644 index fdda41bb3..000000000 --- a/pool_automation/roles/node_install/meta/main.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -dependencies: - - role: common diff --git a/pool_automation/roles/node_install/tasks/main.yml b/pool_automation/roles/node_install/tasks/main.yml deleted file mode 100644 index 6394098a0..000000000 --- a/pool_automation/roles/node_install/tasks/main.yml +++ /dev/null @@ -1,24 +0,0 @@ ---- -- name: Add sovrin repositories - apt_repository: - repo: "deb https://repo.sovrin.org/deb xenial {{ channel }}" - state: present - become: true - -- name: Install indy node - apt: - name: "{{ item }}" - update_cache: true - force: true - become: true - with_items: - - "indy-node={{ indy_node_ver }}" - - "indy-plenum={{ indy_plenum_ver }}" - - "libindy-crypto={{ libindy_crypto_ver }}" - - "python3-indy-crypto={{ python_indy_crypto_ver }}" - -- name: Enable node service - service: - name: indy-node - enabled: true - become: true diff --git a/pool_automation/roles/perf_scripts/defaults/main.yml b/pool_automation/roles/perf_scripts/defaults/main.yml new file mode 100644 index 000000000..d551e52ee --- /dev/null +++ b/pool_automation/roles/perf_scripts/defaults/main.yml @@ -0,0 +1,18 @@ +--- +perf_scripts_build_dir: "{{ namespace_dir | default(inventory_dir, true) | default('.build', true) }}" + +perf_scripts_indy_sdk_channel: master +perf_scripts_libindy_ver: null + +perf_scripts_venv_name: perf_venv # created in user's home +perf_scripts_ver: master # branch / tag / sha1 +perf_scripts_python3_indy_ver: null + +# configuration parameters +perf_scripts_configuration: true + +perf_scripts_pool_dir_name: "{{ '.pool' if perf_scripts_build_dir == inventory_dir else 'pool' }}" +perf_scripts_pool_genesis_txns_name: pool_transactions_genesis +perf_scripts_pool_genesis_txns_path_local: "{{ [perf_scripts_build_dir, perf_scripts_pool_dir_name, perf_scripts_pool_genesis_txns_name]|join('/') }}" +# TODO as an option we may use ansible_env.HOME but it needs facts gathering +perf_scripts_pool_genesis_txns_path_remote: '$HOME/{{ perf_scripts_pool_genesis_txns_name }}' diff --git a/pool_automation/roles/perf_scripts/meta/main.yml b/pool_automation/roles/perf_scripts/meta/main.yml new file mode 100644 index 000000000..5376abc34 --- /dev/null +++ b/pool_automation/roles/perf_scripts/meta/main.yml @@ -0,0 +1,3 @@ +--- +dependencies: + - role: ansible_bootstrap diff --git a/pool_automation/roles/perf_scripts/molecule/base/molecule.yml b/pool_automation/roles/perf_scripts/molecule/base/molecule.yml new file mode 100644 index 000000000..2e25a89c5 --- /dev/null +++ b/pool_automation/roles/perf_scripts/molecule/base/molecule.yml @@ -0,0 +1,31 @@ +--- +dependency: + name: galaxy +driver: + name: docker +lint: + name: yamllint + options: + config-file: ../../yamllint +platforms: + - name: perf_scripts + image: ubuntu:16.04 # TODO parametrize + pre_build_image: true + groups: + - clients +provisioner: + name: ansible + lint: + name: ansible-lint + inventory: + links: + group_vars: ${MOLECULE_INVENTORY_GROUP_VARS:-../resources/group_vars} + playbooks: + converge: ../resources/playbooks/playbook_base.yml +scenario: + name: base +verifier: + name: testinfra + directory: ../resources/tests/base + lint: + name: flake8 diff --git a/pool_automation/roles/perf_scripts/molecule/default/host_vars/client1 b/pool_automation/roles/perf_scripts/molecule/default/host_vars/client1 new file mode 100644 index 000000000..5cc32260e --- /dev/null +++ b/pool_automation/roles/perf_scripts/molecule/default/host_vars/client1 @@ -0,0 +1,2 @@ +--- +ansible_host: client1 diff --git a/pool_automation/roles/perf_scripts/molecule/default/host_vars/client2 b/pool_automation/roles/perf_scripts/molecule/default/host_vars/client2 new file mode 100644 index 000000000..e9a796669 --- /dev/null +++ b/pool_automation/roles/perf_scripts/molecule/default/host_vars/client2 @@ -0,0 +1,2 @@ +--- +ansible_host: client2 diff --git a/pool_automation/roles/perf_scripts/molecule/default/host_vars/node1 b/pool_automation/roles/perf_scripts/molecule/default/host_vars/node1 new file mode 100644 index 000000000..83511dd10 --- /dev/null +++ b/pool_automation/roles/perf_scripts/molecule/default/host_vars/node1 @@ -0,0 +1,3 @@ +--- +tag_id: 1 +ansible_host: node1 diff --git a/pool_automation/roles/perf_scripts/molecule/default/host_vars/node2 b/pool_automation/roles/perf_scripts/molecule/default/host_vars/node2 new file mode 100644 index 000000000..83bb42fe6 --- /dev/null +++ b/pool_automation/roles/perf_scripts/molecule/default/host_vars/node2 @@ -0,0 +1,3 @@ +--- +tag_id: 2 +ansible_host: node2 diff --git a/pool_automation/roles/perf_scripts/molecule/default/host_vars/node3 b/pool_automation/roles/perf_scripts/molecule/default/host_vars/node3 new file mode 100644 index 000000000..89bbea3c0 --- /dev/null +++ b/pool_automation/roles/perf_scripts/molecule/default/host_vars/node3 @@ -0,0 +1,3 @@ +--- +tag_id: 3 +ansible_host: node3 diff --git a/pool_automation/roles/perf_scripts/molecule/default/host_vars/node4 b/pool_automation/roles/perf_scripts/molecule/default/host_vars/node4 new file mode 100644 index 000000000..9b66a2614 --- /dev/null +++ b/pool_automation/roles/perf_scripts/molecule/default/host_vars/node4 @@ -0,0 +1,3 @@ +--- +tag_id: 4 +ansible_host: node4 diff --git a/pool_automation/roles/perf_scripts/molecule/default/molecule.yml b/pool_automation/roles/perf_scripts/molecule/default/molecule.yml new file mode 100644 index 000000000..5c170b664 --- /dev/null +++ b/pool_automation/roles/perf_scripts/molecule/default/molecule.yml @@ -0,0 +1,68 @@ +--- +scenario: + name: default + +driver: + name: docker + +parameters: + node_config: &node_config + image: solita/ubuntu-systemd:16.04 # TODO parametrize + command: ${MOLECULE_DOCKER_COMMAND:-""} + volumes: + - /sys/fs/cgroup:/sys/fs/cgroup:ro + privileged: true + pre_build_image: true + networks: + - name: private_network + groups: + - nodes + + client_config: &client_config + image: ubuntu:16.04 # TODO parametrize + pre_build_image: true + networks: + - name: private_network + groups: + - clients + +provisioner: + name: ansible + lint: + name: ansible-lint + inventory: + links: + group_vars: ${MOLECULE_INVENTORY_GROUP_VARS:-../resources/group_vars} + host_vars: ./host_vars + playbooks: + converge: ../resources/playbooks/playbook.yml + +platforms: + - <<: *node_config + name: node1 + - <<: *node_config + name: node2 + - <<: *node_config + name: node3 + - <<: *node_config + name: node4 + - <<: *client_config + name: client1 + - <<: *client_config + name: client2 + +dependency: + name: galaxy + +lint: + name: yamllint + options: + config-file: ../../yamllint + +verifier: + name: testinfra + directory: ../resources/tests/base + additional_files_or_dirs: + - ../test_configured.py + lint: + name: flake8 diff --git a/pool_automation/roles/perf_scripts/molecule/resources/group_vars/all.yml b/pool_automation/roles/perf_scripts/molecule/resources/group_vars/all.yml new file mode 100644 index 000000000..21ab8b325 --- /dev/null +++ b/pool_automation/roles/perf_scripts/molecule/resources/group_vars/all.yml @@ -0,0 +1,7 @@ +--- +perf_scripts_ver: master +perf_scripts_indy_sdk_channel: master +perf_scripts_python3_indy_ver: 1.6.8.dev858 +perf_scripts_libindy_ver: 1.6.8~858 +perf_scripts_venv_name: perf_test_venv +perf_scripts_pool_genesis_txns_name: pool_transactions_genesis diff --git a/pool_automation/roles/perf_scripts/molecule/resources/playbooks/playbook.yml b/pool_automation/roles/perf_scripts/molecule/resources/playbooks/playbook.yml new file mode 100644 index 000000000..56d06658d --- /dev/null +++ b/pool_automation/roles/perf_scripts/molecule/resources/playbooks/playbook.yml @@ -0,0 +1,12 @@ +--- +- name: Converge pool + gather_facts: false + hosts: nodes + roles: + - role: indy_node + +- name: Converge clients + gather_facts: false + hosts: clients + roles: + - role: perf_scripts diff --git a/pool_automation/roles/perf_scripts/molecule/resources/playbooks/playbook_base.yml b/pool_automation/roles/perf_scripts/molecule/resources/playbooks/playbook_base.yml new file mode 100644 index 000000000..703ce38cc --- /dev/null +++ b/pool_automation/roles/perf_scripts/molecule/resources/playbooks/playbook_base.yml @@ -0,0 +1,8 @@ +--- +- name: Converge + gather_facts: false + hosts: clients + roles: + - role: perf_scripts + vars: + perf_scripts_configuration: false diff --git a/pool_automation/roles/perf_scripts/molecule/resources/tests/base/test_installed.py b/pool_automation/roles/perf_scripts/molecule/resources/tests/base/test_installed.py new file mode 100644 index 000000000..cf029d6fa --- /dev/null +++ b/pool_automation/roles/perf_scripts/molecule/resources/tests/base/test_installed.py @@ -0,0 +1,40 @@ +import pytest + +testinfra_hosts = ['clients'] + + +def test_venv_exists(host, venv_path): + assert host.file(venv_path).exists + assert host.file(venv_path).is_directory + + +def test_correct_packages_are_installed(host, ansible_vars): + libindy = host.package('libindy') + assert libindy.is_installed + + if ansible_vars['perf_scripts_libindy_ver'] is not None: + assert libindy.version == ansible_vars['perf_scripts_libindy_ver'] + + +def test_correct_python_packages_are_installed_inside_venv(host, ansible_vars, venv_path): + pip_path = "{}/bin/pip".format(venv_path) + + pip_packages = host.pip_package.get_packages(pip_path=pip_path) + + # TODO version check for VCS as package source + assert 'indy-perf-load' in pip_packages + assert 'python3-indy' in pip_packages + + # TODO python3-indy's package metadata doesn't match package version + # for non-stable packages, thus the check will fail for them + if (ansible_vars['perf_scripts_python3_indy_ver'] is not None and + 'dev' not in ansible_vars['perf_scripts_python3_indy_ver']): + assert pip_packages['python3-indy']['version'] == ansible_vars['perf_scripts_python3_indy_ver'] + + +def test_perf_processes_is_runable(host, venv_path): + assert host.run("{}/bin/perf_processes.py --help".format(venv_path)).rc == 0 + + +def test_perf_spike_load_is_runable(host, venv_path): + assert host.run("{}/bin/perf_spike_load.py --help".format(venv_path)).rc == 0 diff --git a/pool_automation/roles/perf_scripts/molecule/resources/tests/conftest.py b/pool_automation/roles/perf_scripts/molecule/resources/tests/conftest.py new file mode 100644 index 000000000..16f70842e --- /dev/null +++ b/pool_automation/roles/perf_scripts/molecule/resources/tests/conftest.py @@ -0,0 +1,20 @@ +import pytest + + +@pytest.fixture(scope="module") +def ansible_vars(host): + return host.ansible.get_variables() + + +@pytest.fixture(scope="module") +def venv_path(host, ansible_vars): + return "{}/{}".format( + host.user().home, + ansible_vars['perf_scripts_venv_name']) + + +@pytest.fixture(scope="module") +def pool_txns_path(host, ansible_vars): + return "{}/{}".format( + host.user().home, + ansible_vars['perf_scripts_pool_genesis_txns_name']) diff --git a/pool_automation/roles/perf_scripts/molecule/resources/tests/test_configured.py b/pool_automation/roles/perf_scripts/molecule/resources/tests/test_configured.py new file mode 100644 index 000000000..41eed8067 --- /dev/null +++ b/pool_automation/roles/perf_scripts/molecule/resources/tests/test_configured.py @@ -0,0 +1,14 @@ +import pytest + +testinfra_hosts = ['clients'] + + +def test_pool_txns_genesis_file_exists(host, pool_txns_path): + txns_file = host.file(pool_txns_path) + assert txns_file.exists + + +def test_perf_processes_can_connect(host, venv_path, pool_txns_path): + assert host.run( + "{}/bin/perf_processes.py --test_conn -g {}" + .format(venv_path, pool_txns_path)).rc == 0 diff --git a/pool_automation/roles/perf_scripts/tasks/configure.yml b/pool_automation/roles/perf_scripts/tasks/configure.yml new file mode 100644 index 000000000..d3d54fc3b --- /dev/null +++ b/pool_automation/roles/perf_scripts/tasks/configure.yml @@ -0,0 +1,6 @@ +--- +- name: Push pool genesis txns file to client + copy: + src: "{{ perf_scripts_pool_genesis_txns_path_local }}" + dest: "{{ perf_scripts_pool_genesis_txns_path_remote }}" +... diff --git a/pool_automation/roles/perf_scripts/tasks/main.yml b/pool_automation/roles/perf_scripts/tasks/main.yml new file mode 100644 index 000000000..8058c6c59 --- /dev/null +++ b/pool_automation/roles/perf_scripts/tasks/main.yml @@ -0,0 +1,77 @@ +--- +- name: Check that required variables are specified + assert: + that: + - lookup('vars', item, default='') + msg: "{{ lookup('vars', item, default='undefined')|string }}" + loop: + - perf_scripts_build_dir + - perf_scripts_ver + - perf_scripts_venv_name + - perf_scripts_indy_sdk_channel + - perf_scripts_pool_dir_name + - perf_scripts_pool_genesis_txns_name + - perf_scripts_pool_genesis_txns_path_local + - perf_scripts_pool_genesis_txns_path_remote + + +# TODO move the following three tasks (HTTPS, sovrin key and sovrin repos) +# into separate role +- name: Install HTTPS support for apt + apt: + name: + - apt-transport-https + - ca-certificates + update_cache: true + become: true + +- name: Add sovrin key + apt_key: + keyserver: keyserver.ubuntu.com + id: 3B75B82CF342D9FB + become: true + +- name: Add sovrin repositories for indy-sdk + apt_repository: + repo: "deb https://repo.sovrin.org/sdk/deb xenial {{ perf_scripts_indy_sdk_channel }}" + state: present + become: true + +# TODO rename of split +- name: Install pip and other required packages + apt: + name: + - libsodium18 # TODO might depends on target platform package distributions + - "libindy{{ ('=' ~ perf_scripts_libindy_ver) if perf_scripts_libindy_ver else '' }}" + - python3-pip + - python-setuptools # neeeded for ansible's pip (TODO actually depends on python used by ansible) + - virtualenv + - git + - vim # TODO think about other useful tools and move to client_install + update_cache: true + become: true + +- name: Install virtualenv globally + pip: + name: + - virtualenv + executable: pip3 + become: true + +- name: Install python3-indy + pip: + name: "python3-indy=={{ perf_scripts_python3_indy_ver }}" + virtualenv: "$HOME/{{ perf_scripts_venv_name }}" # $HOME as a workaround + virtualenv_python: python3 + state: present + when: not not perf_scripts_python3_indy_ver + +- name: Install performance scripts from the VCS + pip: + name: "git+https://github.com/hyperledger/indy-node.git@{{ perf_scripts_ver }}#egg=subdir&subdirectory=scripts/performance" + virtualenv: "$HOME/{{ perf_scripts_venv_name }}" # $HOME as a workaround + virtualenv_python: python3 + +- name: Configuration tasks + include_tasks: configure.yml + when: perf_scripts_configuration diff --git a/pool_automation/roles/pool_install/defaults/main.yml b/pool_automation/roles/pool_install/defaults/main.yml deleted file mode 100644 index 791667c20..000000000 --- a/pool_automation/roles/pool_install/defaults/main.yml +++ /dev/null @@ -1,2 +0,0 @@ ---- -network_name: sandbox diff --git a/pool_automation/roles/pool_install/handlers/main.yml b/pool_automation/roles/pool_install/handlers/main.yml deleted file mode 100644 index ed97d539c..000000000 --- a/pool_automation/roles/pool_install/handlers/main.yml +++ /dev/null @@ -1 +0,0 @@ ---- diff --git a/pool_automation/roles/pool_install/meta/main.yml b/pool_automation/roles/pool_install/meta/main.yml deleted file mode 100644 index c0cda4959..000000000 --- a/pool_automation/roles/pool_install/meta/main.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -dependencies: - - role: node_install diff --git a/pool_automation/scripts/README.md b/pool_automation/scripts/README.md new file mode 100644 index 000000000..34adbeaa8 --- /dev/null +++ b/pool_automation/scripts/README.md @@ -0,0 +1,55 @@ +# Helper scripts for Pool Automation Ansible roles + +## Quickstart + +- `namespace-config.py`: helps to create inventory directory with group variables + that override Ansible Roles' defaults. The inventory directory then might be + passed either to [Ansible command line tools][2aceed7f] or + [molecule][1d2f4724]. + + [2aceed7f]: https://docs.ansible.com/ansible/latest/user_guide/command_line_tools.html "ansible tools" + [1d2f4724]: https://molecule.readthedocs.io/en/latest/index.html "molecule" + +## Scripts + +### namespace-config.py + +Used to create inventory directory with user specified values for group +variables to use in Ansible roles. + +The tool explores default values for each role it found and provides +command line API to override them. + +The tool creates directory with the structure acceptable for Ansible inventory +directories as declared in [Working With Inventory](https://docs.ansible.com/ansible/latest/user_guide/intro_inventory.html#splitting-out-host-and-group-specific-data). Also it adds an inventory file for +`localhost` to make its specification explicit. + +```shell +inventory-dir/ +└── group_vars +| └── all +| ├── _config.yml +| ... +└── localhost.yml +``` + +So it is possible to place your inventory file(s) here (e.g. `inventory-dir/hosts`) +and pass either the whole directory or an inventory file to [Ansible command line tools][2aceed7f]. + +Also you may pass the `inventory-dir/group_vars` to molecule's provisioner +as a link as described [here](https://molecule.readthedocs.io/en/latest/configuration.html#provisioner). + +### Requirements + +- Python 2 + +### Environment variables + +- `ANSIBLE_PROJECT_DIR` if defined used by the tool as a directory to search + for roles. Otherwise (by default) the tool searches for roles in its parent + directory. + +### Command line API + +Please refer to `namespace-config.py --help` for the detailed information +regarding available arguments. diff --git a/pool_automation/scripts/namespace-config.py b/pool_automation/scripts/namespace-config.py new file mode 100644 index 000000000..ddc8e4b5f --- /dev/null +++ b/pool_automation/scripts/namespace-config.py @@ -0,0 +1,339 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import os +import sys +import re +from inspect import getsourcefile +import json +import glob +import argparse +import yaml +from collections import OrderedDict +import logging +import logging.config + +logger = logging.getLogger(__name__) + +DEF_LOGGING_FORMAT = ("%(asctime)s - %(name)s - %(levelname)s - " + "[%(filename)s:%(lineno)d]: %(message)s") +DEF_LOGLEVEL = logging.INFO + +DEFAULT_INV_SCHEME = """ +localhosts.yml: + all: + children: + localhosts: + vars: + ansible_connection: local + ansible_python_interpreter: '{{ ansible_playbook_python }}' + hosts: + localhost: + aws_clients_provisioner: + aws_nodes_provisioner: + +host_vars: + aws_nodes_provisioner: + - aws_manage + aws_clients_provisioner: + - aws_manage + +group_vars: + all: + - ansible_bootstrap + localhosts: + - aws_manage + nodes: + - indy_node + clients: + - indy_cli + - perf_scripts +""" + + +class Role(object): + + def __init__(self, path): + self.path = path + self.name = os.path.basename(path) + + self.defaults = {} + self.defaults_path = "{}/defaults/main.yml".format(self.path) + + self._load_defaults() + + def _load_defaults(self): + try: + with open(self.defaults_path, 'r') as _f: + self.defaults = yaml.safe_load(_f) + except IOError: + logger.debug("Ignoring absense of the file {}".format(self.defaults_path)) + + +class RoleRef(object): + def __init__(self, role): + self.role = role + self.vars = {} + + def set_vars(self, role_vars): + # treat role's defaults as a collection of only possible vars + if self.defaults: + self.vars.update( + {k: v for k, v in role_vars.iteritems() if k in self.defaults}) + + @property + def name(self): + return self.role.name + + @property + def defaults(self): + return self.role.defaults + + +class InvBase(object): + def __init__(self, inv_dir, name, *rel_dirs): + if not re.search(r'\.(yml|yaml)$', name): + name = "{}.yml".format(name) + self.name = name + self.path = os.path.join(os.path.join(inv_dir, *rel_dirs), name) + + def _dump(self, stream): + raise NotImplemented + + def dump(self): + with open(self.path, "w") as _f: + _f.write('---\n') + self._dump(_f) + _f.write('...\n') + + +class Inv(InvBase): + def __init__(self, inv_dir, name, content): + self.content = content + super(Inv, self).__init__(inv_dir, name) + + def _dump(self, stream): + stream.write(yaml.safe_dump(self.content, default_flow_style=False)) + + +class InvVars(InvBase): + + def __init__(self, inv_dir, vars_name, name, roles_refs): + self.roles_refs = roles_refs + super(InvVars, self).__init__(inv_dir, name, vars_name) + + def _dump(self, stream): + for role_ref in self.roles_refs: + stream.write("\n# {0} {1} {0}\n".format('=' * 20, role_ref.name)) + + if role_ref.vars: + stream.write(yaml.safe_dump(role_ref.vars, default_flow_style=False)) + + if role_ref.defaults: + _s = yaml.safe_dump(role_ref.defaults, default_flow_style=False) + stream.write(''.join(["\n# defaults\n\n"] + + ["#{}".format(_l) for _l in _s.splitlines(True)])) + + +def _load_roles(): + proj_dir = os.getenv('ANSIBLE_PROJECT_DIR') + if not proj_dir: + script_path = os.path.abspath(getsourcefile(lambda: 0)) + proj_dir = os.path.abspath(os.path.join(os.path.dirname(script_path), '..')) + else: + proj_dir = os.path.abspath(proj_dir) + + roles = [Role(r) for r in glob.iglob("{}/roles/*".format(proj_dir))] + + if not roles: + logger.error("No roles are found in {}".format(proj_dir)) + raise RuntimeError("No roles are found in {}".format(proj_dir)) + + return {r.name: r for r in roles} + + +def _load_inv_scheme(): + inv_scheme_path = os.getenv('ANSIBLE_INV_SCHEME') + if inv_scheme_path: + with open(inv_scheme_path, 'r') as _f: + return yaml.safe_load(_f) + else: + return yaml.safe_load(DEFAULT_INV_SCHEME) + + +def _reset_logging(): + for handler in logging.root.handlers[:]: + handler.flush() + logging.root.removeHandler(handler) + handler.close() + + +def _set_logging(logconfig_path=None): + _reset_logging() + if logconfig_path: + with open(logconfig_path, "rb") as f: + logging.config.dictConfig( + json.load(f, object_pairs_hook=OrderedDict) + ) + else: + logging.basicConfig(level=DEF_LOGLEVEL, format=DEF_LOGGING_FORMAT) + + +def _arg_name(var_name, arg_prefix=None): + return "{}.{}".format(arg_prefix, var_name) if arg_prefix else var_name + + +def _parse_args(roles, inv_mode, inv_scheme=None): + parser = argparse.ArgumentParser( + description="Namespace Configuration Tool", + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + + parser.add_argument('namespace-dir', + help='path to namespace directory') + + parser.add_argument("--namespace-name", metavar="STR", default=None, + help=("Name of the namespace. " + "Default: basename of the 'namespace-dir'")) + + parser.add_argument("--inventory-name", metavar="STR", default='inventory', + help=("Name of the inventory directory inside " + "the 'namespace-dir'")) + + parser.add_argument("--logconfig", metavar="PATH", default=None, + help=("Path to json-formatted logging configuration" + " file, if not defined the basic" + " one will be used")) + + parser.add_argument("--show-defaults", action="store_true", + help="Show defaults and exit") + + def add_role_group(parent, role, arg_prefix=None, title=None, descr=None): + + if role.defaults is None: + return + + _group = parent.add_argument_group(title, descr) + for p, d in role.defaults.iteritems(): + _help_kwargs = {'metavar': "{}".format(type(d).__name__).upper()} + if type(d) is list: + _help_kwargs['nargs'] = '+' + _help_kwargs['metavar'] = 'ITEM' + elif type(d) is dict: + _help_kwargs['metavar'] = 'JSON' + _help_kwargs['type'] = json.loads + elif type(d) in (int, float): + _help_kwargs['type'] = type(d) + + _group.add_argument("--{}".format(_arg_name(p, arg_prefix)), **_help_kwargs) + + if inv_mode == 'plays': + for inv, inv_spec in inv_scheme.iteritems(): + if inv not in ('host_vars', 'group_vars'): + continue + + for inv_obj, role_names in inv_spec.iteritems(): + for role_name in role_names: + add_role_group(parser, roles[role_name], arg_prefix=inv_obj, + title="'{}' {} vars for '{}' role" + .format(inv_obj, inv.split('_')[0], roles[role_name].name)) + else: + for role in roles.itervalues(): + add_role_group(parser, role, title="'{}' role vars".format(role.name)) + + return vars(parser.parse_args()) + + +def _dump_defaults(roles): + for role in roles.itervalues(): + if role.defaults is not None: + print(role.name.upper()) + print(yaml.safe_dump(role.defaults, default_flow_style=False)) + + +def main(): + + _set_logging() + + roles = _load_roles() + inv_mode = os.getenv('ANSIBLE_INVENTORY_MODE', 'plays') + inv_scheme = _load_inv_scheme() if inv_mode == 'plays' else None + + args = _parse_args(roles, inv_mode, inv_scheme) + + # config logging + if args['logconfig'] is not None: + _set_logging(args['logconfig']) + + logger.debug("Cmd line arguments: {}".format(args)) + + if args["show_defaults"]: + _dump_defaults(roles) + exit(0) + + inv_dir = os.path.join(args['namespace-dir'], args['inventory_name']) + namespace_name = (args['namespace_name'] if args['namespace_name'] + else os.path.basename(args['namespace-dir'])) + + # create inventory dir hierarchy + for d in ('host_vars', 'group_vars'): + _path = os.path.join(inv_dir, d) + if not os.path.isdir(_path): + os.makedirs(_path) + + def get_user_vars(role, arg_prefix=None): + # construct user specified vars + user_vars = {} + for v_name in role.defaults.keys(): + arg_name = _arg_name(v_name, arg_prefix) + if args.get(arg_name): + user_vars[v_name] = args[arg_name] + return user_vars + + namespace_spec = { + 'all': { + 'vars': { + 'namespace_dir': os.path.join('{{ inventory_dir }}', '..'), + 'namespace_name': namespace_name, + 'namespace_dir_relative': '..' + } + } + } + + inventories = [Inv(inv_dir, 'namespace.yml', namespace_spec)] + if inv_mode == 'plays': + for inv, inv_spec in inv_scheme.iteritems(): + if inv in ('host_vars', 'group_vars'): + for inv_obj, role_names in inv_spec.iteritems(): + _roles = [] + for role_name in role_names: + role_ref = RoleRef(roles[role_name]) + role_ref.set_vars(get_user_vars(role_ref, inv_obj)) + _roles.append(role_ref) + inventories.append(InvVars(inv_dir, inv, inv_obj, _roles)) + else: + inventories.append(Inv(inv_dir, inv, inv_spec)) + else: # role oriented logic + localhost_spec = { + 'all': { + 'hosts': { + 'localhost': { + 'ansible_connection': 'local', + 'ansible_python_interpreter': '{{ ansible_playbook_python }}' + } + } + } + } + inventories.append(Inv(inv_dir, 'localhost.yml', localhost_spec)) + _roles = [] + for role in roles.itervalues(): + role_ref = RoleRef(role) + role_ref.set_vars(get_user_vars(role_ref)) + _roles.append(role_ref) + inventories.append(InvVars(inv_dir, 'group_vars', 'all', _roles)) + + for inv in inventories: + inv.dump() + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/pool_automation/tasks/check_inventory.yml b/pool_automation/tasks/check_inventory.yml new file mode 100644 index 000000000..89e95daad --- /dev/null +++ b/pool_automation/tasks/check_inventory.yml @@ -0,0 +1,11 @@ +--- +- name: Check that 'inventory_dir' is in 'ansible_inventory_sources' + fail: + msg: >- + Unexpected inventory directory {{ inventory_dir }}: + not one of specified {{ ansible_inventory_sources }}. + Seems as wrongly autodetected. + when: + - inventory_dir|default(false) + - (inventory_dir|realpath) not in ansible_inventory_sources +... diff --git a/pool_automation/roles/common/yamllint b/pool_automation/yamllint similarity index 100% rename from pool_automation/roles/common/yamllint rename to pool_automation/yamllint diff --git a/requirement.txt b/requirement.txt deleted file mode 100644 index 461b2063d..000000000 --- a/requirement.txt +++ /dev/null @@ -1 +0,0 @@ -indy-client \ No newline at end of file diff --git a/scripts/add_keys.py b/scripts/add_keys.py deleted file mode 100644 index 3437a6290..000000000 --- a/scripts/add_keys.py +++ /dev/null @@ -1,148 +0,0 @@ -#! /usr/bin/env python3 - -""" -This script registers new trust anchors (client keys) -To add new key you need to use existing Steward and it's seed -""" - -import os -import sys -from itertools import groupby - -from stp_core.loop.looper import Looper -from plenum.common.signer_did import DidSigner -from plenum.common.types import HA -from stp_core.common.log import getlogger -from plenum.test.helper import eventually, eventuallyAll - -from indy_common.config_util import getConfig -from indy_common.constants import TRUST_ANCHOR -from indy_client.client.client import Client -from indy_client.client.wallet.wallet import Wallet - - -logger = getlogger() - -# loading cluster configuration -config = getConfig() - -requestTTL = 10 # seconds - -# load test configuration -assert len(sys.argv) >= 3 - -stewardName = sys.argv[1] -stewardSeed = str.encode(sys.argv[2]) -trustAnchorSeeds = sys.argv[3:] - -if not trustAnchorSeeds: - seed_file_path = "{}/load_test_clients.list".format(os.getcwd()) - trustAnchorSeeds = [] - with open(seed_file_path, "r") as file: - trustAnchorSeeds = [line.strip().split(":")[1] for line in file] - - -def spawnClient(clientName, port, signerSeed, host='0.0.0.0'): - clientAddress = HA(host, port) - # from plenum.client.request_id_store import FileRequestIdStore - # walletFilePath = os.path.join(config.baseDir, "wallet") - # print("Storing request ids in {}".format(walletFilePath)) - # store = FileRequestIdStore(walletFilePath) - # wallet = Wallet(clientName, store) - wallet = Wallet(clientName) - wallet.addIdentifier(signer=DidSigner(seed=signerSeed)) - client = Client(clientName, ha=clientAddress) - return client, wallet - - -async def checkReply(client, request_key): - _, status = client.getReply(*request_key) - logger.info("Number of received messages {}".format(len(client.inBox))) - groups = groupby(client.inBox, key=lambda x: x[0]) - for key, group in groups: - logger.info("Group {}".format(key['op'])) - for msg in list(group): - logger.info(" {}".format(msg)) - succeeded = status == "CONFIRMED" - return succeeded - - -async def doRequesting(client, wallet, op): - signedOp = wallet.signOp(op) - logger.info("Client {} sending request {}".format(client, op)) - request = client.submitReqs(signedOp)[0][0] - args = (request.identifier, request.reqId) - await eventually(checkReply, *args, timeout=requestTTL) - - -def checkIfConnectedToAll(client): - connectedNodes = client.nodestack.connecteds - connectedNodesNum = len(connectedNodes) - totalNodes = len(client.nodeReg) - logger.info("Connected {} / {} nodes".format(connectedNodesNum, totalNodes)) - for node in connectedNodes: - logger.info(" {}".format(node)) - - if connectedNodesNum == 0: - raise Exception("Not connected to any") - elif connectedNodesNum < totalNodes * 0.8: - raise Exception("Not connected fully") - else: - return True - - -async def ensureConnectedToNodes(client): - wait = 5 - logger.info( - "waiting for {} seconds to check client connections to nodes...".format(wait)) - await eventuallyAll(lambda: checkIfConnectedToAll(client), retryWait=.5, totalTimeout=wait) - - -def addNyms(): - with Looper(debug=getConfig().LOOPER_DEBUG) as looper: - - from indy_client.test.helper import createNym - - # Starting clients - print("Spawning client") - client, wallet = spawnClient(stewardName, 5678, stewardSeed) - client.registerObserver(wallet.handleIncomingReply) - print("Adding it to looper") - looper.add(client) - print("Running it") - looper.run(ensureConnectedToNodes(client)) - - # Creating request - print("Creating request") - bad = [] - for seed in trustAnchorSeeds: - signer = DidSigner(seed=seed.encode()) - nym = signer.identifier - verkey = signer.verkey - # Sending requests - print("Creating nym for seed {}".format(seed)) - try: - createNym( - looper=looper, - nym=nym, - creatorClient=client, - creatorWallet=wallet, - verkey=verkey, - role=TRUST_ANCHOR) - print("Successfully created nym for {}".format(seed)) - except Exception as ex: - bad.append(seed) - print("Failed to create nym for {}".format(seed)) - - print("=======================") - if not bad: - print("All nyms created successfully") - else: - print("Failed to created nyms for:") - for nym in bad: - print("-", nym) - print("=======================") - - -if __name__ == '__main__': - addNyms() diff --git a/scripts/add_new_node b/scripts/add_new_node deleted file mode 100644 index bda2bd8af..000000000 --- a/scripts/add_new_node +++ /dev/null @@ -1,88 +0,0 @@ -#! /usr/bin/env python3 - -from stp_core.loop.looper import Looper -from plenum.common.signer_simple import SimpleSigner - -from indy_client.client.client import Client -from indy_common.config_util import getConfig - - -# TODO: This code is obsolete, moreover we have a CLI command for this. - - -looper = Looper(debug=getConfig().LOOPER_DEBUG) - -config = getConfig() -basedirpath = config.baseDir - -# Steward that will be used to create a new steward by submitting a -# NYM transaction -# TODO: Make this name configurable -clientName = 'Steward1' - -# This is because i know the seed. -# TODO: Make this configurable, maybe read the private key/seed from -# command line -seed = ('0' * (32 - len(clientName)) + clientName).encode() - -signer = SimpleSigner(seed=seed) -client_address = ('0.0.0.0', 9760) - -client = Client(clientName, - nodeReg=None, - ha=client_address, - signer=signer, - basedirpath=basedirpath) - -looper.add(client) - -# give the client time to connect -# TODO: Use looper and `eventually` to check whether request succeeded -looper.runFor(3) - -# Steward that will be used to create a new node by submitting a -# NODE transaction. This steward is like the owner of this node. -# TODO: Make this name configurable -name = "Steward5" -# This is the seed i want to use for creating keys for the new steward. -# TODO: Make this configurable, maybe read the private key/seed from -# command line -sseed = ('0' * (32 - len(name)) + name).encode() -verkey = SimpleSigner(seed=sseed).verkey -client.submitNewSteward("Steward5", verkey) -# give the client time to connect -# TODO: Use looper and `eventually` to check whether request succeeded -looper.runFor(3) - -# This is the name of the newly created node. This would be received -# from the administrator of the node. -# TODO: Make this name configurable -name = "Node5" -# This is the seed i used creating keys for the new node. This would be -# received from the administrator of the node. -# TODO: Make this configurable, maybe read the private key/seed from -# command line - -nseed = ('0' * (32 - len(name)) + name).encode() -nodeverkey = SimpleSigner(seed=nseed).verkey - -# Here we would add the ip and port of the node. This would be received -# from the administrator of the node. -nodeStackHa = ("127.0.0.1", 9709) -clientStackHa = ("127.0.0.1", 9710) - -signer = SimpleSigner(seed=sseed) -client_address = ('0.0.0.0', 9761) -client = Client("Steward5", - None, - ha=client_address, - signer=signer, - basedirpath=basedirpath) - -looper.add(client) -# TODO: Use looper and `eventually` to check whether request succeeded -looper.runFor(3) - -client.submitNewNode(name, nodeverkey, nodeStackHa, clientStackHa) -# TODO: Use looper and `eventually` to check whether request succeeded -looper.runFor(3) diff --git a/scripts/build_graph_from_csv b/scripts/build_graph_from_csv index 362455b17..8bc2f676e 100755 --- a/scripts/build_graph_from_csv +++ b/scripts/build_graph_from_csv @@ -20,6 +20,9 @@ Graph: stats_metric - min, lo, avg, hi, max combined metric_per_sec - sum of metric values averaged over frame time metric_count_per_sec - number of metric events averaged over frame time + +An option --output allows user to define a filepath for an output image (e.g. --output /home/me/Documents/out.png). +If this option is provided, the script will not display the figure on a screen. """ from typing import List @@ -29,10 +32,11 @@ from collections import namedtuple from datetime import datetime import pandas as pd import argparse +import os def add_subplot(ax, name, items, data, log_scale=False): - ax.set_title(name) + ax.set_title(name, verticalalignment='center') ax.grid(True) ax.set_yscale("log" if log_scale else "linear") @@ -40,9 +44,7 @@ def add_subplot(ax, name, items, data, log_scale=False): for item in items: ax.plot(timestamps, data[item], label=item, ls='-', lw=2) - - ax.legend(bbox_to_anchor=(1, 1), loc=2, prop={'size': 8}, borderaxespad=0.) - + ax.legend(bbox_to_anchor=(1, 1), loc=2, borderaxespad=0.) PlotInfo = namedtuple('GraphInfo', 'title log_scale items') @@ -80,7 +82,11 @@ def parse_plot_list(text: str) -> List[PlotInfo]: def build_graph(): + plt.rcParams.update({'font.size': 10}) + plt.rcParams["figure.figsize"] = [23,12] + parser = argparse.ArgumentParser(description='Gets file path and graph name to build a graph') + parser.add_argument('--output', required=False, help='output picture file path', dest="output") parser.add_argument('filepath', type=str, help='the csv file absolute path') parser.add_argument('--plots', required=False, help='plot list') args = parser.parse_args() @@ -131,8 +137,12 @@ def build_graph(): mng.resize(*mng.window.maxsize()) plt.subplots_adjust(left=0.05, right=0.85, bottom=0.07, top=0.93) plt.suptitle(file_path) - plt.show() + if not args.output: + plt.show() + else: + output = os.path.expanduser(args.output) + plt.savefig(output, bbox_inches='tight') if __name__ == '__main__': build_graph() diff --git a/scripts/create_dirs.sh b/scripts/create_dirs.sh index 24b4e6568..d7567d8a2 100755 --- a/scripts/create_dirs.sh +++ b/scripts/create_dirs.sh @@ -1,7 +1,7 @@ #!/usr/bin/env bash # dirs to be created -node_dirs="/etc/indy /var/lib/indy /var/log/indy /home/${USER}/.indy-cli" +node_dirs="/etc/indy /var/lib/indy /var/log/indy" # create dirs for dr in $node_dirs @@ -20,9 +20,6 @@ if [ ! -f /etc/indy/indy_config.py ]; then echo "BACKUP_DIR = '/var/lib/indy/backup'" | sudo tee -a /etc/indy/indy_config.py echo "PLUGINS_DIR = '/var/lib/indy/plugins'" | sudo tee -a /etc/indy/indy_config.py echo "NODE_INFO_DIR = '/var/lib/indy'" | sudo tee -a /etc/indy/indy_config.py - - echo "CLI_BASE_DIR = '~/.indy-cli/'" | sudo tee -a /etc/indy/indy_config.py - echo "CLI_NETWORK_DIR = '~/.indy-cli/networks'" | sudo tee -a /etc/indy/indy_config.py fi # grant permissions diff --git a/scripts/current_validators b/scripts/current_validators index c01be3c87..46e9db8e0 100755 --- a/scripts/current_validators +++ b/scripts/current_validators @@ -1,4 +1,4 @@ -# !/usr/bin/python3 +#! /usr/bin/python3 import string import json @@ -156,19 +156,19 @@ def parse_inputs(): parser.add_argument('--writeJson', help='Boolean flag. If set, the output is json. (Default: the output is csv.)', - action='set_true') + action='store_true') parser.add_argument('--protocolVersion', help='Ledger protocol version. legacy = 1, current = 2, the default is set to 2' - 'EXAMPLE: --protocolVersion 1') + '\nEXAMPLE: --protocolVersion 1') args = parser.parse_args() if not args.protocolVersion: - args.protocolVersion = '2' # Mike said leave this on protocol version 2 + args.protocolVersion = '2' # default to 2 elif args.protocolVersion == '1' or args.protocolVersion == '2': - args.protocolVersion + pass else: - log.error("INVALID PARAMS \nPlease enter a correct parameter" + log.error("Invalid protocolVersion" "\n EXAMPLE: protocolVersion 1") return args diff --git a/scripts/enable_bls b/scripts/enable_bls deleted file mode 100755 index b3a1c9964..000000000 --- a/scripts/enable_bls +++ /dev/null @@ -1,114 +0,0 @@ -#! /usr/bin/env python3 - -import argparse -import logging - -from plenum.common.config_util import getConfig -from plenum.common.constants import BLS_KEY, ALIAS, BLS_KEY_PROOF -from plenum.common.exceptions import OperationError, NoConsensusYet -from plenum.common.keygen_utils import init_bls_keys -from plenum.common.signer_did import DidSigner -from stp_core.common.log import getlogger, Logger -from stp_core.loop.eventually import eventually -from stp_core.loop.looper import Looper -from stp_core.network.port_dispenser import genHa -from stp_core.types import HA - -from indy_client.client.client import Client -from indy_client.client.wallet.node import Node -from indy_client.client.wallet.wallet import Wallet -from indy_common.util import get_reply_if_confirmed -from indy_common.config_helper import NodeConfigHelper - -config = getConfig() -config.enableStdOutLogging = False -Logger.setLogLevel(logging.INFO) -logger = getlogger() - - -def parse_args(): - parser = argparse.ArgumentParser( - description="Generate BLS keys for a node " - "by taking the node's name and seeds " - "and send NODE txn with the BLS key specified") - - parser.add_argument('--name', required=True, help='node name') - parser.add_argument('--node_dest', required=True, type=str, - help="Node's dest as specified in NODE txn") - parser.add_argument('--steward_seed', required=True, type=str, - help="Steward's seed that was used to generate Steward's DID") - parser.add_argument('--bls_seed', required=True, type=str, - help="Seed for a new BLS key") - - args = parser.parse_args() - - return args.name, args.node_dest, args.steward_seed, args.bls_seed - - -def send_node_txn(node_name, bls_key, steward_seed, node_dest): - port = genHa()[1] - ha = HA('0.0.0.0', port) - name = "steward_wallet" - client = Client(name, ha=ha) - - wallet = Wallet(name) - wallet.addIdentifier(signer=DidSigner(seed=steward_seed)) - - added = False - with Looper() as looper: - looper.add(client) - print('>>>>>>>>>>> Updating NYM with BLS keys...') - data = __prepare_node_data(node_name, *bls_key) - req = __send_node_request(wallet, client, - data, - steward_seed, node_dest) - print('>>>>>>>>>>>> Sent {}'.format(req)) - try: - looper.run( - eventually(_ensureReqCompleted, - (req.identifier, req.reqId), client, - timeout=20, retryWait=2)) - added = True - except NoConsensusYet: - raise TimeoutError('Request timed out') - - if added: - print('>>>>>>>>>>>> Successfully updated NYM with BLS keys') - else: - print('>>>>>>>>>>>> Generated BLS key {} but failed to add it to the pool'.format(bls_key)) - - -def __prepare_node_data(node_name, bls_key, bls_key_proof): - data = {} - data[ALIAS] = node_name - data[BLS_KEY] = bls_key - data[BLS_KEY_PROOF] = bls_key_proof - return data - - -def __send_node_request(wallet, client, - data, steward_seed, node_dest): - steward_nym = DidSigner(seed=steward_seed).identifier - - node = Node(node_dest, data, steward_nym) - wallet.addNode(node) - reqs = wallet.preparePending() - return client.submitReqs(*reqs)[0][0] - - -def _ensureReqCompleted(reqKey, client): - reply, err = get_reply_if_confirmed(client, *reqKey) - if err: - raise OperationError(err) - - if reply is None: - raise NoConsensusYet('not completed') - - -if __name__ == "__main__": - node_name, node_dest, steward_seed, bls_seed = parse_args() - steward_seed = steward_seed.encode() - bls_seed = bls_seed.encode() - config_helper = NodeConfigHelper(node_name, config) - bls_key = init_bls_keys(config_helper.keys_dir, node_name, bls_seed) - send_node_txn(node_name, bls_key, steward_seed, node_dest) diff --git a/scripts/get_metrics b/scripts/get_metrics index f666239a4..2d1f55297 100755 --- a/scripts/get_metrics +++ b/scripts/get_metrics @@ -201,11 +201,12 @@ def process_storage(storage, args): node_traffic = node_in.sum + node_out.sum client_traffic = (client_in.sum + client_out.sum) print(" Client incoming/outgoing: {:.2f} messages, {:.2f} traffic" - .format(client_in.count / client_out.count, client_in.sum / client_out.sum)) - print(" Node incoming/outgoing traffic: {:.2f}".format(node_in.sum / node_out.sum)) + .format(client_in.count / client_out.count, client_in.sum / client_out.sum if client_out.sum > 0 else 0)) + print(" Node incoming/outgoing traffic: {:.2f}".format(node_in.sum / node_out.sum if node_out.sum > 0 else 0)) print(" Node/client traffic: {:.2f}".format(node_traffic / client_traffic)) - print(" Node traffic per batch: {:.2f}".format(node_traffic / three_pc.count)) - print(" Node traffic per request: {:.2f}".format(node_traffic / three_pc.sum)) + if three_pc.count > 0: + print(" Node traffic per batch: {:.2f}".format(node_traffic / three_pc.count)) + print(" Node traffic per request: {:.2f}".format(node_traffic / three_pc.sum)) print("") print("Profiling info:") @@ -214,7 +215,10 @@ def process_storage(storage, args): break if m < MetricsName.NODE_PROD_TIME and \ m not in {MetricsName.REQUEST_PROCESSING_TIME, - MetricsName.BACKUP_REQUEST_PROCESSING_TIME}: + MetricsName.BACKUP_REQUEST_PROCESSING_TIME, + MetricsName.GC_GEN0_TIME, + MetricsName.GC_GEN1_TIME, + MetricsName.GC_GEN2_TIME}: continue acc = total.get(m) print(" {} : {}".format(str(m).split('.')[-1], @@ -242,14 +246,21 @@ def process_storage(storage, args): f.write("\n") +def detect_storage_type(path): + if os.path.isfile('{}.bin'.format(path)): + return KeyValueStorageType.BinaryFile + if any(f.endswith('.ldb') for f in os.listdir(path)): + return KeyValueStorageType.Leveldb + return KeyValueStorageType.Rocksdb + + if __name__ == '__main__': args = read_args() if args.data_dir is not None: - is_leveldb = any(f.endswith('.ldb') for f in os.listdir(args.data_dir)) - storage_type = KeyValueStorageType.Leveldb if is_leveldb \ - else KeyValueStorageType.Rocksdb - storage = initKeyValueStorage(storage_type, args.data_dir, "") + storage_type = detect_storage_type(args.data_dir) + location, name = os.path.split(args.data_dir) + storage = initKeyValueStorage(storage_type, location, name) process_storage(storage, args) exit() diff --git a/scripts/git b/scripts/git index d651dacd9..5640d6cab 100644 --- a/scripts/git +++ b/scripts/git @@ -1,7 +1,7 @@ #!/usr/bin/env bash # set -e -dirs=( "indy-common" "indy-client" "indy-node" "plenum" "anoncreds" "ledger" ) +dirs=( "indy-common" "indy-node" "plenum" "anoncreds" "ledger" ) # quiet pushd pushd() { diff --git a/scripts/indy b/scripts/indy deleted file mode 100755 index d4176133a..000000000 --- a/scripts/indy +++ /dev/null @@ -1,84 +0,0 @@ -#! /usr/bin/env python3 -""" -Convenience script for calling the indy command line interface (CLI). For now, -the CLI is designed for experimenting with the Indy Identity platform, and not -for creating a live consensus pool. For that, it's as simple as defining a node -registry, creating a looper, creating a node, and running it. - -$ indy - -or supply a command to be executed first - -$ indy "new nodes all" - -""" - -import logging -import os -import sys - -# NOTE: Loading of plugin should happen as early as possible -# So put all other required imports after loadPlugins function call below -from plenum.common.plugin_helper import loadPlugins -from indy_common.config_util import getConfig - -logging.root.handlers = [] -logger = logging.getLogger() -logger.propagate = False -logger.disabled = True - -config = getConfig() -baseDir = os.path.expanduser(config.CLI_BASE_DIR) -network_dir = os.path.expanduser(config.CLI_NETWORK_DIR) -if not os.path.exists(baseDir): - os.makedirs(baseDir) -if not os.path.exists(network_dir): - os.makedirs(network_dir) -loadPlugins(baseDir) - -# NOTE: Put all regular imports below (not related to loadplugin) -from indy_client.cli.cli import IndyCli -from stp_core.loop.looper import Looper - - -def run_cli(): - print("This client is deprecated! " - "Please, use the new libindy-based CLI: " - "https://github.com/hyperledger/indy-sdk/tree/master/cli") - - commands = sys.argv[1:] - - withNode = True if '--with-node' in commands else False - - with Looper(debug=config.LOOPER_DEBUG) as looper: - logFilePath = os.path.expanduser(os.path.join(config.CLI_BASE_DIR, config.logFilePath)) - cli = IndyCli(looper=looper, - basedirpath=baseDir, - ledger_base_dir=network_dir, - logFileName=logFilePath, - withNode=withNode - ) - - looper.run(cli.shell(*commands)) - - -default_config = """ -[node_reg] -Alpha = 127.0.0.1 8001 -Beta = 127.0.0.1 8003 -Gamma = 127.0.0.1 8005 -Delta = 127.0.0.1 8007 - -[client_node_reg] -AlphaC = 127.0.0.1 8002 -BetaC = 127.0.0.1 8004 -GammaC = 127.0.0.1 8006 -DeltaC = 127.0.0.1 8008 - -[storage_locations] -basePath = ~ -""" - - -if __name__ == '__main__': - run_cli() diff --git a/scripts/indy_old_cli_export_dids b/scripts/indy_old_cli_export_dids deleted file mode 100755 index 644bf7ca9..000000000 --- a/scripts/indy_old_cli_export_dids +++ /dev/null @@ -1,52 +0,0 @@ -#! /usr/bin/env python3 -""" -Script for export DIDs from client wallet. - -$indy_export_dids [-e ] -w -""" - -import argparse -import base64 -import json -import os -from pathlib import Path - -from indy_common.config_util import getConfig -from plenum.cli.constants import NO_ENV, WALLET_FILE_EXTENSION -from plenum.client.wallet import WalletStorageHelper - -ap = argparse.ArgumentParser() -ap.add_argument("-e", "--env_name", default=NO_ENV) -ap.add_argument("-w", "--wallet_name", required=True) -ap.add_argument("-f", "--output_file", required=False) -args = ap.parse_args() -env_name = args.env_name -wallet_name = args.wallet_name -output_file = args.output_file - -config = getConfig() -base_dir = os.path.expanduser(config.CLI_BASE_DIR) -wallets_dir = os.path.join(base_dir, config.walletsDir) - -wallet_dir = os.path.join(wallets_dir, env_name) -storage_helper = WalletStorageHelper(wallet_dir) -wallet_path = os.path.join(wallet_dir, "{}.{}".format(wallet_name, WALLET_FILE_EXTENSION)) -wallet = storage_helper.loadWallet(wallet_path) - -dids = [] -for did, did_signer in wallet.idsToSigners.items(): - seed_base64 = base64.b64encode(did_signer.seed).decode("ascii") - dids.append({"did": did, "seed": seed_base64}) - -dto = { - "version": 1, - "dids": dids -} - -out_file = output_file if output_file else \ - os.path.join(os.path.curdir, "{}_{}.exp_wallet".format(env_name, wallet_name)) - -path = Path(out_file) -path.write_text(json.dumps(dto)) - -print("Wallet successfully exported to {}".format(out_file)) diff --git a/scripts/load.py b/scripts/load.py deleted file mode 100644 index 3b14e2dc8..000000000 --- a/scripts/load.py +++ /dev/null @@ -1,60 +0,0 @@ -import logging -from time import perf_counter - -from plenum.common.signer_did import DidSigner -from indy_client.client.client import Client -from indy_client.client.wallet.wallet import Wallet -from indy_common.identity import Identity -from stp_core.common.log import getlogger, Logger -from stp_core.network.port_dispenser import genHa, HA -from stp_core.loop.looper import Looper -from plenum.test.helper import waitForSufficientRepliesForRequests -from indy_common.config_util import getConfig - -numReqs = 100 -splits = 1 - -Logger.setLogLevel(logging.WARNING) -logger = getlogger() - - -def sendRandomRequests(wallet: Wallet, client: Client, count: int): - print('{} random requests will be sent'.format(count)) - for i in range(count): - idr, signer = wallet.addIdentifier() - idy = Identity(identifier=idr, - verkey=signer.verkey) - wallet.addTrustAnchoredIdentity(idy) - reqs = wallet.preparePending() - return client.submitReqs(*reqs)[0] - - -def put_load(): - port = genHa()[1] - ha = HA('0.0.0.0', port) - name = "hello" - wallet = Wallet(name) - wallet.addIdentifier( - signer=DidSigner(seed=b'000000000000000000000000Steward1')) - client = Client(name, ha=ha) - with Looper(debug=getConfig().LOOPER_DEBUG) as looper: - looper.add(client) - print('Will send {} reqs in all'.format(numReqs)) - requests = sendRandomRequests(wallet, client, numReqs) - start = perf_counter() - for i in range(0, numReqs, numReqs // splits): - print('Will wait for {} now'.format(numReqs // splits)) - s = perf_counter() - reqs = requests[i:i + numReqs // splits + 1] - waitForSufficientRepliesForRequests(looper, client, requests=reqs, - customTimeoutPerReq=100, - override_timeout_limit=True) - print('>>> Got replies for {} requests << in {}'. - format(numReqs // splits, perf_counter() - s)) - end = perf_counter() - print('>>>Total {} in {}<<<'.format(numReqs, end - start)) - exit(0) - - -if __name__ == "__main__": - put_load() diff --git a/scripts/load_multi.py b/scripts/load_multi.py deleted file mode 100644 index 49d4354ac..000000000 --- a/scripts/load_multi.py +++ /dev/null @@ -1,58 +0,0 @@ -import concurrent -from concurrent.futures import ProcessPoolExecutor -from concurrent.futures import ThreadPoolExecutor - -import math - -import os - -from scripts.load import put_load - - -# Each task is a call to `put_load` - -def soft_blow(num_tasks=100, use_processes=False): - executor = ProcessPoolExecutor if use_processes else ThreadPoolExecutor - - with executor(max_workers=10) as e: - for _ in range(num_tasks): - e.submit(put_load) - - -# Defining at module level so it can be pickled -def _task_for_proc(num_threads, num_tasks): - print('Executing {} tasks for a process with {} threads'.format( - num_tasks, num_threads)) - futrs = [] - with ThreadPoolExecutor(max_workers=num_threads) as te: - for _ in range(num_tasks): - fut = te.submit(put_load) - futrs.append(fut) - print('Waiting for futures: {}'.format(os.getpid())) - concurrent.futures.wait(futrs) - - -def hard_blow(): - # Note: This method might perform more tasks then `num_tasks`, - # if the values of `num_tasks`, `num_threads` are chosen such that they - # are not multiples of `num_procs` - - # TODO: WIP - - num_tasks = 10000 - num_procs = 4 - threads_per_proc = 10 - - tasks_per_proc = int(math.ceil(num_tasks / num_procs)) - - futrs = [] - with ProcessPoolExecutor(max_workers=num_procs) as pe: - for _ in range(num_procs): - fut = pe.submit(_task_for_proc, (threads_per_proc, tasks_per_proc)) - futrs.append(fut) - print('Waiting for futures: main') - concurrent.futures.wait(futrs) - - -if __name__ == '__main__': - soft_blow() diff --git a/scripts/load_test.py b/scripts/load_test.py deleted file mode 100644 index eb2d16eb1..000000000 --- a/scripts/load_test.py +++ /dev/null @@ -1,444 +0,0 @@ -#! /usr/bin/env python3 - -import argparse -import asyncio -import os -import time -import csv -import functools -from collections import namedtuple -from random import randint -from jsonpickle import json - -from stp_core.loop.looper import Looper - -from stp_core.common.log import getlogger -from plenum.common.types import HA -from plenum.common.util import randomString -from stp_core.network.port_dispenser import genHa -from plenum.common.signer_did import DidSigner - -from plenum.common.constants import \ - TARGET_NYM, TXN_TYPE, NYM, \ - ROLE, RAW, NODE,\ - DATA, ALIAS, CLIENT_IP, \ - CLIENT_PORT - -from plenum.test.helper import eventually -from plenum.test.test_client import \ - getAcksFromInbox, getNacksFromInbox, getRepliesFromInbox - -from indy_common.constants import ATTRIB, GET_ATTR -from indy_client.client.wallet.attribute import Attribute, LedgerStore -from indy_client.client.wallet.wallet import Wallet -from indy_client.client.client import Client -from indy_common.identity import Identity -from indy_common.constants import GET_NYM - - -logger = getlogger() - -TTL = 120.0 # 60.0 -CONNECTION_TTL = 30.0 -RETRY_WAIT = 0.25 - - -def parseArgs(): - - parser = argparse.ArgumentParser() - - parser.add_argument("-c", "--num_clients", - action="store", - type=int, - default=1, - dest="numberOfClients", - help="number of clients to use (set to -1 for all)") - - parser.add_argument("-r", "--num_requests", - action="store", - type=int, - default=1, - dest="numberOfRequests", - help="number of clients to use") - - parser.add_argument( - "-t", - "--request_type", - action="store", - type=str, - default="NYM", - dest="requestType", - help="type of requests to send, supported = NYM, GET_NYM, ATTRIB") - - parser.add_argument("--at-once", - action='store_true', - dest="atOnce", - help="if set client send all request at once") - - parser.add_argument("--timeout", - action="store", - type=int, - default=1, - dest="timeoutBetweenRequests", - help="number of seconds to sleep after each request") - - parser.add_argument("--clients-list", - action="store", - default="{}/load_test_clients.list".format( - os.getcwd()), - dest="clientsListFilePath", - help="path to file with list of client names and keys") - - parser.add_argument("--results-path", - action="store", - default=os.getcwd(), - dest="resultsPath", - help="output directory") - - parser.add_argument("--skip-clients", - action="store", - type=int, - default=0, - dest="numberOfClientsToSkip", - help="number of clients to skip from clients list") - - return parser.parse_args() - - -def createClientAndWalletWithSeed(name, seed, ha=None): - if isinstance(seed, str): - seed = seed.encode() - if not ha: - port = genHa()[1] - ha = HA('0.0.0.0', port) - wallet = Wallet(name) - wallet.addIdentifier(signer=DidSigner(seed=seed)) - client = Client(name, ha=ha) - return client, wallet - - -class Rotator: - - def __init__(self, collection): - self._collection = collection - self._index = 0 - - def __iter__(self): - return self - - def __next__(self): - if len(self._collection) == 0: - raise StopIteration() - if self._index >= len(self._collection): - self._index = 0 - x = self._collection[self._index] - self._index += 1 - return x - - -class ClientPoll: - - def __init__(self, filePath, limit=-1, skip=0): - self.__startPort = 5679 - self.__filePath = filePath - self.__limit = limit - self.__skip = skip - self._clientsWallets = [self._spawnClient(name, seed) - for name, seed in self._readCredentials()] - - @property - def clients(self): - for cli, _ in self._clientsWallets: - yield cli - - @staticmethod - def randomRawAttr(): - d = {"{}_{}".format(randomString(20), randint(100, 1000000)): "{}_{}". - format(randint(1000000, 1000000000000), randomString(50))} - return json.dumps(d) - - def submitNym(self, reqsPerClient=1): - - usedIdentifiers = set() - - def newSigner(): - while True: - signer = DidSigner() - idr = signer.identifier - if idr not in usedIdentifiers: - usedIdentifiers.add(idr) - return signer - - def makeRequest(cli, wallet): - signer = newSigner() - idy = Identity(identifier=signer.identifier, - verkey=signer.verkey) - - wallet.addTrustAnchoredIdentity(idy) - - return self.submitGeneric(makeRequest, reqsPerClient) - - def submitGetNym(self, reqsPerClient=1): - - ids = Rotator([wallet.defaultId - for _, wallet in self._clientsWallets]) - - def makeRequest(cli, wallet): - op = { - TARGET_NYM: next(ids), - TXN_TYPE: GET_NYM, - } - req = wallet.signOp(op) - wallet.pendRequest(req) - - return self.submitGeneric(makeRequest, reqsPerClient) - - def submitSetAttr(self, reqsPerClient=1): - - def makeRequest(cli, wallet): - attrib = Attribute(name=cli.name, - origin=wallet.defaultId, - value=self.randomRawAttr(), - ledgerStore=LedgerStore.RAW) - wallet.addAttribute(attrib) - - return self.submitGeneric(makeRequest, reqsPerClient) - - def submitGeneric(self, makeRequest, reqsPerClient): - corosArgs = [] - for cli, wallet in self._clientsWallets: - for _ in range(reqsPerClient): - makeRequest(cli, wallet) - reqs = wallet.preparePending() - sentAt = time.time() - cli.submitReqs(*reqs) - for req in reqs: - corosArgs.append([cli, wallet, req, sentAt]) - return corosArgs - - def _readCredentials(self): - with open(self.__filePath, "r") as file: - creds = [line.strip().split(":") for i, line in enumerate(file)] - return map(lambda x: (x[0], str.encode(x[1])), - creds[self.__skip:self.__skip + self.__limit]) - - def _spawnClient(self, name, seed, host='0.0.0.0'): - self.__startPort += randint(100, 1000) - address = HA(host, self.__startPort) - logger.info("Seed for client {} is {}, " - "its len is {}".format(name, seed, len(seed))) - return createClientAndWalletWithSeed(name, seed, address) - - -resultsRowFieldNames = [ - 'signerName', - 'signerId', - 'dest', - 'reqId', - 'transactionType', - 'sentAt', - 'quorumAt', - 'latency', - 'ackNodes', - 'nackNodes', - 'replyNodes'] -ResultRow = namedtuple('ResultRow', resultsRowFieldNames) - - -async def eventuallyAny(coroFunc, *args, retryWait: float = 0.01, - timeout: float = 5): - start = time.perf_counter() - - def remaining(): - return start + timeout - time.perf_counter() - - remain = remaining() - data = None - while remain >= 0: - res = await coroFunc(*args) - (complete, data) = res - if complete: - return data - remain = remaining() - if remain > 0: - await asyncio.sleep(retryWait) - remain = remaining() - return data - - -async def checkReply(client, requestId, identifier): - hasConsensus = False - acks, nacks, replies = [], [], [] - try: - # acks = client.reqRepStore.getAcks(requestId) - # nacks = client.reqRepStore.getNacks(requestId) - # replies = client.reqRepStore.getReplies(requestId) - acks = getAcksFromInbox(client, requestId) - nacks = getNacksFromInbox(client, requestId) - replies = getRepliesFromInbox(client, requestId) - hasConsensus = client.hasConsensus(identifier, requestId) - except KeyError: - logger.info("No replies for {}:{} yet".format(identifier, requestId)) - except Exception as e: - logger.warn( - "Error occured during checking replies: {}".format( - repr(e))) - finally: - return hasConsensus, (hasConsensus, acks, nacks, replies) - - -async def checkReplyAndLogStat(client, wallet, request, sentAt, writeResultsRow, stats): - hasConsensus, ackNodes, nackNodes, replyNodes = \ - await eventuallyAny(checkReply, client, - request.reqId, wallet.defaultId, - retryWait=RETRY_WAIT, timeout=TTL - ) - - endTime = time.time() - # TODO: only first hasConsensus=True make sense - quorumAt = endTime if hasConsensus else "" - latency = endTime - sentAt - - row = ResultRow(signerName=wallet.name, - signerId=wallet.defaultId, - dest=request.operation.get('dest'), - reqId=request.reqId, - transactionType=request.operation['type'], - sentAt=sentAt, - quorumAt=quorumAt, - latency=latency, - ackNodes=",".join(ackNodes), - nackNodes=",".join(nackNodes.keys()), - replyNodes=",".join(replyNodes.keys())) - stats.append((latency, hasConsensus)) - writeResultsRow(row._asdict()) - - -def checkIfConnectedToAll(client): - connectedNodes = client.nodestack.connecteds - connectedNodesNum = len(connectedNodes) - totalNodes = len(client.nodeReg) - logger.info("Connected {} / {} nodes". - format(connectedNodesNum, totalNodes)) - - if connectedNodesNum == 0: - raise Exception("Not connected to any") - elif connectedNodesNum < totalNodes * 0.8: - raise Exception("Not connected fully") - else: - return True - - -def printCurrentTestResults(stats, testStartedAt): - totalNum = len(stats) - totalLatency = 0 - successNum = 0 - for lat, hasConsensus in stats: - totalLatency += lat - successNum += int(bool(hasConsensus)) - avgLatency = totalLatency / totalNum if totalNum else 0.0 - secSinceTestStart = time.time() - testStartedAt - failNum = totalNum - successNum - throughput = successNum / secSinceTestStart - errRate = failNum / secSinceTestStart - logger.info( - """ - ================================ - Test time: {} - Average latency: {} - Throughput: {} - Error rate: {} - Succeeded: {} - Failed: {} - ================================ - """.format(secSinceTestStart, avgLatency, throughput, - errRate, successNum, failNum) - ) - - -def main(args): - - resultsFileName = \ - "perf_results_{x.numberOfClients}_" \ - "{x.numberOfRequests}_{0}.csv".format(int(time.time()), x=args) - resultFilePath = os.path.join(args.resultsPath, resultsFileName) - logger.info("Results file: {}".format(resultFilePath)) - - def writeResultsRow(row): - if not os.path.exists(resultFilePath): - resultsFd = open(resultFilePath, "w") - resultsWriter = csv.DictWriter( - resultsFd, fieldnames=resultsRowFieldNames) - resultsWriter.writeheader() - resultsFd.close() - resultsFd = open(resultFilePath, "a") - resultsWriter = csv.DictWriter( - resultsFd, fieldnames=resultsRowFieldNames) - resultsWriter.writerow(row) - resultsFd.close() - - stats = [] - - def buildCoros(coroFunc, corosArgs): - coros = [] - for args in corosArgs: - argsExt = args + [writeResultsRow, stats] - coros.append(functools.partial(coroFunc, *argsExt)) - return coros - - clientPoll = ClientPoll(args.clientsListFilePath, - args.numberOfClients, args.numberOfClientsToSkip) - - with Looper() as looper: - - # connect - - connectionCoros = [] - for cli in clientPoll.clients: - looper.add(cli) - connectionCoros.append( - functools.partial(checkIfConnectedToAll, cli)) - for coro in connectionCoros: - looper.run(eventually(coro, - timeout=CONNECTION_TTL, - retryWait=RETRY_WAIT, - verbose=False)) - - testStartedAt = time.time() - stats.clear() - - requestType = args.requestType - sendRequests = { - "NYM": clientPoll.submitNym, - "GET_NYM": clientPoll.submitGetNym, - "ATTRIB": clientPoll.submitSetAttr, - "ATTR": clientPoll.submitSetAttr - }.get(requestType) - - if sendRequests is None: - raise ValueError("Unsupported request type, " - "only NYM and ATTRIB/ATTR are supported") - - def sendAndWaitReplies(numRequests): - corosArgs = sendRequests(numRequests) - coros = buildCoros(checkReplyAndLogStat, corosArgs) - for coro in coros: - task = eventually(coro, - retryWait=RETRY_WAIT, - timeout=TTL, - verbose=False) - looper.run(task) - printCurrentTestResults(stats, testStartedAt) - logger.info("Sent and waited for {} {} requests" - .format(len(coros), requestType)) - - if args.atOnce: - sendAndWaitReplies(numRequests=args.numberOfRequests) - else: - for i in range(args.numberOfRequests): - sendAndWaitReplies(numRequests=1) - - -if __name__ == '__main__': - commandLineArgs = parseArgs() - main(commandLineArgs) diff --git a/scripts/performance/README.md b/scripts/performance/README.md index cc00f9505..d68d9d2f2 100644 --- a/scripts/performance/README.md +++ b/scripts/performance/README.md @@ -23,7 +23,7 @@ Default value is 0. '-g', '--genesis' : Path to file with genesis txns for the pool to connect to. Default value is "~/.indy-cli/networks/sandbox/pool_transactions_genesis". -'-s', '--seed' : Seed that will be used to generate submitter did. Default value is Trustee1. +'-s', '--seed' : Seed that will be used to generate submitter did. Default value is Trustee1. If test requires several did to be generated, several seed parameters could be provided. '-w', '--wallet_key' : Key to access encrypted wallet. Default value is "key". @@ -37,6 +37,9 @@ Files to be stored: * successful - request and response for successful txns * failed - request and error provided by libindy * nack_reject - request and error provided by pool +* \.log - log file + +'--short_stat' : If mentioned only total statistics is stored. Other files will be empty. '--sep' : Separator that will be used in output csv file. Do not use "," - it will be in conflict with JSON values. Default value is "|". @@ -63,6 +66,9 @@ Supported txns: * get_payment_sources - Get payment sources * payment - Perform payment * verify_payment - Verify performed payment +* cfg_writes - Config ledger txn - set writable property of pool to True +* demoted_node - Pool ledger txn - add new demoted node +* get_txn - Multi ledger txn - request txn by ledger id and seq_no Note: At the moment revoc_reg_entry requests could be used only with batch size equal to 1. After each entry write request revoc registery is recreated. So each entry req generates 3 request to ledger in total. @@ -71,7 +77,7 @@ Default value is 'p''. '-p', '--pool_config' : Pool config in form of JSON. The value will be passed to open_pool_ledger call. Default value is empty. Parameters description depends on libindy version and could be found in official sdk documentation. -'-l', '--load_rate' : Batch send rate in txns per sec. Batches will be evenly distributed within second. Default 10. +'-l', '--load_rate' : Batch send rate in batches per sec. Batches will be evenly distributed within second. Default 10. Note: batches are evenly distributed, but txs inside one batch are sent as fast as possible. @@ -83,11 +89,24 @@ Note: batches are evenly distributed, but txs inside one batch are sent as fast '--load_time' : Work no longer then load_time sec. Zero value means work always. Default value is 0. +'--log_lvl' : Log level as int value. Default is info level. + +'--ext_set' : Parameter allow to configure plugins settings. Should be provided in form of JSON. For now only payment plugins supported. Default is empty. + +'--test_conn' : Check connection to pool with provided genesis file only. Do not send reqs, do not create test dirs or files. Just do pool open and make initial catchup and exit. + +One or more parameters could be stored in config file. Format of the config is simple YAML. Long parameter name should be used. Config name should be passed to script as an additional unnamed argument. +``` +python3 perf_processes.py -n 1000 -k nym config_file_name +``` + ## Transaction data -Each txn can read predefined data from file or generate random data. + +All txns generate random data. +Some txns (nym, schema, attrib, cred_def, revoc_reg_def, revoc_reg_entry, get_nym, get_attrib, get_schema, get_cred_def, get_revoc_reg_def, get_revoc_reg, get_revoc_reg_delta) can read predefined data from file. Default mode for each txn is to generate random data. -Script is designed to use data from csv files which +In case of predefined data script is designed to use csv files which could be obtained from previous script run with writing txns of random data - result file named "successful". For example one can run script to add 1000 nym txns @@ -100,6 +119,7 @@ This file could be used to run script to read all those 1000 nyms ``` python3 perf_processes.py -n 1000 -k "{\"get_nym\": {\"file_name\": \"./load_test_20180620_150354/successful\"}}" ``` + #### Parameters for data file processing 'file_name' - name of the file. @@ -114,19 +134,37 @@ python3 perf_processes.py -n 1000 -k "{\"get_nym\": {\"file_name\": \"./load_tes 'file_field' - split number to be used to run test with. Default is 2. -#### Parameters for specific request types -'payment_addrs_count' - count of payment addresses. Default is 100. The count of payment addresses actually also determines the count of initial -payment sources (one payment source per payment address). Please note, the count of initial payment sources serves -as a buffer for payment request generator because new payments use receipts of previous payments as sources. -In case there is no available sources in the buffer, payment request generator prints a message to stdout that -a next request cannot be generated since no req data are available. _Applicable for: payment, verify_payment, get_payment_sources_ +#### Parameters for 'get_txn' txn type + +'ledger' - ledger id to request from, default is DOMAIN. + +'min_seq_no' - min seq_no to request, default is 0. + +'max_seq_no' - max seq_no to request, default is 10000000. -'payment_method' - payment method. _Applicable for: payment, verify_payment, get_payment_sources_ +'rand_seq' - the way to get next seq_no from interval from min to max, true - next val is random, false - sequentially from min to max, default is true. -'plugin_lib' - name of payment library file. _Applicable for: payment, verify_payment, get_payment_sources_ -'plugin_init_func' - name of payment library initialization function. _Applicable for: payment, verify_payment, get_payment_sources_ +#### Parameters for payment plugins + +If ext_set contains setting for payment plugins then load script will try to send txn with fees. + +'payment_addrs_count' - count of payment addresses. Default is 100. + +'addr_mint_limit' - will be minted to each address. Default is 1000. + +'payment_method' - payment method. Default is empty. + +'plugin_lib' - name of payment library file. Default is empty. + +'plugin_init' - name of payment library initialization function. Default is empty. + +'trustees_num' - number of trustees required for multisig. Default is 4. + +'set_fees' - fees that wiil be append to existing pool fees before test starts. Default is empty. + +'mint_by' - mint limit will be minted several times by mint_by amount. Total minted amount will be equal to mint limit. Default is mint limit. ## Examples @@ -206,8 +244,3 @@ python3 perf_processes.py -k "{\"TXN_TYPE\": {\"file_name\": \"/path/to/file\", ``` python3 perf_processes.py -k "{\"TXN_TYPE\": {\"file_name\": \"/path/to/file\", \"file_max_split\": 1, \"file_field\": 1, \"ignore_first_line\": false, \"file_sep\": \" \"}}" ``` - -* To send payment txns using 1000 payment addresses / initial payment sources (NOTE: payment method specific arguments should be substituted): -``` -python3 perf_processes.py -k "{\"payment\": {\"payment_addrs_count\": 1000, \"payment_method\": \"\", \"payment_lib\": \"\", \"payment_init_func\": \"\"}}"" -``` diff --git a/scripts/performance/perf_load/__init__.py b/scripts/performance/perf_load/__init__.py index e69de29bb..48ba559ef 100644 --- a/scripts/performance/perf_load/__init__.py +++ b/scripts/performance/perf_load/__init__.py @@ -0,0 +1,5 @@ +from .__version__ import ( + __title__, __version_info__, __version__, __description__, + __long_description__, __keywords__, __url__, __author__, + __author_email__, __maintainer__, __license__ +) diff --git a/scripts/performance/perf_load/__version__.py b/scripts/performance/perf_load/__version__.py new file mode 100644 index 000000000..f97673fae --- /dev/null +++ b/scripts/performance/perf_load/__version__.py @@ -0,0 +1,15 @@ +""" +perf_load package metadata +""" + +__title__ = 'indy-perf-load' +__version_info__ = (1, 1, 2) +__version__ = '.'.join(map(str, __version_info__)) +__description__ = 'Indy node performance load' +__long_description__ = __description__ +__keywords__ = 'indy node performance load testing' +__url__ = 'https://github.com/hyperledger/indy-node/tree/master/scripts/performance' +__author__ = "Hyperledger" +__author_email__ = 'hyperledger-indy@lists.hyperledger.org' +__maintainer__ = "Hyperledger" +__license__ = "Apache 2.0" diff --git a/scripts/performance/perf_load/perf_client.py b/scripts/performance/perf_load/perf_client.py index ce663fdeb..5ba9524f7 100644 --- a/scripts/performance/perf_load/perf_client.py +++ b/scripts/performance/perf_load/perf_client.py @@ -3,12 +3,13 @@ import os import asyncio import signal +import logging from datetime import datetime from indy import pool, wallet, did, ledger -from perf_load.perf_client_msgs import ClientReady, ClientRun, ClientStop, ClientGetStat, ClientSend, ClientMsg +from perf_load.perf_client_msgs import ClientReady, ClientRun, ClientStop, ClientGetStat, ClientSend from perf_load.perf_clientstaistic import ClientStatistic -from perf_load.perf_utils import random_string +from perf_load.perf_utils import random_string, logger_init from perf_load.perf_req_gen import NoReqDataAvailableException from perf_load.perf_gen_req_parser import ReqTypeParser @@ -18,9 +19,10 @@ class LoadClient: SendTime = 1 SendSync = 2 - def __init__(self, name, pipe_conn, batch_size, batch_rate, req_kind, buff_req, pool_config, send_mode, **kwargs): + def __init__(self, name, pipe_conn, batch_size, batch_rate, req_kind, buff_req, pool_config, send_mode, short_stat, + **kwargs): self._name = name - self._stat = ClientStatistic() + self._stat = ClientStatistic(short_stat) self._send_mode = send_mode self._buff_reqs = buff_req self._pipe_conn = pipe_conn @@ -47,12 +49,7 @@ def __init__(self, name, pipe_conn, batch_size, batch_rate, req_kind, buff_req, raise RuntimeError("Batch size cannot be greater than 1 in response waiting mode") if self._send_mode == LoadClient.SendResp and buff_req != 0: raise RuntimeError("Cannot pregenerate reqs in response waiting mode") - - def msg(self, fmt: str, *args): - try: - self._pipe_conn.send(ClientMsg(fmt, *args)) - except Exception as e: - print("{} Ready send error {}".format(self._name, e)) + self._logger = logging.getLogger(self._name) async def pool_open_pool(self, name, config): return await pool.open_pool_ledger(name, config) @@ -85,21 +82,27 @@ async def wallet_close(self, wallet_h): await wallet.close_wallet(wallet_h) async def _init_pool(self, genesis_path): + self._logger.info("_init_pool {}".format(genesis_path)) await self.pool_protocol_version() pool_cfg = json.dumps({"genesis_txn": genesis_path}) await self.pool_create_config(self._pool_name, pool_cfg) self._pool_handle = await self.pool_open_pool(self._pool_name, self._pool_config) + self._logger.info("_init_pool done") async def _wallet_init(self, w_key): + self._logger.info("_wallet_init {}".format(w_key)) self._wallet_name = "{}_wallet".format(self._pool_name) wallet_credential = json.dumps({"key": w_key}) wallet_config = json.dumps({"id": self._wallet_name}) await self.wallet_create_wallet(wallet_config, wallet_credential) self._wallet_handle = await self.wallet_open_wallet(wallet_config, wallet_credential) + self._logger.info("_wallet_init done") async def _did_init(self, seed): + self._logger.info("_did_init {}".format(seed)) self._test_did, self._test_verk = await self.did_create_my_did( self._wallet_handle, json.dumps({'seed': seed[0]})) + self._logger.info("_did_init done") async def _pre_init(self): pass @@ -111,6 +114,7 @@ def _on_pool_create_ext_params(self): return {"max_cred_num": self._batch_size} async def run_test(self, genesis_path, seed, w_key): + self._logger.info("run_test genesis_path {}, seed {}, w_key {}".format(genesis_path, seed, w_key)) try: await self._pre_init() @@ -120,27 +124,32 @@ async def run_test(self, genesis_path, seed, w_key): await self._post_init() + self._logger.info("call _req_generator.on_pool_create") await self._req_generator.on_pool_create(self._pool_handle, self._wallet_handle, self._test_did, self.ledger_sign_req, self.ledger_submit, **self._on_pool_create_ext_params()) except Exception as ex: - self.msg("{} run_test error {}", self._name, ex) + self._logger.exception("run_test error {} stopping...".format(ex)) self._loop.stop() return + self._logger.info("call pregen_reqs") await self.pregen_reqs() + self._logger.info("send ClientReady") try: self._pipe_conn.send(ClientReady()) except Exception as e: - print("{} Ready send error {}".format(self._name, e)) + self._logger.exception("Ready send error {}".format(e)) raise e def _on_ClientRun(self, cln_run): + self._logger.debug("_on_ClientRun _send_mode {}".format(self._send_mode)) if self._send_mode == LoadClient.SendTime: self._loop.call_soon(self.req_send) def _on_ClientSend(self, cln_snd): + self._logger.debug("_on_ClientSend _send_mode {}".format(self._send_mode)) if self._send_mode == LoadClient.SendSync: self.req_send(cln_snd.cnt) @@ -148,6 +157,7 @@ def read_cb(self): force_close = False try: flag = self._pipe_conn.recv() + self._logger.debug("read_cb {}".format(flag)) if isinstance(flag, ClientStop): if self._closing is False: force_close = True @@ -159,25 +169,27 @@ def read_cb(self): elif isinstance(flag, ClientSend): self._on_ClientSend(flag) except Exception as e: - self.msg("{} Error {}", self._name, e) + self._logger.exception("Error {}".format(e)) force_close = True if force_close: self._loop.create_task(self.stop_test()) async def gen_signed_req(self): + self._logger.debug("gen_signed_req") if self._closing is True: return try: req_data, req = await self._req_generator.generate_request(self._test_did) except NoReqDataAvailableException: - self.msg("{} | Cannot generate request since no req data are available.", datetime.now()) + self._logger.warning("Cannot generate request since no req data are available.") return except Exception as e: - self.msg("{} generate req error {}", self._name, e) + self._logger.exception("generate req error {}".format(e)) self._loop.stop() raise e try: - sig_req = await self.ledger_sign_req(self._wallet_handle, self._test_did, req) + req_did = self._req_generator.req_did() or self._test_did + sig_req = await self.ledger_sign_req(self._wallet_handle, req_did, req) self._stat.signed(req_data) self._load_client_reqs.append((req_data, sig_req)) except Exception as e: @@ -206,7 +218,7 @@ async def pregen_reqs(self): try: await self.gen_signed_req() except NoReqDataAvailableException: - self.msg("{} cannot prepare more reqs. Done {}/{}", self._name, i, self._buff_reqs) + self._logger.warning("cannot prepare more reqs. Done {}/{}".format(i, self._buff_reqs)) return def gen_reqs(self): @@ -241,7 +253,7 @@ def send_stat(self): try: self._pipe_conn.send(st) except Exception as e: - print("{} stat send error {}".format(self._name, e)) + self._logger.exception("stat send error {}".format(e)) raise e def req_send(self, cnt: int = None): @@ -254,7 +266,7 @@ def req_send(self, cnt: int = None): to_snd = cnt or self._batch_size if len(self._load_client_reqs) < to_snd: - self.msg("WARNING need to send {}, but have {}", to_snd, len(self._load_client_reqs)) + self._logger.warning("Need to send {}, but have {}".format(to_snd, len(self._load_client_reqs))) for i in range(min(len(self._load_client_reqs), to_snd)): req_data, req = self._load_client_reqs.pop() @@ -263,25 +275,29 @@ def req_send(self, cnt: int = None): self._send_q.append(sender) async def stop_test(self): + self._logger.info("stop_test...") self._closing = True if len(self._send_q) > 0: await asyncio.gather(*self._send_q, return_exceptions=True) if len(self._gen_q) > 0: await asyncio.gather(*self._gen_q, return_exceptions=True) - + self._logger.info("stopping queues done") try: if self._wallet_handle is not None: await self.wallet_close(self._wallet_handle) + self._logger.info("wallet closed") except Exception as e: - self.msg("{} close_wallet exception: {}", self._name, e) + self._logger.exception("close_wallet exception: {}".format(e)) try: if self._pool_handle is not None: await self.pool_close_pool(self._pool_handle) + self._logger.info("pool closed") except Exception as e: - self.msg("{} close_pool_ledger exception: {}", self._name, e) + self._logger.exception("close_pool_ledger exception: {}".format(e)) self._loop.call_soon_threadsafe(self._loop.stop) + self._logger.info("looper stopped") dirs_to_dlt = [] if self._wallet_name is not None and self._wallet_name != "": dirs_to_dlt.append(os.path.join(os.path.expanduser("~/.indy_client/wallet"), self._wallet_name)) @@ -291,26 +307,34 @@ async def stop_test(self): for d in dirs_to_dlt: if os.path.isdir(d): shutil.rmtree(d, ignore_errors=True) + self._logger.info("dirs {} deleted".format(dirs_to_dlt)) @classmethod def run(cls, name, genesis_path, pipe_conn, seed, batch_size, batch_rate, - req_kind, buff_req, wallet_key, pool_config, send_mode, mask_sign, ext_set): + req_kind, buff_req, wallet_key, pool_config, send_mode, mask_sign, ext_set, + log_dir, log_lvl, short_stat): if mask_sign: + logger_init(log_dir, "{}.log".format(name), log_lvl) signal.signal(signal.SIGINT, signal.SIG_IGN) + logging.getLogger(name).info("starting") + exts = {} if ext_set and isinstance(ext_set, str): try: exts = json.loads(ext_set) except Exception as e: - print("{} parse ext settings error {}".format(name, e)) + logging.getLogger(name).warning("{} parse ext settings error {}".format(name, e)) exts = {} - cln = cls(name, pipe_conn, batch_size, batch_rate, req_kind, buff_req, pool_config, send_mode, **exts) + cln = cls(name, pipe_conn, batch_size, batch_rate, req_kind, buff_req, + pool_config, send_mode, short_stat, **exts) try: asyncio.run_coroutine_threadsafe(cln.run_test(genesis_path, seed, wallet_key), loop=cln._loop) cln._loop.run_forever() except Exception as e: - print("{} running error {}".format(cln._name, e)) + logging.getLogger(name).exception("running error {}".format(e)) stat = cln._stat.dump_stat(dump_all=True) + + logging.getLogger(name).info("stopped") return stat diff --git a/scripts/performance/perf_load/perf_client_fees.py b/scripts/performance/perf_load/perf_client_fees.py index ca6b2764b..1c7e2db68 100644 --- a/scripts/performance/perf_load/perf_client_fees.py +++ b/scripts/performance/perf_load/perf_client_fees.py @@ -6,7 +6,7 @@ from perf_load.perf_client import LoadClient from perf_load.perf_utils import ensure_is_reply, divide_sequence_into_chunks,\ - request_get_type, gen_input_output, PUB_XFER_TXN_ID + request_get_type, gen_input_output, PUB_XFER_TXN_ID, response_get_type TRUSTEE_ROLE_CODE = "0" @@ -30,8 +30,10 @@ def __init_plugin_once(cls, plugin_lib_name, init_func_name): print("Payment plugin initialization failed: {}".format(repr(ex))) raise ex - def __init__(self, name, pipe_conn, batch_size, batch_rate, req_kind, buff_req, pool_config, send_mode, **kwargs): - super().__init__(name, pipe_conn, batch_size, batch_rate, req_kind, buff_req, pool_config, send_mode, **kwargs) + def __init__(self, name, pipe_conn, batch_size, batch_rate, req_kind, buff_req, pool_config, send_mode, short_stat, + **kwargs): + super().__init__(name, pipe_conn, batch_size, batch_rate, req_kind, buff_req, pool_config, send_mode, + short_stat, **kwargs) self._trustee_dids = [] self._pool_fees = {} self._ignore_fees_txns = [PUB_XFER_TXN_ID] @@ -44,6 +46,9 @@ def __init__(self, name, pipe_conn, batch_size, batch_rate, req_kind, buff_req, self._req_num_of_trustees = kwargs.get("trustees_num", 4) self._set_fees = kwargs.get("set_fees", {}) self._req_addrs = {} + self._mint_by = kwargs.get("mint_by", self._addr_mint_limit) + if self._mint_by < 1 or self._mint_by > self._addr_mint_limit: + self._mint_by = self._addr_mint_limit if not self._payment_method or not self._plugin_lib or not self._plugin_init: raise RuntimeError("Plugin cannot be initialized. Some required param missed") @@ -87,7 +92,8 @@ async def _parse_fees_resp(self, req, resp_or_exp): try: resp_obj = json.loads(resp) op_f = resp_obj.get("op", "") - if op_f == "REPLY": + resp_type = response_get_type(resp_obj) + if op_f == "REPLY" and resp_type not in self._ignore_fees_txns: receipt_infos_json = await payment.parse_response_with_fees(self._payment_method, resp) receipt_infos = json.loads(receipt_infos_json) if receipt_infos_json else [] for ri in receipt_infos: @@ -95,7 +101,7 @@ async def _parse_fees_resp(self, req, resp_or_exp): else: self._restore_fees_from_req(req) except Exception as e: - print("Error on payment txn postprocessing: {}".format(e)) + self._logger.exception("Error on payment txn postprocessing: {}".format(e)) self._req_addrs.pop(req, {}) async def ledger_submit(self, pool_h, req): @@ -132,15 +138,19 @@ async def multisig_req(self, req): ret_req = await ledger.multi_sign_request(self._wallet_handle, d, ret_req) return ret_req - async def __mint_sources(self, payment_addresses, amount): - outputs = [] - for payment_address in payment_addresses: - outputs.append({"recipient": payment_address, "amount": amount}) - - mint_req, _ = await payment.build_mint_req(self._wallet_handle, self._test_did, json.dumps(outputs), None) - mint_req = await self.multisig_req(mint_req) - mint_resp = await ledger.submit_request(self._pool_handle, mint_req) - ensure_is_reply(mint_resp) + async def __mint_sources(self, payment_addresses, amount, by_val): + iters = (amount // by_val) + (1 if (amount % by_val) > 0 else 0) + mint_val = by_val + for i in range(iters): + outputs = [] + if (i + 1) * by_val > amount: + mint_val = amount % by_val + for payment_address in payment_addresses: + outputs.append({"recipient": payment_address, "amount": mint_val}) + mint_req, _ = await payment.build_mint_req(self._wallet_handle, self._test_did, json.dumps(outputs), None) + mint_req = await self.multisig_req(mint_req) + mint_resp = await ledger.submit_request(self._pool_handle, mint_req) + ensure_is_reply(mint_resp) async def _get_payment_sources(self, pmnt_addr): get_ps_req, _ = await payment.build_get_payment_sources_request(self._wallet_handle, self._test_did, pmnt_addr) @@ -175,6 +185,7 @@ async def _did_init(self, seed): self._trustee_dids[0], nym_req) ensure_is_reply(nym_resp) self._trustee_dids.append(self._test_did) + self._logger.info("_did_init done") async def _pool_fees_init(self): if self._set_fees: @@ -188,24 +199,29 @@ async def _pool_fees_init(self): get_fees_resp = await ledger.sign_and_submit_request(self._pool_handle, self._wallet_handle, self._test_did, get_fees_req) self._pool_fees = json.loads(await payment.parse_get_txn_fees_response(self._payment_method, get_fees_resp)) + self._logger.info("_pool_fees_init done") async def _payment_address_init(self): pmt_addrs = await self.__create_payment_addresses(self._payment_addrs_count) for payment_addrs_chunk in divide_sequence_into_chunks(pmt_addrs, 500): - await self.__mint_sources(payment_addrs_chunk, self._addr_mint_limit) + await self.__mint_sources(payment_addrs_chunk, self._addr_mint_limit, self._mint_by) for pa in pmt_addrs: self._addr_txos.update(await self._get_payment_sources(pa)) + self._logger.info("_payment_address_init done") async def _pre_init(self): self.__init_plugin_once(self._plugin_lib, self._plugin_init) + self._logger.info("_pre_init done") async def _post_init(self): await self._pool_fees_init() await self._payment_address_init() + self._logger.info("_post_init done") def _on_pool_create_ext_params(self): params = super()._on_pool_create_ext_params() params.update({"addr_txos": self._addr_txos, "payment_method": self._payment_method, "pool_fees": self._pool_fees}) + self._logger.info("_on_pool_create_ext_params done {}".format(params)) return params diff --git a/scripts/performance/perf_load/perf_client_msgs.py b/scripts/performance/perf_load/perf_client_msgs.py index 91372c102..f1a37f743 100644 --- a/scripts/performance/perf_load/perf_client_msgs.py +++ b/scripts/performance/perf_load/perf_client_msgs.py @@ -19,8 +19,3 @@ class ClientGetStat: class ClientSend: def __init__(self, cnt: int = 10): self.cnt = cnt - - -class ClientMsg: - def __init__(self, msg: str, *args): - self.msg = msg.format(*args) diff --git a/scripts/performance/perf_load/perf_client_runner.py b/scripts/performance/perf_load/perf_client_runner.py index 55adafc4d..f6bc3e9ee 100644 --- a/scripts/performance/perf_load/perf_client_runner.py +++ b/scripts/performance/perf_load/perf_client_runner.py @@ -1,3 +1,4 @@ +import logging from perf_load.perf_client_msgs import ClientRun, ClientGetStat @@ -18,8 +19,10 @@ def __init__(self, name, conn, out_file): self.total_nack = 0 self.total_reject = 0 self._out_file = out_file + self._logger = logging.getLogger(name) def stop_client(self): + self._logger.debug("stop_client") self.status = ClientRunner.ClientStopped def is_finished(self): @@ -35,18 +38,20 @@ def refresh_stat(self, stat): self.total_reject = stat.get("total_rejected", self.total_reject) def run_client(self): + self._logger.debug("run_client {}".format(self)) try: if self.conn and self.status == ClientRunner.ClientReady: self.conn.send(ClientRun()) self.status = ClientRunner.ClientRun except Exception as e: - print("Sent Run to client {} error {}".format(self.name, e), file=self._out_file) + self._logger.exception("Sent Run to client {} error {}".format(self.name, e)) self.status = ClientRunner.ClientError def req_stats(self): + self._logger.debug("req_stats {}".format(self)) try: if self.conn and self.status == ClientRunner.ClientRun: self.conn.send(ClientGetStat()) except Exception as e: - print("Sent ClientGetStat to client {} error {}".format(self.name, e), file=self._out_file) + self._logger.exception("Sent ClientGetStat to client {} error {}".format(self.name, e), file=self._out_file) self.status = ClientRunner.ClientError diff --git a/scripts/performance/perf_load/perf_clientstaistic.py b/scripts/performance/perf_load/perf_clientstaistic.py index b4304b327..d9092bc38 100644 --- a/scripts/performance/perf_load/perf_clientstaistic.py +++ b/scripts/performance/perf_load/perf_clientstaistic.py @@ -3,7 +3,8 @@ class ClientStatistic: - def __init__(self): + def __init__(self, short_stat=False): + self._short_stat = short_stat self._req_prep = 0 self._req_sent = 0 self._req_succ = 0 @@ -31,8 +32,9 @@ def signed(self, req_data): def sent(self, req_data, req): req_data_repr = repr(req_data) self._client_stat_reqs.setdefault(req_data_repr, dict())["client_sent"] = time.time() - self._client_stat_reqs[req_data_repr]["req"] = req self._req_sent += 1 + if not self._short_stat: + self._client_stat_reqs[req_data_repr]["req"] = req def reply(self, req_data, reply_or_exception): req_data_repr = repr(req_data) @@ -69,7 +71,8 @@ def reply(self, req_data, reply_or_exception): self._req_fail += 1 status = "fail" self._client_stat_reqs[req_data_repr]["status"] = status - self._client_stat_reqs[req_data_repr]["resp"] = resp + if not self._short_stat: + self._client_stat_reqs[req_data_repr]["resp"] = resp def dump_stat(self, dump_all: bool = False): ret_val = {} diff --git a/scripts/performance/perf_load/perf_gen_req_parser.py b/scripts/performance/perf_load/perf_gen_req_parser.py index 175aeabf0..9bb70456b 100644 --- a/scripts/performance/perf_load/perf_gen_req_parser.py +++ b/scripts/performance/perf_load/perf_gen_req_parser.py @@ -6,6 +6,9 @@ from perf_load.perf_req_gen_definition import RGGetDefinition, RGDefinition from perf_load.perf_req_gen_revoc import RGDefRevoc, RGGetDefRevoc, RGEntryRevoc, RGGetEntryRevoc, RGGetRevocRegDelta from perf_load.perf_req_gen_payment import RGGetPaymentSources, RGPayment, RGVerifyPayment +from perf_load.perf_req_gen_cfg_writes import RGConfigChangeState +from perf_load.perf_req_gen_demoted_node import RGPoolNewDemotedNode +from perf_load.perf_req_gen_get_txn import RGGetTxn class ReqTypeParser: @@ -14,7 +17,8 @@ class ReqTypeParser: "revoc_reg_entry": RGEntryRevoc, "get_nym": RGGetNym, "get_attrib": RGGetAttrib, "get_schema": RGGetSchema, "get_cred_def": RGGetDefinition, "get_revoc_reg_def": RGGetDefRevoc, "get_revoc_reg": RGGetEntryRevoc, "get_revoc_reg_delta": RGGetRevocRegDelta, "get_payment_sources": RGGetPaymentSources, "payment": RGPayment, - "verify_payment": RGVerifyPayment} + "verify_payment": RGVerifyPayment, "cfg_writes": RGConfigChangeState, "demoted_node": RGPoolNewDemotedNode, + "get_txn": RGGetTxn} @classmethod def supported_requests(cls): diff --git a/scripts/performance/perf_load/perf_processes.py b/scripts/performance/perf_load/perf_processes.py index d6aa8b81a..5e967a894 100755 --- a/scripts/performance/perf_load/perf_processes.py +++ b/scripts/performance/perf_load/perf_processes.py @@ -12,9 +12,14 @@ import functools from datetime import datetime import yaml +import logging +import shutil -from perf_load.perf_client_msgs import ClientReady, ClientStop, ClientSend, ClientMsg -from perf_load.perf_utils import check_fs, check_seed +from indy import pool + +import perf_load +from perf_load.perf_client_msgs import ClientReady, ClientStop, ClientSend +from perf_load.perf_utils import check_fs, check_seed, logger_init, random_string from perf_load.perf_gen_req_parser import ReqTypeParser from perf_load.perf_client import LoadClient from perf_load.perf_client_runner import ClientRunner @@ -82,13 +87,21 @@ parser.add_argument('--ext_set', default=None, type=str, required=False, dest='ext_set', help='Ext settings to use') +parser.add_argument('--log_lvl', default=logging.INFO, type=int, required=False, dest='log_lvl', + help='Logging level') + +parser.add_argument('--short_stat', action='store_true', dest='short_stat', help='Store only total statistics') + +parser.add_argument('--test_conn', action='store_true', dest='test_conn', + help='Check pool connection with provided genesis file') + class LoadRunner: def __init__(self, clients=0, genesis_path="~/.indy-cli/networks/sandbox/pool_transactions_genesis", seed=["000000000000000000000000Trustee1"], req_kind="nym", batch_size=10, refresh_rate=10, buff_req=30, out_dir=".", val_sep="|", wallet_key="key", mode="p", pool_config='', sync_mode="freeflow", load_rate=10, out_file="", load_time=0, ext_set=None, - client_runner=LoadClient.run): + client_runner=LoadClient.run, log_lvl=logging.INFO, short_stat=False): self._client_runner = client_runner self._clients = dict() # key process future; value ClientRunner self._loop = asyncio.get_event_loop() @@ -121,14 +134,20 @@ def __init__(self, clients=0, genesis_path="~/.indy-cli/networks/sandbox/pool_tr except Exception as ex: raise RuntimeError("pool_config param is ill-formed JSON: {}".format(ex)) - self._out_file = self.prepare_fs(out_dir, "load_test_{}". - format(datetime.now().strftime("%Y%m%d_%H%M%S")), out_file) + test_name = "load_test_{}".format(datetime.now().strftime("%Y%m%d_%H%M%S")) + self._log_dir = os.path.join(out_dir, test_name) + self._log_lvl = log_lvl + logger_init(self._log_dir, "{}.log".format(test_name), self._log_lvl) + self._logger = logging.getLogger(__name__) + self._out_file = self.prepare_fs(out_dir, test_name, out_file) + self._short_stat = short_stat def process_reqs(self, stat, name: str = ""): assert self._failed_f assert self._total_f assert self._succ_f assert self._nacked_f + self._logger.debug("process_reqs stat {}, name {}".format(stat, name)) if not isinstance(stat, dict): return reqs = stat.get("reqs", []) @@ -138,6 +157,7 @@ def process_reqs(self, stat, name: str = ""): fails = [] for (r_id, r_data) in reqs: # ["client", "label", "id", "status", "client_preparing", "client_prepared", "client_sent", "client_reply", "server_reply"] + self._logger.debug("process_reqs r_id {}, r_data {}".format(r_id, r_data)) status = r_data.get("status", "") tot.append(self._value_separator.join( [name, r_data.get("label", ""), str(r_id), status, @@ -156,6 +176,10 @@ def process_reqs(self, stat, name: str = ""): if tot: self._total_f.write("\n".join(tot + [""])) + + if self._short_stat: + return + if suc: self._succ_f.write("\n".join(suc + [""])) if nack: @@ -164,37 +188,45 @@ def process_reqs(self, stat, name: str = ""): self._failed_f.write("\n".join(fails + [""])) def sig_handler(self, sig): + self._logger.debug("sig_handler sig {}".format(sig)) for prc, cln in self._clients.items(): + self._logger.debug("sig_handler prc {} cln {}".format(prc, cln)) try: if not cln.is_finished() and cln.conn: cln.conn.send(ClientStop()) if sig == signal.SIGTERM: prc.cancel() except Exception as e: - print("Sent stop to client {} error {}".format(cln.name, e), file=self._out_file) + self._logger.exception("Sent stop to client {} error {}".format(cln.name, e)) def _tick_all(self): + self._logger.debug("_tick_all") self._loop.call_later(self._batch_rate, self._tick_all) for cln in self._clients.values(): + self._logger.debug("_tick_all cln {}".format(cln)) try: if cln.status == ClientRunner.ClientRun and cln.conn: cln.conn.send(ClientSend(self._batch_size)) except Exception as e: - print("Sent stop to client {} error {}".format(cln.name, e), file=self._out_file) + self._logger.exception("Sent stop to client {} error {}".format(cln.name, e)) def _tick_one(self, idx: int = 0): i = idx % len(self._clients) + self._logger.debug("_tick_one idx {} i {}".format(idx, i)) self._loop.call_later(self._batch_rate, self._tick_one, i + 1) key = list(self._clients)[i] cln = self._clients[key] + self._logger.debug("_tick_one cln {}".format(cln)) try: if cln.status == ClientRunner.ClientRun and cln.conn: cln.conn.send(ClientSend(self._batch_size)) except Exception as e: - print("Sent stop to client {} error {}".format(cln.name, e), file=self._out_file) + self._logger.exception("Sent stop to client {} error {}".format(cln.name, e)) def start_clients(self): to_start = [c for c in self._clients.values() if c.status == ClientRunner.ClientReady] + self._logger.debug("start_clients to_start {} _start_sync {} _sync_mode {}". + format(to_start, self._start_sync, self._sync_mode)) if self._start_sync and len(to_start) != len(self._clients): return for cln in to_start: @@ -209,33 +241,35 @@ def start_clients(self): self.schedule_stop() def request_stat(self): + self._logger.debug("request_stat") for cln in self._clients.values(): cln.req_stats() def read_client_cb(self, prc): + self._logger.debug("read_client_cb prc {}".format(prc)) try: r_data = self._clients[prc].conn.recv() + self._logger.debug("read_client_cb r_data {}".format(r_data)) if isinstance(r_data, dict): self._clients[prc].refresh_stat(r_data) self.process_reqs(r_data, self._clients[prc].name) elif isinstance(r_data, ClientReady): self._clients[prc].status = ClientRunner.ClientReady self.start_clients() - elif isinstance(r_data, ClientMsg): - print("{} : {}".format(self._clients[prc].name, r_data.msg), file=self._out_file) else: - print("Recv garbage {} from {}".format(r_data, self._clients[prc].name), file=self._out_file) + self._logger.warning("Recv garbage {} from {}".format(r_data, self._clients[prc].name)) except Exception as e: - print("{} read_client_cb error {}".format(self._clients[prc].name, e), file=self._out_file) - # self._clients[prc].conn = None + self._logger.exception("{} read_client_cb error {}".format(self._clients[prc].name, e)) def client_done(self, client): + self._logger.debug("client_done client {}".format(client)) try: last_stat = client.result() + self._logger.debug("client_done last_stat {}".format(last_stat)) self._clients[client].refresh_stat(last_stat) self.process_reqs(last_stat, self._clients[client].name) except Exception as e: - print("Client Error", e, file=self._out_file) + self._logger.exception("Client Error {}".format(e)) self._clients[client].stop_client() self._loop.remove_reader(self._clients[client].conn) @@ -246,6 +280,7 @@ def client_done(self, client): self._loop.call_soon_threadsafe(self._loop.stop) def get_refresh_str(self): + self._logger.debug("get_refresh_str") clients = 0 total_sent = 0 total_succ = 0 @@ -267,7 +302,9 @@ def get_refresh_str(self): return print_str def prepare_fs(self, out_dir, test_dir_name, out_file): - self._out_dir = os.path.join(out_dir, test_dir_name) + self._logger.debug("prepare_fs out_dir {}, test_dir_name {}, out_file {}". + format(out_dir, test_dir_name, out_file)) + self._out_dir = os.path.expanduser(os.path.join(out_dir, test_dir_name)) if not os.path.exists(self._out_dir): os.makedirs(self._out_dir) @@ -296,6 +333,7 @@ def prepare_fs(self, out_dir, test_dir_name, out_file): return ret_out def close_fs(self): + self._logger.debug("close_fs") assert self._failed_f assert self._total_f assert self._succ_f @@ -308,6 +346,7 @@ def close_fs(self): self._out_file.close() def screen_stat(self): + self._logger.debug("close_fs") ends = "\n" if self._out_file != sys.stdout else "\r" print(self.get_refresh_str(), end=ends, file=self._out_file) self.request_stat() @@ -320,10 +359,12 @@ def screen_stat(self): self._loop.call_later(self._refresh_rate, self.screen_stat) def schedule_stop(self): + self._logger.debug("schedule_stop _stop_sec".format(self._stop_sec)) if self._stop_sec > 0: self._loop.call_later(self._stop_sec, self.sig_handler, signal.SIGINT) def load_run(self): + print("Version ", perf_load.__version__, file=self._out_file) print("Number of client ", self._proc_count, file=self._out_file) print("Path to genesis txns file", self._genesis_path, file=self._out_file) print("Seed ", self._seed, file=self._out_file) @@ -340,6 +381,7 @@ def load_run(self): print("Pool config ", self._pool_config, file=self._out_file) print("Load rate batches per sec", 1 / self._batch_rate, file=self._out_file) print("Ext settings ", self._ext_set, file=self._out_file) + print("Save short statistics ", self._short_stat, file=self._out_file) load_client_mode = LoadClient.SendTime if self._sync_mode in ['all', 'one']: @@ -349,7 +391,7 @@ def load_run(self): self._batch_size = 1 self._buff_req = 0 - print("load_client_mode", load_client_mode, file=self._out_file) + self._logger.info("load_run version {} params {}".format(perf_load.__version__, self.__dict__)) self._loop.add_signal_handler(signal.SIGTERM, functools.partial(self.sig_handler, signal.SIGTERM)) self._loop.add_signal_handler(signal.SIGINT, functools.partial(self.sig_handler, signal.SIGINT)) @@ -363,15 +405,19 @@ def load_run(self): prc_name = "LoadClient_{}".format(i) prc = executor.submit(self._client_runner, prc_name, self._genesis_path, wr, self._seed, self._batch_size, self._batch_rate, self._req_kind, self._buff_req, self._wallet_key, self._pool_config, - load_client_mode, self._mode == 'p', self._ext_set) + load_client_mode, self._mode == 'p', self._ext_set, self._log_dir, self._log_lvl, + self._short_stat) prc.add_done_callback(self.client_done) self._loop.add_reader(rd, self.read_client_cb, prc) self._clients[prc] = ClientRunner(prc_name, rd, self._out_file) + self._logger.info("load_run client {} created".format(prc_name)) self.screen_stat() + self._logger.info("load_run all clients created") self._loop.run_forever() + self._logger.info("load_run stopping...") self._loop.remove_signal_handler(signal.SIGTERM) self._loop.remove_signal_handler(signal.SIGINT) @@ -379,6 +425,24 @@ def load_run(self): print("DONE At", datetime.now(), file=self._out_file) print(self.get_refresh_str(), file=self._out_file) self.close_fs() + self._logger.info("load_run stopped") + + +def check_genesis(gen_path): + loop = asyncio.get_event_loop() + pool_cfg = json.dumps({"genesis_txn": gen_path}) + pool_name = "pool_{}".format(random_string(24)) + loop.run_until_complete(pool.set_protocol_version(2)) + try: + loop.run_until_complete(pool.create_pool_ledger_config(pool_name, pool_cfg)) + pool_handle = loop.run_until_complete(pool.open_pool_ledger(pool_name, None)) + except Exception as ex: + raise argparse.ArgumentTypeError(ex) + + loop.run_until_complete(pool.close_pool_ledger(pool_handle)) + dir_to_dlt = os.path.join(os.path.expanduser("~/.indy_client/pool"), pool_name) + if os.path.isdir(dir_to_dlt): + shutil.rmtree(dir_to_dlt, ignore_errors=True) if __name__ == '__main__': @@ -406,10 +470,16 @@ def load_run(self): dict_args["genesis_path"] = check_fs(False, dict_args["genesis_path"]) dict_args["out_dir"] = check_fs(True, dict_args["out_dir"]) + check_genesis(dict_args["genesis_path"]) + + if dict_args["test_conn"]: + exit(0) + tr = LoadRunner(dict_args["clients"], dict_args["genesis_path"], dict_args["seed"], dict_args["req_kind"], dict_args["batch_size"], dict_args["refresh_rate"], dict_args["buff_req"], dict_args["out_dir"], dict_args["val_sep"], dict_args["wallet_key"], dict_args["mode"], dict_args["pool_config"], dict_args["sync_mode"], dict_args["load_rate"], dict_args["out_file"], dict_args["load_time"], dict_args["ext_set"], - client_runner=LoadClient.run if not dict_args["ext_set"] else LoadClientFees.run) + client_runner=LoadClient.run if not dict_args["ext_set"] else LoadClientFees.run, + log_lvl=dict_args["log_lvl"], short_stat=dict_args["short_stat"]) tr.load_run() diff --git a/scripts/performance/perf_load/perf_req_gen.py b/scripts/performance/perf_load/perf_req_gen.py index a37446d89..aa061c589 100644 --- a/scripts/performance/perf_load/perf_req_gen.py +++ b/scripts/performance/perf_load/perf_req_gen.py @@ -87,3 +87,6 @@ async def on_request_generated(self, req_data, gen_req): async def on_request_replied(self, req_data, gen_req, resp_or_exp): pass + + def req_did(self): + return None diff --git a/scripts/performance/perf_load/perf_req_gen_cfg_writes.py b/scripts/performance/perf_load/perf_req_gen_cfg_writes.py new file mode 100644 index 000000000..965019ea5 --- /dev/null +++ b/scripts/performance/perf_load/perf_req_gen_cfg_writes.py @@ -0,0 +1,14 @@ +import random +from indy import ledger + +from perf_load.perf_req_gen import RequestGenerator + + +class RGConfigChangeState(RequestGenerator): + _req_types = ["111"] + + def _rand_data(self): + return str(random.randint(0, 99999999)) + + async def _gen_req(self, submit_did, req_data): + return await ledger.build_pool_config_request(submit_did, True, False) diff --git a/scripts/performance/perf_load/perf_req_gen_demoted_node.py b/scripts/performance/perf_load/perf_req_gen_demoted_node.py new file mode 100644 index 000000000..08357ad34 --- /dev/null +++ b/scripts/performance/perf_load/perf_req_gen_demoted_node.py @@ -0,0 +1,38 @@ +import random +from indy import ledger, did +import json + +from perf_load.perf_req_gen import RequestGenerator +from perf_load.perf_utils import random_string + + +class RGPoolNewDemotedNode(RequestGenerator): + _req_types = ["0"] + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._steward_did = None + self._node_alias = None + self._node_did = None + + def _rand_data(self): + ret = "0.{}.{}.{}".format(random.randint(0, 255), random.randint(0, 255), random.randint(0, 255)) + return ret + + async def on_pool_create(self, pool_handle, wallet_handle, submitter_did, sign_req_f, send_req_f, *args, **kwargs): + self._node_alias = random_string(7) + self._node_did, node_ver = await did.create_and_store_my_did(wallet_handle, + json.dumps({'seed': random_string(32)})) + self._steward_did, verk = await did.create_and_store_my_did(wallet_handle, + json.dumps({'seed': random_string(32)})) + + nym_req = await ledger.build_nym_request(submitter_did, self._steward_did, verk, None, "STEWARD") + await ledger.sign_and_submit_request(pool_handle, wallet_handle, submitter_did, nym_req) + + async def _gen_req(self, submitter_did, req_data): + data = {'alias': self._node_alias, 'client_port': 50001, 'node_port': 50002, 'node_ip': req_data, + 'client_ip': req_data, 'services': []} + return await ledger.build_node_request(self._steward_did, self._node_did, json.dumps(data)) + + def req_did(self): + return self._steward_did diff --git a/scripts/performance/perf_load/perf_req_gen_get_txn.py b/scripts/performance/perf_load/perf_req_gen_get_txn.py new file mode 100644 index 000000000..a8848189b --- /dev/null +++ b/scripts/performance/perf_load/perf_req_gen_get_txn.py @@ -0,0 +1,27 @@ +import random +from indy import ledger + +from perf_load.perf_req_gen import RequestGenerator + + +class RGGetTxn(RequestGenerator): + _req_types = ["3"] + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._ledger = kwargs.get("ledger", "DOMAIN") + self._min_seq = kwargs.get("min_seq_no", 0) + self._max_seq = kwargs.get("max_seq_no", 10000000) + self._rand_seq = kwargs.get("rand_seq", True) + self._cur_seq = -1 + self._width = self._max_seq - self._min_seq + + def _rand_data(self): + if self._rand_seq: + self._cur_seq = random.randint(self._min_seq, self._max_seq) + else: + self._cur_seq = self._min_seq + ((self._cur_seq + 1) % self._width) + return (self._ledger, int(self._cur_seq)) + + async def _gen_req(self, submitter_did, req_data): + return await ledger.build_get_txn_request(submitter_did, req_data[0], req_data[1]) diff --git a/scripts/performance/perf_load/perf_req_gen_payment.py b/scripts/performance/perf_load/perf_req_gen_payment.py index 26706bc89..0dd8ce84f 100644 --- a/scripts/performance/perf_load/perf_req_gen_payment.py +++ b/scripts/performance/perf_load/perf_req_gen_payment.py @@ -53,6 +53,10 @@ async def _gen_req(self, submit_did, req_data): class RGPayment(RGBasePayment): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._sent_reqs = set() + def _gen_req_data(self): return self._gen_input_output(1, self._payment_fees) @@ -62,6 +66,24 @@ async def _gen_req(self, submit_did, req_data): self._wallet_handle, self._submitter_did, json.dumps(inputs), json.dumps(outputs), None) return req + async def on_request_generated(self, req_data, gen_req): + self._sent_reqs.add(gen_req) + + async def on_request_replied(self, req_data, gen_req, resp_or_exp): + if gen_req in self._sent_reqs: + self._sent_reqs.remove(gen_req) + + if isinstance(resp_or_exp, Exception): + return + + try: + receipt_infos_json = await payment.parse_payment_response(self._payment_method, resp_or_exp) + receipt_infos = json.loads(receipt_infos_json) if receipt_infos_json else [] + for ri in receipt_infos: + self._addr_txos[ri["recipient"]].append((ri["receipt"], ri["amount"])) + except Exception: + pass + class RGVerifyPayment(RGBasePayment): def __init__(self, *args, **kwargs): diff --git a/scripts/performance/perf_load/perf_req_gen_seq.py b/scripts/performance/perf_load/perf_req_gen_seq.py index 029982aea..1a50a3e9b 100644 --- a/scripts/performance/perf_load/perf_req_gen_seq.py +++ b/scripts/performance/perf_load/perf_req_gen_seq.py @@ -56,3 +56,6 @@ async def on_request_generated(self, req_data, gen_req): async def on_request_replied(self, req_data, req, resp_or_exp): for r in self._reqs_collection: await r.on_request_replied(req_data, req, resp_or_exp) + + def req_did(self): + return self._reqs_collection[self._req_idx].req_did() diff --git a/scripts/performance/perf_load/perf_utils.py b/scripts/performance/perf_load/perf_utils.py index 97f6045f2..8b54a55ed 100644 --- a/scripts/performance/perf_load/perf_utils.py +++ b/scripts/performance/perf_load/perf_utils.py @@ -4,11 +4,20 @@ from collections import Sequence import base58 import libnacl - +import logging +import time PUB_XFER_TXN_ID = "10001" +def logger_init(out_dir, f_name, log_lvl): + od = os.path.expanduser(out_dir) + os.makedirs(od, exist_ok=True) + logging.basicConfig(filename=os.path.join(od, f_name), level=log_lvl, style="{", + format='{asctime:s}|{levelname:s}|{filename:s}|{name:s}|{message:s}') + logging.Formatter.converter = time.gmtime + + def check_fs(is_dir: bool, fs_name: str): pp = os.path.expanduser(fs_name) rights = os.W_OK if is_dir else os.R_OK @@ -73,6 +82,17 @@ def request_get_type(req): return txn_type +def response_get_type(req): + if isinstance(req, dict): + dict_resp = req + elif isinstance(req, str): + dict_resp = json.loads(req) + else: + raise RuntimeError("Response of unsupported type") + txn_type = dict_resp.get("result", {}).get("txn", {}).get("type", "") + return txn_type + + def gen_input_output(addr_txos, val): for address in addr_txos: inputs = [] diff --git a/scripts/performance/setup.py b/scripts/performance/setup.py index dd2f5ef9e..d6cbc1266 100644 --- a/scripts/performance/setup.py +++ b/scripts/performance/setup.py @@ -1,5 +1,6 @@ #!/usr/bin/env python +import os import sys from setuptools import setup, find_packages @@ -13,17 +14,29 @@ print("NOTE: Installation failed. Run setup.py using python3") sys.exit(1) +# resolve metadata +metadata = {} +here = os.path.abspath(os.path.dirname(__file__)) +with open(os.path.join(here, 'perf_load', '__version__.py'), 'r') as f: + exec(f.read(), metadata) + tests_require = ['pytest==3.3.1', 'pytest-xdist==1.22.1', 'python3-indy>=1.6.1.dev683'] setup( - name='indy-perf-load', - version="1.0.13", - description='Indy node performance load', - keywords='Indy Node performance load testing', + name=metadata['__title__'], + version=metadata['__version__'], + description=metadata['__description__'], + long_description=metadata['__long_description__'], + keywords=metadata['__keywords__'], + url=metadata['__url__'], + author=metadata['__author__'], + author_email=metadata['__author_email__'], + maintainer=metadata['__maintainer__'], + license=metadata['__license__'], packages=find_packages(), package_data={'': ['*.md']}, include_package_data=True, - install_requires=['python3-indy>=1.6.1.dev683', 'PyYAML>=3.12'], + install_requires=['python3-indy>=1.6.1.dev683', 'PyYAML>=3.12', 'libnacl==1.6.1', 'base58'], setup_requires=['pytest-runner'], extras_require={'tests': tests_require}, tests_require=tests_require, diff --git a/scripts/read_ledger b/scripts/read_ledger index a818b3314..ea9267692 100755 --- a/scripts/read_ledger +++ b/scripts/read_ledger @@ -46,7 +46,6 @@ def read_args(): parser.add_argument('--count', required=False, action='store_true', help="returns the number of txns in the given ledger") parser.add_argument('--node_name', required=False, help="Node's name") - parser.add_argument('--client_name', required=False, help="Client's name") parser.add_argument('--serializer', required=False, default='json', help="How to represent the data (json by default)") parser.add_argument('--network', required=False, type=str, @@ -55,20 +54,10 @@ def read_args(): return parser.parse_args() -def get_ledger_dir(node_name, client_name, network): - if node_name and client_name: - print("Either 'node_name' or 'client_name' can be specified") - exit() - +def get_ledger_dir(node_name, network): config = getConfig() _network = network if network else config.NETWORK_NAME ledger_base_dir = config.LEDGER_DIR - if client_name: - # Build path to data if --client_name was specified - ledger_data_dir = os.path.join(config.CLI_BASE_DIR, _network, - config.clientDataDir, client_name) - return ledger_data_dir - if node_name: # Build path to data if --node_name was specified ledger_data_dir = os.path.join(ledger_base_dir, _network, _DATA, node_name) @@ -178,7 +167,7 @@ if __name__ == '__main__': args = read_args() config = getConfig() - ledger_data_dir = get_ledger_dir(args.node_name, args.client_name, args.network) + ledger_data_dir = get_ledger_dir(args.node_name, args.network) read_copy_ledger_data_dir = None try: # RocksDB supports real read-only mode and does not need to have a ledger copy. diff --git a/scripts/reset_client b/scripts/reset_client deleted file mode 100644 index 560b5ac39..000000000 --- a/scripts/reset_client +++ /dev/null @@ -1,24 +0,0 @@ -#! /usr/bin/env python3 - -import os - -from indy_client.script_helper import performIndyBaseDirCleanup -from indy_common.config_util import getConfig - -config = getConfig() -baseDir = os.path.expanduser(config.CLI_BASE_DIR) - -print("\nIMPORTANT: This will clean up indy base directory: {}".format(baseDir)) -userAnswer = "no" -validAns = "YeS" -try: - userAnswer = input( - "\nAre you sure, type {} to continue: ".format(validAns)) -except KeyboardInterrupt: - pass - -if userAnswer == validAns: - performIndyBaseDirCleanup(baseDir) - print("\nIndy base directory cleaned up.\n") -else: - print("\nOk, no cleanup performed.\n") diff --git a/scripts/test_some_write_keys_others_read_them b/scripts/test_some_write_keys_others_read_them deleted file mode 100644 index c6c1fac20..000000000 --- a/scripts/test_some_write_keys_others_read_them +++ /dev/null @@ -1,138 +0,0 @@ -#!/usr/bin/env python3 - -""" -Test performing the following scenarios on behalf of multiple users in parallel: -- some users cyclically update their own verkeys, -- other users cyclically read verkeys of the former users. - -To run the test execute this python script providing the following parameters: --w or --writers --r or --readers --i or --iterations --t or --timeout (optional parameter) - -Examples: - -test_some_write_keys_others_read_them -w 2 -r 8 -i 10 -t 30 - -test_some_write_keys_others_read_them --writers 4 --readers 20 --iterations 50 -""" - -import argparse -import os -from concurrent import futures -from concurrent.futures import ProcessPoolExecutor -from datetime import datetime - -from stp_core.common.log import getlogger - -from indy_client.utils.user_scenarios import generateNymsData, \ - NymsCreationScenario, KeyRotationScenario, ForeignKeysReadScenario - -STEWARD1_SEED = b"000000000000000000000000Steward1" - -logger = getlogger() - - -def parseArgs(): - parser = argparse.ArgumentParser() - - parser.add_argument("-w", "--writers", - type=int, - required=True, - dest="writers", - help="number of writers") - - parser.add_argument("-r", "--readers", - type=int, - required=True, - dest="readers", - help="number of readers") - - parser.add_argument("-i", "--iterations", - type=int, - required=True, - dest="iterations", - help="number of iterations") - - parser.add_argument("-t", "--timeout", - type=int, - dest="timeout", - help="timeout in seconds") - - return parser.parse_args() - - -def main(args): - numOfWriters = args.writers - numOfReaders = args.readers - numOfIterations = args.iterations - timeout = args.timeout - - writers = generateNymsData(numOfWriters) - readers = generateNymsData(numOfReaders) - - logDir = os.path.join(os.getcwd(), "test-logs-{}".format( - datetime.now().strftime("%Y-%m-%dT%H-%M-%S"))) - - with ProcessPoolExecutor(numOfWriters + numOfReaders) as executor: - usersIdsAndVerkeys = [(user.identifier, user.verkey) - for user in writers + readers] - - nymsCreationScenarioFuture = \ - executor.submit(NymsCreationScenario.runInstance, - seed=STEWARD1_SEED, - nymsIdsAndVerkeys=usersIdsAndVerkeys, - logFileName=os.path.join( - logDir, - "nyms-creator-{}.log".format( - STEWARD1_SEED.decode()))) - - nymsCreationScenarioFuture.result(timeout=timeout) - logger.info("Created {} nyms".format(numOfWriters + numOfReaders)) - - keyRotationScenariosFutures = \ - [executor.submit(KeyRotationScenario.runInstance, - seed=writer.seed, - iterations=numOfIterations, - logFileName=os.path.join( - logDir, - "writer-{}.log".format(writer.seed.decode()))) - for writer in writers] - - writersIds = [writer.identifier for writer in writers] - - foreignKeysReadScenariosFutures = \ - [executor.submit(ForeignKeysReadScenario.runInstance, - seed=reader.seed, - nymsIds=writersIds, - iterations=numOfIterations, - logFileName=os.path.join( - logDir, - "reader-{}.log".format(reader.seed.decode()))) - for reader in readers] - - futures.wait(keyRotationScenariosFutures + - foreignKeysReadScenariosFutures, - timeout=timeout) - - failed = False - for future in keyRotationScenariosFutures + \ - foreignKeysReadScenariosFutures: - ex = future.exception(timeout=0) - if ex: - failed = True - logger.exception(ex, exc_info=ex) - - if failed: - logger.error("Scenarios of some writers or readers failed") - else: - logger.info("Scenarios of all writers and readers " - "finished successfully") - - logger.info("Logs of worker processes were also written to {}" - .format(logDir)) - - -if __name__ == "__main__": - main(parseArgs()) diff --git a/scripts/test_users_write_and_read_own_keys b/scripts/test_users_write_and_read_own_keys deleted file mode 100644 index 11fd28904..000000000 --- a/scripts/test_users_write_and_read_own_keys +++ /dev/null @@ -1,113 +0,0 @@ -#!/usr/bin/env python3 - -""" -Test performing the following scenario on behalf of multiple users in parallel: -- each user cyclically updates and reads his/her own verkey. - -To run the test execute this python script providing the following parameters: --u or --users --i or --iterations --t or --timeout (optional parameter) - -Examples: - -test_users_write_and_read_own_keys -u 8 -i 10 -t 60 - -test_users_write_and_read_own_keys --users 20 --iterations 50 -""" - -import argparse -import os -from concurrent import futures -from concurrent.futures import ProcessPoolExecutor -from datetime import datetime - -from stp_core.common.log import getlogger - -from indy_client.utils.user_scenarios import generateNymsData, \ - NymsCreationScenario, KeyRotationAndReadScenario - -STEWARD1_SEED = b"000000000000000000000000Steward1" - -logger = getlogger() - - -def parseArgs(): - parser = argparse.ArgumentParser() - - parser.add_argument("-u", "--users", - type=int, - required=True, - dest="users", - help="number of users") - - parser.add_argument("-i", "--iterations", - type=int, - required=True, - dest="iterations", - help="number of iterations") - - parser.add_argument("-t", "--timeout", - type=int, - dest="timeout", - help="timeout in seconds") - - return parser.parse_args() - - -def main(args): - numOfUsers = args.users - numOfIterations = args.iterations - timeout = args.timeout - - users = generateNymsData(numOfUsers) - - logDir = os.path.join(os.getcwd(), "test-logs-{}".format( - datetime.now().strftime("%Y-%m-%dT%H-%M-%S"))) - - with ProcessPoolExecutor(numOfUsers) as executor: - usersIdsAndVerkeys = [(user.identifier, user.verkey) - for user in users] - - nymsCreationScenarioFuture = \ - executor.submit(NymsCreationScenario.runInstance, - seed=STEWARD1_SEED, - nymsIdsAndVerkeys=usersIdsAndVerkeys, - logFileName=os.path.join( - logDir, - "nyms-creator-{}.log".format( - STEWARD1_SEED.decode()))) - - nymsCreationScenarioFuture.result(timeout=timeout) - logger.info("Created {} nyms".format(numOfUsers)) - - keyRotationAndReadScenariosFutures = \ - [executor.submit(KeyRotationAndReadScenario.runInstance, - seed=user.seed, - iterations=numOfIterations, - logFileName=os.path.join( - logDir, - "user-{}.log".format(user.seed.decode()))) - for user in users] - - futures.wait(keyRotationAndReadScenariosFutures, - timeout=timeout) - - failed = False - for future in keyRotationAndReadScenariosFutures: - ex = future.exception(timeout=0) - if ex: - failed = True - logger.exception(ex, exc_info=ex) - - if failed: - logger.error("Scenarios of some users failed") - else: - logger.info("Scenarios of all users finished successfully") - - logger.info("Logs of worker processes were also written to {}" - .format(logDir)) - - -if __name__ == "__main__": - main(parseArgs()) diff --git a/scripts/validator-info b/scripts/validator-info index eeed67342..7eade6dd4 100755 --- a/scripts/validator-info +++ b/scripts/validator-info @@ -70,10 +70,8 @@ class FloatUnknown(BaseUnknown): class TimestampUnknown(BaseUnknown): def _str(self): return "{}".format( - datetime.datetime.fromtimestamp(self.val).strftime( - "%A, %B %{0}d, %Y %{0}I:%M:%S %p".format( - '#' if os.name == 'nt' else '-')) - ) + time.strftime("%A, %B %{0}d, %Y %{0}I:%M:%S %p %z".format('#' if os.name == 'nt' else '-'), + time.localtime(self.val))) class UptimeUnknown(BaseUnknown): @@ -98,10 +96,14 @@ class StateUnknown(BaseUnknown): class NodesListUnknown(BaseUnknown): def __init__(self, val): - super().__init__([] if val is None else val) + super().__init__({} if val is None else {rn[0]: rn[1] for rn in val}) def _str(self): - return "\n".join("# {}".format(alias) for alias in self.val) + if self.val: + return "\n".join([" {}\t{}".format(pr_n, "({})".format(r_idx) if r_idx is not None else "") + for pr_n, r_idx in self.val.items()]) + else: + return "" def __iter__(self): return iter(self.val) @@ -257,12 +259,18 @@ class BindingStats(BaseUnknown): ], False) -class TransactionsStats(BaseStats): - shema = [ - ("config", BaseUnknown), - ("ledger", BaseUnknown), - ("pool", BaseUnknown) - ] +class TransactionsStats(BaseUnknown): + def __init__(self, val): + super().__init__({} if val is None else val) + + def _str(self): + if self.val: + return "\n".join([" Total {} Transactions: {}".format(ledger, cnt) for ledger, cnt in self.val.items()]) + else: + return "" + + def __iter__(self): + return iter(self.val) class AverageStats(BaseStats): @@ -285,8 +293,11 @@ class NodeStats(BaseStats): ("Name", BaseUnknown), ("did", BaseUnknown), ("verkey", BaseUnknown), - ("Node_port", BindingStats), - ("Client_port", BindingStats), + ("BLS_key", BaseUnknown), + ("Node_port", BaseUnknown), + ("Client_port", BaseUnknown), + ("Node_ip", BaseUnknown), + ("Client_ip", BaseUnknown), ("Metrics", MetricsStats) ] @@ -452,19 +463,17 @@ class ValidatorStats(BaseStats): # will drop visibility of output lines = [ "Validator {} is {}".format(self['Node_info']['Name'], self['state']), - "#Current time: {}".format(self['timestamp']), + "Update time: {}".format(self['timestamp']), "Validator DID: {}".format(self['Node_info']['did']), "Verification Key: {}".format(self['Node_info']['verkey']), - "Node Port: {}".format(self['Node_info']['Node_port']), - "Client Port: {}".format(self['Node_info']['Client_port']), + "BLS Key: {}".format(self['Node_info']['BLS_key']), + "Node HA: {}:{}".format(self['Node_info']['Node_ip'], self['Node_info']['Node_port']), + "Client HA: {}:{}".format(self['Node_info']['Client_ip'], self['Node_info']['Client_port']), "Metrics:", - " Uptime: {}".format(self['Node_info']['Metrics']['uptime']), - "# Total Config Transactions: {}".format( - self['Node_info']['Metrics']['transaction-count']['config']), - " Total Ledger Transactions: {}".format( - self['Node_info']['Metrics']['transaction-count']['ledger']), - " Total Pool Transactions: {}".format( - self['Node_info']['Metrics']['transaction-count']['pool']), + " Uptime: {}".format(self['Node_info']['Metrics']['uptime']) + ] + [ + str(self['Node_info']['Metrics']['transaction-count']) + ] + [ " Read Transactions/Seconds: {}".format( self['Node_info']['Metrics']['average-per-second']['read-transactions']), " Write Transactions/Seconds: {}".format( @@ -473,28 +482,27 @@ class ValidatorStats(BaseStats): self['Pool_info']['Reachable_nodes_count'], self['Pool_info']['Total_nodes_count']) ] + [ - "# {}".format(alias) - for alias in self['Pool_info']['Reachable_nodes'] + str(self['Pool_info']['Reachable_nodes']) ] + [ "Unreachable Hosts: {}/{}".format( self['Pool_info']['Unreachable_nodes_count'], self['Pool_info']['Total_nodes_count'] ) ] + [ - "# {}".format(alias) - for alias in self['Pool_info']['Unreachable_nodes'] + str(self['Pool_info']['Unreachable_nodes']) ] + [ - "#Software Versions:" + "Software Versions:" ] + [ - "# {}: {}".format(pkgName, self['software'][pkgName]) + " {}: {}".format(pkgName, self['software'][pkgName]) for pkgName in self['software'].keys() ] # skip lines with started with '#' if not verbose # or remove '#' otherwise + # return "\n".join(lines) return ("\n".join( [l[(1 if l[0] == '#' else 0):] - for l in lines if self._verbose or l[0] != '#']) + for l in lines if self._verbose or (l and l[0] != '#')]) ) @@ -519,7 +527,7 @@ async def handle_client(client_reader, client_writer): else: logger.debug("Received data: {}".format(data)) stats = json.loads(data.decode()) - print(json.dumps(stats, indent=2, cls=NewEncoder)) + print(json.dumps(stats, indent=2, cls=NewEncoder, sort_keys=True)) def accept_client(client_reader, client_writer): @@ -567,7 +575,7 @@ def nagios(vstats): "{} {}_Unreachable_Validators unreachable_validators={} {} Unreachable Validators".format( state,vstats['Node_info']['Name'],vstats['Pool_info']['Unreachable_nodes_count'],vstats['Node_info']['Name']) ] - return "\n".join(lines); + return "\n".join(lines) def get_stats_from_file(stats, verbose, _json, _nagios): @@ -576,7 +584,7 @@ def get_stats_from_file(stats, verbose, _json, _nagios): vstats = ValidatorStats(stats, verbose) if _json: - return json.dumps(vstats, indent=2, cls=NewEncoder) + return json.dumps(vstats, indent=2, cls=NewEncoder, sort_keys=True) if _nagios: return nagios(vstats) @@ -617,7 +625,7 @@ def format_value(value): def create_print_tree(stats: dict, indent=0, lines=[]): - for key, value in stats.items(): + for key, value in sorted(stats.items(), key=lambda x: x[0]): if isinstance(value, dict): lines.append(make_indent(indent) + format_key(key)) create_print_tree(value, indent + 1, lines) @@ -805,25 +813,36 @@ def main(): # loop.close() # else: all_paths = glob(os.path.join(args.basedir, "*_info.json")) - info_paths = [] - additional_paths = [] + + files_by_node = dict() + for path in all_paths: - if path.find("additional") != -1: - additional_paths.append(path) + bn = os.path.basename(path) + if not bn: + continue + node_name = bn.split("_", maxsplit=1)[0] + if "additional" in bn: + files_by_node.setdefault(node_name, {}).update({"additional": path}) + elif "version" in bn: + files_by_node.setdefault(node_name, {}).update({"version": path}) else: - info_paths.append(path) - if not info_paths: + files_by_node.setdefault(node_name, {}).update({"info": path}) + if not files_by_node: print('There are no info files in {}'.format(args.basedir)) return if args.json: - out_json = compile_json_ouput(info_paths + additional_paths) + allf = [] + for n, ff in files_by_node.items(): + allf.extend([v for k, v in ff.items()]) + out_json = compile_json_ouput(allf) if out_json: - print(json.dumps(out_json)) + print(json.dumps(out_json, sort_keys=True)) sys.exit(0) - for file_path in info_paths: - json_data = read_json(file_path) + for node in files_by_node: + inf_ver = [v for k, v in files_by_node[node].items() if k in ["info", "version"]] + json_data = compile_json_ouput(inf_ver) if json_data: if args.verbose: print("{}".format(os.linesep).join(create_print_tree(json_data, lines=[]))) @@ -832,7 +851,10 @@ def main(): print('\n') if args.verbose: - for file_path in additional_paths: + for node in files_by_node: + file_path = files_by_node[node].get("additional", "") + if not file_path: + continue json_data = read_json(file_path) if json_data: print("{}".format(os.linesep).join(create_print_tree(json_data, lines=[]))) diff --git a/setup.py b/setup.py index 924a30bf4..4b58a4aa3 100644 --- a/setup.py +++ b/setup.py @@ -56,8 +56,7 @@ data_files=[( (BASE_DIR, ['data/nssm_original.exe']) )], - install_requires=['indy-plenum-dev==1.6.565', - 'indy-anoncreds-dev==1.0.32', + install_requires=['indy-plenum-dev==1.6.619', 'python-dateutil', 'timeout-decorator==0.4.0'], setup_requires=['pytest-runner'], @@ -65,10 +64,7 @@ 'tests': tests_require }, tests_require=tests_require, - scripts=['scripts/indy', - 'scripts/add_new_node', - 'scripts/reset_client', - 'scripts/start_indy_node', + scripts=['scripts/start_indy_node', 'scripts/start_node_control_tool', 'scripts/clear_node.py', 'scripts/get_keys', @@ -88,14 +84,10 @@ 'scripts/restart_upgrade_agent.bat', 'scripts/install_nssm.bat', 'scripts/read_ledger', - 'scripts/test_some_write_keys_others_read_them', - 'scripts/test_users_write_and_read_own_keys', 'scripts/validator-info', 'scripts/validator-info-history', 'scripts/init_bls_keys', - 'scripts/enable_bls', 'scripts/create_dirs.sh', - 'scripts/indy_old_cli_export_dids', 'scripts/setup_iptables', 'scripts/setup_indy_node_iptables', 'scripts/current_validators', diff --git a/tools/diagnostics/nsreplay b/tools/diagnostics/nsreplay index defb0ddb5..96ea21748 100755 --- a/tools/diagnostics/nsreplay +++ b/tools/diagnostics/nsreplay @@ -27,9 +27,6 @@ # > ln -s ../opt/openssl/include/openssl . # The above fix was taken from: # https://www.anintegratedworld.com/mac-osx-fatal-error-opensslsha-h-file-not-found/ -# Prehaps adding the above two lines between `brew install openssl` and the -# "# PBC" section of https://github.com/hyperledger/indy-anoncreds/blob/master/setup-charm-homebrew.sh -# would be sufficient? from indy_common.config_helper import NodeConfigHelper from indy_common.config_util import getConfig @@ -277,7 +274,7 @@ class NodeStateReplayer: node_name) return Recorder(node_rec_kv_store, skip_metadata_write=True), \ - Recorder(client_rec_kv_store, skip_metadata_write=True) + Recorder(client_rec_kv_store, skip_metadata_write=True) def update_loaded_config(self, config): config.STACK_COMPANION = 2