Skip to content

Commit

Permalink
Add test for query spo-stake-distribution
Browse files Browse the repository at this point in the history
  • Loading branch information
palas committed Aug 1, 2024
1 parent daf6a7a commit 5f407d5
Show file tree
Hide file tree
Showing 3 changed files with 50 additions and 0 deletions.
14 changes: 14 additions & 0 deletions cabal.project
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,20 @@ packages:
trace-resources
trace-forward

source-repository-package
type: git
location: https://github.com/IntersectMBO/cardano-api.git
tag: d06c818ac67216ab7a60159827e6b8fccd7ef0ea
--sha256: sha256-AK0UEpOJeyTuTsI3vwAZxyg5xfLJFx7sRP4iti1tj1s=
subdir: cardano-api

source-repository-package
type: git
location: https://github.com/IntersectMBO/cardano-cli.git
tag: 1810f26b6e1b022adcb9327df3f297af9b5d9247
--sha256: sha256-4wz1Z11BuAds70rGFHEqZ1OMlJuuB/v8zDMx0+O9g5U=
subdir: cardano-cli

extra-packages: Cabal

program-options
Expand Down
1 change: 1 addition & 0 deletions cardano-testnet/cardano-testnet.cabal
Original file line number Diff line number Diff line change
Expand Up @@ -235,6 +235,7 @@ test-suite cardano-testnet-test
, hedgehog
, hedgehog-extras
, http-conduit
, lens
, lens-aeson
, microlens
, mtl
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,11 +30,13 @@ import Cardano.Testnet

import Prelude

import Control.Lens ((^?))
import Control.Monad (forM_)
import Control.Monad.Catch (MonadCatch)
import Data.Aeson (eitherDecodeStrictText)
import qualified Data.Aeson as Aeson
import qualified Data.Aeson.KeyMap as Aeson
import qualified Data.Aeson.Lens as Aeson
import Data.Bifunctor (bimap)
import Data.Data (type (:~:) (Refl))
import Data.Either.Extra (mapLeft)
Expand Down Expand Up @@ -209,6 +211,39 @@ hprop_cli_queries = integrationWorkspace "cli-queries" $ \tempAbsBasePath' -> H.
H.noteM_ $ execCli' execConfig [ eraName, "query", "stake-distribution"
, "--out-file", stakePoolsOutFile ]

TestQuerySPOStakeDistributionCmd ->
-- spo-stake-distribution
do
-- Query all SPOs
aesonSpoDist :: Aeson.Value <- execCliStdoutToJson execConfig [ eraName, "query", "spo-stake-distribution", "--all-spos" ]
secondHash <- H.evalMaybe $ T.unpack <$> aesonSpoDist ^? Aeson.nth 1 . Aeson.nth 0 . Aeson._String
secondAmount <- H.evalMaybe $ aesonSpoDist ^? Aeson.nth 1 . Aeson.nth 1 . Aeson._Number

-- Query individual SPO using result and ensure result is the same
secondSpoInfo :: Aeson.Value <- execCliStdoutToJson execConfig [ eraName, "query", "spo-stake-distribution", "--spo-key-hash", secondHash ]
individualHash <- H.evalMaybe $ T.unpack <$> secondSpoInfo ^? Aeson.nth 0 . Aeson.nth 0 . Aeson._String
individualAmount <- H.evalMaybe $ secondSpoInfo ^? Aeson.nth 0 . Aeson.nth 1 . Aeson._Number
secondHash === individualHash
secondAmount === individualAmount

-- Query individual SPO using SPOs verification file
let spoKey = verificationKey . poolNodeKeysCold $ Defaults.defaultSpoKeys 1
fileQueryResult :: Aeson.Value <- execCliStdoutToJson execConfig [ eraName, "query", "spo-stake-distribution"
, "--spo-verification-key-file", unFile spoKey
]
fileQueryHash <- H.evalMaybe $ T.unpack <$> fileQueryResult ^? Aeson.nth 0 . Aeson.nth 0 . Aeson._String
fileQueryAmount <- H.evalMaybe $ fileQueryResult ^? Aeson.nth 0 . Aeson.nth 1 . Aeson._Number

-- Query individual SPO using SPOs bech32 of key and compare to previous result
delegatorVKey :: VerificationKey StakePoolKey <- readVerificationKeyFromFile AsStakePoolKey work spoKey
keyQueryResult :: Aeson.Value <- execCliStdoutToJson execConfig [ eraName, "query", "spo-stake-distribution"
, "--spo-verification-key", T.unpack $ serialiseToBech32 delegatorVKey
]
keyQueryHash <- H.evalMaybe $ T.unpack <$> keyQueryResult ^? Aeson.nth 0 . Aeson.nth 0 . Aeson._String
keyQueryAmount <- H.evalMaybe $ keyQueryResult ^? Aeson.nth 0 . Aeson.nth 1 . Aeson._Number
fileQueryHash === keyQueryHash
fileQueryAmount === keyQueryAmount

TestQueryStakeAddressInfoCmd ->
-- stake-address-info
do
Expand Down

0 comments on commit 5f407d5

Please sign in to comment.