From bb66cc2920b1b65fbddbf39a8bf4e59976954597 Mon Sep 17 00:00:00 2001 From: Makram Date: Wed, 11 Dec 2024 21:31:57 +0200 Subject: [PATCH 01/15] deployment/ccip/changeset: optional MCMS for promote candidate (#15641) * deployment/ccip/changeset: optional MCMS for promote candidate Add an optional MCMS config that, when nil, will simply execute the transactions to promote the candidate config using the deployer key. When non-nil, it only generates the MCMS proposals. Updated some of the validation as well and removed previous validation that was incorrect. * fix DonIDForChain * pr feedback --- .../ccip/changeset/cs_add_chain_test.go | 15 +- ...cs_active_candidate.go => cs_ccip_home.go} | 146 ++++++++++++------ ...candidate_test.go => cs_ccip_home_test.go} | 133 +++++++++++++++- .../ccip/changeset/cs_initial_add_chain.go | 10 +- .../changeset/internal/deploy_home_chain.go | 24 ++- 5 files changed, 261 insertions(+), 67 deletions(-) rename deployment/ccip/changeset/{cs_active_candidate.go => cs_ccip_home.go} (68%) rename deployment/ccip/changeset/{cs_active_candidate_test.go => cs_ccip_home_test.go} (70%) diff --git a/deployment/ccip/changeset/cs_add_chain_test.go b/deployment/ccip/changeset/cs_add_chain_test.go index 84a8ad817e1..b21d7411ce7 100644 --- a/deployment/ccip/changeset/cs_add_chain_test.go +++ b/deployment/ccip/changeset/cs_add_chain_test.go @@ -153,15 +153,15 @@ func TestAddChainInbound(t *testing.T) { // transfer ownership to timelock _, err = commonchangeset.ApplyChangesets(t, e.Env, map[uint64]*commonchangeset.TimelockExecutionContracts{ - initialDeploy[0]: &commonchangeset.TimelockExecutionContracts{ + initialDeploy[0]: { Timelock: state.Chains[initialDeploy[0]].Timelock, CallProxy: state.Chains[initialDeploy[0]].CallProxy, }, - initialDeploy[1]: &commonchangeset.TimelockExecutionContracts{ + initialDeploy[1]: { Timelock: state.Chains[initialDeploy[1]].Timelock, CallProxy: state.Chains[initialDeploy[1]].CallProxy, }, - initialDeploy[2]: &commonchangeset.TimelockExecutionContracts{ + initialDeploy[2]: { Timelock: state.Chains[initialDeploy[2]].Timelock, CallProxy: state.Chains[initialDeploy[2]].CallProxy, }, @@ -195,11 +195,11 @@ func TestAddChainInbound(t *testing.T) { } _, err = commonchangeset.ApplyChangesets(t, e.Env, map[uint64]*commonchangeset.TimelockExecutionContracts{ - e.HomeChainSel: &commonchangeset.TimelockExecutionContracts{ + e.HomeChainSel: { Timelock: state.Chains[e.HomeChainSel].Timelock, CallProxy: state.Chains[e.HomeChainSel].CallProxy, }, - newChain: &commonchangeset.TimelockExecutionContracts{ + newChain: { Timelock: state.Chains[newChain].Timelock, CallProxy: state.Chains[newChain].CallProxy, }, @@ -238,8 +238,11 @@ func TestAddChainInbound(t *testing.T) { Changeset: commonchangeset.WrapChangeSet(PromoteAllCandidatesChangeset), Config: PromoteAllCandidatesChangesetConfig{ HomeChainSelector: e.HomeChainSel, - NewChainSelector: newChain, + DONChainSelector: newChain, NodeIDs: nodeIDs, + MCMS: &MCMSConfig{ + MinDelay: 0, + }, }, }, }) diff --git a/deployment/ccip/changeset/cs_active_candidate.go b/deployment/ccip/changeset/cs_ccip_home.go similarity index 68% rename from deployment/ccip/changeset/cs_active_candidate.go rename to deployment/ccip/changeset/cs_ccip_home.go index 572a4a75f8e..202d4216b60 100644 --- a/deployment/ccip/changeset/cs_active_candidate.go +++ b/deployment/ccip/changeset/cs_ccip_home.go @@ -1,9 +1,11 @@ package changeset import ( + "context" "fmt" "math/big" + "github.com/ethereum/go-ethereum/accounts/abi/bind" "github.com/ethereum/go-ethereum/common" "github.com/smartcontractkit/ccip-owner-contracts/pkg/gethwrappers" "github.com/smartcontractkit/ccip-owner-contracts/pkg/proposal/mcms" @@ -24,46 +26,70 @@ var ( type PromoteAllCandidatesChangesetConfig struct { HomeChainSelector uint64 - NewChainSelector uint64 - NodeIDs []string + // DONChainSelector is the chain selector of the DON that we want to promote the candidate config of. + // Note that each (chain, ccip capability version) pair has a unique DON ID. + DONChainSelector uint64 + NodeIDs []string + MCMS *MCMSConfig } func (p PromoteAllCandidatesChangesetConfig) Validate(e deployment.Environment, state CCIPOnChainState) (deployment.Nodes, error) { - if p.HomeChainSelector == 0 { - return nil, fmt.Errorf("HomeChainSelector must be set") + if err := deployment.IsValidChainSelector(p.HomeChainSelector); err != nil { + return nil, fmt.Errorf("home chain selector invalid: %w", err) } - if p.NewChainSelector == 0 { - return nil, fmt.Errorf("NewChainSelector must be set") + if err := deployment.IsValidChainSelector(p.DONChainSelector); err != nil { + return nil, fmt.Errorf("don chain selector invalid: %w", err) } if len(p.NodeIDs) == 0 { return nil, fmt.Errorf("NodeIDs must be set") } + if state.Chains[p.HomeChainSelector].CCIPHome == nil { + return nil, fmt.Errorf("CCIPHome contract does not exist") + } + if state.Chains[p.HomeChainSelector].CapabilityRegistry == nil { + return nil, fmt.Errorf("CapabilityRegistry contract does not exist") + } nodes, err := deployment.NodeInfo(p.NodeIDs, e.Offchain) if err != nil { return nil, fmt.Errorf("fetch node info: %w", err) } - donID, exists, err := internal.DonIDForChain( + donID, err := internal.DonIDForChain( state.Chains[p.HomeChainSelector].CapabilityRegistry, state.Chains[p.HomeChainSelector].CCIPHome, - p.NewChainSelector, + p.DONChainSelector, ) if err != nil { return nil, fmt.Errorf("fetch don id for chain: %w", err) } - if !exists { - return nil, fmt.Errorf("don id for chain(%d) does not exist", p.NewChainSelector) + if donID == 0 { + return nil, fmt.Errorf("don doesn't exist in CR for chain %d", p.DONChainSelector) } - // check if the DON ID has a candidate digest set that we can promote - for _, pluginType := range []cctypes.PluginType{cctypes.PluginTypeCCIPCommit, cctypes.PluginTypeCCIPExec} { - candidateDigest, err := state.Chains[p.HomeChainSelector].CCIPHome.GetCandidateDigest(nil, donID, uint8(pluginType)) - if err != nil { - return nil, fmt.Errorf("error fetching candidate digest for pluginType(%s): %w", pluginType.String(), err) - } - if candidateDigest == [32]byte{} { - return nil, fmt.Errorf("candidate digest is zero, must be non-zero to promote") - } + + // Check that candidate digest and active digest are not both zero - this is enforced onchain. + commitConfigs, err := state.Chains[p.HomeChainSelector].CCIPHome.GetAllConfigs(&bind.CallOpts{ + Context: context.Background(), + }, donID, uint8(cctypes.PluginTypeCCIPCommit)) + if err != nil { + return nil, fmt.Errorf("fetching commit configs from cciphome: %w", err) + } + + execConfigs, err := state.Chains[p.HomeChainSelector].CCIPHome.GetAllConfigs(&bind.CallOpts{ + Context: context.Background(), + }, donID, uint8(cctypes.PluginTypeCCIPExec)) + if err != nil { + return nil, fmt.Errorf("fetching exec configs from cciphome: %w", err) + } + + if commitConfigs.ActiveConfig.ConfigDigest == [32]byte{} && + commitConfigs.CandidateConfig.ConfigDigest == [32]byte{} { + return nil, fmt.Errorf("commit active and candidate config digests are both zero") + } + + if execConfigs.ActiveConfig.ConfigDigest == [32]byte{} && + execConfigs.CandidateConfig.ConfigDigest == [32]byte{} { + return nil, fmt.Errorf("exec active and candidate config digests are both zero") } return nodes, nil @@ -85,33 +111,44 @@ func PromoteAllCandidatesChangeset( return deployment.ChangesetOutput{}, fmt.Errorf("%w: %w", deployment.ErrInvalidConfig, err) } + txOpts := e.Chains[cfg.HomeChainSelector].DeployerKey + if cfg.MCMS != nil { + txOpts = deployment.SimTransactOpts() + } + + homeChain := e.Chains[cfg.HomeChainSelector] + promoteCandidateOps, err := promoteAllCandidatesForChainOps( + homeChain, + txOpts, state.Chains[cfg.HomeChainSelector].CapabilityRegistry, state.Chains[cfg.HomeChainSelector].CCIPHome, - cfg.NewChainSelector, + cfg.DONChainSelector, nodes.NonBootstraps(), + cfg.MCMS != nil, ) if err != nil { - return deployment.ChangesetOutput{}, err + return deployment.ChangesetOutput{}, fmt.Errorf("generating promote candidate ops: %w", err) } - var ( - timelocksPerChain = map[uint64]common.Address{ + // Disabled MCMS means that we already executed the txes, so just return early w/out the proposals. + if cfg.MCMS == nil { + return deployment.ChangesetOutput{}, nil + } + + prop, err := proposalutils.BuildProposalFromBatches( + map[uint64]common.Address{ cfg.HomeChainSelector: state.Chains[cfg.HomeChainSelector].Timelock.Address(), - } - proposerMCMSes = map[uint64]*gethwrappers.ManyChainMultiSig{ + }, + map[uint64]*gethwrappers.ManyChainMultiSig{ cfg.HomeChainSelector: state.Chains[cfg.HomeChainSelector].ProposerMcm, - } - ) - prop, err := proposalutils.BuildProposalFromBatches( - timelocksPerChain, - proposerMCMSes, + }, []timelock.BatchChainOperation{{ ChainIdentifier: mcms.ChainIdentifier(cfg.HomeChainSelector), Batch: promoteCandidateOps, }}, "promoteCandidate for commit and execution", - 0, // minDelay + cfg.MCMS.MinDelay, ) if err != nil { return deployment.ChangesetOutput{}, err @@ -206,13 +243,14 @@ func setCandidateOnExistingDon( nodes deployment.Nodes, ) ([]mcms.Operation, error) { // fetch DON ID for the chain - donID, exists, err := internal.DonIDForChain(capReg, ccipHome, chainSelector) + donID, err := internal.DonIDForChain(capReg, ccipHome, chainSelector) if err != nil { return nil, fmt.Errorf("fetch don id for chain: %w", err) } - if !exists { - return nil, fmt.Errorf("don id for chain(%d) does not exist", chainSelector) + if donID == 0 { + return nil, fmt.Errorf("don doesn't exist in CR for chain %d", chainSelector) } + fmt.Printf("donID: %d", donID) encodedSetCandidateCall, err := internal.CCIPHomeABI.Pack( "setCandidate", @@ -251,19 +289,21 @@ func setCandidateOnExistingDon( } // promoteCandidateOp will create the MCMS Operation for `promoteCandidateAndRevokeActive` directed towards the capabilityRegistry -func promoteCandidateOp(donID uint32, pluginType uint8, capReg *capabilities_registry.CapabilitiesRegistry, - ccipHome *ccip_home.CCIPHome, nodes deployment.Nodes) (mcms.Operation, error) { - +func promoteCandidateOp( + homeChain deployment.Chain, + txOpts *bind.TransactOpts, + donID uint32, + pluginType uint8, + capReg *capabilities_registry.CapabilitiesRegistry, + ccipHome *ccip_home.CCIPHome, + nodes deployment.Nodes, + mcmsEnabled bool, +) (mcms.Operation, error) { allConfigs, err := ccipHome.GetAllConfigs(nil, donID, pluginType) if err != nil { return mcms.Operation{}, err } - if allConfigs.CandidateConfig.ConfigDigest == [32]byte{} { - return mcms.Operation{}, fmt.Errorf("candidate digest is empty, expected nonempty") - } - fmt.Printf("commit candidate digest after setCandidate: %x\n", allConfigs.CandidateConfig.ConfigDigest) - encodedPromotionCall, err := internal.CCIPHomeABI.Pack( "promoteCandidateAndRevokeActive", donID, @@ -276,7 +316,7 @@ func promoteCandidateOp(donID uint32, pluginType uint8, capReg *capabilities_reg } updateDonTx, err := capReg.UpdateDON( - deployment.SimTransactOpts(), + txOpts, donID, nodes.PeerIDs(), []capabilities_registry.CapabilitiesRegistryCapabilityConfiguration{ @@ -291,6 +331,13 @@ func promoteCandidateOp(donID uint32, pluginType uint8, capReg *capabilities_reg if err != nil { return mcms.Operation{}, fmt.Errorf("error creating updateDon op for donID(%d) and plugin type (%d): %w", donID, pluginType, err) } + if !mcmsEnabled { + _, err = deployment.ConfirmIfNoError(homeChain, updateDonTx, err) + if err != nil { + return mcms.Operation{}, fmt.Errorf("error confirming updateDon call for donID(%d) and plugin type (%d): %w", donID, pluginType, err) + } + } + return mcms.Operation{ To: capReg.Address(), Data: updateDonTx.Data(), @@ -300,28 +347,31 @@ func promoteCandidateOp(donID uint32, pluginType uint8, capReg *capabilities_reg // promoteAllCandidatesForChainOps promotes the candidate commit and exec configs to active by calling promoteCandidateAndRevokeActive on CCIPHome through the UpdateDON call on CapReg contract func promoteAllCandidatesForChainOps( + homeChain deployment.Chain, + txOpts *bind.TransactOpts, capReg *capabilities_registry.CapabilitiesRegistry, ccipHome *ccip_home.CCIPHome, chainSelector uint64, nodes deployment.Nodes, + mcmsEnabled bool, ) ([]mcms.Operation, error) { // fetch DON ID for the chain - donID, exists, err := internal.DonIDForChain(capReg, ccipHome, chainSelector) + donID, err := internal.DonIDForChain(capReg, ccipHome, chainSelector) if err != nil { return nil, fmt.Errorf("fetch don id for chain: %w", err) } - if !exists { - return nil, fmt.Errorf("don id for chain(%d) does not exist", chainSelector) + if donID == 0 { + return nil, fmt.Errorf("don doesn't exist in CR for chain %d", chainSelector) } var mcmsOps []mcms.Operation - updateCommitOp, err := promoteCandidateOp(donID, uint8(cctypes.PluginTypeCCIPCommit), capReg, ccipHome, nodes) + updateCommitOp, err := promoteCandidateOp(homeChain, txOpts, donID, uint8(cctypes.PluginTypeCCIPCommit), capReg, ccipHome, nodes, mcmsEnabled) if err != nil { return nil, fmt.Errorf("promote candidate op: %w", err) } mcmsOps = append(mcmsOps, updateCommitOp) - updateExecOp, err := promoteCandidateOp(donID, uint8(cctypes.PluginTypeCCIPExec), capReg, ccipHome, nodes) + updateExecOp, err := promoteCandidateOp(homeChain, txOpts, donID, uint8(cctypes.PluginTypeCCIPExec), capReg, ccipHome, nodes, mcmsEnabled) if err != nil { return nil, fmt.Errorf("promote candidate op: %w", err) } diff --git a/deployment/ccip/changeset/cs_active_candidate_test.go b/deployment/ccip/changeset/cs_ccip_home_test.go similarity index 70% rename from deployment/ccip/changeset/cs_active_candidate_test.go rename to deployment/ccip/changeset/cs_ccip_home_test.go index 0efa6b62589..92784551957 100644 --- a/deployment/ccip/changeset/cs_active_candidate_test.go +++ b/deployment/ccip/changeset/cs_ccip_home_test.go @@ -3,6 +3,7 @@ package changeset import ( "testing" + "github.com/ethereum/go-ethereum/accounts/abi/bind" "github.com/ethereum/go-ethereum/common" "github.com/smartcontractkit/ccip-owner-contracts/pkg/gethwrappers" "github.com/smartcontractkit/ccip-owner-contracts/pkg/proposal/mcms" @@ -12,6 +13,7 @@ import ( "github.com/smartcontractkit/chainlink-testing-framework/lib/utils/testcontext" "github.com/smartcontractkit/chainlink/deployment/ccip/changeset/internal" + "github.com/smartcontractkit/chainlink/v2/core/capabilities/ccip/types" cctypes "github.com/smartcontractkit/chainlink/v2/core/capabilities/ccip/types" "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/ccip/generated/router" @@ -107,9 +109,9 @@ func TestActiveCandidate(t *testing.T) { // [ACTIVE, CANDIDATE] setup by setting candidate through cap reg capReg, ccipHome := state.Chains[tenv.HomeChainSel].CapabilityRegistry, state.Chains[tenv.HomeChainSel].CCIPHome - donID, exists, err := internal.DonIDForChain(capReg, ccipHome, tenv.FeedChainSel) + donID, err := internal.DonIDForChain(capReg, ccipHome, tenv.FeedChainSel) require.NoError(t, err) - require.True(t, exists) + require.NotEqual(t, uint32(0), donID) donInfo, err := state.Chains[tenv.HomeChainSel].CapabilityRegistry.GetDON(nil, donID) require.NoError(t, err) require.Equal(t, 5, len(donInfo.NodeP2PIds)) @@ -218,7 +220,14 @@ func TestActiveCandidate(t *testing.T) { oldCandidateDigest, err := state.Chains[tenv.HomeChainSel].CCIPHome.GetCandidateDigest(nil, donID, uint8(cctypes.PluginTypeCCIPExec)) require.NoError(t, err) - promoteOps, err := promoteAllCandidatesForChainOps(state.Chains[tenv.HomeChainSel].CapabilityRegistry, state.Chains[tenv.HomeChainSel].CCIPHome, tenv.FeedChainSel, nodes.NonBootstraps()) + promoteOps, err := promoteAllCandidatesForChainOps( + tenv.Env.Chains[tenv.HomeChainSel], + deployment.SimTransactOpts(), + state.Chains[tenv.HomeChainSel].CapabilityRegistry, + state.Chains[tenv.HomeChainSel].CCIPHome, + tenv.FeedChainSel, + nodes.NonBootstraps(), + true) require.NoError(t, err) promoteProposal, err := proposalutils.BuildProposalFromBatches(timelocksPerChain, proposerMCMSes, []timelock.BatchChainOperation{{ ChainIdentifier: mcms.ChainIdentifier(tenv.HomeChainSel), @@ -251,3 +260,121 @@ func TestActiveCandidate(t *testing.T) { require.NoError(t, err) // [NEW ACTIVE, NO CANDIDATE] done sending successful request } + +func Test_PromoteCandidate(t *testing.T) { + for _, tc := range []struct { + name string + mcmsEnabled bool + }{ + { + name: "MCMS enabled", + mcmsEnabled: true, + }, + { + name: "MCMS disabled", + mcmsEnabled: false, + }, + } { + t.Run(tc.name, func(t *testing.T) { + ctx := testcontext.Get(t) + tenv := NewMemoryEnvironment(t, + WithChains(2), + WithNodes(4)) + state, err := LoadOnchainState(tenv.Env) + require.NoError(t, err) + + // Deploy to all chains. + allChains := maps.Keys(tenv.Env.Chains) + source := allChains[0] + dest := allChains[1] + + nodes, err := deployment.NodeInfo(tenv.Env.NodeIDs, tenv.Env.Offchain) + require.NoError(t, err) + + var nodeIDs []string + for _, node := range nodes { + nodeIDs = append(nodeIDs, node.NodeID) + } + + if tc.mcmsEnabled { + // Transfer ownership to timelock so that we can promote the zero digest later down the line. + _, err = commonchangeset.ApplyChangesets(t, tenv.Env, map[uint64]*commonchangeset.TimelockExecutionContracts{ + source: { + Timelock: state.Chains[source].Timelock, + CallProxy: state.Chains[source].CallProxy, + }, + dest: { + Timelock: state.Chains[dest].Timelock, + CallProxy: state.Chains[dest].CallProxy, + }, + tenv.HomeChainSel: { + Timelock: state.Chains[tenv.HomeChainSel].Timelock, + CallProxy: state.Chains[tenv.HomeChainSel].CallProxy, + }, + }, []commonchangeset.ChangesetApplication{ + { + Changeset: commonchangeset.WrapChangeSet(commonchangeset.TransferToMCMSWithTimelock), + Config: genTestTransferOwnershipConfig(tenv, allChains, state), + }, + }) + require.NoError(t, err) + assertTimelockOwnership(t, tenv, allChains, state) + } + + var ( + capReg = state.Chains[tenv.HomeChainSel].CapabilityRegistry + ccipHome = state.Chains[tenv.HomeChainSel].CCIPHome + ) + donID, err := internal.DonIDForChain(capReg, ccipHome, dest) + require.NoError(t, err) + require.NotEqual(t, uint32(0), donID) + candidateDigestCommitBefore, err := ccipHome.GetCandidateDigest(&bind.CallOpts{ + Context: ctx, + }, donID, uint8(types.PluginTypeCCIPCommit)) + require.NoError(t, err) + require.Equal(t, [32]byte{}, candidateDigestCommitBefore) + candidateDigestExecBefore, err := ccipHome.GetCandidateDigest(&bind.CallOpts{ + Context: ctx, + }, donID, uint8(types.PluginTypeCCIPExec)) + require.NoError(t, err) + require.Equal(t, [32]byte{}, candidateDigestExecBefore) + + var mcmsConfig *MCMSConfig + if tc.mcmsEnabled { + mcmsConfig = &MCMSConfig{ + MinDelay: 0, + } + } + _, err = commonchangeset.ApplyChangesets(t, tenv.Env, map[uint64]*commonchangeset.TimelockExecutionContracts{ + tenv.HomeChainSel: { + Timelock: state.Chains[tenv.HomeChainSel].Timelock, + CallProxy: state.Chains[tenv.HomeChainSel].CallProxy, + }, + }, []commonchangeset.ChangesetApplication{ + { + Changeset: commonchangeset.WrapChangeSet(PromoteAllCandidatesChangeset), + Config: PromoteAllCandidatesChangesetConfig{ + HomeChainSelector: tenv.HomeChainSel, + DONChainSelector: dest, + NodeIDs: nodeIDs, + MCMS: mcmsConfig, + }, + }, + }) + require.NoError(t, err) + + // after promoting the zero digest, active digest should also be zero + activeDigestCommit, err := ccipHome.GetActiveDigest(&bind.CallOpts{ + Context: ctx, + }, donID, uint8(types.PluginTypeCCIPCommit)) + require.NoError(t, err) + require.Equal(t, [32]byte{}, activeDigestCommit) + + activeDigestExec, err := ccipHome.GetActiveDigest(&bind.CallOpts{ + Context: ctx, + }, donID, uint8(types.PluginTypeCCIPExec)) + require.NoError(t, err) + require.Equal(t, [32]byte{}, activeDigestExec) + }) + } +} diff --git a/deployment/ccip/changeset/cs_initial_add_chain.go b/deployment/ccip/changeset/cs_initial_add_chain.go index 52b07aae6b4..5ba648d74b5 100644 --- a/deployment/ccip/changeset/cs_initial_add_chain.go +++ b/deployment/ccip/changeset/cs_initial_add_chain.go @@ -308,14 +308,15 @@ func createDON( newChainSel uint64, nodes deployment.Nodes, ) error { - donID, exists, err := internal.DonIDForChain(capReg, ccipHome, newChainSel) + donID, err := internal.DonIDForChain(capReg, ccipHome, newChainSel) if err != nil { return fmt.Errorf("fetch don id for chain: %w", err) } - if exists { + if donID != 0 { lggr.Infow("DON already exists not adding it again", "donID", donID, "chain", newChainSel) return ValidateCCIPHomeConfigSetUp(lggr, capReg, ccipHome, newChainSel) } + commitConfig, ok := ocr3Configs[cctypes.PluginTypeCCIPCommit] if !ok { return fmt.Errorf("missing commit plugin in ocr3Configs") @@ -477,13 +478,14 @@ func ValidateCCIPHomeConfigSetUp( chainSel uint64, ) error { // fetch DONID - donID, exists, err := internal.DonIDForChain(capReg, ccipHome, chainSel) + donID, err := internal.DonIDForChain(capReg, ccipHome, chainSel) if err != nil { return fmt.Errorf("fetch don id for chain: %w", err) } - if !exists { + if donID == 0 { return fmt.Errorf("don id for chain(%d) does not exist", chainSel) } + // final sanity checks on configs. commitConfigs, err := ccipHome.GetAllConfigs(&bind.CallOpts{ //Pending: true, diff --git a/deployment/ccip/changeset/internal/deploy_home_chain.go b/deployment/ccip/changeset/internal/deploy_home_chain.go index df53d752e75..aa029fd4bec 100644 --- a/deployment/ccip/changeset/internal/deploy_home_chain.go +++ b/deployment/ccip/changeset/internal/deploy_home_chain.go @@ -110,25 +110,37 @@ func LatestCCIPDON(registry *capabilities_registry.CapabilitiesRegistry) (*capab // DonIDForChain returns the DON ID for the chain with the given selector // It looks up with the CCIPHome contract to find the OCR3 configs for the DONs, and returns the DON ID for the chain matching with the given selector from the OCR3 configs -func DonIDForChain(registry *capabilities_registry.CapabilitiesRegistry, ccipHome *ccip_home.CCIPHome, chainSelector uint64) (uint32, bool, error) { +func DonIDForChain(registry *capabilities_registry.CapabilitiesRegistry, ccipHome *ccip_home.CCIPHome, chainSelector uint64) (uint32, error) { dons, err := registry.GetDONs(nil) if err != nil { - return 0, false, err + return 0, fmt.Errorf("get Dons from capability registry: %w", err) } - // TODO: what happens if there are multiple dons for one chain (accidentally?) + var donIDs []uint32 for _, don := range dons { if len(don.CapabilityConfigurations) == 1 && don.CapabilityConfigurations[0].CapabilityId == CCIPCapabilityID { configs, err := ccipHome.GetAllConfigs(nil, don.Id, uint8(types.PluginTypeCCIPCommit)) if err != nil { - return 0, false, err + return 0, fmt.Errorf("get all commit configs from cciphome: %w", err) } if configs.ActiveConfig.Config.ChainSelector == chainSelector || configs.CandidateConfig.Config.ChainSelector == chainSelector { - return don.Id, true, nil + donIDs = append(donIDs, don.Id) } } } - return 0, false, nil + + // more than one DON is an error + if len(donIDs) > 1 { + return 0, fmt.Errorf("more than one DON found for (chain selector %d, ccip capability id %x) pair", chainSelector, CCIPCapabilityID[:]) + } + + // no DON found - don ID of 0 indicates that (this is the case in the CR as well). + if len(donIDs) == 0 { + return 0, nil + } + + // DON found - return it. + return donIDs[0], nil } func BuildSetOCR3ConfigArgs( From d1caaa33e496f540a411207be0809120a894c600 Mon Sep 17 00:00:00 2001 From: krehermann <16602512+krehermann@users.noreply.github.com> Date: Wed, 11 Dec 2024 12:37:22 -0700 Subject: [PATCH 02/15] refactor update don capability (#15623) * refactor update & append capabilities * update don changeset mcms * update don with test * cleanup dupes * configurable MCMS; infered MCMS usage from it --- .../changeset/append_node_capabilities.go | 6 +- .../append_node_capabilities_test.go | 2 +- .../keystone/changeset/deploy_forwarder.go | 53 +++++- .../changeset/deploy_forwarder_test.go | 2 +- deployment/keystone/changeset/deploy_ocr3.go | 51 +++++- .../keystone/changeset/deploy_ocr3_test.go | 3 +- .../keystone/changeset/internal/update_don.go | 41 +++-- .../changeset/internal/update_don_test.go | 29 ++- deployment/keystone/changeset/update_don.go | 94 +++++++++- .../keystone/changeset/update_don_test.go | 165 ++++++++++++++++++ .../changeset/update_node_capabilities.go | 17 +- .../update_node_capabilities_test.go | 2 +- deployment/keystone/changeset/update_nodes.go | 26 ++- .../keystone/changeset/update_nodes_test.go | 4 +- deployment/keystone/deploy.go | 34 +--- deployment/keystone/forwarder_deployer.go | 12 +- deployment/keystone/ocr3config.go | 27 +-- 17 files changed, 456 insertions(+), 112 deletions(-) create mode 100644 deployment/keystone/changeset/update_don_test.go diff --git a/deployment/keystone/changeset/append_node_capabilities.go b/deployment/keystone/changeset/append_node_capabilities.go index f0bad959551..688d4fd8d2f 100644 --- a/deployment/keystone/changeset/append_node_capabilities.go +++ b/deployment/keystone/changeset/append_node_capabilities.go @@ -29,7 +29,7 @@ func AppendNodeCapabilities(env deployment.Environment, req *AppendNodeCapabilit return deployment.ChangesetOutput{}, err } out := deployment.ChangesetOutput{} - if req.UseMCMS { + if req.UseMCMS() { if r.Ops == nil { return out, fmt.Errorf("expected MCMS operation to be non-nil") } @@ -45,7 +45,7 @@ func AppendNodeCapabilities(env deployment.Environment, req *AppendNodeCapabilit proposerMCMSes, []timelock.BatchChainOperation{*r.Ops}, "proposal to set update node capabilities", - 0, + req.MCMSConfig.MinDuration, ) if err != nil { return out, fmt.Errorf("failed to build proposal: %w", err) @@ -76,6 +76,6 @@ func (req *AppendNodeCapabilitiesRequest) convert(e deployment.Environment) (*in Chain: registryChain, ContractSet: &contracts, P2pToCapabilities: req.P2pToCapabilities, - UseMCMS: req.UseMCMS, + UseMCMS: req.UseMCMS(), }, nil } diff --git a/deployment/keystone/changeset/append_node_capabilities_test.go b/deployment/keystone/changeset/append_node_capabilities_test.go index 7fbbbfc8a83..159500ab5a7 100644 --- a/deployment/keystone/changeset/append_node_capabilities_test.go +++ b/deployment/keystone/changeset/append_node_capabilities_test.go @@ -75,7 +75,7 @@ func TestAppendNodeCapabilities(t *testing.T) { cfg := changeset.AppendNodeCapabilitiesRequest{ RegistryChainSel: te.RegistrySelector, P2pToCapabilities: newCapabilities, - UseMCMS: true, + MCMSConfig: &changeset.MCMSConfig{MinDuration: 0}, } csOut, err := changeset.AppendNodeCapabilities(te.Env, &cfg) diff --git a/deployment/keystone/changeset/deploy_forwarder.go b/deployment/keystone/changeset/deploy_forwarder.go index cf116decd54..1e4066770bd 100644 --- a/deployment/keystone/changeset/deploy_forwarder.go +++ b/deployment/keystone/changeset/deploy_forwarder.go @@ -3,7 +3,11 @@ package changeset import ( "fmt" + "github.com/ethereum/go-ethereum/common" + "github.com/smartcontractkit/ccip-owner-contracts/pkg/gethwrappers" + "github.com/smartcontractkit/ccip-owner-contracts/pkg/proposal/timelock" "github.com/smartcontractkit/chainlink/deployment" + "github.com/smartcontractkit/chainlink/deployment/common/proposalutils" kslib "github.com/smartcontractkit/chainlink/deployment/keystone" ) @@ -35,7 +39,8 @@ type ConfigureForwardContractsRequest struct { WFNodeIDs []string RegistryChainSel uint64 - UseMCMS bool + // MCMSConfig is optional. If non-nil, the changes will be proposed using MCMS. + MCMSConfig *MCMSConfig } func (r ConfigureForwardContractsRequest) Validate() error { @@ -45,6 +50,10 @@ func (r ConfigureForwardContractsRequest) Validate() error { return nil } +func (r ConfigureForwardContractsRequest) UseMCMS() bool { + return r.MCMSConfig != nil +} + func ConfigureForwardContracts(env deployment.Environment, req ConfigureForwardContractsRequest) (deployment.ChangesetOutput, error) { wfDon, err := kslib.NewRegisteredDon(env, kslib.RegisteredDonConfig{ NodeIDs: req.WFNodeIDs, @@ -56,12 +65,46 @@ func ConfigureForwardContracts(env deployment.Environment, req ConfigureForwardC } r, err := kslib.ConfigureForwardContracts(&env, kslib.ConfigureForwarderContractsRequest{ Dons: []kslib.RegisteredDon{*wfDon}, - UseMCMS: req.UseMCMS, + UseMCMS: req.UseMCMS(), }) if err != nil { return deployment.ChangesetOutput{}, fmt.Errorf("failed to configure forward contracts: %w", err) } - return deployment.ChangesetOutput{ - Proposals: r.Proposals, - }, nil + + cresp, err := kslib.GetContractSets(env.Logger, &kslib.GetContractSetsRequest{ + Chains: env.Chains, + AddressBook: env.ExistingAddresses, + }) + if err != nil { + return deployment.ChangesetOutput{}, fmt.Errorf("failed to get contract sets: %w", err) + } + + var out deployment.ChangesetOutput + if req.UseMCMS() { + if len(r.OpsPerChain) == 0 { + return out, fmt.Errorf("expected MCMS operation to be non-nil") + } + for chainSelector, op := range r.OpsPerChain { + contracts := cresp.ContractSets[chainSelector] + timelocksPerChain := map[uint64]common.Address{ + chainSelector: contracts.Timelock.Address(), + } + proposerMCMSes := map[uint64]*gethwrappers.ManyChainMultiSig{ + chainSelector: contracts.ProposerMcm, + } + + proposal, err := proposalutils.BuildProposalFromBatches( + timelocksPerChain, + proposerMCMSes, + []timelock.BatchChainOperation{op}, + "proposal to set update nodes", + req.MCMSConfig.MinDuration, + ) + if err != nil { + return out, fmt.Errorf("failed to build proposal: %w", err) + } + out.Proposals = append(out.Proposals, *proposal) + } + } + return out, nil } diff --git a/deployment/keystone/changeset/deploy_forwarder_test.go b/deployment/keystone/changeset/deploy_forwarder_test.go index 82454599226..dd894fde9d9 100644 --- a/deployment/keystone/changeset/deploy_forwarder_test.go +++ b/deployment/keystone/changeset/deploy_forwarder_test.go @@ -109,7 +109,7 @@ func TestConfigureForwarders(t *testing.T) { WFDonName: "test-wf-don", WFNodeIDs: wfNodes, RegistryChainSel: te.RegistrySelector, - UseMCMS: true, + MCMSConfig: &changeset.MCMSConfig{MinDuration: 0}, } csOut, err := changeset.ConfigureForwardContracts(te.Env, cfg) require.NoError(t, err) diff --git a/deployment/keystone/changeset/deploy_ocr3.go b/deployment/keystone/changeset/deploy_ocr3.go index 0ce3d02844b..4dfed1e292c 100644 --- a/deployment/keystone/changeset/deploy_ocr3.go +++ b/deployment/keystone/changeset/deploy_ocr3.go @@ -5,9 +5,12 @@ import ( "fmt" "io" + "github.com/ethereum/go-ethereum/common" + "github.com/smartcontractkit/ccip-owner-contracts/pkg/gethwrappers" "github.com/smartcontractkit/ccip-owner-contracts/pkg/proposal/timelock" "github.com/smartcontractkit/chainlink/deployment" + "github.com/smartcontractkit/chainlink/deployment/common/proposalutils" kslib "github.com/smartcontractkit/chainlink/deployment/keystone" ) @@ -38,7 +41,12 @@ type ConfigureOCR3Config struct { DryRun bool WriteGeneratedConfig io.Writer // if not nil, write the generated config to this writer as JSON [OCR2OracleConfig] - UseMCMS bool + // MCMSConfig is optional. If non-nil, the changes will be proposed using MCMS. + MCMSConfig *MCMSConfig +} + +func (cfg ConfigureOCR3Config) UseMCMS() bool { + return cfg.MCMSConfig != nil } func ConfigureOCR3Contract(env deployment.Environment, cfg ConfigureOCR3Config) (deployment.ChangesetOutput, error) { @@ -47,7 +55,7 @@ func ConfigureOCR3Contract(env deployment.Environment, cfg ConfigureOCR3Config) NodeIDs: cfg.NodeIDs, OCR3Config: cfg.OCR3Config, DryRun: cfg.DryRun, - UseMCMS: cfg.UseMCMS, + UseMCMS: cfg.UseMCMS(), }) if err != nil { return deployment.ChangesetOutput{}, fmt.Errorf("failed to configure OCR3Capability: %w", err) @@ -67,11 +75,38 @@ func ConfigureOCR3Contract(env deployment.Environment, cfg ConfigureOCR3Config) } } // does not create any new addresses - var proposals []timelock.MCMSWithTimelockProposal - if cfg.UseMCMS { - proposals = append(proposals, *resp.Proposal) + var out deployment.ChangesetOutput + if cfg.UseMCMS() { + if resp.Ops == nil { + return out, fmt.Errorf("expected MCMS operation to be non-nil") + } + r, err := kslib.GetContractSets(env.Logger, &kslib.GetContractSetsRequest{ + Chains: env.Chains, + AddressBook: env.ExistingAddresses, + }) + if err != nil { + return out, fmt.Errorf("failed to get contract sets: %w", err) + } + contracts := r.ContractSets[cfg.ChainSel] + timelocksPerChain := map[uint64]common.Address{ + cfg.ChainSel: contracts.Timelock.Address(), + } + proposerMCMSes := map[uint64]*gethwrappers.ManyChainMultiSig{ + cfg.ChainSel: contracts.ProposerMcm, + } + + proposal, err := proposalutils.BuildProposalFromBatches( + timelocksPerChain, + proposerMCMSes, + []timelock.BatchChainOperation{*resp.Ops}, + "proposal to set update nodes", + cfg.MCMSConfig.MinDuration, + ) + if err != nil { + return out, fmt.Errorf("failed to build proposal: %w", err) + } + out.Proposals = []timelock.MCMSWithTimelockProposal{*proposal} + } - return deployment.ChangesetOutput{ - Proposals: proposals, - }, nil + return out, nil } diff --git a/deployment/keystone/changeset/deploy_ocr3_test.go b/deployment/keystone/changeset/deploy_ocr3_test.go index 60abd702929..5d02f83500d 100644 --- a/deployment/keystone/changeset/deploy_ocr3_test.go +++ b/deployment/keystone/changeset/deploy_ocr3_test.go @@ -71,7 +71,6 @@ func TestConfigureOCR3(t *testing.T) { NodeIDs: wfNodes, OCR3Config: &c, WriteGeneratedConfig: w, - UseMCMS: false, } csOut, err := changeset.ConfigureOCR3Contract(te.Env, cfg) @@ -104,7 +103,7 @@ func TestConfigureOCR3(t *testing.T) { NodeIDs: wfNodes, OCR3Config: &c, WriteGeneratedConfig: w, - UseMCMS: true, + MCMSConfig: &changeset.MCMSConfig{MinDuration: 0}, } csOut, err := changeset.ConfigureOCR3Contract(te.Env, cfg) diff --git a/deployment/keystone/changeset/internal/update_don.go b/deployment/keystone/changeset/internal/update_don.go index dae0e46eca7..fc7e410e540 100644 --- a/deployment/keystone/changeset/internal/update_don.go +++ b/deployment/keystone/changeset/internal/update_don.go @@ -6,9 +6,11 @@ import ( "encoding/hex" "encoding/json" "fmt" + "math/big" "sort" "github.com/ethereum/go-ethereum/accounts/abi/bind" + "github.com/smartcontractkit/ccip-owner-contracts/pkg/proposal/mcms" "github.com/smartcontractkit/ccip-owner-contracts/pkg/proposal/timelock" "github.com/smartcontractkit/chainlink-common/pkg/logger" "github.com/smartcontractkit/chainlink/deployment" @@ -36,7 +38,7 @@ type UpdateDonRequest struct { UseMCMS bool } -func (r *UpdateDonRequest) appendNodeCapabilitiesRequest() *AppendNodeCapabilitiesRequest { +func (r *UpdateDonRequest) AppendNodeCapabilitiesRequest() *AppendNodeCapabilitiesRequest { out := &AppendNodeCapabilitiesRequest{ Chain: r.Chain, ContractSet: r.ContractSet, @@ -65,8 +67,8 @@ func (r *UpdateDonRequest) Validate() error { } type UpdateDonResponse struct { - DonInfo kcr.CapabilitiesRegistryDONInfo - Proposals []timelock.MCMSWithTimelockProposal + DonInfo kcr.CapabilitiesRegistryDONInfo + Ops *timelock.BatchChainOperation } func UpdateDon(lggr logger.Logger, req *UpdateDonRequest) (*UpdateDonResponse, error) { @@ -89,24 +91,37 @@ func UpdateDon(lggr logger.Logger, req *UpdateDonRequest) (*UpdateDonResponse, e return nil, fmt.Errorf("failed to compute configs: %w", err) } - _, err = AppendNodeCapabilitiesImpl(lggr, req.appendNodeCapabilitiesRequest()) - if err != nil { - return nil, fmt.Errorf("failed to append node capabilities: %w", err) + txOpts := req.Chain.DeployerKey + if req.UseMCMS { + txOpts = deployment.SimTransactOpts() } - - tx, err := registry.UpdateDON(req.Chain.DeployerKey, don.Id, don.NodeP2PIds, cfgs, don.IsPublic, don.F) + tx, err := registry.UpdateDON(txOpts, don.Id, don.NodeP2PIds, cfgs, don.IsPublic, don.F) if err != nil { err = kslib.DecodeErr(kcr.CapabilitiesRegistryABI, err) return nil, fmt.Errorf("failed to call UpdateDON: %w", err) } - - _, err = req.Chain.Confirm(tx) - if err != nil { - return nil, fmt.Errorf("failed to confirm UpdateDON transaction %s: %w", tx.Hash().String(), err) + var ops *timelock.BatchChainOperation + if !req.UseMCMS { + _, err = req.Chain.Confirm(tx) + if err != nil { + return nil, fmt.Errorf("failed to confirm UpdateDON transaction %s: %w", tx.Hash().String(), err) + } + } else { + ops = &timelock.BatchChainOperation{ + ChainIdentifier: mcms.ChainIdentifier(req.Chain.Selector), + Batch: []mcms.Operation{ + { + To: registry.Address(), + Data: tx.Data(), + Value: big.NewInt(0), + }, + }, + } } + out := don out.CapabilityConfigurations = cfgs - return &UpdateDonResponse{DonInfo: out}, nil + return &UpdateDonResponse{DonInfo: out, Ops: ops}, nil } func PeerIDsToBytes(p2pIDs []p2pkey.PeerID) [][32]byte { diff --git a/deployment/keystone/changeset/internal/update_don_test.go b/deployment/keystone/changeset/internal/update_don_test.go index 49ddee538bf..93857b26f78 100644 --- a/deployment/keystone/changeset/internal/update_don_test.go +++ b/deployment/keystone/changeset/internal/update_don_test.go @@ -83,13 +83,13 @@ func TestUpdateDon(t *testing.T) { admin: admin_4, }) // capabilities - cap_A = kcr.CapabilitiesRegistryCapability{ + initialCap = kcr.CapabilitiesRegistryCapability{ LabelledName: "test", Version: "1.0.0", CapabilityType: 0, } - cap_B = kcr.CapabilitiesRegistryCapability{ + capToAdd = kcr.CapabilitiesRegistryCapability{ LabelledName: "cap b", Version: "1.0.0", CapabilityType: 1, @@ -104,7 +104,7 @@ func TestUpdateDon(t *testing.T) { { Name: "don 1", Nodes: []deployment.Node{node_1, node_2, node_3, node_4}, - Capabilities: []kcr.CapabilitiesRegistryCapability{cap_A}, + Capabilities: []kcr.CapabilitiesRegistryCapability{initialCap}, }, }, nops: []keystone.NOP{ @@ -115,14 +115,26 @@ func TestUpdateDon(t *testing.T) { }, } - testCfg := setupUpdateDonTest(t, lggr, cfg) + testCfg := registerTestDon(t, lggr, cfg) + // add the new capabilities to registry + m := make(map[p2pkey.PeerID][]kcr.CapabilitiesRegistryCapability) + for _, node := range cfg.dons[0].Nodes { + m[node.PeerID] = append(m[node.PeerID], capToAdd) + } + + _, err := internal.AppendNodeCapabilitiesImpl(lggr, &internal.AppendNodeCapabilitiesRequest{ + Chain: testCfg.Chain, + ContractSet: testCfg.ContractSet, + P2pToCapabilities: m, + }) + require.NoError(t, err) req := &internal.UpdateDonRequest{ ContractSet: testCfg.ContractSet, Chain: testCfg.Chain, P2PIDs: []p2pkey.PeerID{p2p_1.PeerID(), p2p_2.PeerID(), p2p_3.PeerID(), p2p_4.PeerID()}, CapabilityConfigs: []internal.CapabilityConfig{ - {Capability: cap_A}, {Capability: cap_B}, + {Capability: initialCap}, {Capability: capToAdd}, }, } want := &internal.UpdateDonResponse{ @@ -131,8 +143,8 @@ func TestUpdateDon(t *testing.T) { ConfigCount: 1, NodeP2PIds: internal.PeerIDsToBytes([]p2pkey.PeerID{p2p_1.PeerID(), p2p_2.PeerID(), p2p_3.PeerID(), p2p_4.PeerID()}), CapabilityConfigurations: []kcr.CapabilitiesRegistryCapabilityConfiguration{ - {CapabilityId: kstest.MustCapabilityId(t, testCfg.Registry, cap_A)}, - {CapabilityId: kstest.MustCapabilityId(t, testCfg.Registry, cap_B)}, + {CapabilityId: kstest.MustCapabilityId(t, testCfg.Registry, initialCap)}, + {CapabilityId: kstest.MustCapabilityId(t, testCfg.Registry, capToAdd)}, }, }, } @@ -220,10 +232,11 @@ type setupUpdateDonTestResult struct { chain deployment.Chain } -func setupUpdateDonTest(t *testing.T, lggr logger.Logger, cfg setupUpdateDonTestConfig) *kstest.SetupTestRegistryResponse { +func registerTestDon(t *testing.T, lggr logger.Logger, cfg setupUpdateDonTestConfig) *kstest.SetupTestRegistryResponse { t.Helper() req := newSetupTestRegistryRequest(t, cfg.dons, cfg.nops) return kstest.SetupTestRegistry(t, lggr, req) + } func newSetupTestRegistryRequest(t *testing.T, dons []kslib.DonInfo, nops []keystone.NOP) *kstest.SetupTestRegistryRequest { diff --git a/deployment/keystone/changeset/update_don.go b/deployment/keystone/changeset/update_don.go index 1ab40d5a935..3f43ea513be 100644 --- a/deployment/keystone/changeset/update_don.go +++ b/deployment/keystone/changeset/update_don.go @@ -4,8 +4,11 @@ import ( "fmt" "github.com/smartcontractkit/chainlink/deployment" + kslib "github.com/smartcontractkit/chainlink/deployment/keystone" + "github.com/smartcontractkit/chainlink/deployment/keystone/changeset/internal" kcr "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/keystone/generated/capabilities_registry" + "github.com/smartcontractkit/chainlink/v2/core/services/keystore/keys/p2pkey" ) var _ deployment.ChangeSet[*UpdateDonRequest] = UpdateDon @@ -13,7 +16,28 @@ var _ deployment.ChangeSet[*UpdateDonRequest] = UpdateDon // CapabilityConfig is a struct that holds a capability and its configuration type CapabilityConfig = internal.CapabilityConfig -type UpdateDonRequest = internal.UpdateDonRequest +type UpdateDonRequest struct { + RegistryChainSel uint64 + P2PIDs []p2pkey.PeerID // this is the unique identifier for the don + CapabilityConfigs []CapabilityConfig // if Config subfield is nil, a default config is used + + // MCMSConfig is optional. If non-nil, the changes will be proposed using MCMS. + MCMSConfig *MCMSConfig +} + +func (r *UpdateDonRequest) Validate() error { + if len(r.P2PIDs) == 0 { + return fmt.Errorf("p2pIDs is required") + } + if len(r.CapabilityConfigs) == 0 { + return fmt.Errorf("capabilityConfigs is required") + } + return nil +} + +func (r UpdateDonRequest) UseMCMS() bool { + return r.MCMSConfig != nil +} type UpdateDonResponse struct { DonInfo kcr.CapabilitiesRegistryDONInfo @@ -23,9 +47,73 @@ type UpdateDonResponse struct { // This a complex action in practice that involves registering missing capabilities, adding the nodes, and updating // the capabilities of the DON func UpdateDon(env deployment.Environment, req *UpdateDonRequest) (deployment.ChangesetOutput, error) { - _, err := internal.UpdateDon(env.Logger, req) + appendResult, err := AppendNodeCapabilities(env, appendRequest(req)) + if err != nil { + return deployment.ChangesetOutput{}, fmt.Errorf("failed to append node capabilities: %w", err) + } + + ur, err := updateDonRequest(env, req) + if err != nil { + return deployment.ChangesetOutput{}, fmt.Errorf("failed to create update don request: %w", err) + } + updateResult, err := internal.UpdateDon(env.Logger, ur) if err != nil { return deployment.ChangesetOutput{}, fmt.Errorf("failed to update don: %w", err) } - return deployment.ChangesetOutput{}, nil + + out := deployment.ChangesetOutput{} + if req.UseMCMS() { + if updateResult.Ops == nil { + return out, fmt.Errorf("expected MCMS operation to be non-nil") + } + if len(appendResult.Proposals) == 0 { + return out, fmt.Errorf("expected append node capabilities to return proposals") + } + + out.Proposals = appendResult.Proposals + + // add the update don to the existing batch + // this makes the proposal all-or-nothing because all the operations are in the same batch, there is only one tr + // transaction and only one proposal + out.Proposals[0].Transactions[0].Batch = append(out.Proposals[0].Transactions[0].Batch, updateResult.Ops.Batch...) + + } + return out, nil + +} + +func appendRequest(r *UpdateDonRequest) *AppendNodeCapabilitiesRequest { + out := &AppendNodeCapabilitiesRequest{ + RegistryChainSel: r.RegistryChainSel, + P2pToCapabilities: make(map[p2pkey.PeerID][]kcr.CapabilitiesRegistryCapability), + MCMSConfig: r.MCMSConfig, + } + for _, p2pid := range r.P2PIDs { + if _, exists := out.P2pToCapabilities[p2pid]; !exists { + out.P2pToCapabilities[p2pid] = make([]kcr.CapabilitiesRegistryCapability, 0) + } + for _, cc := range r.CapabilityConfigs { + out.P2pToCapabilities[p2pid] = append(out.P2pToCapabilities[p2pid], cc.Capability) + } + } + return out +} + +func updateDonRequest(env deployment.Environment, r *UpdateDonRequest) (*internal.UpdateDonRequest, error) { + resp, err := kslib.GetContractSets(env.Logger, &kslib.GetContractSetsRequest{ + Chains: env.Chains, + AddressBook: env.ExistingAddresses, + }) + if err != nil { + return nil, fmt.Errorf("failed to get contract sets: %w", err) + } + contractSet := resp.ContractSets[r.RegistryChainSel] + + return &internal.UpdateDonRequest{ + Chain: env.Chains[r.RegistryChainSel], + ContractSet: &contractSet, + P2PIDs: r.P2PIDs, + CapabilityConfigs: r.CapabilityConfigs, + UseMCMS: r.UseMCMS(), + }, nil } diff --git a/deployment/keystone/changeset/update_don_test.go b/deployment/keystone/changeset/update_don_test.go new file mode 100644 index 00000000000..18287da6887 --- /dev/null +++ b/deployment/keystone/changeset/update_don_test.go @@ -0,0 +1,165 @@ +package changeset_test + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + commonchangeset "github.com/smartcontractkit/chainlink/deployment/common/changeset" + "github.com/smartcontractkit/chainlink/deployment/keystone/changeset" + "github.com/smartcontractkit/chainlink/deployment/keystone/changeset/internal" + kcr "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/keystone/generated/capabilities_registry" + "github.com/smartcontractkit/chainlink/v2/core/services/keystore/keys/p2pkey" +) + +func TestUpdateDon(t *testing.T) { + t.Parallel() + + var ( + capA = kcr.CapabilitiesRegistryCapability{ + LabelledName: "capA", + Version: "0.4.2", + } + capB = kcr.CapabilitiesRegistryCapability{ + LabelledName: "capB", + Version: "3.16.0", + } + caps = []kcr.CapabilitiesRegistryCapability{capA, capB} + ) + t.Run("no mcms", func(t *testing.T) { + te := SetupTestEnv(t, TestConfig{ + WFDonConfig: DonConfig{N: 4}, + AssetDonConfig: DonConfig{N: 4}, + WriterDonConfig: DonConfig{N: 4}, + NumChains: 1, + }) + + // contract set is already deployed with capabilities + // we have to keep track of the existing capabilities to add to the new ones + var p2pIDs []p2pkey.PeerID + newCapabilities := make(map[p2pkey.PeerID][]kcr.CapabilitiesRegistryCapability) + for id, _ := range te.WFNodes { + k, err := p2pkey.MakePeerID(id) + require.NoError(t, err) + p2pIDs = append(p2pIDs, k) + newCapabilities[k] = caps + } + + t.Run("succeeds if update sets new and existing capabilities", func(t *testing.T) { + cfg := changeset.UpdateDonRequest{ + RegistryChainSel: te.RegistrySelector, + P2PIDs: p2pIDs, + CapabilityConfigs: []changeset.CapabilityConfig{ + { + Capability: capA, + }, + { + Capability: capB, + }, + }, + } + + csOut, err := changeset.UpdateDon(te.Env, &cfg) + require.NoError(t, err) + require.Len(t, csOut.Proposals, 0) + require.Nil(t, csOut.AddressBook) + + assertDonContainsCapabilities(t, te.ContractSets()[te.RegistrySelector].CapabilitiesRegistry, caps, p2pIDs) + }) + }) + t.Run("with mcms", func(t *testing.T) { + te := SetupTestEnv(t, TestConfig{ + WFDonConfig: DonConfig{N: 4}, + AssetDonConfig: DonConfig{N: 4}, + WriterDonConfig: DonConfig{N: 4}, + NumChains: 1, + UseMCMS: true, + }) + + // contract set is already deployed with capabilities + // we have to keep track of the existing capabilities to add to the new ones + var p2pIDs []p2pkey.PeerID + for id, _ := range te.WFNodes { + k, err := p2pkey.MakePeerID(id) + require.NoError(t, err) + p2pIDs = append(p2pIDs, k) + } + + cfg := changeset.UpdateDonRequest{ + RegistryChainSel: te.RegistrySelector, + P2PIDs: p2pIDs, + CapabilityConfigs: []changeset.CapabilityConfig{ + { + Capability: capA, + }, + { + Capability: capB, + }, + }, + MCMSConfig: &changeset.MCMSConfig{MinDuration: 0}, + } + + csOut, err := changeset.UpdateDon(te.Env, &cfg) + require.NoError(t, err) + + if true { + require.Len(t, csOut.Proposals, 1) + require.Len(t, csOut.Proposals[0].Transactions, 1) // append node capabilties cs, update don + require.Len(t, csOut.Proposals[0].Transactions[0].Batch, 3) // add capabilities, update nodes, update don + require.Nil(t, csOut.AddressBook) + } else { + require.Len(t, csOut.Proposals, 1) + require.Len(t, csOut.Proposals[0].Transactions, 2) // append node capabilties cs, update don + require.Len(t, csOut.Proposals[0].Transactions[0].Batch, 2) // add capabilities, update nodes + require.Len(t, csOut.Proposals[0].Transactions[1].Batch, 1) // update don + require.Nil(t, csOut.AddressBook) + } + + // now apply the changeset such that the proposal is signed and execed + contracts := te.ContractSets()[te.RegistrySelector] + timelockContracts := map[uint64]*commonchangeset.TimelockExecutionContracts{ + te.RegistrySelector: { + Timelock: contracts.Timelock, + CallProxy: contracts.CallProxy, + }, + } + _, err = commonchangeset.ApplyChangesets(t, te.Env, timelockContracts, []commonchangeset.ChangesetApplication{ + { + Changeset: commonchangeset.WrapChangeSet(changeset.UpdateDon), + Config: &cfg, + }, + }) + require.NoError(t, err) + assertDonContainsCapabilities(t, te.ContractSets()[te.RegistrySelector].CapabilitiesRegistry, caps, p2pIDs) + }) +} + +func assertDonContainsCapabilities(t *testing.T, registry *kcr.CapabilitiesRegistry, want []kcr.CapabilitiesRegistryCapability, p2pIDs []p2pkey.PeerID) { + dons, err := registry.GetDONs(nil) + require.NoError(t, err) + var got *kcr.CapabilitiesRegistryDONInfo + for i, don := range dons { + if internal.SortedHash(internal.PeerIDsToBytes(p2pIDs)) == internal.SortedHash(don.NodeP2PIds) { + got = &dons[i] + break + } + } + require.NotNil(t, got, "missing don with p2pIDs %v", p2pIDs) + wantHashes := make([][32]byte, len(want)) + for i, c := range want { + h, err := registry.GetHashedCapabilityId(nil, c.LabelledName, c.Version) + require.NoError(t, err) + wantHashes[i] = h + assert.Contains(t, capIDsFromCapCfgs(got.CapabilityConfigurations), h, "missing capability %v", c) + } + assert.LessOrEqual(t, len(want), len(got.CapabilityConfigurations), "too many capabilities") +} + +func capIDsFromCapCfgs(cfgs []kcr.CapabilitiesRegistryCapabilityConfiguration) [][32]byte { + out := make([][32]byte, len(cfgs)) + for i, c := range cfgs { + out[i] = c.CapabilityId + } + return out +} diff --git a/deployment/keystone/changeset/update_node_capabilities.go b/deployment/keystone/changeset/update_node_capabilities.go index d50c07c9f06..9c9d5585fc2 100644 --- a/deployment/keystone/changeset/update_node_capabilities.go +++ b/deployment/keystone/changeset/update_node_capabilities.go @@ -53,10 +53,11 @@ type UpdateNodeCapabilitiesRequest = MutateNodeCapabilitiesRequest // MutateNodeCapabilitiesRequest is a request to change the capabilities of nodes in the registry type MutateNodeCapabilitiesRequest struct { - RegistryChainSel uint64 - + RegistryChainSel uint64 P2pToCapabilities map[p2pkey.PeerID][]kcr.CapabilitiesRegistryCapability - UseMCMS bool + + // MCMSConfig is optional. If non-nil, the changes will be proposed using MCMS. + MCMSConfig *MCMSConfig } func (req *MutateNodeCapabilitiesRequest) Validate() error { @@ -71,6 +72,10 @@ func (req *MutateNodeCapabilitiesRequest) Validate() error { return nil } +func (req *MutateNodeCapabilitiesRequest) UseMCMS() bool { + return req.MCMSConfig != nil +} + func (req *MutateNodeCapabilitiesRequest) updateNodeCapabilitiesImplRequest(e deployment.Environment) (*internal.UpdateNodeCapabilitiesImplRequest, error) { if err := req.Validate(); err != nil { return nil, fmt.Errorf("failed to validate UpdateNodeCapabilitiesRequest: %w", err) @@ -95,7 +100,7 @@ func (req *MutateNodeCapabilitiesRequest) updateNodeCapabilitiesImplRequest(e de Chain: registryChain, ContractSet: &contractSet, P2pToCapabilities: req.P2pToCapabilities, - UseMCMS: req.UseMCMS, + UseMCMS: req.UseMCMS(), }, nil } @@ -112,7 +117,7 @@ func UpdateNodeCapabilities(env deployment.Environment, req *UpdateNodeCapabilit } out := deployment.ChangesetOutput{} - if req.UseMCMS { + if req.UseMCMS() { if r.Ops == nil { return out, fmt.Errorf("expected MCMS operation to be non-nil") } @@ -128,7 +133,7 @@ func UpdateNodeCapabilities(env deployment.Environment, req *UpdateNodeCapabilit proposerMCMSes, []timelock.BatchChainOperation{*r.Ops}, "proposal to set update node capabilities", - 0, + req.MCMSConfig.MinDuration, ) if err != nil { return out, fmt.Errorf("failed to build proposal: %w", err) diff --git a/deployment/keystone/changeset/update_node_capabilities_test.go b/deployment/keystone/changeset/update_node_capabilities_test.go index 8c6378d809f..cb5588ff3d1 100644 --- a/deployment/keystone/changeset/update_node_capabilities_test.go +++ b/deployment/keystone/changeset/update_node_capabilities_test.go @@ -106,7 +106,7 @@ func TestUpdateNodeCapabilities(t *testing.T) { cfg := changeset.UpdateNodeCapabilitiesRequest{ RegistryChainSel: te.RegistrySelector, P2pToCapabilities: capabiltiesToSet, - UseMCMS: true, + MCMSConfig: &changeset.MCMSConfig{MinDuration: 0}, } csOut, err := changeset.UpdateNodeCapabilities(te.Env, &cfg) diff --git a/deployment/keystone/changeset/update_nodes.go b/deployment/keystone/changeset/update_nodes.go index 4e2a4f7f4c6..bb12f32cb94 100644 --- a/deployment/keystone/changeset/update_nodes.go +++ b/deployment/keystone/changeset/update_nodes.go @@ -2,6 +2,7 @@ package changeset import ( "fmt" + "time" "github.com/ethereum/go-ethereum/common" "github.com/smartcontractkit/ccip-owner-contracts/pkg/gethwrappers" @@ -14,14 +15,31 @@ import ( "github.com/smartcontractkit/chainlink/v2/core/services/keystore/keys/p2pkey" ) +type MCMSConfig struct { + MinDuration time.Duration +} + var _ deployment.ChangeSet[*UpdateNodesRequest] = UpdateNodes type UpdateNodesRequest struct { RegistryChainSel uint64 P2pToUpdates map[p2pkey.PeerID]NodeUpdate - UseMCMS bool + // MCMSConfig is optional. If non-nil, the changes will be proposed using MCMS. + MCMSConfig *MCMSConfig +} + +func (r *UpdateNodesRequest) Validate() error { + if r.P2pToUpdates == nil { + return fmt.Errorf("P2pToUpdates must be non-nil") + } + return nil +} + +func (r UpdateNodesRequest) UseMCMS() bool { + return r.MCMSConfig != nil } + type NodeUpdate = internal.NodeUpdate // UpdateNodes updates the a set of nodes. @@ -48,14 +66,14 @@ func UpdateNodes(env deployment.Environment, req *UpdateNodesRequest) (deploymen Chain: registryChain, ContractSet: &contracts, P2pToUpdates: req.P2pToUpdates, - UseMCMS: req.UseMCMS, + UseMCMS: req.UseMCMS(), }) if err != nil { return deployment.ChangesetOutput{}, fmt.Errorf("failed to update don: %w", err) } out := deployment.ChangesetOutput{} - if req.UseMCMS { + if req.UseMCMS() { if resp.Ops == nil { return out, fmt.Errorf("expected MCMS operation to be non-nil") } @@ -71,7 +89,7 @@ func UpdateNodes(env deployment.Environment, req *UpdateNodesRequest) (deploymen proposerMCMSes, []timelock.BatchChainOperation{*resp.Ops}, "proposal to set update nodes", - 0, + req.MCMSConfig.MinDuration, ) if err != nil { return out, fmt.Errorf("failed to build proposal: %w", err) diff --git a/deployment/keystone/changeset/update_nodes_test.go b/deployment/keystone/changeset/update_nodes_test.go index aebe10aa3d5..be3bfb12ee6 100644 --- a/deployment/keystone/changeset/update_nodes_test.go +++ b/deployment/keystone/changeset/update_nodes_test.go @@ -79,7 +79,7 @@ func TestUpdateNodes(t *testing.T) { cfg := changeset.UpdateNodesRequest{ RegistryChainSel: te.RegistrySelector, P2pToUpdates: updates, - UseMCMS: true, + MCMSConfig: &changeset.MCMSConfig{MinDuration: 0}, } csOut, err := changeset.UpdateNodes(te.Env, &cfg) @@ -101,7 +101,7 @@ func TestUpdateNodes(t *testing.T) { Config: &changeset.UpdateNodesRequest{ RegistryChainSel: te.RegistrySelector, P2pToUpdates: updates, - UseMCMS: true, + MCMSConfig: &changeset.MCMSConfig{MinDuration: 0}, }, }, }) diff --git a/deployment/keystone/deploy.go b/deployment/keystone/deploy.go index b83b5114391..7d3e5391219 100644 --- a/deployment/keystone/deploy.go +++ b/deployment/keystone/deploy.go @@ -14,15 +14,12 @@ import ( "time" "github.com/ethereum/go-ethereum/accounts/abi/bind" - "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/rpc" "golang.org/x/exp/maps" - "github.com/smartcontractkit/ccip-owner-contracts/pkg/gethwrappers" "github.com/smartcontractkit/ccip-owner-contracts/pkg/proposal/mcms" "github.com/smartcontractkit/ccip-owner-contracts/pkg/proposal/timelock" "github.com/smartcontractkit/chainlink/deployment" - "github.com/smartcontractkit/chainlink/deployment/common/proposalutils" "google.golang.org/protobuf/proto" "google.golang.org/protobuf/types/known/durationpb" @@ -348,7 +345,7 @@ func ConfigureOCR3Contract(env *deployment.Environment, chainSel uint64, dons [] type ConfigureOCR3Resp struct { OCR2OracleConfig - Proposal *timelock.MCMSWithTimelockProposal + Ops *timelock.BatchChainOperation } type ConfigureOCR3Config struct { @@ -405,7 +402,7 @@ func ConfigureOCR3ContractFromJD(env *deployment.Environment, cfg ConfigureOCR3C } return &ConfigureOCR3Resp{ OCR2OracleConfig: r.ocrConfig, - Proposal: r.proposal, + Ops: r.ops, }, nil } @@ -941,13 +938,13 @@ func containsAllDONs(donInfos []kcr.CapabilitiesRegistryDONInfo, p2pIdsToDon map // configureForwarder sets the config for the forwarder contract on the chain for all Dons that accept workflows // dons that don't accept workflows are not registered with the forwarder -func configureForwarder(lggr logger.Logger, chain deployment.Chain, contractSet ContractSet, dons []RegisteredDon, useMCMS bool) ([]timelock.MCMSWithTimelockProposal, error) { +func configureForwarder(lggr logger.Logger, chain deployment.Chain, contractSet ContractSet, dons []RegisteredDon, useMCMS bool) (map[uint64]timelock.BatchChainOperation, error) { if contractSet.Forwarder == nil { return nil, errors.New("nil forwarder contract") } var ( - fwdr = contractSet.Forwarder - proposals []timelock.MCMSWithTimelockProposal + fwdr = contractSet.Forwarder + opMap = make(map[uint64]timelock.BatchChainOperation) ) for _, dn := range dons { if !dn.Info.AcceptsWorkflows { @@ -982,26 +979,9 @@ func configureForwarder(lggr logger.Logger, chain deployment.Chain, contractSet }, }, } - timelocksPerChain := map[uint64]common.Address{ - chain.Selector: contractSet.Timelock.Address(), - } - proposerMCMSes := map[uint64]*gethwrappers.ManyChainMultiSig{ - chain.Selector: contractSet.ProposerMcm, - } - - proposal, err := proposalutils.BuildProposalFromBatches( - timelocksPerChain, - proposerMCMSes, - []timelock.BatchChainOperation{ops}, - "proposal to set forward config", - 0, - ) - if err != nil { - return nil, fmt.Errorf("failed to build proposal: %w", err) - } - proposals = append(proposals, *proposal) + opMap[chain.Selector] = ops } lggr.Debugw("configured forwarder", "forwarder", fwdr.Address().String(), "donId", dn.Info.Id, "version", ver, "f", dn.Info.F, "signers", signers) } - return proposals, nil + return opMap, nil } diff --git a/deployment/keystone/forwarder_deployer.go b/deployment/keystone/forwarder_deployer.go index d7cfa7991f4..7c7b3a1ed93 100644 --- a/deployment/keystone/forwarder_deployer.go +++ b/deployment/keystone/forwarder_deployer.go @@ -64,7 +64,7 @@ type ConfigureForwarderContractsRequest struct { UseMCMS bool } type ConfigureForwarderContractsResponse struct { - Proposals []timelock.MCMSWithTimelockProposal + OpsPerChain map[uint64]timelock.BatchChainOperation } // Depreciated: use [changeset.ConfigureForwarders] instead @@ -79,7 +79,7 @@ func ConfigureForwardContracts(env *deployment.Environment, req ConfigureForward return nil, fmt.Errorf("failed to get contract sets: %w", err) } - var allProposals []timelock.MCMSWithTimelockProposal + opPerChain := make(map[uint64]timelock.BatchChainOperation) // configure forwarders on all chains for _, chain := range env.Chains { // get the forwarder contract for the chain @@ -87,13 +87,15 @@ func ConfigureForwardContracts(env *deployment.Environment, req ConfigureForward if !ok { return nil, fmt.Errorf("failed to get contract set for chain %d", chain.Selector) } - proposals, err := configureForwarder(env.Logger, chain, contracts, req.Dons, req.UseMCMS) + ops, err := configureForwarder(env.Logger, chain, contracts, req.Dons, req.UseMCMS) if err != nil { return nil, fmt.Errorf("failed to configure forwarder for chain selector %d: %w", chain.Selector, err) } - allProposals = append(allProposals, proposals...) + for k, op := range ops { + opPerChain[k] = op + } } return &ConfigureForwarderContractsResponse{ - Proposals: allProposals, + OpsPerChain: opPerChain, }, nil } diff --git a/deployment/keystone/ocr3config.go b/deployment/keystone/ocr3config.go index ccd738636ed..aed142ea116 100644 --- a/deployment/keystone/ocr3config.go +++ b/deployment/keystone/ocr3config.go @@ -18,12 +18,10 @@ import ( "github.com/smartcontractkit/libocr/offchainreporting2plus/ocr3confighelper" "github.com/smartcontractkit/libocr/offchainreporting2plus/types" - "github.com/smartcontractkit/ccip-owner-contracts/pkg/gethwrappers" "github.com/smartcontractkit/ccip-owner-contracts/pkg/proposal/mcms" "github.com/smartcontractkit/ccip-owner-contracts/pkg/proposal/timelock" "github.com/smartcontractkit/chainlink/deployment" - "github.com/smartcontractkit/chainlink/deployment/common/proposalutils" kocr3 "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/keystone/generated/ocr3_capability" "github.com/smartcontractkit/chainlink/v2/core/services/keystore/chaintype" "github.com/smartcontractkit/chainlink/v2/core/services/ocrcommon" @@ -300,7 +298,7 @@ func (r configureOCR3Request) generateOCR3Config() (OCR2OracleConfig, error) { type configureOCR3Response struct { ocrConfig OCR2OracleConfig - proposal *timelock.MCMSWithTimelockProposal + ops *timelock.BatchChainOperation } func configureOCR3contract(req configureOCR3Request) (*configureOCR3Response, error) { @@ -333,7 +331,7 @@ func configureOCR3contract(req configureOCR3Request) (*configureOCR3Response, er return nil, fmt.Errorf("failed to call SetConfig for OCR3 contract %s using mcms: %T: %w", req.contract.Address().String(), req.useMCMS, err) } - var proposal *timelock.MCMSWithTimelockProposal + var ops *timelock.BatchChainOperation if !req.useMCMS { _, err = req.chain.Confirm(tx) if err != nil { @@ -341,7 +339,7 @@ func configureOCR3contract(req configureOCR3Request) (*configureOCR3Response, er return nil, fmt.Errorf("failed to confirm SetConfig for OCR3 contract %s: %w", req.contract.Address().String(), err) } } else { - ops := timelock.BatchChainOperation{ + ops = &timelock.BatchChainOperation{ ChainIdentifier: mcms.ChainIdentifier(req.chain.Selector), Batch: []mcms.Operation{ { @@ -351,24 +349,7 @@ func configureOCR3contract(req configureOCR3Request) (*configureOCR3Response, er }, }, } - timelocksPerChain := map[uint64]common.Address{ - req.chain.Selector: req.contractSet.Timelock.Address(), - } - proposerMCMSes := map[uint64]*gethwrappers.ManyChainMultiSig{ - req.chain.Selector: req.contractSet.ProposerMcm, - } - - proposal, err = proposalutils.BuildProposalFromBatches( - timelocksPerChain, - proposerMCMSes, - []timelock.BatchChainOperation{ops}, - "proposal to set ocr3 config", - 0, - ) - if err != nil { - return nil, fmt.Errorf("failed to build proposal: %w", err) - } } - return &configureOCR3Response{ocrConfig, proposal}, nil + return &configureOCR3Response{ocrConfig, ops}, nil } From 73572073e9f1b8d4b4f7e03269caf06fb920f158 Mon Sep 17 00:00:00 2001 From: Margaret Ma Date: Wed, 11 Dec 2024 15:09:26 -0500 Subject: [PATCH 03/15] [DEVSVCS-963] Ensure that workflow id is unique (#15582) * ensure that workflow id is unique * Update gethwrappers --------- Co-authored-by: app-token-issuer-infra-releng[bot] <120227048+app-token-issuer-infra-releng[bot]@users.noreply.github.com> --- contracts/gas-snapshots/workflow.gas-snapshot | 28 ++++++------- .../v0.8/workflow/dev/WorkflowRegistry.sol | 39 +++++++++++++++---- .../WorkflowRegistry.registerWorkflow.t.sol | 29 ++++++++++++++ .../WorkflowRegistry.registerWorkflow.tree | 2 + .../WorkflowRegistry.updateWorkflow.t.sol | 26 +++++++++++++ .../WorkflowRegistry.updateWorkflow.tree | 2 + .../workflow_registry_wrapper.go | 2 +- ...rapper-dependency-versions-do-not-edit.txt | 2 +- 8 files changed, 106 insertions(+), 24 deletions(-) diff --git a/contracts/gas-snapshots/workflow.gas-snapshot b/contracts/gas-snapshots/workflow.gas-snapshot index 9195c401ef3..bdfd2b24aec 100644 --- a/contracts/gas-snapshots/workflow.gas-snapshot +++ b/contracts/gas-snapshots/workflow.gas-snapshot @@ -17,9 +17,9 @@ WorkflowRegistryManager_getVersion:test_WhenVersionNumberIsRegistered() (gas: 28 WorkflowRegistryManager_getVersionNumberByContractAddressAndChainID:test_WhenAVersionIsRegisteredForTheContractAddressAndChainIDCombination() (gas: 285022) WorkflowRegistryManager_getVersionNumberByContractAddressAndChainID:test_WhenNoVersionIsRegisteredForTheContractAddressAndChainIDCombination() (gas: 286634) WorkflowRegistryManager_getVersionNumberByContractAddressAndChainID:test_WhenTheContractAddressIsInvalid() (gas: 284604) -WorkflowRegistry_activateWorkflow:test_WhenTheCallerIsAnAuthorizedAddress() (gas: 495029) -WorkflowRegistry_deleteWorkflow:test_WhenTheCallerIsAnAuthorizedAddress_AndTheDonIDIsAllowed() (gas: 403945) -WorkflowRegistry_deleteWorkflow:test_WhenTheCallerIsAnAuthorizedAddress_AndTheDonIDIsNotAllowed() (gas: 421748) +WorkflowRegistry_activateWorkflow:test_WhenTheCallerIsAnAuthorizedAddress() (gas: 517416) +WorkflowRegistry_deleteWorkflow:test_WhenTheCallerIsAnAuthorizedAddress_AndTheDonIDIsAllowed() (gas: 422157) +WorkflowRegistry_deleteWorkflow:test_WhenTheCallerIsAnAuthorizedAddress_AndTheDonIDIsNotAllowed() (gas: 439960) WorkflowRegistry_getAllAllowedDONs:test_WhenTheRegistryIsLocked() (gas: 47473) WorkflowRegistry_getAllAllowedDONs:test_WhenTheSetOfAllowedDONsIsEmpty() (gas: 25780) WorkflowRegistry_getAllAllowedDONs:test_WhenThereAreMultipleAllowedDONs() (gas: 75437) @@ -28,9 +28,9 @@ WorkflowRegistry_getAllAuthorizedAddresses:test_WhenTheRegistryIsLocked() (gas: WorkflowRegistry_getAllAuthorizedAddresses:test_WhenTheSetOfAuthorizedAddressesIsEmpty() (gas: 26152) WorkflowRegistry_getAllAuthorizedAddresses:test_WhenThereAreMultipleAuthorizedAddresses() (gas: 78270) WorkflowRegistry_getAllAuthorizedAddresses:test_WhenThereIsASingleAuthorizedAddress() (gas: 16832) -WorkflowRegistry_getWorkflowMetadata:test_WhenTheRegistryIsLocked() (gas: 519145) +WorkflowRegistry_getWorkflowMetadata:test_WhenTheRegistryIsLocked() (gas: 541532) WorkflowRegistry_getWorkflowMetadata:test_WhenTheWorkflowDoesNotExist() (gas: 17543) -WorkflowRegistry_getWorkflowMetadata:test_WhenTheWorkflowExistsWithTheOwnerAndName() (gas: 490001) +WorkflowRegistry_getWorkflowMetadata:test_WhenTheWorkflowExistsWithTheOwnerAndName() (gas: 512388) WorkflowRegistry_getWorkflowMetadataListByDON:test_WhenLimitExceedsTotalWorkflows() (gas: 128146) WorkflowRegistry_getWorkflowMetadataListByDON:test_WhenLimitIsEqualToTotalWorkflows() (gas: 128035) WorkflowRegistry_getWorkflowMetadataListByDON:test_WhenLimitIsLessThanTotalWorkflows() (gas: 90141) @@ -48,17 +48,17 @@ WorkflowRegistry_getWorkflowMetadataListByOwner:test_WhenStartIsGreaterThanOrEqu WorkflowRegistry_getWorkflowMetadataListByOwner:test_WhenTheOwnerHasNoWorkflows() (gas: 14006) WorkflowRegistry_getWorkflowMetadataListByOwner:test_WhenTheRegistryIsLocked() (gas: 165968) WorkflowRegistry_lockRegistry:test_WhenTheCallerIsTheContractOwner() (gas: 38758) -WorkflowRegistry_pauseWorkflow:test_WhenTheDonIDIsAllowed_AndTheCallerIsAnAuthorizedAddress() (gas: 494993) -WorkflowRegistry_pauseWorkflow:test_WhenTheDonIDIsAllowed_AndTheCallerIsAnUnauthorizedAddress() (gas: 502796) -WorkflowRegistry_pauseWorkflow:test_WhenTheDonIDIsNotAllowed_AndTheCallerIsAnAuthorizedAddress() (gas: 502555) -WorkflowRegistry_pauseWorkflow:test_WhenTheDonIDIsNotAllowed_AndTheCallerIsAnUnauthorizedAddress() (gas: 506966) -WorkflowRegistry_registerWorkflow:test_WhenTheWorkflowInputsAreAllValid() (gas: 549769) -WorkflowRegistry_requestForceUpdateSecrets:test_WhenTheCallerIsAnAuthorizedAddress_AndTheWorkflowIsInAnAllowedDON() (gas: 891242) -WorkflowRegistry_requestForceUpdateSecrets:test_WhenTheCallerIsAnAuthorizedAddress_AndTheWorkflowIsNotInAnAllowedDON() (gas: 488397) -WorkflowRegistry_requestForceUpdateSecrets:test_WhenTheCallerIsNotAnAuthorizedAddress() (gas: 486751) +WorkflowRegistry_pauseWorkflow:test_WhenTheDonIDIsAllowed_AndTheCallerIsAnAuthorizedAddress() (gas: 517380) +WorkflowRegistry_pauseWorkflow:test_WhenTheDonIDIsAllowed_AndTheCallerIsAnUnauthorizedAddress() (gas: 525183) +WorkflowRegistry_pauseWorkflow:test_WhenTheDonIDIsNotAllowed_AndTheCallerIsAnAuthorizedAddress() (gas: 524942) +WorkflowRegistry_pauseWorkflow:test_WhenTheDonIDIsNotAllowed_AndTheCallerIsAnUnauthorizedAddress() (gas: 529353) +WorkflowRegistry_registerWorkflow:test_WhenTheWorkflowInputsAreAllValid() (gas: 572178) +WorkflowRegistry_requestForceUpdateSecrets:test_WhenTheCallerIsAnAuthorizedAddress_AndTheWorkflowIsInAnAllowedDON() (gas: 936016) +WorkflowRegistry_requestForceUpdateSecrets:test_WhenTheCallerIsAnAuthorizedAddress_AndTheWorkflowIsNotInAnAllowedDON() (gas: 510784) +WorkflowRegistry_requestForceUpdateSecrets:test_WhenTheCallerIsNotAnAuthorizedAddress() (gas: 509138) WorkflowRegistry_unlockRegistry:test_WhenTheCallerIsTheContractOwner() (gas: 30325) WorkflowRegistry_updateAllowedDONs:test_WhenTheBoolInputIsFalse() (gas: 29739) WorkflowRegistry_updateAllowedDONs:test_WhenTheBoolInputIsTrue() (gas: 170296) WorkflowRegistry_updateAuthorizedAddresses:test_WhenTheBoolInputIsFalse() (gas: 30278) WorkflowRegistry_updateAuthorizedAddresses:test_WhenTheBoolInputIsTrue() (gas: 175515) -WorkflowRegistry_updateWorkflow:test_WhenTheWorkflowInputsAreAllValid() (gas: 479601) \ No newline at end of file +WorkflowRegistry_updateWorkflow:test_WhenTheWorkflowInputsAreAllValid() (gas: 515666) diff --git a/contracts/src/v0.8/workflow/dev/WorkflowRegistry.sol b/contracts/src/v0.8/workflow/dev/WorkflowRegistry.sol index 0e6ae3450ac..2454374b2fb 100644 --- a/contracts/src/v0.8/workflow/dev/WorkflowRegistry.sol +++ b/contracts/src/v0.8/workflow/dev/WorkflowRegistry.sol @@ -43,6 +43,8 @@ contract WorkflowRegistry is Ownable2StepMsgSender, ITypeAndVersion { /// @dev Mapping to track workflows by secretsURL hash (owner + secretsURL). /// This is used to find all workflows that have the same secretsURL when a force secrets update event is requested. mapping(bytes32 secretsURLHash => EnumerableSet.Bytes32Set workflowKeys) private s_secretsHashToWorkflows; + /// @dev Keep track of all workflowIDs to ensure uniqueness. + mapping(bytes32 workflowID => bool inUse) private s_workflowIDs; /// @dev List of all authorized EOAs/contracts allowed to access this contract's state functions. All view functions are open access. EnumerableSet.AddressSet private s_authorizedAddresses; @@ -203,13 +205,15 @@ contract WorkflowRegistry is Ownable2StepMsgSender, ITypeAndVersion { ) external registryNotLocked { _validatePermissions(donID, msg.sender); _validateWorkflowName(bytes(workflowName).length); - _validateWorkflowMetadata(workflowID, bytes(binaryURL).length, bytes(configURL).length, bytes(secretsURL).length); + _validateWorkflowURLs(bytes(binaryURL).length, bytes(configURL).length, bytes(secretsURL).length); bytes32 workflowKey = computeHashKey(msg.sender, workflowName); if (s_workflows[workflowKey].owner != address(0)) { revert WorkflowAlreadyRegistered(); } + _requireUniqueWorkflowID(workflowID); + // Create new workflow entry s_workflows[workflowKey] = WorkflowMetadata({ workflowID: workflowID, @@ -272,7 +276,7 @@ contract WorkflowRegistry is Ownable2StepMsgSender, ITypeAndVersion { string calldata configURL, string calldata secretsURL ) external registryNotLocked { - _validateWorkflowMetadata(newWorkflowID, bytes(binaryURL).length, bytes(configURL).length, bytes(secretsURL).length); + _validateWorkflowURLs(bytes(binaryURL).length, bytes(configURL).length, bytes(secretsURL).length); WorkflowMetadata storage workflow = _getWorkflowFromStorage(msg.sender, workflowKey); @@ -295,6 +299,12 @@ contract WorkflowRegistry is Ownable2StepMsgSender, ITypeAndVersion { revert WorkflowContentNotUpdated(); } + // Ensure the new workflowID is unique + _requireUniqueWorkflowID(newWorkflowID); + + // Free the old workflowID + s_workflowIDs[currentWorkflowID] = false; + // Update all fields that have changed and the relevant sets workflow.workflowID = newWorkflowID; if (!sameBinaryURL) { @@ -387,6 +397,9 @@ contract WorkflowRegistry is Ownable2StepMsgSender, ITypeAndVersion { revert AddressNotAuthorized(msg.sender); } + // Release the workflowID for reuse + s_workflowIDs[workflow.workflowID] = false; + // Remove the workflow from the owner and DON mappings s_ownerWorkflowKeys[msg.sender].remove(workflowKey); s_donWorkflowKeys[workflow.donID].remove(workflowKey); @@ -508,6 +521,20 @@ contract WorkflowRegistry is Ownable2StepMsgSender, ITypeAndVersion { return workflow; } + /// @notice Ensures the given workflowID is unique and marks it as used. + /// @param workflowID The workflowID to validate and consume. + function _requireUniqueWorkflowID( + bytes32 workflowID + ) internal { + if (workflowID == bytes32(0)) revert InvalidWorkflowID(); + + if (s_workflowIDs[workflowID]) { + revert WorkflowIDAlreadyExists(); + } + + s_workflowIDs[workflowID] = true; + } + // ================================================================ // | Workflow Queries | // ================================================================ @@ -636,16 +663,12 @@ contract WorkflowRegistry is Ownable2StepMsgSender, ITypeAndVersion { // | Validation | // ================================================================ - /// @dev Internal function to validate the metadata for a workflow. - /// @param workflowID The unique identifier for the workflow. - function _validateWorkflowMetadata( - bytes32 workflowID, + /// @dev Internal function to validate the urls for a workflow. + function _validateWorkflowURLs( uint256 binaryURLLength, uint256 configURLLength, uint256 secretsURLLength ) internal pure { - if (workflowID == bytes32(0)) revert InvalidWorkflowID(); - if (binaryURLLength > MAX_URL_LENGTH) { revert URLTooLong(binaryURLLength, MAX_URL_LENGTH); } diff --git a/contracts/src/v0.8/workflow/test/WorkflowRegistry/WorkflowRegistry.registerWorkflow.t.sol b/contracts/src/v0.8/workflow/test/WorkflowRegistry/WorkflowRegistry.registerWorkflow.t.sol index 426fbfcc502..859437196cd 100644 --- a/contracts/src/v0.8/workflow/test/WorkflowRegistry/WorkflowRegistry.registerWorkflow.t.sol +++ b/contracts/src/v0.8/workflow/test/WorkflowRegistry/WorkflowRegistry.registerWorkflow.t.sol @@ -138,6 +138,35 @@ contract WorkflowRegistry_registerWorkflow is WorkflowRegistrySetup { ); } + // whenTheCallerIsAnAuthorizedAddress whenTheRegistryIsNotLocked whenTheDonIDIsAllowed + function test_RevertWhen_TheWorkflowIDIsAlreadyInUsedByAnotherWorkflow() external { + vm.startPrank(s_authorizedAddress); + + // Register a valid workflow first + s_registry.registerWorkflow( + s_validWorkflowName, + s_validWorkflowID, + s_allowedDonID, + WorkflowRegistry.WorkflowStatus.ACTIVE, + s_validBinaryURL, + s_validConfigURL, + s_validSecretsURL + ); + + vm.expectRevert(WorkflowRegistry.WorkflowIDAlreadyExists.selector); + s_registry.registerWorkflow( + "ValidWorkflow2", + s_validWorkflowID, + s_allowedDonID, + WorkflowRegistry.WorkflowStatus.ACTIVE, + s_validBinaryURL, + s_validConfigURL, + s_validSecretsURL + ); + + vm.stopPrank(); + } + // whenTheCallerIsAnAuthorizedAddress whenTheRegistryIsNotLocked whenTheDonIDIsAllowed function test_RevertWhen_TheWorkflowNameIsAlreadyUsedByTheOwner() external { vm.startPrank(s_authorizedAddress); diff --git a/contracts/src/v0.8/workflow/test/WorkflowRegistry/WorkflowRegistry.registerWorkflow.tree b/contracts/src/v0.8/workflow/test/WorkflowRegistry/WorkflowRegistry.registerWorkflow.tree index 75cdf940575..eabbf58d464 100644 --- a/contracts/src/v0.8/workflow/test/WorkflowRegistry/WorkflowRegistry.registerWorkflow.tree +++ b/contracts/src/v0.8/workflow/test/WorkflowRegistry/WorkflowRegistry.registerWorkflow.tree @@ -18,6 +18,8 @@ WorkflowRegistry.registerWorkflow │ └── it should revert ├── when the workflowID is invalid │ └── it should revert + ├── when the workflowID is already in used by another workflow + │ └── it should revert ├── when the workflow name is already used by the owner │ └── it should revert └── when the workflow inputs are all valid diff --git a/contracts/src/v0.8/workflow/test/WorkflowRegistry/WorkflowRegistry.updateWorkflow.t.sol b/contracts/src/v0.8/workflow/test/WorkflowRegistry/WorkflowRegistry.updateWorkflow.t.sol index 5058512ba7b..4082874a91e 100644 --- a/contracts/src/v0.8/workflow/test/WorkflowRegistry/WorkflowRegistry.updateWorkflow.t.sol +++ b/contracts/src/v0.8/workflow/test/WorkflowRegistry/WorkflowRegistry.updateWorkflow.t.sol @@ -158,6 +158,32 @@ contract WorkflowRegistry_updateWorkflow is WorkflowRegistrySetup { s_registry.updateWorkflow(s_validWorkflowKey, bytes32(0), s_validBinaryURL, s_validConfigURL, s_newValidSecretsURL); } + // whenTheCallerIsAnAuthorizedAddress whenTheRegistryIsNotLocked whenTheDonIDIsAllowed whenTheCallerIsTheWorkflowOwner + function test_RevertWhen_TheWorkflowIDIsAlreadyInUsedByAnotherWorkflow() external { + // Register a workflow first + _registerValidWorkflow(); + + // Register another workflow with another workflow ID + vm.startPrank(s_authorizedAddress); + s_registry.registerWorkflow( + "ValidWorkflow2", + s_newValidWorkflowID, + s_allowedDonID, + WorkflowRegistry.WorkflowStatus.ACTIVE, + s_validBinaryURL, + s_validConfigURL, + s_validSecretsURL + ); + + // Update the workflow with a workflow ID that is already in use by another workflow. + vm.expectRevert(WorkflowRegistry.WorkflowIDAlreadyExists.selector); + s_registry.updateWorkflow( + s_validWorkflowKey, s_newValidWorkflowID, s_validBinaryURL, s_validConfigURL, s_newValidSecretsURL + ); + + vm.stopPrank(); + } + // whenTheCallerIsAnAuthorizedAddress whenTheRegistryIsNotLocked whenTheDonIDIsAllowed whenTheCallerIsTheWorkflowOwner function test_WhenTheWorkflowInputsAreAllValid() external { // Register a workflow first. diff --git a/contracts/src/v0.8/workflow/test/WorkflowRegistry/WorkflowRegistry.updateWorkflow.tree b/contracts/src/v0.8/workflow/test/WorkflowRegistry/WorkflowRegistry.updateWorkflow.tree index 0d4da7cb32e..9b8243a8672 100644 --- a/contracts/src/v0.8/workflow/test/WorkflowRegistry/WorkflowRegistry.updateWorkflow.tree +++ b/contracts/src/v0.8/workflow/test/WorkflowRegistry/WorkflowRegistry.updateWorkflow.tree @@ -25,6 +25,8 @@ WorkflowRegistry.updateWorkflow │ └── it should revert ├── when the workflowID is invalid │ └── it should revert + ├── when the workflowID is already in used by another workflow + │ └── it should revert └── when the workflow inputs are all valid ├── it should update the existing workflow in s_workflows with the new values ├── it should emit {WorkflowUpdatedV1} diff --git a/core/gethwrappers/workflow/generated/workflow_registry_wrapper/workflow_registry_wrapper.go b/core/gethwrappers/workflow/generated/workflow_registry_wrapper/workflow_registry_wrapper.go index c87f59c0e7b..a81d69c343e 100644 --- a/core/gethwrappers/workflow/generated/workflow_registry_wrapper/workflow_registry_wrapper.go +++ b/core/gethwrappers/workflow/generated/workflow_registry_wrapper/workflow_registry_wrapper.go @@ -43,7 +43,7 @@ type WorkflowRegistryWorkflowMetadata struct { var WorkflowRegistryMetaData = &bind.MetaData{ ABI: "[{\"inputs\":[{\"internalType\":\"address\",\"name\":\"caller\",\"type\":\"address\"}],\"name\":\"AddressNotAuthorized\",\"type\":\"error\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"caller\",\"type\":\"address\"}],\"name\":\"CallerIsNotWorkflowOwner\",\"type\":\"error\"},{\"inputs\":[],\"name\":\"CannotTransferToSelf\",\"type\":\"error\"},{\"inputs\":[{\"internalType\":\"uint32\",\"name\":\"donID\",\"type\":\"uint32\"}],\"name\":\"DONNotAllowed\",\"type\":\"error\"},{\"inputs\":[],\"name\":\"InvalidWorkflowID\",\"type\":\"error\"},{\"inputs\":[],\"name\":\"MustBeProposedOwner\",\"type\":\"error\"},{\"inputs\":[],\"name\":\"OnlyCallableByOwner\",\"type\":\"error\"},{\"inputs\":[],\"name\":\"OwnerCannotBeZero\",\"type\":\"error\"},{\"inputs\":[],\"name\":\"RegistryLocked\",\"type\":\"error\"},{\"inputs\":[{\"internalType\":\"uint256\",\"name\":\"providedLength\",\"type\":\"uint256\"},{\"internalType\":\"uint8\",\"name\":\"maxAllowedLength\",\"type\":\"uint8\"}],\"name\":\"URLTooLong\",\"type\":\"error\"},{\"inputs\":[],\"name\":\"WorkflowAlreadyInDesiredStatus\",\"type\":\"error\"},{\"inputs\":[],\"name\":\"WorkflowAlreadyRegistered\",\"type\":\"error\"},{\"inputs\":[],\"name\":\"WorkflowContentNotUpdated\",\"type\":\"error\"},{\"inputs\":[],\"name\":\"WorkflowDoesNotExist\",\"type\":\"error\"},{\"inputs\":[],\"name\":\"WorkflowIDAlreadyExists\",\"type\":\"error\"},{\"inputs\":[],\"name\":\"WorkflowIDNotUpdated\",\"type\":\"error\"},{\"inputs\":[{\"internalType\":\"uint256\",\"name\":\"providedLength\",\"type\":\"uint256\"},{\"internalType\":\"uint8\",\"name\":\"maxAllowedLength\",\"type\":\"uint8\"}],\"name\":\"WorkflowNameTooLong\",\"type\":\"error\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"internalType\":\"uint32[]\",\"name\":\"donIDs\",\"type\":\"uint32[]\"},{\"indexed\":false,\"internalType\":\"bool\",\"name\":\"allowed\",\"type\":\"bool\"}],\"name\":\"AllowedDONsUpdatedV1\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"internalType\":\"address[]\",\"name\":\"addresses\",\"type\":\"address[]\"},{\"indexed\":false,\"internalType\":\"bool\",\"name\":\"allowed\",\"type\":\"bool\"}],\"name\":\"AuthorizedAddressesUpdatedV1\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"address\",\"name\":\"from\",\"type\":\"address\"},{\"indexed\":true,\"internalType\":\"address\",\"name\":\"to\",\"type\":\"address\"}],\"name\":\"OwnershipTransferRequested\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"address\",\"name\":\"from\",\"type\":\"address\"},{\"indexed\":true,\"internalType\":\"address\",\"name\":\"to\",\"type\":\"address\"}],\"name\":\"OwnershipTransferred\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"internalType\":\"address\",\"name\":\"lockedBy\",\"type\":\"address\"}],\"name\":\"RegistryLockedV1\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"internalType\":\"address\",\"name\":\"unlockedBy\",\"type\":\"address\"}],\"name\":\"RegistryUnlockedV1\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"bytes32\",\"name\":\"workflowID\",\"type\":\"bytes32\"},{\"indexed\":true,\"internalType\":\"address\",\"name\":\"workflowOwner\",\"type\":\"address\"},{\"indexed\":true,\"internalType\":\"uint32\",\"name\":\"donID\",\"type\":\"uint32\"},{\"indexed\":false,\"internalType\":\"string\",\"name\":\"workflowName\",\"type\":\"string\"}],\"name\":\"WorkflowActivatedV1\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"bytes32\",\"name\":\"workflowID\",\"type\":\"bytes32\"},{\"indexed\":true,\"internalType\":\"address\",\"name\":\"workflowOwner\",\"type\":\"address\"},{\"indexed\":true,\"internalType\":\"uint32\",\"name\":\"donID\",\"type\":\"uint32\"},{\"indexed\":false,\"internalType\":\"string\",\"name\":\"workflowName\",\"type\":\"string\"}],\"name\":\"WorkflowDeletedV1\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"address\",\"name\":\"owner\",\"type\":\"address\"},{\"indexed\":false,\"internalType\":\"bytes32\",\"name\":\"secretsURLHash\",\"type\":\"bytes32\"},{\"indexed\":false,\"internalType\":\"string\",\"name\":\"workflowName\",\"type\":\"string\"}],\"name\":\"WorkflowForceUpdateSecretsRequestedV1\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"bytes32\",\"name\":\"workflowID\",\"type\":\"bytes32\"},{\"indexed\":true,\"internalType\":\"address\",\"name\":\"workflowOwner\",\"type\":\"address\"},{\"indexed\":true,\"internalType\":\"uint32\",\"name\":\"donID\",\"type\":\"uint32\"},{\"indexed\":false,\"internalType\":\"string\",\"name\":\"workflowName\",\"type\":\"string\"}],\"name\":\"WorkflowPausedV1\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"bytes32\",\"name\":\"workflowID\",\"type\":\"bytes32\"},{\"indexed\":true,\"internalType\":\"address\",\"name\":\"workflowOwner\",\"type\":\"address\"},{\"indexed\":true,\"internalType\":\"uint32\",\"name\":\"donID\",\"type\":\"uint32\"},{\"indexed\":false,\"internalType\":\"enumWorkflowRegistry.WorkflowStatus\",\"name\":\"status\",\"type\":\"uint8\"},{\"indexed\":false,\"internalType\":\"string\",\"name\":\"workflowName\",\"type\":\"string\"},{\"indexed\":false,\"internalType\":\"string\",\"name\":\"binaryURL\",\"type\":\"string\"},{\"indexed\":false,\"internalType\":\"string\",\"name\":\"configURL\",\"type\":\"string\"},{\"indexed\":false,\"internalType\":\"string\",\"name\":\"secretsURL\",\"type\":\"string\"}],\"name\":\"WorkflowRegisteredV1\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"bytes32\",\"name\":\"oldWorkflowID\",\"type\":\"bytes32\"},{\"indexed\":true,\"internalType\":\"address\",\"name\":\"workflowOwner\",\"type\":\"address\"},{\"indexed\":true,\"internalType\":\"uint32\",\"name\":\"donID\",\"type\":\"uint32\"},{\"indexed\":false,\"internalType\":\"bytes32\",\"name\":\"newWorkflowID\",\"type\":\"bytes32\"},{\"indexed\":false,\"internalType\":\"string\",\"name\":\"workflowName\",\"type\":\"string\"},{\"indexed\":false,\"internalType\":\"string\",\"name\":\"binaryURL\",\"type\":\"string\"},{\"indexed\":false,\"internalType\":\"string\",\"name\":\"configURL\",\"type\":\"string\"},{\"indexed\":false,\"internalType\":\"string\",\"name\":\"secretsURL\",\"type\":\"string\"}],\"name\":\"WorkflowUpdatedV1\",\"type\":\"event\"},{\"inputs\":[],\"name\":\"acceptOwnership\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"bytes32\",\"name\":\"workflowKey\",\"type\":\"bytes32\"}],\"name\":\"activateWorkflow\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"owner\",\"type\":\"address\"},{\"internalType\":\"string\",\"name\":\"field\",\"type\":\"string\"}],\"name\":\"computeHashKey\",\"outputs\":[{\"internalType\":\"bytes32\",\"name\":\"\",\"type\":\"bytes32\"}],\"stateMutability\":\"pure\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"bytes32\",\"name\":\"workflowKey\",\"type\":\"bytes32\"}],\"name\":\"deleteWorkflow\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"getAllAllowedDONs\",\"outputs\":[{\"internalType\":\"uint32[]\",\"name\":\"allowedDONs\",\"type\":\"uint32[]\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"getAllAuthorizedAddresses\",\"outputs\":[{\"internalType\":\"address[]\",\"name\":\"authorizedAddresses\",\"type\":\"address[]\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"workflowOwner\",\"type\":\"address\"},{\"internalType\":\"string\",\"name\":\"workflowName\",\"type\":\"string\"}],\"name\":\"getWorkflowMetadata\",\"outputs\":[{\"components\":[{\"internalType\":\"bytes32\",\"name\":\"workflowID\",\"type\":\"bytes32\"},{\"internalType\":\"address\",\"name\":\"owner\",\"type\":\"address\"},{\"internalType\":\"uint32\",\"name\":\"donID\",\"type\":\"uint32\"},{\"internalType\":\"enumWorkflowRegistry.WorkflowStatus\",\"name\":\"status\",\"type\":\"uint8\"},{\"internalType\":\"string\",\"name\":\"workflowName\",\"type\":\"string\"},{\"internalType\":\"string\",\"name\":\"binaryURL\",\"type\":\"string\"},{\"internalType\":\"string\",\"name\":\"configURL\",\"type\":\"string\"},{\"internalType\":\"string\",\"name\":\"secretsURL\",\"type\":\"string\"}],\"internalType\":\"structWorkflowRegistry.WorkflowMetadata\",\"name\":\"\",\"type\":\"tuple\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"uint32\",\"name\":\"donID\",\"type\":\"uint32\"},{\"internalType\":\"uint256\",\"name\":\"start\",\"type\":\"uint256\"},{\"internalType\":\"uint256\",\"name\":\"limit\",\"type\":\"uint256\"}],\"name\":\"getWorkflowMetadataListByDON\",\"outputs\":[{\"components\":[{\"internalType\":\"bytes32\",\"name\":\"workflowID\",\"type\":\"bytes32\"},{\"internalType\":\"address\",\"name\":\"owner\",\"type\":\"address\"},{\"internalType\":\"uint32\",\"name\":\"donID\",\"type\":\"uint32\"},{\"internalType\":\"enumWorkflowRegistry.WorkflowStatus\",\"name\":\"status\",\"type\":\"uint8\"},{\"internalType\":\"string\",\"name\":\"workflowName\",\"type\":\"string\"},{\"internalType\":\"string\",\"name\":\"binaryURL\",\"type\":\"string\"},{\"internalType\":\"string\",\"name\":\"configURL\",\"type\":\"string\"},{\"internalType\":\"string\",\"name\":\"secretsURL\",\"type\":\"string\"}],\"internalType\":\"structWorkflowRegistry.WorkflowMetadata[]\",\"name\":\"workflowMetadataList\",\"type\":\"tuple[]\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"workflowOwner\",\"type\":\"address\"},{\"internalType\":\"uint256\",\"name\":\"start\",\"type\":\"uint256\"},{\"internalType\":\"uint256\",\"name\":\"limit\",\"type\":\"uint256\"}],\"name\":\"getWorkflowMetadataListByOwner\",\"outputs\":[{\"components\":[{\"internalType\":\"bytes32\",\"name\":\"workflowID\",\"type\":\"bytes32\"},{\"internalType\":\"address\",\"name\":\"owner\",\"type\":\"address\"},{\"internalType\":\"uint32\",\"name\":\"donID\",\"type\":\"uint32\"},{\"internalType\":\"enumWorkflowRegistry.WorkflowStatus\",\"name\":\"status\",\"type\":\"uint8\"},{\"internalType\":\"string\",\"name\":\"workflowName\",\"type\":\"string\"},{\"internalType\":\"string\",\"name\":\"binaryURL\",\"type\":\"string\"},{\"internalType\":\"string\",\"name\":\"configURL\",\"type\":\"string\"},{\"internalType\":\"string\",\"name\":\"secretsURL\",\"type\":\"string\"}],\"internalType\":\"structWorkflowRegistry.WorkflowMetadata[]\",\"name\":\"workflowMetadataList\",\"type\":\"tuple[]\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"isRegistryLocked\",\"outputs\":[{\"internalType\":\"bool\",\"name\":\"\",\"type\":\"bool\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"lockRegistry\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"owner\",\"outputs\":[{\"internalType\":\"address\",\"name\":\"\",\"type\":\"address\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"bytes32\",\"name\":\"workflowKey\",\"type\":\"bytes32\"}],\"name\":\"pauseWorkflow\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"string\",\"name\":\"workflowName\",\"type\":\"string\"},{\"internalType\":\"bytes32\",\"name\":\"workflowID\",\"type\":\"bytes32\"},{\"internalType\":\"uint32\",\"name\":\"donID\",\"type\":\"uint32\"},{\"internalType\":\"enumWorkflowRegistry.WorkflowStatus\",\"name\":\"status\",\"type\":\"uint8\"},{\"internalType\":\"string\",\"name\":\"binaryURL\",\"type\":\"string\"},{\"internalType\":\"string\",\"name\":\"configURL\",\"type\":\"string\"},{\"internalType\":\"string\",\"name\":\"secretsURL\",\"type\":\"string\"}],\"name\":\"registerWorkflow\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"string\",\"name\":\"secretsURL\",\"type\":\"string\"}],\"name\":\"requestForceUpdateSecrets\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"to\",\"type\":\"address\"}],\"name\":\"transferOwnership\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"typeAndVersion\",\"outputs\":[{\"internalType\":\"string\",\"name\":\"\",\"type\":\"string\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"unlockRegistry\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"uint32[]\",\"name\":\"donIDs\",\"type\":\"uint32[]\"},{\"internalType\":\"bool\",\"name\":\"allowed\",\"type\":\"bool\"}],\"name\":\"updateAllowedDONs\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address[]\",\"name\":\"addresses\",\"type\":\"address[]\"},{\"internalType\":\"bool\",\"name\":\"allowed\",\"type\":\"bool\"}],\"name\":\"updateAuthorizedAddresses\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"bytes32\",\"name\":\"workflowKey\",\"type\":\"bytes32\"},{\"internalType\":\"bytes32\",\"name\":\"newWorkflowID\",\"type\":\"bytes32\"},{\"internalType\":\"string\",\"name\":\"binaryURL\",\"type\":\"string\"},{\"internalType\":\"string\",\"name\":\"configURL\",\"type\":\"string\"},{\"internalType\":\"string\",\"name\":\"secretsURL\",\"type\":\"string\"}],\"name\":\"updateWorkflow\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"}]", - Bin: "0x6080806040523461004a57331561003b57600180546001600160a01b03191633179055600a805460ff191690556040516133f390816100508239f35b639b15e16f60e01b8152600490fd5b600080fdfe6080604052600436101561001257600080fd5b60003560e01c806308e7f63a14612096578063181f5a77146120075780632303348a14611eca5780632b596f6d14611e3c5780633ccd14ff14611502578063695e13401461135a5780636f35177114611281578063724c13dd1461118a5780637497066b1461106f57806379ba509714610f995780637ec0846d14610f0e5780638da5cb5b14610ebc5780639f4cb53414610e9b578063b87a019414610e45578063d4b89c7414610698578063db800092146105fd578063e3dce080146104d6578063e690f33214610362578063f2fde38b14610284578063f794bdeb146101495763f99ecb6b1461010357600080fd5b346101445760007ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261014457602060ff600a54166040519015158152f35b600080fd5b346101445760007ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc360112610144576006805461018581612410565b6101926040519182612297565b81815261019e82612410565b916020937fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe060208401940136853760005b82811061023257505050906040519283926020840190602085525180915260408401929160005b82811061020557505050500390f35b835173ffffffffffffffffffffffffffffffffffffffff16855286955093810193928101926001016101f6565b6001908260005273ffffffffffffffffffffffffffffffffffffffff817ff652222313e28459528d920b65115c16c04f3efc82aaedc97be59f3f377c0d3f01541661027d8287612542565b52016101cf565b346101445760207ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc360112610144576102bb61237e565b6102c3612bdb565b73ffffffffffffffffffffffffffffffffffffffff8091169033821461033857817fffffffffffffffffffffffff00000000000000000000000000000000000000006000541617600055600154167fed8889f560326eb138920d842192f0eb3dd22b4f139c87a2c57538e05bae1278600080a3005b60046040517fdad89dca000000000000000000000000000000000000000000000000000000008152fd5b346101445760207ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc3601126101445760ff600a54166104ac576103a760043533612dc1565b600181019081549160ff8360c01c16600281101561047d576001146104535778010000000000000000000000000000000000000000000000007fffffffffffffff00ffffffffffffffffffffffffffffffffffffffffffffffff841617905580547f6a0ed88e9cf3cb493ab4028fcb1dc7d18f0130fcdfba096edde0aadbfbf5e99f63ffffffff604051946020865260a01c16938061044e339560026020840191016125e4565b0390a4005b60046040517f6f861db1000000000000000000000000000000000000000000000000000000008152fd5b7f4e487b7100000000000000000000000000000000000000000000000000000000600052602160045260246000fd5b60046040517f78a4e7d9000000000000000000000000000000000000000000000000000000008152fd5b34610144576104e436612306565b916104ed612bdb565b60ff600a54166104ac5760005b828110610589575060405191806040840160408552526060830191906000905b8082106105515785151560208601527f509460cccbb176edde6cac28895a4415a24961b8f3a0bd2617b9bb7b4e166c9b85850386a1005b90919283359073ffffffffffffffffffffffffffffffffffffffff82168092036101445760019181526020809101940192019061051a565b60019084156105cb576105c373ffffffffffffffffffffffffffffffffffffffff6105bd6105b8848888612a7b565b612bba565b16612f9c565b505b016104fa565b6105f773ffffffffffffffffffffffffffffffffffffffff6105f16105b8848888612a7b565b166131cd565b506105c5565b346101445761061d61060e366123a1565b91610617612428565b50612a9c565b6000526004602052604060002073ffffffffffffffffffffffffffffffffffffffff6001820154161561066e5761065661066a91612698565b604051918291602083526020830190612154565b0390f35b60046040517f871e01b2000000000000000000000000000000000000000000000000000000008152fd5b346101445760a07ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc3601126101445760443567ffffffffffffffff8111610144576106e79036906004016122d8565b9060643567ffffffffffffffff8111610144576107089036906004016122d8565b9160843567ffffffffffffffff8111610144576107299036906004016122d8565b60ff600a94929454166104ac57610744818688602435612cd0565b61075060043533612dc1565b9163ffffffff600184015460a01c169561076a3388612c26565b8354946024358614610e1b576107a56040516107948161078d8160038b016125e4565b0382612297565b61079f368c8561284c565b90612e30565b6107c76040516107bc8161078d8160048c016125e4565b61079f36868861284c565b6107e96040516107de8161078d8160058d016125e4565b61079f36898d61284c565b918080610e14575b80610e0d575b610de357602435885515610c8e575b15610b3d575b15610890575b926108807f41161473ce2ed633d9f902aab9702d16a5531da27ec84e1939abeffe54ad7353959361044e93610872610864978d604051998a996024358b5260a060208c0152600260a08c0191016125e4565b9189830360408b015261290f565b91868303606088015261290f565b908382036080850152339761290f565b61089d6005860154612591565b610ad6575b67ffffffffffffffff8411610aa7576108cb846108c26005880154612591565b600588016128c8565b6000601f85116001146109a757928492610872610880938a9b9c61094f876108649b9a61044e9a7f41161473ce2ed633d9f902aab9702d16a5531da27ec84e1939abeffe54ad73539e9f60009261099c575b50507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8260011b9260031b1c19161790565b60058a01555b8c8780610972575b50509c9b9a9950935050929495509250610812565b61097c9133612a9c565b60005260056020526109946004356040600020612fee565b508c8761095d565b013590508f8061091d565b9860058601600052602060002060005b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe087168110610a8f5750926108726108809361044e969388968c7f41161473ce2ed633d9f902aab9702d16a5531da27ec84e1939abeffe54ad73539c9d9e9f897fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe06108649e9d1610610a57575b505050600187811b0160058a0155610955565b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff60f88b60031b161c199101351690558e8d81610a44565b898c0135825560209b8c019b600190920191016109b7565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b6040516020810190610b1c81610af060058a01338661294e565b037fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe08101835282612297565b5190206000526005602052610b376004356040600020613294565b506108a2565b67ffffffffffffffff8311610aa757610b6683610b5d6004890154612591565b600489016128c8565b600083601f8111600114610bc75780610bb292600091610bbc575b507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8260011b9260031b1c19161790565b600487015561080c565b90508601358d610b81565b506004870160005260206000209060005b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe086168110610c765750847fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0811610610c3e575b5050600183811b01600487015561080c565b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff60f88660031b161c19908601351690558a80610c2c565b9091602060018192858a013581550193019101610bd8565b67ffffffffffffffff8b11610aa757610cb78b610cae60038a0154612591565b60038a016128c8565b60008b601f8111600114610d175780610d0292600091610d0c57507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8260011b9260031b1c19161790565b6003880155610806565b90508501358e610b81565b506003880160005260206000209060005b8d7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe081168210610dca578091507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0811610610d91575b905060018092501b016003880155610806565b60f87fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff9160031b161c19908501351690558b808c610d7e565b5085820135835560019092019160209182019101610d28565b60046040517f6b4a810d000000000000000000000000000000000000000000000000000000008152fd5b50826107f7565b50816107f1565b60046040517f95406722000000000000000000000000000000000000000000000000000000008152fd5b346101445760607ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc3601126101445761066a610e8f610e8261237e565b6044359060243590612af7565b604051918291826121f8565b34610144576020610eb4610eae366123a1565b91612a9c565b604051908152f35b346101445760007ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261014457602073ffffffffffffffffffffffffffffffffffffffff60015416604051908152f35b346101445760007ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261014457610f45612bdb565b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff00600a5416600a557f11a03e25ee25bf1459f9e1cb293ea03707d84917f54a65e32c9a7be2f2edd68a6020604051338152a1005b346101445760007ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc3601126101445760005473ffffffffffffffffffffffffffffffffffffffff808216330361104557600154917fffffffffffffffffffffffff0000000000000000000000000000000000000000903382851617600155166000553391167f8be0079c531659141344cd1fd0a4f28419497f9722a3daafe3b4186f6b6457e0600080a3005b60046040517f02b543c6000000000000000000000000000000000000000000000000000000008152fd5b346101445760007ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261014457600880546110ab81612410565b6110b86040519182612297565b8181526110c482612410565b916020937fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe060208401940136853760005b82811061114857505050906040519283926020840190602085525180915260408401929160005b82811061112b57505050500390f35b835163ffffffff168552869550938101939281019260010161111c565b6001908260005263ffffffff817ff3f7a9fe364faab93b216da50a3214154f22a0a2b415b23a84c8169e8b636ee30154166111838287612542565b52016110f5565b346101445761119836612306565b916111a1612bdb565b60ff600a54166104ac5760005b82811061122d575060405191806040840160408552526060830191906000905b8082106112055785151560208601527fcab63bf31d1e656baa23cebef64e12033ea0ffbd44b1278c3747beec2d2f618c85850386a1005b90919283359063ffffffff8216809203610144576001918152602080910194019201906111ce565b600190841561125f5761125763ffffffff61125161124c848888612a7b565b612a8b565b16612ee3565b505b016111ae565b61127b63ffffffff61127561124c848888612a7b565b1661307a565b50611259565b346101445760207ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc3601126101445760ff600a54166104ac576112c660043533612dc1565b600181019081549163ffffffff8360a01c169260ff8160c01c16600281101561047d5715610453577fffffffffffffff00ffffffffffffffffffffffffffffffffffffffffffffffff9061131a3386612c26565b16905580547f17b2d730bb5e064df3fbc6165c8aceb3b0d62c524c196c0bc1012209280bc9a6604051602081528061044e339560026020840191016125e4565b34610144576020807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc360112610144576004359060ff600a54166104ac576113a28233612dc1565b916113ba336000526007602052604060002054151590565b156114d25760049233600052600283526113d8826040600020613294565b50600181019063ffffffff80835460a01c16600052600385526113ff846040600020613294565b506005820161140e8154612591565b61149e575b508154925460a01c16917f76ee2dfcae10cb8522e62e713e62660e09ecfaab08db15d9404de1914132257160405186815280611456339560028a840191016125e4565b0390a46000525261149c60056040600020600081556000600182015561147e60028201612a32565b61148a60038201612a32565b61149660048201612a32565b01612a32565b005b6040516114b381610af089820194338661294e565b519020600052600585526114cb846040600020613294565b5086611413565b60246040517f85982a00000000000000000000000000000000000000000000000000000000008152336004820152fd5b346101445760e07ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc3601126101445760043567ffffffffffffffff8111610144576115519036906004016122d8565b6044359163ffffffff8316830361014457600260643510156101445760843567ffffffffffffffff81116101445761158d9036906004016122d8565b91909260a43567ffffffffffffffff8111610144576115b09036906004016122d8565b60c43567ffffffffffffffff8111610144576115d09036906004016122d8565b96909560ff600a54166104ac576115e7338a612c26565b60408511611e04576115fd888483602435612cd0565b611608858733612a9c565b80600052600460205273ffffffffffffffffffffffffffffffffffffffff60016040600020015416611dda57604051906116418261227a565b602435825233602083015263ffffffff8b16604083015261166760643560608401612585565b61167236888a61284c565b608083015261168236848661284c565b60a083015261169236868861284c565b60c08301526116a2368b8b61284c565b60e0830152806000526004602052604060002091805183556001830173ffffffffffffffffffffffffffffffffffffffff60208301511681549077ffffffff0000000000000000000000000000000000000000604085015160a01b16906060850151600281101561047d5778ff0000000000000000000000000000000000000000000000007fffffffffffffff000000000000000000000000000000000000000000000000009160c01b1693161717179055608081015180519067ffffffffffffffff8211610aa7576117858261177c6002880154612591565b600288016128c8565b602090601f8311600114611d0e576117d2929160009183611c375750507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8260011b9260031b1c19161790565b60028401555b60a081015180519067ffffffffffffffff8211610aa757611809826118006003880154612591565b600388016128c8565b602090601f8311600114611c4257611856929160009183611c375750507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8260011b9260031b1c19161790565b60038401555b60c081015180519067ffffffffffffffff8211610aa75761188d826118846004880154612591565b600488016128c8565b602090601f8311600114611b6a5791806118de9260e09594600092611a455750507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8260011b9260031b1c19161790565b60048501555b015180519267ffffffffffffffff8411610aa757838d926119168e9661190d6005860154612591565b600586016128c8565b602090601f8311600114611a50579463ffffffff61087295819a957fc4399022965bad9b2b468bbd8c758a7e80cdde36ff3088ddbb7f93bdfb5623cb9f9e9d99946119a28761044e9f9b98600593611a069f9a600092611a455750507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8260011b9260031b1c19161790565b9101555b3360005260026020526119bd836040600020612fee565b501660005260036020526119d5816040600020612fee565b508d82611a1c575b5050506108646040519a8b9a6119f58c6064356120e9565b60a060208d015260a08c019161290f565b978389036080850152169633966024359661290f565b611a3c92611a2a9133612a9c565b60005260056020526040600020612fee565b508c8f8d6119dd565b01519050388061091d565b906005840160005260206000209160005b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe085168110611b4057506108729563ffffffff9a957fc4399022965bad9b2b468bbd8c758a7e80cdde36ff3088ddbb7f93bdfb5623cb9f9e9d999460018761044e9f9b96928f9693611a069f9a94837fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe06005971610611b09575b505050811b019101556119a6565b01517fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff60f88460031b161c19169055388080611afb565b939550918194969750600160209291839285015181550194019201918f9492918f97969492611a61565b906004860160005260206000209160005b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe085168110611c1f5750918391600193837fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe060e098971610611be8575b505050811b0160048501556118e4565b01517fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff60f88460031b161c191690558f8080611bd8565b91926020600181928685015181550194019201611b7b565b015190508f8061091d565b9190600386016000526020600020906000935b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe084168510611cf35760019450837fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0811610611cbc575b505050811b01600384015561185c565b01517fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff60f88460031b161c191690558e8080611cac565b81810151835560209485019460019093019290910190611c55565b9190600286016000526020600020906000935b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe084168510611dbf5760019450837fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0811610611d88575b505050811b0160028401556117d8565b01517fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff60f88460031b161c191690558e8080611d78565b81810151835560209485019460019093019290910190611d21565b60046040517fa0677dd0000000000000000000000000000000000000000000000000000000008152fd5b604485604051907f36a7c503000000000000000000000000000000000000000000000000000000008252600482015260406024820152fd5b346101445760007ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261014457611e73612bdb565b60017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff00600a541617600a557f2789711f6fd67d131ad68378617b5d1d21a2c92b34d7c3745d70b3957c08096c6020604051338152a1005b34610144576020807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc3601126101445760043567ffffffffffffffff811161014457611f1a9036906004016122d8565b60ff600a54166104ac57611f2e9133612a9c565b90816000526005602052604060002091825491821561066e5760005b838110611f5357005b80611f6060019287612ecb565b90549060031b1c60005260048352604060002063ffffffff8382015460a01c1660005260098452604060002054151580611fea575b611fa1575b5001611f4a565b7f95d94f817db4971aa99ba35d0fe019bd8cc39866fbe02b6d47b5f0f3727fb67360405186815260408682015280611fe1339460026040840191016125e4565b0390a286611f9a565b50612002336000526007602052604060002054151590565b611f95565b346101445760007ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261014457604051604081019080821067ffffffffffffffff831117610aa75761066a91604052601a81527f576f726b666c6f77526567697374727920312e302e302d64657600000000000060208201526040519182916020835260208301906120f6565b346101445760607ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc3601126101445760043563ffffffff8116810361014457610e8f61066a916044359060243590612757565b90600282101561047d5752565b919082519283825260005b8481106121405750507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0601f8460006020809697860101520116010190565b602081830181015184830182015201612101565b6121f59160e06121e46121d26121c06101008651865273ffffffffffffffffffffffffffffffffffffffff602088015116602087015263ffffffff60408801511660408701526121ac606088015160608801906120e9565b6080870151908060808801528601906120f6565b60a086015185820360a08701526120f6565b60c085015184820360c08601526120f6565b9201519060e08184039101526120f6565b90565b6020808201906020835283518092526040830192602060408460051b8301019501936000915b84831061222e5750505050505090565b909192939495848061226a837fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc086600196030187528a51612154565b980193019301919493929061221e565b610100810190811067ffffffffffffffff821117610aa757604052565b90601f7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0910116810190811067ffffffffffffffff821117610aa757604052565b9181601f840112156101445782359167ffffffffffffffff8311610144576020838186019501011161014457565b9060407ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc8301126101445760043567ffffffffffffffff9283821161014457806023830112156101445781600401359384116101445760248460051b8301011161014457602401919060243580151581036101445790565b6004359073ffffffffffffffffffffffffffffffffffffffff8216820361014457565b9060407ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc8301126101445760043573ffffffffffffffffffffffffffffffffffffffff8116810361014457916024359067ffffffffffffffff82116101445761240c916004016122d8565b9091565b67ffffffffffffffff8111610aa75760051b60200190565b604051906124358261227a565b606060e0836000815260006020820152600060408201526000838201528260808201528260a08201528260c08201520152565b6040516020810181811067ffffffffffffffff821117610aa7576040526000815290565b9061249682612410565b6124a36040519182612297565b8281527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe06124d18294612410565b019060005b8281106124e257505050565b6020906124ed612428565b828285010152016124d6565b9190820180921161250657565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601160045260246000fd5b9190820391821161250657565b80518210156125565760209160051b010190565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052603260045260246000fd5b600282101561047d5752565b90600182811c921680156125da575b60208310146125ab57565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052602260045260246000fd5b91607f16916125a0565b8054600093926125f382612591565b9182825260209360019160018116908160001461265b575060011461261a575b5050505050565b90939495506000929192528360002092846000945b83861061264757505050500101903880808080612613565b80548587018301529401938590820161262f565b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff00168685015250505090151560051b010191503880808080612613565b90600560e06040936127538551916126af8361227a565b61274c8397825485526126f960ff600185015473ffffffffffffffffffffffffffffffffffffffff8116602089015263ffffffff8160a01c168489015260c01c1660608701612585565b805161270c8161078d81600288016125e4565b608086015280516127248161078d81600388016125e4565b60a0860152805161273c8161078d81600488016125e4565b60c08601525180968193016125e4565b0384612297565b0152565b63ffffffff16916000838152600360209060036020526040936040842054908187101561283c576127ab918160648993118015612834575b61282c575b8161279f82856124f9565b111561281c5750612535565b946127b58661248c565b96845b8781106127ca57505050505050505090565b6001908287528486526127e98888206127e383876124f9565b90612ecb565b905490861b1c875260048652612800888820612698565b61280a828c612542565b52612815818b612542565b50016127b8565b6128279150826124f9565b612535565b506064612794565b50801561278f565b50505050505050506121f5612468565b92919267ffffffffffffffff8211610aa7576040519161289460207fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0601f8401160184612297565b829481845281830111610144578281602093846000960137010152565b8181106128bc575050565b600081556001016128b1565b9190601f81116128d757505050565b612903926000526020600020906020601f840160051c83019310612905575b601f0160051c01906128b1565b565b90915081906128f6565b601f82602094937fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0938186528686013760008582860101520116010190565b91907fffffffffffffffffffffffffffffffffffffffff0000000000000000000000009060601b16825260149060009281549261298a84612591565b926001946001811690816000146129f157506001146129ac575b505050505090565b9091929395945060005260209460206000206000905b8582106129de57505050506014929350010138808080806129a4565b80548583018501529087019082016129c2565b92505050601494507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff00919350168383015280151502010138808080806129a4565b612a3c8154612591565b9081612a46575050565b81601f60009311600114612a58575055565b908083918252612a77601f60208420940160051c8401600185016128b1565b5555565b91908110156125565760051b0190565b3563ffffffff811681036101445790565b91906034612af191836040519485927fffffffffffffffffffffffffffffffffffffffff000000000000000000000000602085019860601b168852848401378101600083820152036014810184520182612297565b51902090565b73ffffffffffffffffffffffffffffffffffffffff1691600083815260029260209060026020526040936040842054908183101561283c57612b4e9181606485931180156128345761282c578161279f82856124f9565b94612b588661248c565b96845b878110612b6d57505050505050505090565b600190828752838652612b868888206127e383886124f9565b90549060031b1c875260048652612b9e888820612698565b612ba8828c612542565b52612bb3818b612542565b5001612b5b565b3573ffffffffffffffffffffffffffffffffffffffff811681036101445790565b73ffffffffffffffffffffffffffffffffffffffff600154163303612bfc57565b60046040517f2b5c74de000000000000000000000000000000000000000000000000000000008152fd5b63ffffffff1680600052600960205260406000205415612c9f575073ffffffffffffffffffffffffffffffffffffffff1680600052600760205260406000205415612c6e5750565b602490604051907f85982a000000000000000000000000000000000000000000000000000000000082526004820152fd5b602490604051907f8fe6d7e10000000000000000000000000000000000000000000000000000000082526004820152fd5b91909115612d975760c891828111612d615750818111612d2c5750808211612cf6575050565b60449250604051917ecd56a800000000000000000000000000000000000000000000000000000000835260048301526024820152fd5b604491604051917ecd56a800000000000000000000000000000000000000000000000000000000835260048301526024820152fd5b60449083604051917ecd56a800000000000000000000000000000000000000000000000000000000835260048301526024820152fd5b60046040517f7dc2f4e1000000000000000000000000000000000000000000000000000000008152fd5b90600052600460205260406000209073ffffffffffffffffffffffffffffffffffffffff8060018401541691821561066e5716809103612dff575090565b602490604051907f31ee6dc70000000000000000000000000000000000000000000000000000000082526004820152fd5b9081518151908181149384612e47575b5050505090565b6020929394508201209201201438808080612e40565b6008548110156125565760086000527ff3f7a9fe364faab93b216da50a3214154f22a0a2b415b23a84c8169e8b636ee30190600090565b6006548110156125565760066000527ff652222313e28459528d920b65115c16c04f3efc82aaedc97be59f3f377c0d3f0190600090565b80548210156125565760005260206000200190600090565b600081815260096020526040812054612f975760085468010000000000000000811015612f6a579082612f56612f2184600160409601600855612e5d565b81939154907fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff9060031b92831b921b19161790565b905560085492815260096020522055600190565b6024827f4e487b710000000000000000000000000000000000000000000000000000000081526041600452fd5b905090565b600081815260076020526040812054612f975760065468010000000000000000811015612f6a579082612fda612f2184600160409601600655612e94565b905560065492815260076020522055600190565b9190600183016000908282528060205260408220541560001461307457845494680100000000000000008610156130475783613037612f21886001604098999a01855584612ecb565b9055549382526020522055600190565b6024837f4e487b710000000000000000000000000000000000000000000000000000000081526041600452fd5b50925050565b60008181526009602052604081205490919080156131c8577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff9081810181811161319b576008549083820191821161316e5781810361313a575b505050600854801561310d578101906130ec82612e5d565b909182549160031b1b19169055600855815260096020526040812055600190565b6024847f4e487b710000000000000000000000000000000000000000000000000000000081526031600452fd5b613158613149612f2193612e5d565b90549060031b1c928392612e5d565b90558452600960205260408420553880806130d4565b6024867f4e487b710000000000000000000000000000000000000000000000000000000081526011600452fd5b6024857f4e487b710000000000000000000000000000000000000000000000000000000081526011600452fd5b505090565b60008181526007602052604081205490919080156131c8577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff9081810181811161319b576006549083820191821161316e57818103613260575b505050600654801561310d5781019061323f82612e94565b909182549160031b1b19169055600655815260076020526040812055600190565b61327e61326f612f2193612e94565b90549060031b1c928392612e94565b9055845260076020526040842055388080613227565b90600182019060009281845282602052604084205490811515600014612e40577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff918281018181116133b95782549084820191821161338c57818103613357575b5050508054801561332a5782019161330d8383612ecb565b909182549160031b1b191690555582526020526040812055600190565b6024867f4e487b710000000000000000000000000000000000000000000000000000000081526031600452fd5b613377613367612f219386612ecb565b90549060031b1c92839286612ecb565b905586528460205260408620553880806132f5565b6024887f4e487b710000000000000000000000000000000000000000000000000000000081526011600452fd5b6024877f4e487b710000000000000000000000000000000000000000000000000000000081526011600452fdfea164736f6c6343000818000a", + Bin: "0x6080806040523461004a57331561003b57600180546001600160a01b03191633179055600b805460ff191690556040516134e290816100508239f35b639b15e16f60e01b8152600490fd5b600080fdfe6080604052600436101561001257600080fd5b60003560e01c806308e7f63a1461210e578063181f5a771461207f5780632303348a14611f425780632b596f6d14611eb45780633ccd14ff14611572578063695e1340146113965780636f351771146112bd578063724c13dd146111c65780637497066b146110ab57806379ba509714610fd55780637ec0846d14610f4a5780638da5cb5b14610ef85780639f4cb53414610ed7578063b87a019414610e81578063d4b89c7414610698578063db800092146105fd578063e3dce080146104d6578063e690f33214610362578063f2fde38b14610284578063f794bdeb146101495763f99ecb6b1461010357600080fd5b346101445760007ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261014457602060ff600b54166040519015158152f35b600080fd5b346101445760007ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc360112610144576007805461018581612488565b610192604051918261230f565b81815261019e82612488565b916020937fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe060208401940136853760005b82811061023257505050906040519283926020840190602085525180915260408401929160005b82811061020557505050500390f35b835173ffffffffffffffffffffffffffffffffffffffff16855286955093810193928101926001016101f6565b6001908260005273ffffffffffffffffffffffffffffffffffffffff817fa66cc928b5edb82af9bd49922954155ab7b0942694bea4ce44661d9a8736c68801541661027d82876125ba565b52016101cf565b346101445760207ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc360112610144576102bb6123f6565b6102c3612c53565b73ffffffffffffffffffffffffffffffffffffffff8091169033821461033857817fffffffffffffffffffffffff00000000000000000000000000000000000000006000541617600055600154167fed8889f560326eb138920d842192f0eb3dd22b4f139c87a2c57538e05bae1278600080a3005b60046040517fdad89dca000000000000000000000000000000000000000000000000000000008152fd5b346101445760207ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc3601126101445760ff600b54166104ac576103a760043533612eb0565b600181019081549160ff8360c01c16600281101561047d576001146104535778010000000000000000000000000000000000000000000000007fffffffffffffff00ffffffffffffffffffffffffffffffffffffffffffffffff841617905580547f6a0ed88e9cf3cb493ab4028fcb1dc7d18f0130fcdfba096edde0aadbfbf5e99f63ffffffff604051946020865260a01c16938061044e3395600260208401910161265c565b0390a4005b60046040517f6f861db1000000000000000000000000000000000000000000000000000000008152fd5b7f4e487b7100000000000000000000000000000000000000000000000000000000600052602160045260246000fd5b60046040517f78a4e7d9000000000000000000000000000000000000000000000000000000008152fd5b34610144576104e43661237e565b916104ed612c53565b60ff600b54166104ac5760005b828110610589575060405191806040840160408552526060830191906000905b8082106105515785151560208601527f509460cccbb176edde6cac28895a4415a24961b8f3a0bd2617b9bb7b4e166c9b85850386a1005b90919283359073ffffffffffffffffffffffffffffffffffffffff82168092036101445760019181526020809101940192019061051a565b60019084156105cb576105c373ffffffffffffffffffffffffffffffffffffffff6105bd6105b8848888612af3565b612c32565b1661308b565b505b016104fa565b6105f773ffffffffffffffffffffffffffffffffffffffff6105f16105b8848888612af3565b166132bc565b506105c5565b346101445761061d61060e36612419565b916106176124a0565b50612b14565b6000526004602052604060002073ffffffffffffffffffffffffffffffffffffffff6001820154161561066e5761065661066a91612710565b6040519182916020835260208301906121cc565b0390f35b60046040517f871e01b2000000000000000000000000000000000000000000000000000000008152fd5b346101445760a07ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc3601126101445760443567ffffffffffffffff8111610144576106e7903690600401612350565b9060643567ffffffffffffffff811161014457610708903690600401612350565b9160843567ffffffffffffffff811161014457610729903690600401612350565b60ff600b94929454166104ac57610741818688612d48565b61074d60043533612eb0565b9163ffffffff600184015460a01c16956107673388612c9e565b8354946024358614610e57576107a26040516107918161078a8160038b0161265c565b038261230f565b61079c368c856128c4565b90612f1f565b6107c46040516107b98161078a8160048c0161265c565b61079c3686886128c4565b6107e66040516107db8161078a8160058d0161265c565b61079c36898d6128c4565b918080610e50575b80610e49575b610e1f57610803602435612e08565b88600052600660205260406000207fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff008154169055602435885515610cca575b15610b79575b156108cc575b926108bc7f41161473ce2ed633d9f902aab9702d16a5531da27ec84e1939abeffe54ad7353959361044e936108ae6108a0978d604051998a996024358b5260a060208c0152600260a08c01910161265c565b9189830360408b0152612987565b918683036060880152612987565b9083820360808501523397612987565b6108d96005860154612609565b610b12575b67ffffffffffffffff8411610ae357610907846108fe6005880154612609565b60058801612940565b6000601f85116001146109e3579284926108ae6108bc938a9b9c61098b876108a09b9a61044e9a7f41161473ce2ed633d9f902aab9702d16a5531da27ec84e1939abeffe54ad73539e9f6000926109d8575b50507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8260011b9260031b1c19161790565b60058a01555b8c87806109ae575b50509c9b9a995093505092949550925061084e565b6109b89133612b14565b60005260056020526109d060043560406000206130dd565b508c87610999565b013590508f80610959565b9860058601600052602060002060005b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe087168110610acb5750926108ae6108bc9361044e969388968c7f41161473ce2ed633d9f902aab9702d16a5531da27ec84e1939abeffe54ad73539c9d9e9f897fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe06108a09e9d1610610a93575b505050600187811b0160058a0155610991565b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff60f88b60031b161c199101351690558e8d81610a80565b898c0135825560209b8c019b600190920191016109f3565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b6040516020810190610b5881610b2c60058a0133866129c6565b037fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0810183528261230f565b5190206000526005602052610b736004356040600020613383565b506108de565b67ffffffffffffffff8311610ae357610ba283610b996004890154612609565b60048901612940565b600083601f8111600114610c035780610bee92600091610bf8575b507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8260011b9260031b1c19161790565b6004870155610848565b90508601358d610bbd565b506004870160005260206000209060005b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe086168110610cb25750847fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0811610610c7a575b5050600183811b016004870155610848565b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff60f88660031b161c19908601351690558a80610c68565b9091602060018192858a013581550193019101610c14565b67ffffffffffffffff8b11610ae357610cf38b610cea60038a0154612609565b60038a01612940565b60008b601f8111600114610d535780610d3e92600091610d4857507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8260011b9260031b1c19161790565b6003880155610842565b90508501358e610bbd565b506003880160005260206000209060005b8d7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe081168210610e06578091507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0811610610dcd575b905060018092501b016003880155610842565b60f87fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff9160031b161c19908501351690558b808c610dba565b5085820135835560019092019160209182019101610d64565b60046040517f6b4a810d000000000000000000000000000000000000000000000000000000008152fd5b50826107f4565b50816107ee565b60046040517f95406722000000000000000000000000000000000000000000000000000000008152fd5b346101445760607ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc3601126101445761066a610ecb610ebe6123f6565b6044359060243590612b6f565b60405191829182612270565b34610144576020610ef0610eea36612419565b91612b14565b604051908152f35b346101445760007ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261014457602073ffffffffffffffffffffffffffffffffffffffff60015416604051908152f35b346101445760007ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261014457610f81612c53565b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff00600b5416600b557f11a03e25ee25bf1459f9e1cb293ea03707d84917f54a65e32c9a7be2f2edd68a6020604051338152a1005b346101445760007ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc3601126101445760005473ffffffffffffffffffffffffffffffffffffffff808216330361108157600154917fffffffffffffffffffffffff0000000000000000000000000000000000000000903382851617600155166000553391167f8be0079c531659141344cd1fd0a4f28419497f9722a3daafe3b4186f6b6457e0600080a3005b60046040517f02b543c6000000000000000000000000000000000000000000000000000000008152fd5b346101445760007ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261014457600980546110e781612488565b6110f4604051918261230f565b81815261110082612488565b916020937fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe060208401940136853760005b82811061118457505050906040519283926020840190602085525180915260408401929160005b82811061116757505050500390f35b835163ffffffff1685528695509381019392810192600101611158565b6001908260005263ffffffff817f6e1540171b6c0c960b71a7020d9f60077f6af931a8bbf590da0223dacf75c7af0154166111bf82876125ba565b5201611131565b34610144576111d43661237e565b916111dd612c53565b60ff600b54166104ac5760005b828110611269575060405191806040840160408552526060830191906000905b8082106112415785151560208601527fcab63bf31d1e656baa23cebef64e12033ea0ffbd44b1278c3747beec2d2f618c85850386a1005b90919283359063ffffffff82168092036101445760019181526020809101940192019061120a565b600190841561129b5761129363ffffffff61128d611288848888612af3565b612b03565b16612fd2565b505b016111ea565b6112b763ffffffff6112b1611288848888612af3565b16613169565b50611295565b346101445760207ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc3601126101445760ff600b54166104ac5761130260043533612eb0565b600181019081549163ffffffff8360a01c169260ff8160c01c16600281101561047d5715610453577fffffffffffffff00ffffffffffffffffffffffffffffffffffffffffffffffff906113563386612c9e565b16905580547f17b2d730bb5e064df3fbc6165c8aceb3b0d62c524c196c0bc1012209280bc9a6604051602081528061044e3395600260208401910161265c565b34610144576020807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc360112610144576004359060ff600b54166104ac576113de8233612eb0565b916113f6336000526008602052604060002054151590565b156115425782600493546000526006835260406000207fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0081541690553360005260028352611448826040600020613383565b50600181019063ffffffff80835460a01c166000526003855261146f846040600020613383565b506005820161147e8154612609565b61150e575b508154925460a01c16917f76ee2dfcae10cb8522e62e713e62660e09ecfaab08db15d9404de19141322571604051868152806114c6339560028a8401910161265c565b0390a46000525261150c6005604060002060008155600060018201556114ee60028201612aaa565b6114fa60038201612aaa565b61150660048201612aaa565b01612aaa565b005b60405161152381610b2c8982019433866129c6565b5190206000526005855261153b846040600020613383565b5086611483565b60246040517f85982a00000000000000000000000000000000000000000000000000000000008152336004820152fd5b346101445760e07ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc3601126101445760043567ffffffffffffffff8111610144576115c1903690600401612350565b6044359163ffffffff8316830361014457600260643510156101445760843567ffffffffffffffff8111610144576115fd903690600401612350565b91909260a43567ffffffffffffffff811161014457611620903690600401612350565b60c43567ffffffffffffffff811161014457611640903690600401612350565b96909560ff600b54166104ac57611657338a612c9e565b60408511611e7c5761166a888483612d48565b611675858733612b14565b80600052600460205273ffffffffffffffffffffffffffffffffffffffff60016040600020015416611e52576116ac602435612e08565b604051906116b9826122f2565b602435825233602083015263ffffffff8b1660408301526116df606435606084016125fd565b6116ea36888a6128c4565b60808301526116fa3684866128c4565b60a083015261170a3686886128c4565b60c083015261171a368b8b6128c4565b60e0830152806000526004602052604060002091805183556001830173ffffffffffffffffffffffffffffffffffffffff60208301511681549077ffffffff0000000000000000000000000000000000000000604085015160a01b16906060850151600281101561047d5778ff0000000000000000000000000000000000000000000000007fffffffffffffff000000000000000000000000000000000000000000000000009160c01b1693161717179055608081015180519067ffffffffffffffff8211610ae3576117fd826117f46002880154612609565b60028801612940565b602090601f8311600114611d865761184a929160009183611caf5750507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8260011b9260031b1c19161790565b60028401555b60a081015180519067ffffffffffffffff8211610ae357611881826118786003880154612609565b60038801612940565b602090601f8311600114611cba576118ce929160009183611caf5750507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8260011b9260031b1c19161790565b60038401555b60c081015180519067ffffffffffffffff8211610ae357611905826118fc6004880154612609565b60048801612940565b602090601f8311600114611be25791806119569260e09594600092611abd5750507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8260011b9260031b1c19161790565b60048501555b015180519267ffffffffffffffff8411610ae357838d9261198e8e966119856005860154612609565b60058601612940565b602090601f8311600114611ac8579463ffffffff6108ae95819a957fc4399022965bad9b2b468bbd8c758a7e80cdde36ff3088ddbb7f93bdfb5623cb9f9e9d9994611a1a8761044e9f9b98600593611a7e9f9a600092611abd5750507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8260011b9260031b1c19161790565b9101555b336000526002602052611a358360406000206130dd565b50166000526003602052611a4d8160406000206130dd565b508d82611a94575b5050506108a06040519a8b9a611a6d8c606435612161565b60a060208d015260a08c0191612987565b9783890360808501521696339660243596612987565b611ab492611aa29133612b14565b600052600560205260406000206130dd565b508c8f8d611a55565b015190503880610959565b906005840160005260206000209160005b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe085168110611bb857506108ae9563ffffffff9a957fc4399022965bad9b2b468bbd8c758a7e80cdde36ff3088ddbb7f93bdfb5623cb9f9e9d999460018761044e9f9b96928f9693611a7e9f9a94837fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe06005971610611b81575b505050811b01910155611a1e565b01517fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff60f88460031b161c19169055388080611b73565b939550918194969750600160209291839285015181550194019201918f9492918f97969492611ad9565b906004860160005260206000209160005b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe085168110611c975750918391600193837fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe060e098971610611c60575b505050811b01600485015561195c565b01517fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff60f88460031b161c191690558f8080611c50565b91926020600181928685015181550194019201611bf3565b015190508f80610959565b9190600386016000526020600020906000935b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe084168510611d6b5760019450837fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0811610611d34575b505050811b0160038401556118d4565b01517fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff60f88460031b161c191690558e8080611d24565b81810151835560209485019460019093019290910190611ccd565b9190600286016000526020600020906000935b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe084168510611e375760019450837fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0811610611e00575b505050811b016002840155611850565b01517fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff60f88460031b161c191690558e8080611df0565b81810151835560209485019460019093019290910190611d99565b60046040517fa0677dd0000000000000000000000000000000000000000000000000000000008152fd5b604485604051907f36a7c503000000000000000000000000000000000000000000000000000000008252600482015260406024820152fd5b346101445760007ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261014457611eeb612c53565b60017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff00600b541617600b557f2789711f6fd67d131ad68378617b5d1d21a2c92b34d7c3745d70b3957c08096c6020604051338152a1005b34610144576020807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc3601126101445760043567ffffffffffffffff811161014457611f92903690600401612350565b60ff600b54166104ac57611fa69133612b14565b90816000526005602052604060002091825491821561066e5760005b838110611fcb57005b80611fd860019287612fba565b90549060031b1c60005260048352604060002063ffffffff8382015460a01c16600052600a8452604060002054151580612062575b612019575b5001611fc2565b7f95d94f817db4971aa99ba35d0fe019bd8cc39866fbe02b6d47b5f0f3727fb673604051868152604086820152806120593394600260408401910161265c565b0390a286612012565b5061207a336000526008602052604060002054151590565b61200d565b346101445760007ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261014457604051604081019080821067ffffffffffffffff831117610ae35761066a91604052601a81527f576f726b666c6f77526567697374727920312e302e302d646576000000000000602082015260405191829160208352602083019061216e565b346101445760607ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc3601126101445760043563ffffffff8116810361014457610ecb61066a9160443590602435906127cf565b90600282101561047d5752565b919082519283825260005b8481106121b85750507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0601f8460006020809697860101520116010190565b602081830181015184830182015201612179565b61226d9160e061225c61224a6122386101008651865273ffffffffffffffffffffffffffffffffffffffff602088015116602087015263ffffffff604088015116604087015261222460608801516060880190612161565b60808701519080608088015286019061216e565b60a086015185820360a087015261216e565b60c085015184820360c086015261216e565b9201519060e081840391015261216e565b90565b6020808201906020835283518092526040830192602060408460051b8301019501936000915b8483106122a65750505050505090565b90919293949584806122e2837fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc086600196030187528a516121cc565b9801930193019194939290612296565b610100810190811067ffffffffffffffff821117610ae357604052565b90601f7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0910116810190811067ffffffffffffffff821117610ae357604052565b9181601f840112156101445782359167ffffffffffffffff8311610144576020838186019501011161014457565b9060407ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc8301126101445760043567ffffffffffffffff9283821161014457806023830112156101445781600401359384116101445760248460051b8301011161014457602401919060243580151581036101445790565b6004359073ffffffffffffffffffffffffffffffffffffffff8216820361014457565b9060407ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc8301126101445760043573ffffffffffffffffffffffffffffffffffffffff8116810361014457916024359067ffffffffffffffff82116101445761248491600401612350565b9091565b67ffffffffffffffff8111610ae35760051b60200190565b604051906124ad826122f2565b606060e0836000815260006020820152600060408201526000838201528260808201528260a08201528260c08201520152565b6040516020810181811067ffffffffffffffff821117610ae3576040526000815290565b9061250e82612488565b61251b604051918261230f565b8281527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe06125498294612488565b019060005b82811061255a57505050565b6020906125656124a0565b8282850101520161254e565b9190820180921161257e57565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601160045260246000fd5b9190820391821161257e57565b80518210156125ce5760209160051b010190565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052603260045260246000fd5b600282101561047d5752565b90600182811c92168015612652575b602083101461262357565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052602260045260246000fd5b91607f1691612618565b80546000939261266b82612609565b918282526020936001916001811690816000146126d35750600114612692575b5050505050565b90939495506000929192528360002092846000945b8386106126bf5750505050010190388080808061268b565b8054858701830152940193859082016126a7565b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff00168685015250505090151560051b01019150388080808061268b565b90600560e06040936127cb855191612727836122f2565b6127c483978254855261277160ff600185015473ffffffffffffffffffffffffffffffffffffffff8116602089015263ffffffff8160a01c168489015260c01c16606087016125fd565b80516127848161078a816002880161265c565b6080860152805161279c8161078a816003880161265c565b60a086015280516127b48161078a816004880161265c565b60c086015251809681930161265c565b038461230f565b0152565b63ffffffff1691600083815260036020906003602052604093604084205490818710156128b4576128239181606489931180156128ac575b6128a4575b816128178285612571565b111561289457506125ad565b9461282d86612504565b96845b87811061284257505050505050505090565b60019082875284865261286188882061285b8387612571565b90612fba565b905490861b1c875260048652612878888820612710565b612882828c6125ba565b5261288d818b6125ba565b5001612830565b61289f915082612571565b6125ad565b50606461280c565b508015612807565b505050505050505061226d6124e0565b92919267ffffffffffffffff8211610ae3576040519161290c60207fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0601f840116018461230f565b829481845281830111610144578281602093846000960137010152565b818110612934575050565b60008155600101612929565b9190601f811161294f57505050565b61297b926000526020600020906020601f840160051c8301931061297d575b601f0160051c0190612929565b565b909150819061296e565b601f82602094937fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0938186528686013760008582860101520116010190565b91907fffffffffffffffffffffffffffffffffffffffff0000000000000000000000009060601b168252601490600092815492612a0284612609565b92600194600181169081600014612a695750600114612a24575b505050505090565b9091929395945060005260209460206000206000905b858210612a565750505050601492935001013880808080612a1c565b8054858301850152908701908201612a3a565b92505050601494507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0091935016838301528015150201013880808080612a1c565b612ab48154612609565b9081612abe575050565b81601f60009311600114612ad0575055565b908083918252612aef601f60208420940160051c840160018501612929565b5555565b91908110156125ce5760051b0190565b3563ffffffff811681036101445790565b91906034612b6991836040519485927fffffffffffffffffffffffffffffffffffffffff000000000000000000000000602085019860601b16885284840137810160008382015203601481018452018261230f565b51902090565b73ffffffffffffffffffffffffffffffffffffffff169160008381526002926020906002602052604093604084205490818310156128b457612bc69181606485931180156128ac576128a457816128178285612571565b94612bd086612504565b96845b878110612be557505050505050505090565b600190828752838652612bfe88882061285b8388612571565b90549060031b1c875260048652612c16888820612710565b612c20828c6125ba565b52612c2b818b6125ba565b5001612bd3565b3573ffffffffffffffffffffffffffffffffffffffff811681036101445790565b73ffffffffffffffffffffffffffffffffffffffff600154163303612c7457565b60046040517f2b5c74de000000000000000000000000000000000000000000000000000000008152fd5b63ffffffff1680600052600a60205260406000205415612d17575073ffffffffffffffffffffffffffffffffffffffff1680600052600860205260406000205415612ce65750565b602490604051907f85982a000000000000000000000000000000000000000000000000000000000082526004820152fd5b602490604051907f8fe6d7e10000000000000000000000000000000000000000000000000000000082526004820152fd5b9060c891828111612dd25750818111612d9d5750808211612d67575050565b60449250604051917ecd56a800000000000000000000000000000000000000000000000000000000835260048301526024820152fd5b604491604051917ecd56a800000000000000000000000000000000000000000000000000000000835260048301526024820152fd5b60449083604051917ecd56a800000000000000000000000000000000000000000000000000000000835260048301526024820152fd5b8015612e865780600052600660205260ff60406000205416612e5c576000526006602052604060002060017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff00825416179055565b60046040517f4cb050e4000000000000000000000000000000000000000000000000000000008152fd5b60046040517f7dc2f4e1000000000000000000000000000000000000000000000000000000008152fd5b90600052600460205260406000209073ffffffffffffffffffffffffffffffffffffffff8060018401541691821561066e5716809103612eee575090565b602490604051907f31ee6dc70000000000000000000000000000000000000000000000000000000082526004820152fd5b9081518151908181149384612f36575b5050505090565b6020929394508201209201201438808080612f2f565b6009548110156125ce5760096000527f6e1540171b6c0c960b71a7020d9f60077f6af931a8bbf590da0223dacf75c7af0190600090565b6007548110156125ce5760076000527fa66cc928b5edb82af9bd49922954155ab7b0942694bea4ce44661d9a8736c6880190600090565b80548210156125ce5760005260206000200190600090565b6000818152600a6020526040812054613086576009546801000000000000000081101561305957908261304561301084600160409601600955612f4c565b81939154907fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff9060031b92831b921b19161790565b9055600954928152600a6020522055600190565b6024827f4e487b710000000000000000000000000000000000000000000000000000000081526041600452fd5b905090565b60008181526008602052604081205461308657600754680100000000000000008110156130595790826130c961301084600160409601600755612f83565b905560075492815260086020522055600190565b9190600183016000908282528060205260408220541560001461316357845494680100000000000000008610156131365783613126613010886001604098999a01855584612fba565b9055549382526020522055600190565b6024837f4e487b710000000000000000000000000000000000000000000000000000000081526041600452fd5b50925050565b6000818152600a602052604081205490919080156132b7577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff9081810181811161328a576009549083820191821161325d57818103613229575b50505060095480156131fc578101906131db82612f4c565b909182549160031b1b191690556009558152600a6020526040812055600190565b6024847f4e487b710000000000000000000000000000000000000000000000000000000081526031600452fd5b61324761323861301093612f4c565b90549060031b1c928392612f4c565b90558452600a60205260408420553880806131c3565b6024867f4e487b710000000000000000000000000000000000000000000000000000000081526011600452fd5b6024857f4e487b710000000000000000000000000000000000000000000000000000000081526011600452fd5b505090565b60008181526008602052604081205490919080156132b7577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff9081810181811161328a576007549083820191821161325d5781810361334f575b50505060075480156131fc5781019061332e82612f83565b909182549160031b1b19169055600755815260086020526040812055600190565b61336d61335e61301093612f83565b90549060031b1c928392612f83565b9055845260086020526040842055388080613316565b90600182019060009281845282602052604084205490811515600014612f2f577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff918281018181116134a85782549084820191821161347b57818103613446575b50505080548015613419578201916133fc8383612fba565b909182549160031b1b191690555582526020526040812055600190565b6024867f4e487b710000000000000000000000000000000000000000000000000000000081526031600452fd5b6134666134566130109386612fba565b90549060031b1c92839286612fba565b905586528460205260408620553880806133e4565b6024887f4e487b710000000000000000000000000000000000000000000000000000000081526011600452fd5b6024877f4e487b710000000000000000000000000000000000000000000000000000000081526011600452fdfea164736f6c6343000818000a", } var WorkflowRegistryABI = WorkflowRegistryMetaData.ABI diff --git a/core/gethwrappers/workflow/generation/generated-wrapper-dependency-versions-do-not-edit.txt b/core/gethwrappers/workflow/generation/generated-wrapper-dependency-versions-do-not-edit.txt index 7552f72d164..a908ff2e724 100644 --- a/core/gethwrappers/workflow/generation/generated-wrapper-dependency-versions-do-not-edit.txt +++ b/core/gethwrappers/workflow/generation/generated-wrapper-dependency-versions-do-not-edit.txt @@ -1,2 +1,2 @@ GETH_VERSION: 1.14.11 -workflow_registry_wrapper: ../../../contracts/solc/v0.8.24/WorkflowRegistry/WorkflowRegistry.abi ../../../contracts/solc/v0.8.24/WorkflowRegistry/WorkflowRegistry.bin 910925e0786fbe9efb686646ede620e7fc0536c74acdaeef49e96ac67580ea14 +workflow_registry_wrapper: ../../../contracts/solc/v0.8.24/WorkflowRegistry/WorkflowRegistry.abi ../../../contracts/solc/v0.8.24/WorkflowRegistry/WorkflowRegistry.bin bad48df0196c8a170a8e5486d0334183defd60e74bd89d3885989e00d6f13d23 From 0b0955dc2e3db54abbf7f5b674406e8df7682e4a Mon Sep 17 00:00:00 2001 From: Rens Rooimans Date: Wed, 11 Dec 2024 22:23:12 +0100 Subject: [PATCH 04/15] remove audit warning (#15629) --- contracts/src/v0.8/shared/token/ERC20/BurnMintERC20.sol | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/contracts/src/v0.8/shared/token/ERC20/BurnMintERC20.sol b/contracts/src/v0.8/shared/token/ERC20/BurnMintERC20.sol index 946a6623b49..ea11dc08798 100644 --- a/contracts/src/v0.8/shared/token/ERC20/BurnMintERC20.sol +++ b/contracts/src/v0.8/shared/token/ERC20/BurnMintERC20.sol @@ -13,7 +13,6 @@ import {IERC165} from "../../../vendor/openzeppelin-solidity/v4.8.3/contracts/ut /// @notice A basic ERC20 compatible token contract with burn and minting roles. /// @dev The total supply can be limited during deployment. -/// @dev This contract has not been audited and is not yet approved for production use. contract BurnMintERC20 is IBurnMintERC20, IGetCCIPAdmin, IERC165, ERC20Burnable, AccessControl { error MaxSupplyExceeded(uint256 supplyAfterMint); error InvalidRecipient(address recipient); @@ -153,7 +152,7 @@ contract BurnMintERC20 is IBurnMintERC20, IGetCCIPAdmin, IERC165, ERC20Burnable, /// @dev only the owner can call this function, NOT the current ccipAdmin, and 1-step ownership transfer is used. /// @param newAdmin The address to transfer the CCIPAdmin role to. Setting to address(0) is a valid way to revoke /// the role - function setCCIPAdmin(address newAdmin) public onlyRole(DEFAULT_ADMIN_ROLE) { + function setCCIPAdmin(address newAdmin) external onlyRole(DEFAULT_ADMIN_ROLE) { address currentAdmin = s_ccipAdmin; s_ccipAdmin = newAdmin; From 00cc18d285e6feb7fb4a66a800e9af5fcb148c4a Mon Sep 17 00:00:00 2001 From: "Simon B.Robert" Date: Wed, 11 Dec 2024 18:28:00 -0500 Subject: [PATCH 05/15] Add common rmn config struct (#15597) * Add common rmn config struct * Move to RMN changeset * Add test using RMNConfig --- .../ccip/changeset/cs_update_rmn_config.go | 30 ++++++++++++ .../changeset/cs_update_rmn_config_test.go | 47 +++++++++++++++---- 2 files changed, 68 insertions(+), 9 deletions(-) diff --git a/deployment/ccip/changeset/cs_update_rmn_config.go b/deployment/ccip/changeset/cs_update_rmn_config.go index b10991c977c..25ae8308eb5 100644 --- a/deployment/ccip/changeset/cs_update_rmn_config.go +++ b/deployment/ccip/changeset/cs_update_rmn_config.go @@ -16,8 +16,38 @@ import ( "github.com/smartcontractkit/chainlink/deployment/common/proposalutils" "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/ccip/generated/rmn_home" "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/ccip/generated/rmn_remote" + "github.com/smartcontractkit/chainlink/v2/core/services/keystore/keys/p2pkey" ) +type RMNNopConfig struct { + NodeIndex uint64 + OffchainPublicKey [32]byte + EVMOnChainPublicKey common.Address + PeerId p2pkey.PeerID +} + +func (c RMNNopConfig) ToRMNHomeNode() rmn_home.RMNHomeNode { + return rmn_home.RMNHomeNode{ + PeerId: c.PeerId, + OffchainPublicKey: c.OffchainPublicKey, + } +} + +func (c RMNNopConfig) ToRMNRemoteSigner() rmn_remote.RMNRemoteSigner { + return rmn_remote.RMNRemoteSigner{ + OnchainPublicKey: c.EVMOnChainPublicKey, + NodeIndex: c.NodeIndex, + } +} + +func (c RMNNopConfig) SetBit(bitmap *big.Int, value bool) { + if value { + bitmap.SetBit(bitmap, int(c.NodeIndex), 1) + } else { + bitmap.SetBit(bitmap, int(c.NodeIndex), 0) + } +} + func getDeployer(e deployment.Environment, chain uint64, mcmConfig *MCMSConfig) *bind.TransactOpts { if mcmConfig == nil { return e.Chains[chain].DeployerKey diff --git a/deployment/ccip/changeset/cs_update_rmn_config_test.go b/deployment/ccip/changeset/cs_update_rmn_config_test.go index e22b85cdf81..3ec309182aa 100644 --- a/deployment/ccip/changeset/cs_update_rmn_config_test.go +++ b/deployment/ccip/changeset/cs_update_rmn_config_test.go @@ -12,9 +12,31 @@ import ( "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/ccip/generated/rmn_remote" ) +var ( + rmn_staging_1 = RMNNopConfig{ + NodeIndex: 0, + PeerId: deployment.MustPeerIDFromString("p2p_12D3KooWRXxZq3pd4a3ZGkKj7Nt1SQQrnB8CuvbPnnV9KVeMeWqg"), + OffchainPublicKey: [32]byte(common.FromHex("0xb34944857a42444d1b285d7940d6e06682309e0781e43a69676ee9f85c73c2d1")), + EVMOnChainPublicKey: common.HexToAddress("0x5af8ee32316a6427f169a45fdc1b3a91a85ac459e3c1cb91c69e1c51f0c1fc21"), + } + rmn_staging_2 = RMNNopConfig{ + NodeIndex: 1, + PeerId: deployment.MustPeerIDFromString("p2p_12D3KooWEmdxYQFsRbD9aFczF32zA3CcUwuSiWCk2CrmACo4v9RL"), + OffchainPublicKey: [32]byte(common.FromHex("0x68d9f3f274e3985528a923a9bace3d39c55dd778b187b4120b384cc48c892859")), + EVMOnChainPublicKey: common.HexToAddress("0x858589216956f482a0f68b282a7050af4cd48ed2"), + } + rmn_staging_3 = RMNNopConfig{ + NodeIndex: 2, + PeerId: deployment.MustPeerIDFromString("p2p_12D3KooWJS42cNXKJvj6DeZnxEX7aGxhEuap6uNFrz554AbUDw6Q"), + OffchainPublicKey: [32]byte(common.FromHex("0x5af8ee32316a6427f169a45fdc1b3a91a85ac459e3c1cb91c69e1c51f0c1fc21")), + EVMOnChainPublicKey: common.HexToAddress("0x7c5e94162c6fabbdeb3bfe83ae532846e337bfae"), + } +) + type updateRMNConfigTestCase struct { useMCMS bool name string + nops []RMNNopConfig } func TestUpdateRMNConfig(t *testing.T) { @@ -23,10 +45,12 @@ func TestUpdateRMNConfig(t *testing.T) { { useMCMS: true, name: "with MCMS", + nops: []RMNNopConfig{rmn_staging_1, rmn_staging_2, rmn_staging_3}, }, { useMCMS: false, name: "without MCMS", + nops: []RMNNopConfig{rmn_staging_1, rmn_staging_2, rmn_staging_3}, }, } @@ -80,10 +104,15 @@ func updateRMNConfig(t *testing.T, tc updateRMNConfigTestCase) { } } + nodes := make([]rmn_home.RMNHomeNode, 0, len(tc.nops)) + for _, nop := range tc.nops { + nodes = append(nodes, nop.ToRMNHomeNode()) + } + setRMNHomeCandidateConfig := SetRMNHomeCandidateConfig{ HomeChainSelector: e.HomeChainSel, RMNStaticConfig: rmn_home.RMNHomeStaticConfig{ - Nodes: []rmn_home.RMNHomeNode{}, + Nodes: nodes, OffchainConfig: []byte(""), }, RMNDynamicConfig: rmn_home.RMNHomeDynamicConfig{ @@ -132,16 +161,16 @@ func updateRMNConfig(t *testing.T, tc updateRMNConfigTestCase) { require.NoError(t, err) require.NotEqual(t, previousActiveDigest, currentActiveDigest) + signers := make([]rmn_remote.RMNRemoteSigner, 0, len(tc.nops)) + for _, nop := range tc.nops { + signers = append(signers, nop.ToRMNRemoteSigner()) + } + setRemoteConfig := SetRMNRemoteConfig{ HomeChainSelector: e.HomeChainSel, - Signers: []rmn_remote.RMNRemoteSigner{ - { - OnchainPublicKey: common.Address{}, - NodeIndex: 0, - }, - }, - F: 0, - MCMSConfig: mcmsConfig, + Signers: signers, + F: 0, + MCMSConfig: mcmsConfig, } _, err = commonchangeset.ApplyChangesets(t, e.Env, timelocksPerChain, []commonchangeset.ChangesetApplication{ From 2c13558df4ed1247c6b1e57135d355283a5296bd Mon Sep 17 00:00:00 2001 From: Connor Stein Date: Wed, 11 Dec 2024 20:25:53 -0500 Subject: [PATCH 06/15] 72hr default valid until (#15650) * Build batches * Default valid until --- deployment/common/proposalutils/propose.go | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/deployment/common/proposalutils/propose.go b/deployment/common/proposalutils/propose.go index f525c0b6643..feaee69940e 100644 --- a/deployment/common/proposalutils/propose.go +++ b/deployment/common/proposalutils/propose.go @@ -11,6 +11,10 @@ import ( "github.com/smartcontractkit/ccip-owner-contracts/pkg/proposal/timelock" ) +const ( + DefaultValidUntil = 72 * time.Hour +) + func buildProposalMetadata( chainSelectors []uint64, proposerMcmsesPerChain map[uint64]*gethwrappers.ManyChainMultiSig, @@ -61,10 +65,10 @@ func BuildProposalFromBatches( for chainId, tl := range timelocksPerChain { tlsPerChainId[mcms.ChainIdentifier(chainId)] = tl } - + validUntil := time.Now().Unix() + int64(DefaultValidUntil.Seconds()) return timelock.NewMCMSWithTimelockProposal( "1", - 2004259681, // TODO: should be parameterized and based on current block timestamp. + uint32(validUntil), []mcms.Signature{}, false, mcmsMd, From 880492538e14cc528c491cb7c485be078aa4e443 Mon Sep 17 00:00:00 2001 From: Bartek Tofel Date: Thu, 12 Dec 2024 06:17:11 +0100 Subject: [PATCH 07/15] [TT-1862] remove logstream (#15465) * remove logstream, directly dump all Docker logs to files * fail ocr test on purpose * fix lint, newer workflow that saves ccip logs * update default ccip config * do not fail OCR test, pass allowed messages to log verification, add some tolerated critical messages to CCIP tests * fix allowed message * add more critical logs to ignore, update chainlink-solana dep * revert chainlink-solana deps bump * process node logs only from the current test * fix lints * fix lints, bump golangci-lint version * update run-e2e-tests commit hash to develop merge * use tagged CTF * add plugin-scanning log assertion to OCR2 smoke tests * print names of nodes without expected logs * wait in a loop for nodes to have all plugins-in-logs * fix lints --- .../workflows/client-compatibility-tests.yml | 3 - .../workflows/integration-in-memory-tests.yml | 4 +- .github/workflows/integration-tests.yml | 8 +- .../on-demand-vrfv2-performance-test.yml | 2 +- .../workflows/on-demand-vrfv2-smoke-tests.yml | 2 +- .../on-demand-vrfv2plus-performance-test.yml | 2 +- .../on-demand-vrfv2plus-smoke-tests.yml | 2 +- .github/workflows/run-nightly-e2e-tests.yml | 2 +- .github/workflows/run-selected-e2e-tests.yml | 2 +- deployment/environment/devenv/rmn.go | 12 +- .../ccip-tests/testconfig/README.md | 26 --- .../ccip-tests/testconfig/global.go | 117 ---------- .../testconfig/tomls/ccip-default.toml | 11 - .../ccip-tests/testsetups/test_env.go | 2 - integration-tests/docker/test_env/cl_node.go | 28 +-- integration-tests/docker/test_env/test_env.go | 14 +- .../docker/test_env/test_env_builder.go | 201 ++++++++---------- integration-tests/go.mod | 2 +- integration-tests/go.sum | 4 +- integration-tests/load/go.mod | 2 +- integration-tests/load/go.sum | 4 +- integration-tests/smoke/ocr2_test.go | 173 ++++++++++++--- .../testconfig/automation/example.toml | 8 - .../ccip/overrides/sepolia_avax_binance.toml | 4 - integration-tests/testconfig/default.toml | 13 -- .../testconfig/forwarder_ocr/example.toml | 27 --- .../testconfig/forwarder_ocr2/example.toml | 27 --- .../testconfig/functions/example.toml | 8 - .../testconfig/keeper/example.toml | 8 - .../testconfig/log_poller/example.toml | 8 - .../testconfig/node/example.toml | 8 - integration-tests/testconfig/ocr/example.toml | 27 --- .../testconfig/ocr2/example.toml | 27 --- integration-tests/testconfig/testconfig.go | 21 -- .../testconfig/vrfv2/example.toml | 8 - .../testconfig/vrfv2plus/example.toml | 8 - .../testsetups/ccip/test_helpers.go | 23 +- 37 files changed, 288 insertions(+), 560 deletions(-) diff --git a/.github/workflows/client-compatibility-tests.yml b/.github/workflows/client-compatibility-tests.yml index 03c5b893cca..5f986ccf16c 100644 --- a/.github/workflows/client-compatibility-tests.yml +++ b/.github/workflows/client-compatibility-tests.yml @@ -668,9 +668,6 @@ jobs: E2E_TEST_PYROSCOPE_KEY: ${{ secrets.QA_PYROSCOPE_KEY }} E2E_TEST_PYROSCOPE_ENVIRONMENT: ci-client-compatability-${{ matrix.eth_client }}-testnet E2E_TEST_PYROSCOPE_ENABLED: "true" - E2E_TEST_LOGGING_RUN_ID: ${{ github.run_id }} - E2E_TEST_LOG_COLLECT: ${{ vars.TEST_LOG_COLLECT }} - E2E_TEST_LOG_STREAM_LOG_TARGETS: ${{ vars.LOGSTREAM_LOG_TARGETS }} E2E_TEST_PRIVATE_ETHEREUM_EXECUTION_LAYER: ${{ matrix.evm_node.eth_implementation || 'geth' }} E2E_TEST_PRIVATE_ETHEREUM_ETHEREUM_VERSION: auto_fill # Auto fill the version based on the docker image E2E_TEST_PRIVATE_ETHEREUM_CUSTOM_DOCKER_IMAGE: ${{ matrix.evm_node.docker_image }} diff --git a/.github/workflows/integration-in-memory-tests.yml b/.github/workflows/integration-in-memory-tests.yml index 8d777b41ea1..341d66f641e 100644 --- a/.github/workflows/integration-in-memory-tests.yml +++ b/.github/workflows/integration-in-memory-tests.yml @@ -73,7 +73,7 @@ jobs: contents: read needs: changes if: github.event_name == 'pull_request' && ( needs.changes.outputs.ccip_changes == 'true' || needs.changes.outputs.core_changes == 'true' || needs.changes.outputs.github_ci_changes == 'true') - uses: smartcontractkit/.github/.github/workflows/run-integration-tests.yml@57112554b9e5cfae79e795a8b1c36acf7e9dead7 + uses: smartcontractkit/.github/.github/workflows/run-integration-tests.yml@0632b5652dd5eb03bfa87e23a2b3e2911484fe59 with: workflow_name: Run CCIP Integration Tests For PR test_path: .github/integration-in-memory-tests.yml @@ -95,7 +95,7 @@ jobs: contents: read needs: changes if: github.event_name == 'merge_group' && ( needs.changes.outputs.ccip_changes == 'true' || needs.changes.outputs.core_changes == 'true' || needs.changes.outputs.github_ci_changes == 'true') - uses: smartcontractkit/.github/.github/workflows/run-integration-tests.yml@57112554b9e5cfae79e795a8b1c36acf7e9dead7 + uses: smartcontractkit/.github/.github/workflows/run-integration-tests.yml@0632b5652dd5eb03bfa87e23a2b3e2911484fe59 with: workflow_name: Run CCIP Integration Tests For Merge Queue test_path: .github/integration-in-memory-tests.yml diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 2c11d7568aa..27bdfa52243 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -210,7 +210,7 @@ jobs: contents: read needs: [build-chainlink, changes] if: github.event_name == 'pull_request' && ( needs.changes.outputs.core_changes == 'true' || needs.changes.outputs.github_ci_changes == 'true') - uses: smartcontractkit/.github/.github/workflows/run-e2e-tests.yml@27467f0073162e0ca77d33ce26f649b3d0f4c188 #ctf-run-tests@0.2.0 + uses: smartcontractkit/.github/.github/workflows/run-e2e-tests.yml@0632b5652dd5eb03bfa87e23a2b3e2911484fe59 with: workflow_name: Run Core E2E Tests For PR chainlink_version: ${{ inputs.evm-ref || github.sha }} @@ -251,7 +251,7 @@ jobs: contents: read needs: [build-chainlink, changes] if: github.event_name == 'merge_group' && ( needs.changes.outputs.core_changes == 'true' || needs.changes.outputs.github_ci_changes == 'true') - uses: smartcontractkit/.github/.github/workflows/run-e2e-tests.yml@27467f0073162e0ca77d33ce26f649b3d0f4c188 #ctf-run-tests@1.0.0 + uses: smartcontractkit/.github/.github/workflows/run-e2e-tests.yml@0632b5652dd5eb03bfa87e23a2b3e2911484fe59 with: workflow_name: Run Core E2E Tests For Merge Queue chainlink_version: ${{ inputs.evm-ref || github.sha }} @@ -296,7 +296,7 @@ jobs: contents: read needs: [build-chainlink, changes] if: github.event_name == 'pull_request' && (needs.changes.outputs.ccip_changes == 'true' || needs.changes.outputs.github_ci_changes == 'true') - uses: smartcontractkit/.github/.github/workflows/run-e2e-tests.yml@0d4a2b2b009c87b5c366d0b97f7a8d7de2f60760 + uses: smartcontractkit/.github/.github/workflows/run-e2e-tests.yml@0632b5652dd5eb03bfa87e23a2b3e2911484fe59 with: workflow_name: Run CCIP E2E Tests For PR chainlink_version: ${{ inputs.evm-ref || github.sha }} @@ -338,7 +338,7 @@ jobs: contents: read needs: [build-chainlink, changes] if: github.event_name == 'merge_group' && (needs.changes.outputs.ccip_changes == 'true' || needs.changes.outputs.github_ci_changes == 'true') - uses: smartcontractkit/.github/.github/workflows/run-e2e-tests.yml@0d4a2b2b009c87b5c366d0b97f7a8d7de2f60760 + uses: smartcontractkit/.github/.github/workflows/run-e2e-tests.yml@0632b5652dd5eb03bfa87e23a2b3e2911484fe59 with: workflow_name: Run CCIP E2E Tests For Merge Queue chainlink_version: ${{ inputs.evm-ref || github.sha }} diff --git a/.github/workflows/on-demand-vrfv2-performance-test.yml b/.github/workflows/on-demand-vrfv2-performance-test.yml index aadef377718..f9aeaa0fa1f 100644 --- a/.github/workflows/on-demand-vrfv2-performance-test.yml +++ b/.github/workflows/on-demand-vrfv2-performance-test.yml @@ -67,7 +67,7 @@ jobs: run-e2e-tests-workflow: name: Run E2E Tests needs: set-tests-to-run - uses: smartcontractkit/.github/.github/workflows/run-e2e-tests.yml@5412507526722a7b1c5d719fa686eed5a1bc4035 # ctf-run-tests@0.2.0 + uses: smartcontractkit/.github/.github/workflows/run-e2e-tests.yml@0632b5652dd5eb03bfa87e23a2b3e2911484fe59 with: custom_test_list_json: ${{ needs.set-tests-to-run.outputs.test_list }} chainlink_version: ${{ inputs.chainlink_version }} diff --git a/.github/workflows/on-demand-vrfv2-smoke-tests.yml b/.github/workflows/on-demand-vrfv2-smoke-tests.yml index 4ebc38a8081..ad616dea744 100644 --- a/.github/workflows/on-demand-vrfv2-smoke-tests.yml +++ b/.github/workflows/on-demand-vrfv2-smoke-tests.yml @@ -70,7 +70,7 @@ jobs: run-e2e-tests-workflow: name: Run E2E Tests needs: set-tests-to-run - uses: smartcontractkit/.github/.github/workflows/run-e2e-tests.yml@5412507526722a7b1c5d719fa686eed5a1bc4035 # ctf-run-tests@0.2.0 + uses: smartcontractkit/.github/.github/workflows/run-e2e-tests.yml@0632b5652dd5eb03bfa87e23a2b3e2911484fe59 with: custom_test_list_json: ${{ needs.set-tests-to-run.outputs.test_list }} chainlink_version: ${{ inputs.chainlink_version }} diff --git a/.github/workflows/on-demand-vrfv2plus-performance-test.yml b/.github/workflows/on-demand-vrfv2plus-performance-test.yml index f6d120ac178..b3a820e25a0 100644 --- a/.github/workflows/on-demand-vrfv2plus-performance-test.yml +++ b/.github/workflows/on-demand-vrfv2plus-performance-test.yml @@ -67,7 +67,7 @@ jobs: run-e2e-tests-workflow: name: Run E2E Tests needs: set-tests-to-run - uses: smartcontractkit/.github/.github/workflows/run-e2e-tests.yml@5412507526722a7b1c5d719fa686eed5a1bc4035 # ctf-run-tests@0.2.0 + uses: smartcontractkit/.github/.github/workflows/run-e2e-tests.yml@0632b5652dd5eb03bfa87e23a2b3e2911484fe59 with: custom_test_list_json: ${{ needs.set-tests-to-run.outputs.test_list }} chainlink_version: ${{ inputs.chainlink_version }} diff --git a/.github/workflows/on-demand-vrfv2plus-smoke-tests.yml b/.github/workflows/on-demand-vrfv2plus-smoke-tests.yml index af26c527988..8561034b103 100644 --- a/.github/workflows/on-demand-vrfv2plus-smoke-tests.yml +++ b/.github/workflows/on-demand-vrfv2plus-smoke-tests.yml @@ -70,7 +70,7 @@ jobs: run-e2e-tests-workflow: name: Run E2E Tests needs: set-tests-to-run - uses: smartcontractkit/.github/.github/workflows/run-e2e-tests.yml@5412507526722a7b1c5d719fa686eed5a1bc4035 # ctf-run-tests@0.2.0 + uses: smartcontractkit/.github/.github/workflows/run-e2e-tests.yml@0632b5652dd5eb03bfa87e23a2b3e2911484fe59 with: custom_test_list_json: ${{ needs.set-tests-to-run.outputs.test_list }} chainlink_version: ${{ inputs.chainlink_version }} diff --git a/.github/workflows/run-nightly-e2e-tests.yml b/.github/workflows/run-nightly-e2e-tests.yml index eba1108f89f..712fb088181 100644 --- a/.github/workflows/run-nightly-e2e-tests.yml +++ b/.github/workflows/run-nightly-e2e-tests.yml @@ -20,7 +20,7 @@ on: jobs: call-run-e2e-tests-workflow: name: Run E2E Tests - uses: smartcontractkit/.github/.github/workflows/run-e2e-tests.yml@5412507526722a7b1c5d719fa686eed5a1bc4035 # ctf-run-tests@0.2.0 + uses: smartcontractkit/.github/.github/workflows/run-e2e-tests.yml@0632b5652dd5eb03bfa87e23a2b3e2911484fe59 with: chainlink_version: ${{ inputs.chainlink_version || 'develop' }} test_path: .github/e2e-tests.yml diff --git a/.github/workflows/run-selected-e2e-tests.yml b/.github/workflows/run-selected-e2e-tests.yml index 0e7c97c67fc..e95ce1cef19 100644 --- a/.github/workflows/run-selected-e2e-tests.yml +++ b/.github/workflows/run-selected-e2e-tests.yml @@ -35,7 +35,7 @@ run-name: ${{ inputs.workflow_run_name }} jobs: call-run-e2e-tests-workflow: name: Run E2E Tests - uses: smartcontractkit/.github/.github/workflows/run-e2e-tests.yml@5412507526722a7b1c5d719fa686eed5a1bc4035 # ctf-run-tests@0.2.0 + uses: smartcontractkit/.github/.github/workflows/run-e2e-tests.yml@0632b5652dd5eb03bfa87e23a2b3e2911484fe59 with: chainlink_version: ${{ github.event.inputs.chainlink_version }} test_path: .github/e2e-tests.yml diff --git a/deployment/environment/devenv/rmn.go b/deployment/environment/devenv/rmn.go index 63f27f1e422..3e0c6efe0cd 100644 --- a/deployment/environment/devenv/rmn.go +++ b/deployment/environment/devenv/rmn.go @@ -22,7 +22,6 @@ import ( "github.com/smartcontractkit/chainlink-testing-framework/lib/docker" "github.com/smartcontractkit/chainlink-testing-framework/lib/docker/test_env" "github.com/smartcontractkit/chainlink-testing-framework/lib/logging" - "github.com/smartcontractkit/chainlink-testing-framework/lib/logstream" p2ptypes "github.com/smartcontractkit/chainlink/v2/core/services/p2p/types" ) @@ -51,7 +50,6 @@ func NewRage2ProxyComponent( imageVersion string, local ProxyLocalConfig, shared ProxySharedConfig, - logStream *logstream.LogStream, ) (*RageProxy, error) { rageName := fmt.Sprintf("%s-proxy-%s", name, uuid.NewString()[0:8]) @@ -71,7 +69,6 @@ func NewRage2ProxyComponent( ContainerImage: imageName, ContainerVersion: imageVersion, Networks: networks, - LogStream: logStream, }, Passphrase: DefaultAFNPassphrase, proxyListenerPort: listenPort, @@ -193,8 +190,7 @@ func NewAFN2ProxyComponent( imageName, imageVersion string, shared SharedConfig, - local LocalConfig, - logStream *logstream.LogStream) (*AFN2Proxy, error) { + local LocalConfig) (*AFN2Proxy, error) { afnName := fmt.Sprintf("%s-%s", name, uuid.NewString()[0:8]) rmn := &AFN2Proxy{ EnvComponent: test_env.EnvComponent{ @@ -202,7 +198,6 @@ func NewAFN2ProxyComponent( ContainerImage: imageName, ContainerVersion: imageVersion, Networks: networks, - LogStream: logStream, }, AFNPassphrase: DefaultAFNPassphrase, Shared: shared, @@ -343,7 +338,6 @@ func NewRMNCluster( proxyVersion string, rmnImage string, rmnVersion string, - logStream *logstream.LogStream, ) (*RMNCluster, error) { rmn := &RMNCluster{ t: t, @@ -351,7 +345,7 @@ func NewRMNCluster( Nodes: make(map[string]RMNNode), } for name, rmnConfig := range config { - proxy, err := NewRage2ProxyComponent(networks, name, proxyImage, proxyVersion, rmnConfig.ProxyLocal, rmnConfig.ProxyShared, logStream) + proxy, err := NewRage2ProxyComponent(networks, name, proxyImage, proxyVersion, rmnConfig.ProxyLocal, rmnConfig.ProxyShared) if err != nil { return nil, err } @@ -371,7 +365,7 @@ func NewRMNCluster( return nil, err } rmnConfig.Local.Networking.RageProxy = strings.TrimPrefix(fmt.Sprintf("%s:%s", proxyName, port), "/") - afn, err := NewAFN2ProxyComponent(networks, name, rmnImage, rmnVersion, rmnConfig.Shared, rmnConfig.Local, logStream) + afn, err := NewAFN2ProxyComponent(networks, name, rmnImage, rmnVersion, rmnConfig.Shared, rmnConfig.Local) if err != nil { return nil, err } diff --git a/integration-tests/ccip-tests/testconfig/README.md b/integration-tests/ccip-tests/testconfig/README.md index ff57ecaa220..d614ed62ea4 100644 --- a/integration-tests/ccip-tests/testconfig/README.md +++ b/integration-tests/ccip-tests/testconfig/README.md @@ -430,32 +430,6 @@ Example usage: TTL = "11h" ``` -### CCIP.Env.Logging - -Specifies the logging configuration for the test. Imported from [LoggingConfig](https://github.com/smartcontractkit/chainlink-testing-framework/blob/main/config/logging.go#L11) in chainlink-testing-framework. -Example usage: - -```toml -[CCIP.Env.Logging] -test_log_collect = false # if set to true will save logs even if test did not fail - -[CCIP.Env.Logging.LogStream] -# supported targets: file, loki, in-memory. if empty no logs will be persistet -log_targets = ["file"] -# context timeout for starting log producer and also time-frame for requesting logs -log_producer_timeout = "10s" -# number of retries before log producer gives up and stops listening to logs -log_producer_retry_limit = 10 - -[CCIP.Env.Logging.Loki] -tenant_id = "..." -endpoint = "https://loki...." - -[CCIP.Env.Logging.Grafana] -base_url = "https://grafana..../" -dashboard_url = "/d/6vjVx-1V8/ccip-long-running-tests" -``` - ### CCIP.Env.Lane.LeaderLaneEnabled Specifies whether to enable the leader lane feature. This setting is only applicable for new deployments. diff --git a/integration-tests/ccip-tests/testconfig/global.go b/integration-tests/ccip-tests/testconfig/global.go index 4caa8a9ac00..8866d31705a 100644 --- a/integration-tests/ccip-tests/testconfig/global.go +++ b/integration-tests/ccip-tests/testconfig/global.go @@ -175,120 +175,6 @@ type Common struct { func (p *Common) ReadFromEnvVar() error { logger := logging.GetTestLogger(nil) - testLogCollect := ctfconfig.MustReadEnvVar_Boolean(ctfconfig.E2E_TEST_LOG_COLLECT_ENV) - if testLogCollect != nil { - if p.Logging == nil { - p.Logging = &ctfconfig.LoggingConfig{} - } - logger.Debug().Msgf("Using %s env var to override Logging.TestLogCollect", ctfconfig.E2E_TEST_LOG_COLLECT_ENV) - p.Logging.TestLogCollect = testLogCollect - } - - loggingRunID := ctfconfig.MustReadEnvVar_String(ctfconfig.E2E_TEST_LOGGING_RUN_ID_ENV) - if loggingRunID != "" { - if p.Logging == nil { - p.Logging = &ctfconfig.LoggingConfig{} - } - logger.Debug().Msgf("Using %s env var to override Logging.RunID", ctfconfig.E2E_TEST_LOGGING_RUN_ID_ENV) - p.Logging.RunId = &loggingRunID - } - - logstreamLogTargets := ctfconfig.MustReadEnvVar_Strings(ctfconfig.E2E_TEST_LOG_STREAM_LOG_TARGETS_ENV, ",") - if len(logstreamLogTargets) > 0 { - if p.Logging == nil { - p.Logging = &ctfconfig.LoggingConfig{} - } - if p.Logging.LogStream == nil { - p.Logging.LogStream = &ctfconfig.LogStreamConfig{} - } - logger.Debug().Msgf("Using %s env var to override Logging.LogStream.LogTargets", ctfconfig.E2E_TEST_LOG_STREAM_LOG_TARGETS_ENV) - p.Logging.LogStream.LogTargets = logstreamLogTargets - } - - lokiTenantID := ctfconfig.MustReadEnvVar_String(ctfconfig.E2E_TEST_LOKI_TENANT_ID_ENV) - if lokiTenantID != "" { - if p.Logging == nil { - p.Logging = &ctfconfig.LoggingConfig{} - } - if p.Logging.Loki == nil { - p.Logging.Loki = &ctfconfig.LokiConfig{} - } - logger.Debug().Msgf("Using %s env var to override Logging.Loki.TenantId", ctfconfig.E2E_TEST_LOKI_TENANT_ID_ENV) - p.Logging.Loki.TenantId = &lokiTenantID - } - - lokiEndpoint := ctfconfig.MustReadEnvVar_String(ctfconfig.E2E_TEST_LOKI_ENDPOINT_ENV) - if lokiEndpoint != "" { - if p.Logging == nil { - p.Logging = &ctfconfig.LoggingConfig{} - } - if p.Logging.Loki == nil { - p.Logging.Loki = &ctfconfig.LokiConfig{} - } - logger.Debug().Msgf("Using %s env var to override Logging.Loki.Endpoint", ctfconfig.E2E_TEST_LOKI_ENDPOINT_ENV) - p.Logging.Loki.Endpoint = &lokiEndpoint - } - - lokiBasicAuth := ctfconfig.MustReadEnvVar_String(ctfconfig.E2E_TEST_LOKI_BASIC_AUTH_ENV) - if lokiBasicAuth != "" { - if p.Logging == nil { - p.Logging = &ctfconfig.LoggingConfig{} - } - if p.Logging.Loki == nil { - p.Logging.Loki = &ctfconfig.LokiConfig{} - } - logger.Debug().Msgf("Using %s env var to override Logging.Loki.BasicAuth", ctfconfig.E2E_TEST_LOKI_BASIC_AUTH_ENV) - p.Logging.Loki.BasicAuth = &lokiBasicAuth - } - - lokiBearerToken := ctfconfig.MustReadEnvVar_String(ctfconfig.E2E_TEST_LOKI_BEARER_TOKEN_ENV) - if lokiBearerToken != "" { - if p.Logging == nil { - p.Logging = &ctfconfig.LoggingConfig{} - } - if p.Logging.Loki == nil { - p.Logging.Loki = &ctfconfig.LokiConfig{} - } - logger.Debug().Msgf("Using %s env var to override Logging.Loki.BearerToken", ctfconfig.E2E_TEST_LOKI_BEARER_TOKEN_ENV) - p.Logging.Loki.BearerToken = &lokiBearerToken - } - - grafanaBaseUrl := ctfconfig.MustReadEnvVar_String(ctfconfig.E2E_TEST_GRAFANA_BASE_URL_ENV) - if grafanaBaseUrl != "" { - if p.Logging == nil { - p.Logging = &ctfconfig.LoggingConfig{} - } - if p.Logging.Grafana == nil { - p.Logging.Grafana = &ctfconfig.GrafanaConfig{} - } - logger.Debug().Msgf("Using %s env var to override Logging.Grafana.BaseUrl", ctfconfig.E2E_TEST_GRAFANA_BASE_URL_ENV) - p.Logging.Grafana.BaseUrl = &grafanaBaseUrl - } - - grafanaDashboardUrl := ctfconfig.MustReadEnvVar_String(ctfconfig.E2E_TEST_GRAFANA_DASHBOARD_URL_ENV) - if grafanaDashboardUrl != "" { - if p.Logging == nil { - p.Logging = &ctfconfig.LoggingConfig{} - } - if p.Logging.Grafana == nil { - p.Logging.Grafana = &ctfconfig.GrafanaConfig{} - } - logger.Debug().Msgf("Using %s env var to override Logging.Grafana.DashboardUrl", ctfconfig.E2E_TEST_GRAFANA_DASHBOARD_URL_ENV) - p.Logging.Grafana.DashboardUrl = &grafanaDashboardUrl - } - - grafanaBearerToken := ctfconfig.MustReadEnvVar_String(ctfconfig.E2E_TEST_GRAFANA_BEARER_TOKEN_ENV) - if grafanaBearerToken != "" { - if p.Logging == nil { - p.Logging = &ctfconfig.LoggingConfig{} - } - if p.Logging.Grafana == nil { - p.Logging.Grafana = &ctfconfig.GrafanaConfig{} - } - logger.Debug().Msgf("Using %s env var to override Logging.Grafana.BearerToken", ctfconfig.E2E_TEST_GRAFANA_BEARER_TOKEN_ENV) - p.Logging.Grafana.BearerToken = &grafanaBearerToken - } - selectedNetworks := ctfconfig.MustReadEnvVar_Strings(ctfconfig.E2E_TEST_SELECTED_NETWORK_ENV, ",") if len(selectedNetworks) > 0 { if p.Network == nil { @@ -421,9 +307,6 @@ func (p *Common) GetSethConfig() *seth.Config { } func (p *Common) Validate() error { - if err := p.Logging.Validate(); err != nil { - return fmt.Errorf("error validating logging config %w", err) - } if p.Network == nil { return errors.New("no networks specified") } diff --git a/integration-tests/ccip-tests/testconfig/tomls/ccip-default.toml b/integration-tests/ccip-tests/testconfig/tomls/ccip-default.toml index c82e2f930be..89858a94ddb 100644 --- a/integration-tests/ccip-tests/testconfig/tomls/ccip-default.toml +++ b/integration-tests/ccip-tests/testconfig/tomls/ccip-default.toml @@ -73,17 +73,6 @@ addresses_to_fund = [ [CCIP.Env.PrivateEthereumNetworks.SIMULATED_2.EthereumChainConfig.HardForkEpochs] Deneb = 500 -[CCIP.Env.Logging] -test_log_collect = false # if set to true will save logs even if test did not fail - -[CCIP.Env.Logging.LogStream] -# supported targets: file, loki, in-memory. if empty no logs will be persistet -log_targets = ["file"] -# context timeout for starting log producer and also time-frame for requesting logs -log_producer_timeout = "10s" -# number of retries before log producer gives up and stops listening to logs -log_producer_retry_limit = 10 - # these values will be used to set up chainlink DON # along with these values, the secrets needs to be specified as part of .env variables # diff --git a/integration-tests/ccip-tests/testsetups/test_env.go b/integration-tests/ccip-tests/testsetups/test_env.go index 263d291453d..3c3406a3e5a 100644 --- a/integration-tests/ccip-tests/testsetups/test_env.go +++ b/integration-tests/ccip-tests/testsetups/test_env.go @@ -352,7 +352,6 @@ func DeployLocalCluster( pointer.GetString(clNode.ChainlinkImage.Image), pointer.GetString(clNode.ChainlinkImage.Version), toml, - env.LogStream, test_env.WithPgDBOptions( ctftestenv.WithPostgresImageName(clNode.DBImage), ctftestenv.WithPostgresImageVersion(clNode.DBTag), @@ -381,7 +380,6 @@ func DeployLocalCluster( pointer.GetString(testInputs.EnvInput.NewCLCluster.Common.ChainlinkImage.Image), pointer.GetString(testInputs.EnvInput.NewCLCluster.Common.ChainlinkImage.Version), toml, - env.LogStream, test_env.WithPgDBOptions( ctftestenv.WithPostgresImageName(testInputs.EnvInput.NewCLCluster.Common.DBImage), ctftestenv.WithPostgresImageVersion(testInputs.EnvInput.NewCLCluster.Common.DBTag), diff --git a/integration-tests/docker/test_env/cl_node.go b/integration-tests/docker/test_env/cl_node.go index b5c2505b252..8ebaf579d0a 100644 --- a/integration-tests/docker/test_env/cl_node.go +++ b/integration-tests/docker/test_env/cl_node.go @@ -24,7 +24,6 @@ import ( "github.com/smartcontractkit/chainlink-testing-framework/lib/docker" "github.com/smartcontractkit/chainlink-testing-framework/lib/docker/test_env" "github.com/smartcontractkit/chainlink-testing-framework/lib/logging" - "github.com/smartcontractkit/chainlink-testing-framework/lib/logstream" "github.com/smartcontractkit/chainlink-testing-framework/lib/utils/testcontext" "github.com/smartcontractkit/chainlink/v2/core/services/chainlink" @@ -126,11 +125,11 @@ func WithPgDBOptions(opts ...test_env.PostgresDbOption) ClNodeOption { } } -func NewClNode(networks []string, imageName, imageVersion string, nodeConfig *chainlink.Config, logStream *logstream.LogStream, opts ...ClNodeOption) (*ClNode, error) { +func NewClNode(networks []string, imageName, imageVersion string, nodeConfig *chainlink.Config, opts ...ClNodeOption) (*ClNode, error) { nodeDefaultCName := fmt.Sprintf("%s-%s", "cl-node", uuid.NewString()[0:8]) pgDefaultCName := fmt.Sprintf("pg-%s", nodeDefaultCName) - pgDb, err := test_env.NewPostgresDb(networks, test_env.WithPostgresDbContainerName(pgDefaultCName), test_env.WithPostgresDbLogStream(logStream)) + pgDb, err := test_env.NewPostgresDb(networks, test_env.WithPostgresDbContainerName(pgDefaultCName)) if err != nil { return nil, err } @@ -140,7 +139,6 @@ func NewClNode(networks []string, imageName, imageVersion string, nodeConfig *ch ContainerImage: imageName, ContainerVersion: imageVersion, Networks: networks, - LogStream: logStream, StartupTimeout: 3 * time.Minute, }, UserEmail: "local@local.com", @@ -490,28 +488,6 @@ func (n *ClNode) getContainerRequest(secrets string) ( FileMode: 0644, }, }, - LifecycleHooks: []tc.ContainerLifecycleHooks{ - { - PostStarts: []tc.ContainerHook{ - func(ctx context.Context, c tc.Container) error { - if n.LogStream != nil { - return n.LogStream.ConnectContainer(ctx, c, "") - } - return nil - }, - }, - PreStops: []tc.ContainerHook{ - func(ctx context.Context, c tc.Container) error { - if n.LogStream != nil { - return n.LogStream.DisconnectContainer(c) - } - return nil - }, - }, - PostStops: n.PostStopsHooks, - PreTerminates: n.PreTerminatesHooks, - }, - }, }, nil } diff --git a/integration-tests/docker/test_env/test_env.go b/integration-tests/docker/test_env/test_env.go index 1ca50760d17..a37b7f813a7 100644 --- a/integration-tests/docker/test_env/test_env.go +++ b/integration-tests/docker/test_env/test_env.go @@ -20,8 +20,6 @@ import ( "github.com/smartcontractkit/chainlink-testing-framework/lib/docker" "github.com/smartcontractkit/chainlink-testing-framework/lib/docker/test_env" "github.com/smartcontractkit/chainlink-testing-framework/lib/logging" - "github.com/smartcontractkit/chainlink-testing-framework/lib/logstream" - "github.com/smartcontractkit/chainlink-testing-framework/lib/utils/runid" "github.com/smartcontractkit/chainlink/integration-tests/testconfig/ccip" "github.com/smartcontractkit/chainlink/v2/core/services/chainlink" @@ -36,7 +34,6 @@ var ( type CLClusterTestEnv struct { Cfg *TestEnvConfig DockerNetwork *tc.DockerNetwork - LogStream *logstream.LogStream TestConfig ctf_config.GlobalTestConfig /* components */ @@ -69,7 +66,7 @@ func (te *CLClusterTestEnv) WithTestEnvConfig(cfg *TestEnvConfig) *CLClusterTest te.Cfg = cfg if cfg.MockAdapter.ContainerName != "" { n := []string{te.DockerNetwork.Name} - te.MockAdapter = test_env.NewKillgrave(n, te.Cfg.MockAdapter.ImpostersPath, test_env.WithContainerName(te.Cfg.MockAdapter.ContainerName), test_env.WithLogStream(te.LogStream)) + te.MockAdapter = test_env.NewKillgrave(n, te.Cfg.MockAdapter.ImpostersPath, test_env.WithContainerName(te.Cfg.MockAdapter.ContainerName)) } return te } @@ -99,7 +96,6 @@ func (te *CLClusterTestEnv) StartEthereumNetwork(cfg *ctf_config.EthereumNetwork builder := test_env.NewEthereumNetworkBuilder() c, err := builder.WithExistingConfig(*cfg). WithTest(te.t). - WithLogStream(te.LogStream). Build() if err != nil { return blockchain.EVMNetwork{}, test_env.RpcProvider{}, err @@ -132,7 +128,6 @@ func (te *CLClusterTestEnv) StartJobDistributor(cfg *ccip.JDConfig) error { job_distributor.WithVersion(cfg.GetJDVersion()), job_distributor.WithDBURL(jdDB.InternalURL.String()), ) - jd.LogStream = te.LogStream err = jd.StartContainer() if err != nil { return fmt.Errorf("failed to start job-distributor: %w", err) @@ -160,7 +155,7 @@ func (te *CLClusterTestEnv) StartClCluster(nodeConfig *chainlink.Config, count i opts = append(opts, WithSecrets(secretsConfig)) te.ClCluster = &ClCluster{} for i := 0; i < count; i++ { - ocrNode, err := NewClNode([]string{te.DockerNetwork.Name}, *testconfig.GetChainlinkImageConfig().Image, *testconfig.GetChainlinkImageConfig().Version, nodeConfig, te.LogStream, opts...) + ocrNode, err := NewClNode([]string{te.DockerNetwork.Name}, *testconfig.GetChainlinkImageConfig().Image, *testconfig.GetChainlinkImageConfig().Version, nodeConfig, opts...) if err != nil { return err } @@ -193,11 +188,6 @@ type CleanupOpts struct { func (te *CLClusterTestEnv) Cleanup(opts CleanupOpts) error { te.l.Info().Msg("Cleaning up test environment") - runIdErr := runid.RemoveLocalRunId(te.TestConfig.GetLoggingConfig().RunId) - if runIdErr != nil { - te.l.Warn().Msgf("Failed to remove .run.id file due to: %s (not a big deal, you can still remove it manually)", runIdErr.Error()) - } - if te.t == nil { return fmt.Errorf("cannot cleanup test environment without a testing.T") } diff --git a/integration-tests/docker/test_env/test_env_builder.go b/integration-tests/docker/test_env/test_env_builder.go index cdce826f2c2..e11a3c96095 100644 --- a/integration-tests/docker/test_env/test_env_builder.go +++ b/integration-tests/docker/test_env/test_env_builder.go @@ -2,28 +2,25 @@ package test_env import ( "fmt" - "math" "os" "path/filepath" - "slices" "strings" + "sync" "testing" "time" "github.com/rs/zerolog" "github.com/rs/zerolog/log" "go.uber.org/zap/zapcore" - - "github.com/smartcontractkit/chainlink-testing-framework/seth" + "golang.org/x/sync/errgroup" "github.com/smartcontractkit/chainlink-testing-framework/lib/blockchain" ctf_config "github.com/smartcontractkit/chainlink-testing-framework/lib/config" + ctf_docker "github.com/smartcontractkit/chainlink-testing-framework/lib/docker" "github.com/smartcontractkit/chainlink-testing-framework/lib/docker/test_env" "github.com/smartcontractkit/chainlink-testing-framework/lib/logging" - "github.com/smartcontractkit/chainlink-testing-framework/lib/logstream" "github.com/smartcontractkit/chainlink-testing-framework/lib/networks" "github.com/smartcontractkit/chainlink-testing-framework/lib/testreporters" - "github.com/smartcontractkit/chainlink-testing-framework/lib/testsummary" "github.com/smartcontractkit/chainlink-testing-framework/lib/utils/osutil" "github.com/smartcontractkit/chainlink/integration-tests/testconfig/ccip" @@ -46,7 +43,6 @@ type ChainlinkNodeLogScannerSettings struct { } type CLTestEnvBuilder struct { - hasLogStream bool hasKillgrave bool jdConfig *ccip.JDConfig clNodeConfig *chainlink.Config @@ -90,7 +86,6 @@ func GetDefaultChainlinkNodeLogScannerSettingsWithExtraAllowedMessages(extraAllo func NewCLTestEnvBuilder() *CLTestEnvBuilder { return &CLTestEnvBuilder{ l: log.Logger, - hasLogStream: true, isEVM: true, chainlinkNodeLogScannerSettings: &DefaultChainlinkNodeLogScannerSettings, } @@ -134,12 +129,6 @@ func (b *CLTestEnvBuilder) WithTestInstance(t *testing.T) *CLTestEnvBuilder { return b } -// WithoutLogStream disables LogStream logging component -func (b *CLTestEnvBuilder) WithoutLogStream() *CLTestEnvBuilder { - b.hasLogStream = false - return b -} - func (b *CLTestEnvBuilder) WithoutChainlinkNodeLogScanner() *CLTestEnvBuilder { b.chainlinkNodeLogScannerSettings = &ChainlinkNodeLogScannerSettings{} return b @@ -250,102 +239,105 @@ func (b *CLTestEnvBuilder) Build() (*CLClusterTestEnv, error) { b.te.WithTestInstance(b.t) } - if b.hasLogStream { - loggingConfig := b.testConfig.GetLoggingConfig() - // we need to enable logging to file if we want to scan logs - if b.chainlinkNodeLogScannerSettings != nil && !slices.Contains(loggingConfig.LogStream.LogTargets, string(logstream.File)) { - b.l.Debug().Msg("Enabling logging to file in order to support Chainlink node log scanning") - loggingConfig.LogStream.LogTargets = append(loggingConfig.LogStream.LogTargets, string(logstream.File)) - } - b.te.LogStream, err = logstream.NewLogStream(b.te.t, b.testConfig.GetLoggingConfig()) - if err != nil { - return nil, err - } - - // this clean up has to be added as the FIRST one, because cleanup functions are executed in reverse order (LIFO) - if b.t != nil && b.cleanUpType != CleanUpTypeNone { - b.t.Cleanup(func() { - b.l.Info().Msg("Shutting down LogStream") - logPath, err := osutil.GetAbsoluteFolderPath("logs") - if err == nil { - b.l.Info().Str("Absolute path", logPath).Msg("LogStream logs folder location") - } - - // flush logs when test failed or when we are explicitly told to collect logs - flushLogStream := b.t.Failed() || *b.testConfig.GetLoggingConfig().TestLogCollect + // this clean up has to be added as the FIRST one, because cleanup functions are executed in reverse order (LIFO) + if b.t != nil && b.cleanUpType != CleanUpTypeNone { + b.t.Cleanup(func() { + logsDir := fmt.Sprintf("logs/%s-%s", b.t.Name(), time.Now().Format("2006-01-02T15-04-05")) + loggingErr := ctf_docker.WriteAllContainersLogs(b.l, logsDir) + if loggingErr != nil { + b.l.Error().Err(loggingErr).Msg("Error writing all Docker containers logs") + } - // run even if test has failed, as we might be able to catch additional problems without running the test again - if b.chainlinkNodeLogScannerSettings != nil { - logProcessor := logstream.NewLogProcessor[int](b.te.LogStream) + if b == nil || b.te == nil || b.te.ClCluster == nil || b.te.ClCluster.Nodes == nil { + log.Warn().Msg("Won't dump container and postgres logs, because test environment doesn't have any nodes") + return + } - processFn := func(log logstream.LogContent, count *int) error { - countSoFar := count - if *countSoFar < 0 { - return fmt.Errorf("negative count: %d", *countSoFar) - } - newCount, err := testreporters.ScanLogLine(b.l, string(log.Content), b.chainlinkNodeLogScannerSettings.FailingLogLevel, uint(*countSoFar), b.chainlinkNodeLogScannerSettings.Threshold, b.chainlinkNodeLogScannerSettings.AllowedMessages) - if err != nil { - return err - } - if newCount > math.MaxInt { - return fmt.Errorf("new count overflows int: %d", newCount) - } - *count = int(newCount) - return nil - } + if b.chainlinkNodeLogScannerSettings != nil { + var logFiles []*os.File - // we cannot do parallel processing here, because ProcessContainerLogs() locks a mutex that controls whether - // new logs can be added to the log stream, so parallel processing would get stuck on waiting for it to be unlocked - LogScanningLoop: - for i := 0; i < b.clNodesCount; i++ { - // if something went wrong during environment setup we might not have all nodes, and we don't want an NPE - if b == nil || b.te == nil || b.te.ClCluster == nil || b.te.ClCluster.Nodes == nil || len(b.te.ClCluster.Nodes)-1 < i || b.te.ClCluster.Nodes[i] == nil { + // when tests run in parallel, we need to make sure that we only process logs that belong to nodes created by the current test + // that is required, because some tests might have custom log messages that are allowed, but only for that test (e.g. because they restart the CL node) + var belongsToCurrentEnv = func(filePath string) bool { + for _, clNode := range b.te.ClCluster.Nodes { + if clNode == nil { continue } - // ignore count return, because we are only interested in the error - _, err := logProcessor.ProcessContainerLogs(b.te.ClCluster.Nodes[i].ContainerName, processFn) - if err != nil && !strings.Contains(err.Error(), testreporters.MultipleLogsAtLogLevelErr) && !strings.Contains(err.Error(), testreporters.OneLogAtLogLevelErr) { - b.l.Error().Err(err).Msg("Error processing CL node logs") - continue - } else if err != nil && (strings.Contains(err.Error(), testreporters.MultipleLogsAtLogLevelErr) || strings.Contains(err.Error(), testreporters.OneLogAtLogLevelErr)) { - flushLogStream = true - b.t.Errorf("Found a concerning log in Chainklink Node logs: %v", err) - break LogScanningLoop + if strings.EqualFold(filePath, clNode.ContainerName+".log") { + return true } } - b.l.Info().Msg("Finished scanning Chainlink Node logs for concerning errors") + return false } - if flushLogStream { - b.l.Info().Msg("Flushing LogStream logs") - // we can't do much if this fails, so we just log the error in LogStream - if err := b.te.LogStream.FlushAndShutdown(); err != nil { - b.l.Error().Err(err).Msg("Error flushing and shutting down LogStream") + fileWalkErr := filepath.Walk(logsDir, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err } - b.te.LogStream.PrintLogTargetsLocations() - b.te.LogStream.SaveLogLocationInTestSummary() - } - b.l.Info().Msg("Finished shutting down LogStream") + if !info.IsDir() && belongsToCurrentEnv(info.Name()) { + file, fileErr := os.Open(path) + if fileErr != nil { + return fmt.Errorf("failed to open file %s: %w", path, fileErr) + } + logFiles = append(logFiles, file) + } + return nil + }) - if b.t.Failed() || *b.testConfig.GetLoggingConfig().TestLogCollect { - b.l.Info().Msg("Dump state of all Postgres DBs used by Chainlink Nodes") + if len(logFiles) != len(b.te.ClCluster.Nodes) { + b.l.Warn().Int("Expected", len(b.te.ClCluster.Nodes)).Int("Got", len(logFiles)).Msg("Number of log files does not match number of nodes. Some logs might be missing.") + } - dbDumpFolder := "db_dumps" - dbDumpPath := fmt.Sprintf("%s/%s-%s", dbDumpFolder, b.t.Name(), time.Now().Format("2006-01-02T15-04-05")) - if err := os.MkdirAll(dbDumpPath, os.ModePerm); err != nil { - b.l.Error().Err(err).Msg("Error creating folder for Postgres DB dump") - return + if fileWalkErr != nil { + b.l.Error().Err(fileWalkErr).Msg("Error walking through log files. Skipping log verification.") + } else { + verifyLogsGroup := &errgroup.Group{} + for _, f := range logFiles { + file := f + verifyLogsGroup.Go(func() error { + verifyErr := testreporters.VerifyLogFile(file, b.chainlinkNodeLogScannerSettings.FailingLogLevel, b.chainlinkNodeLogScannerSettings.Threshold, b.chainlinkNodeLogScannerSettings.AllowedMessages...) + _ = file.Close() + // ignore processing errors + if verifyErr != nil && !strings.Contains(verifyErr.Error(), testreporters.MultipleLogsAtLogLevelErr) && !strings.Contains(verifyErr.Error(), testreporters.OneLogAtLogLevelErr) { + b.l.Error().Err(verifyErr).Msg("Error processing CL node logs") + + return nil + + // if it's not a processing error, we want to fail the test; we also can stop processing logs all together at this point + } else if verifyErr != nil && (strings.Contains(verifyErr.Error(), testreporters.MultipleLogsAtLogLevelErr) || strings.Contains(verifyErr.Error(), testreporters.OneLogAtLogLevelErr)) { + + return verifyErr + } + return nil + }) } - absDbDumpPath, err := osutil.GetAbsoluteFolderPath(dbDumpFolder) - if err == nil { - b.l.Info().Str("Absolute path", absDbDumpPath).Msg("PostgresDB dump folder location") + if logVerificationErr := verifyLogsGroup.Wait(); logVerificationErr != nil { + b.t.Errorf("Found a concerning log in Chainklink Node logs: %v", logVerificationErr) } + } + } - for i := 0; i < b.clNodesCount; i++ { + b.l.Info().Msg("Staring to dump state of all Postgres DBs used by Chainlink Nodes") + + dbDumpFolder := "db_dumps" + dbDumpPath := fmt.Sprintf("%s/%s-%s", dbDumpFolder, b.t.Name(), time.Now().Format("2006-01-02T15-04-05")) + if err := os.MkdirAll(dbDumpPath, os.ModePerm); err != nil { + b.l.Error().Err(err).Msg("Error creating folder for Postgres DB dump") + } else { + absDbDumpPath, err := osutil.GetAbsoluteFolderPath(dbDumpFolder) + if err == nil { + b.l.Info().Str("Absolute path", absDbDumpPath).Msg("PostgresDB dump folder location") + } + + dbDumpGroup := sync.WaitGroup{} + for i := 0; i < b.clNodesCount; i++ { + dbDumpGroup.Add(1) + go func() { + defer dbDumpGroup.Done() // if something went wrong during environment setup we might not have all nodes, and we don't want an NPE if b == nil || b.te == nil || b.te.ClCluster == nil || b.te.ClCluster.Nodes == nil || len(b.te.ClCluster.Nodes)-1 < i || b.te.ClCluster.Nodes[i] == nil || b.te.ClCluster.Nodes[i].PostgresDb == nil { - continue + return } filePath := filepath.Join(dbDumpPath, fmt.Sprintf("postgres_db_dump_%s.sql", b.te.ClCluster.Nodes[i].ContainerName)) @@ -353,24 +345,23 @@ func (b *CLTestEnvBuilder) Build() (*CLClusterTestEnv, error) { if err != nil { b.l.Error().Err(err).Msg("Error creating localDbDumpFile for Postgres DB dump") _ = localDbDumpFile.Close() - continue + return } if err := b.te.ClCluster.Nodes[i].PostgresDb.ExecPgDumpFromContainer(localDbDumpFile); err != nil { b.l.Error().Err(err).Msg("Error dumping Postgres DB") } _ = localDbDumpFile.Close() - } - b.l.Info().Msg("Finished dumping state of all Postgres DBs used by Chainlink Nodes") + }() } - if b.testConfig.GetSethConfig() != nil && ((b.t.Failed() && slices.Contains(b.testConfig.GetSethConfig().TraceOutputs, seth.TraceOutput_DOT) && b.testConfig.GetSethConfig().TracingLevel != seth.TracingLevel_None) || (!b.t.Failed() && slices.Contains(b.testConfig.GetSethConfig().TraceOutputs, seth.TraceOutput_DOT) && b.testConfig.GetSethConfig().TracingLevel == seth.TracingLevel_All)) { - _ = testsummary.AddEntry(b.t.Name(), "dot_graphs", "true") - } - }) - } else { - b.l.Warn().Msg("LogStream won't be cleaned up, because either test instance is not set or cleanup type is set to none") - } + dbDumpGroup.Wait() + + b.l.Info().Msg("Finished dumping state of all Postgres DBs used by Chainlink Nodes") + } + }) + } else { + b.l.Warn().Msg("Won't dump container and postgres logs, because either test instance is not set or cleanup type is set to none") } if b.hasKillgrave { @@ -378,7 +369,7 @@ func (b *CLTestEnvBuilder) Build() (*CLClusterTestEnv, error) { return nil, fmt.Errorf("test environment builder failed: %w", fmt.Errorf("cannot start mock adapter without a network")) } - b.te.MockAdapter = test_env.NewKillgrave([]string{b.te.DockerNetwork.Name}, "", test_env.WithLogStream(b.te.LogStream)) + b.te.MockAdapter = test_env.NewKillgrave([]string{b.te.DockerNetwork.Name}, "") err = b.te.StartMockAdapter() if err != nil { @@ -406,10 +397,6 @@ func (b *CLTestEnvBuilder) Build() (*CLClusterTestEnv, error) { return b.te, fmt.Errorf("test environment builder failed: %w", fmt.Errorf("explicit cleanup type must be set when building test environment")) } - if b.te.LogStream == nil && b.chainlinkNodeLogScannerSettings != nil { - log.Warn().Msg("Chainlink node log scanner settings provided, but LogStream is not enabled. Ignoring Chainlink node log scanner settings, as no logs will be available.") - } - if b.jdConfig != nil { err := b.te.StartJobDistributor(b.jdConfig) if err != nil { diff --git a/integration-tests/go.mod b/integration-tests/go.mod index d94c15de0cb..c1b012e3641 100644 --- a/integration-tests/go.mod +++ b/integration-tests/go.mod @@ -50,7 +50,7 @@ require ( github.com/smartcontractkit/chainlink-common v0.3.1-0.20241210192653-a9c706f99e83 github.com/smartcontractkit/chainlink-protos/job-distributor v0.6.0 github.com/smartcontractkit/chainlink-testing-framework/havoc v1.50.2 - github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.18 + github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.19 github.com/smartcontractkit/chainlink-testing-framework/lib/grafana v1.50.0 github.com/smartcontractkit/chainlink-testing-framework/seth v1.50.9 github.com/smartcontractkit/chainlink-testing-framework/wasp v1.50.2 diff --git a/integration-tests/go.sum b/integration-tests/go.sum index 49e87a613fd..fb3d895d130 100644 --- a/integration-tests/go.sum +++ b/integration-tests/go.sum @@ -1450,8 +1450,8 @@ github.com/smartcontractkit/chainlink-starknet/relayer v0.1.1-0.20241202202529-2 github.com/smartcontractkit/chainlink-starknet/relayer v0.1.1-0.20241202202529-2033490e77b8/go.mod h1:EBrEgcdIbwepqguClkv8Ohy7CbyWSJaE4EC9aBJlQK0= github.com/smartcontractkit/chainlink-testing-framework/havoc v1.50.2 h1:GDGrC5OGiV0RyM1znYWehSQXyZQWTOzrEeJRYmysPCE= github.com/smartcontractkit/chainlink-testing-framework/havoc v1.50.2/go.mod h1:DsT43c1oTBmp3iQkMcoZOoKThwZvt8X3Pz6UmznJ4GY= -github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.18 h1:a3xetGZh2nFO1iX5xd9OuqiCkgbWLvW6fTN6fgVubPo= -github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.18/go.mod h1:NwmlNKqrb02v4Sci4b5KW644nfH2BW+FrKbWwTN5r6M= +github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.19 h1:9PMwKNqFKc5FXf4VchyD3CGzZelnSgi13fgVdT2X7T4= +github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.19/go.mod h1:ag7LEgejsVtPXaUNkcoFPpAoDkl1J8V2HSbqVUxfEtk= github.com/smartcontractkit/chainlink-testing-framework/lib/grafana v1.50.0 h1:VIxK8u0Jd0Q/VuhmsNm6Bls6Tb31H/sA3A/rbc5hnhg= github.com/smartcontractkit/chainlink-testing-framework/lib/grafana v1.50.0/go.mod h1:lyAu+oMXdNUzEDScj2DXB2IueY+SDXPPfyl/kb63tMM= github.com/smartcontractkit/chainlink-testing-framework/seth v1.50.9 h1:yB1x5UXvpZNka+5h57yo1/GrKfXKCqMzChCISpldZx4= diff --git a/integration-tests/load/go.mod b/integration-tests/load/go.mod index f73d84e3fc5..5f49519cb4b 100644 --- a/integration-tests/load/go.mod +++ b/integration-tests/load/go.mod @@ -28,7 +28,7 @@ require ( github.com/rs/zerolog v1.33.0 github.com/slack-go/slack v0.15.0 github.com/smartcontractkit/chainlink-common v0.3.1-0.20241210192653-a9c706f99e83 - github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.18 + github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.19 github.com/smartcontractkit/chainlink-testing-framework/seth v1.50.9 github.com/smartcontractkit/chainlink-testing-framework/wasp v1.50.2 github.com/smartcontractkit/tdh2/go/tdh2 v0.0.0-20241009055228-33d0c0bf38de diff --git a/integration-tests/load/go.sum b/integration-tests/load/go.sum index 3bc63a508ac..cda5cebf370 100644 --- a/integration-tests/load/go.sum +++ b/integration-tests/load/go.sum @@ -1441,8 +1441,8 @@ github.com/smartcontractkit/chainlink-starknet/relayer v0.1.1-0.20241202202529-2 github.com/smartcontractkit/chainlink-starknet/relayer v0.1.1-0.20241202202529-2033490e77b8/go.mod h1:EBrEgcdIbwepqguClkv8Ohy7CbyWSJaE4EC9aBJlQK0= github.com/smartcontractkit/chainlink-testing-framework/havoc v1.50.2 h1:GDGrC5OGiV0RyM1znYWehSQXyZQWTOzrEeJRYmysPCE= github.com/smartcontractkit/chainlink-testing-framework/havoc v1.50.2/go.mod h1:DsT43c1oTBmp3iQkMcoZOoKThwZvt8X3Pz6UmznJ4GY= -github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.18 h1:a3xetGZh2nFO1iX5xd9OuqiCkgbWLvW6fTN6fgVubPo= -github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.18/go.mod h1:NwmlNKqrb02v4Sci4b5KW644nfH2BW+FrKbWwTN5r6M= +github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.19 h1:9PMwKNqFKc5FXf4VchyD3CGzZelnSgi13fgVdT2X7T4= +github.com/smartcontractkit/chainlink-testing-framework/lib v1.50.19/go.mod h1:ag7LEgejsVtPXaUNkcoFPpAoDkl1J8V2HSbqVUxfEtk= github.com/smartcontractkit/chainlink-testing-framework/lib/grafana v1.50.0 h1:VIxK8u0Jd0Q/VuhmsNm6Bls6Tb31H/sA3A/rbc5hnhg= github.com/smartcontractkit/chainlink-testing-framework/lib/grafana v1.50.0/go.mod h1:lyAu+oMXdNUzEDScj2DXB2IueY+SDXPPfyl/kb63tMM= github.com/smartcontractkit/chainlink-testing-framework/seth v1.50.9 h1:yB1x5UXvpZNka+5h57yo1/GrKfXKCqMzChCISpldZx4= diff --git a/integration-tests/smoke/ocr2_test.go b/integration-tests/smoke/ocr2_test.go index a011dfdffc6..8416ec05c7e 100644 --- a/integration-tests/smoke/ocr2_test.go +++ b/integration-tests/smoke/ocr2_test.go @@ -1,14 +1,19 @@ package smoke import ( + "bufio" "fmt" "math/big" "net/http" + "os" + "path/filepath" + "regexp" "strings" + "sync" "testing" "time" - "github.com/smartcontractkit/chainlink/integration-tests/utils" + "github.com/onsi/gomega" "github.com/ethereum/go-ethereum/common" "github.com/rs/zerolog" @@ -16,8 +21,8 @@ import ( "github.com/smartcontractkit/chainlink-testing-framework/seth" + ctf_docker "github.com/smartcontractkit/chainlink-testing-framework/lib/docker" "github.com/smartcontractkit/chainlink-testing-framework/lib/logging" - "github.com/smartcontractkit/chainlink-testing-framework/lib/logstream" "github.com/smartcontractkit/chainlink-testing-framework/lib/utils/testcontext" "github.com/smartcontractkit/chainlink/v2/core/config/env" @@ -26,6 +31,7 @@ import ( "github.com/smartcontractkit/chainlink/integration-tests/contracts" "github.com/smartcontractkit/chainlink/integration-tests/docker/test_env" tc "github.com/smartcontractkit/chainlink/integration-tests/testconfig" + "github.com/smartcontractkit/chainlink/integration-tests/utils" ) type ocr2test struct { @@ -224,33 +230,150 @@ func prepareORCv2SmokeTestEnv(t *testing.T, testData ocr2test, l zerolog.Logger, } func assertCorrectNodeConfiguration(t *testing.T, l zerolog.Logger, totalNodeCount int, testData ocr2test, testEnv *test_env.CLClusterTestEnv) { - expectedNodesWithConfiguration := totalNodeCount - 1 // minus bootstrap node - var expectedPatterns []string + l.Info().Msg("Checking if all nodes have correct plugin configuration applied") - if testData.env[string(env.MedianPlugin.Cmd)] != "" { - expectedPatterns = append(expectedPatterns, "Registered loopp.*OCR2.*Median.*") - } + // we have to use gomega here, because sometimes there's a delay in the logs being written (especially in the CI) + // and this check fails on the first execution, and we don't want to add any hardcoded sleeps - if testData.chainReaderAndCodec { - expectedPatterns = append(expectedPatterns, "relayConfig\\.chainReader") - } else { - expectedPatterns = append(expectedPatterns, "ChainReader missing from RelayConfig; falling back to internal MedianContract") - } + gom := gomega.NewGomegaWithT(t) + gom.Eventually(func(g gomega.Gomega) { + allNodesHaveCorrectConfig := false + + var expectedPatterns []string + expectedNodeCount := totalNodeCount - 1 + + if testData.env[string(env.MedianPlugin.Cmd)] != "" { + expectedPatterns = append(expectedPatterns, `Registered loopp.*OCR2.*Median.*`) + } + + if testData.chainReaderAndCodec { + expectedPatterns = append(expectedPatterns, `relayConfig.chainReader`) + } else { + expectedPatterns = append(expectedPatterns, "ChainReader missing from RelayConfig; falling back to internal MedianContract") + } + + logFilePaths := make(map[string]string) + tempLogsDir := os.TempDir() + + var nodesToInclude []string + for i := 1; i < totalNodeCount; i++ { + nodesToInclude = append(nodesToInclude, testEnv.ClCluster.Nodes[i].ContainerName+".log") + } + + // save all log files in temp dir + loggingErr := ctf_docker.WriteAllContainersLogs(l, tempLogsDir) + if loggingErr != nil { + l.Debug().Err(loggingErr).Msg("Error writing all containers logs. Trying again...") + + // try again + return + } + + var fileNameIncludeFilter = func(name string) bool { + for _, n := range nodesToInclude { + if strings.EqualFold(name, n) { + return true + } + } + return false + } + + // find log files for CL nodes + fileWalkErr := filepath.Walk(tempLogsDir, func(path string, info os.FileInfo, err error) error { + if err != nil { + if os.IsPermission(err) { + return nil + } + return err + } + if !info.IsDir() && fileNameIncludeFilter(info.Name()) { + absPath, err := filepath.Abs(path) + if err != nil { + return err + } + logFilePaths[strings.TrimSuffix(info.Name(), ".log")] = absPath + } + return nil + }) + + if fileWalkErr != nil { + l.Debug().Err(fileWalkErr).Msg("Error walking through log files. Trying again...") + + return + } + + if len(logFilePaths) != expectedNodeCount { + l.Debug().Msgf("Expected number of log files to match number of nodes (excluding bootstrap node). Expected: %d, Found: %d. Trying again...", expectedNodeCount, len(logFilePaths)) + + return + } + + // search for expected pattern in log file + var searchForLineInFile = func(filePath string, pattern string) bool { + file, fileErr := os.Open(filePath) + if fileErr != nil { + return false + } + + defer func(file *os.File) { + _ = file.Close() + }(file) + + scanner := bufio.NewScanner(file) + scanner.Split(bufio.ScanLines) + pc := regexp.MustCompile(pattern) + + for scanner.Scan() { + jsonLogLine := scanner.Text() + if pc.MatchString(jsonLogLine) { + return true + } + + } + return false + } + + wg := sync.WaitGroup{} + resultsCh := make(chan map[string][]string, len(logFilePaths)) + + // process all logs in parallel + for nodeName, logFilePath := range logFilePaths { + wg.Add(1) + filePath := logFilePath + go func() { + defer wg.Done() + var patternsFound []string + for _, pattern := range expectedPatterns { + found := searchForLineInFile(filePath, pattern) + if found { + patternsFound = append(patternsFound, pattern) + } + } + resultsCh <- map[string][]string{nodeName: patternsFound} + }() + } + + wg.Wait() + close(resultsCh) - // make sure that nodes are correctly configured by scanning the logs - for _, pattern := range expectedPatterns { - l.Info().Msgf("Checking for pattern: '%s' in CL node logs", pattern) var correctlyConfiguredNodes []string - for i := 1; i < len(testEnv.ClCluster.Nodes); i++ { - logProcessor, processFn, err := logstream.GetRegexMatchingProcessor(testEnv.LogStream, pattern) - require.NoError(t, err, "Error getting regex matching processor") - - count, err := logProcessor.ProcessContainerLogs(testEnv.ClCluster.Nodes[i].ContainerName, processFn) - require.NoError(t, err, "Error processing container logs") - if *count >= 1 { - correctlyConfiguredNodes = append(correctlyConfiguredNodes, testEnv.ClCluster.Nodes[i].ContainerName) + var incorrectlyConfiguredNodes []string + + // check results + for result := range resultsCh { + for nodeName, patternsFound := range result { + if len(patternsFound) == len(expectedPatterns) { + correctlyConfiguredNodes = append(correctlyConfiguredNodes, nodeName) + } else { + incorrectlyConfiguredNodes = append(incorrectlyConfiguredNodes, nodeName) + } } } - require.Equal(t, expectedNodesWithConfiguration, len(correctlyConfiguredNodes), "expected correct plugin config to be applied to %d cl-nodes, but only following ones had it: %s; regexp used: %s", expectedNodesWithConfiguration, strings.Join(correctlyConfiguredNodes, ", "), string(pattern)) - } + + allNodesHaveCorrectConfig = len(correctlyConfiguredNodes) == expectedNodeCount + + g.Expect(allNodesHaveCorrectConfig).To(gomega.BeTrue(), "%d nodes' logs were missing expected plugin configuration entries. Correctly configured nodes: %s. Nodes with missing configuration: %s. Expected log patterns: %s", expectedNodeCount-len(correctlyConfiguredNodes), strings.Join(correctlyConfiguredNodes, ", "), strings.Join(incorrectlyConfiguredNodes, ", "), strings.Join(expectedPatterns, ", ")) + }, "1m", "10s").Should(gomega.Succeed()) + + l.Info().Msg("All nodes have correct plugin configuration applied") } diff --git a/integration-tests/testconfig/automation/example.toml b/integration-tests/testconfig/automation/example.toml index 3bbe78d693d..c239e5a3966 100644 --- a/integration-tests/testconfig/automation/example.toml +++ b/integration-tests/testconfig/automation/example.toml @@ -7,14 +7,6 @@ version="2.7.0" # if set to true will save logs even if test did not fail test_log_collect=false -[Logging.LogStream] -# supported targets: file, loki, in-memory. if empty no logs will be persistet -log_targets=["file"] -# context timeout for starting log producer and also time-frame for requesting logs -log_producer_timeout="10s" -# number of retries before log producer gives up and stops listening to logs -log_producer_retry_limit=10 - # if you want to use polygon_mumbial [Network] selected_networks=["polygon_mumbai"] diff --git a/integration-tests/testconfig/ccip/overrides/sepolia_avax_binance.toml b/integration-tests/testconfig/ccip/overrides/sepolia_avax_binance.toml index 06af64d5d91..72c43b12da5 100644 --- a/integration-tests/testconfig/ccip/overrides/sepolia_avax_binance.toml +++ b/integration-tests/testconfig/ccip/overrides/sepolia_avax_binance.toml @@ -5,10 +5,6 @@ chainlink_node_funding = 2 [Logging] test_log_collect = true -[Logging.LogStream] -# supported targets: file, loki, in-memory. if empty no logs will be persisted -log_targets = ["loki"] - [Network] selected_networks = ['SEPOLIA', 'AVALANCHE_FUJI', 'BSC_TESTNET'] diff --git a/integration-tests/testconfig/default.toml b/integration-tests/testconfig/default.toml index b9987d4571d..8180b40ae21 100644 --- a/integration-tests/testconfig/default.toml +++ b/integration-tests/testconfig/default.toml @@ -2,19 +2,6 @@ # set to true to flush logs to selected target regardless of test result; otherwise logs are only flushed if test failed test_log_collect = false -[Logging.Grafana] -base_url = "https://grafana.ops.prod.cldev.sh" -base_url_github_ci = "http://localhost:8080/primary" -dashboard_url = "/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs" - -[Logging.LogStream] -# supported targets: file, loki, in-memory. if empty no logs will be persisted -log_targets = ["file"] -# context timeout for starting log producer and also time-frame for requesting logs -log_producer_timeout = "10s" -# number of retries before log producer gives up and stops listening to logs -log_producer_retry_limit = 10 - [ChainlinkImage] # postgres version to use postgres_version = "12.0" diff --git a/integration-tests/testconfig/forwarder_ocr/example.toml b/integration-tests/testconfig/forwarder_ocr/example.toml index 517a341f803..6ca4b8bbcc3 100644 --- a/integration-tests/testconfig/forwarder_ocr/example.toml +++ b/integration-tests/testconfig/forwarder_ocr/example.toml @@ -7,33 +7,6 @@ version="2.7.0" # if set to true will save logs even if test did not fail test_log_collect=false -[Logging.LogStream] -# supported targets: file, loki, in-memory. if empty no logs will be persistet -log_targets=["file"] -# context timeout for starting log producer and also time-frame for requesting logs -log_producer_timeout="10s" -# number of retries before log producer gives up and stops listening to logs -log_producer_retry_limit=10 - -[Logging.Loki] -tenant_id="tenant_id" -# full URL of Loki ingest endpoint -endpoint="https://loki.url/api/v3/push" -# currently only needed when using public instance -basic_auth_secret="loki-basic-auth" -# only needed for cloud grafana -bearer_token_secret="bearer_token" - -# LogStream will try to shorten Grafana URLs by default (if all 3 variables are set) -[Logging.Grafana] -# grafana url (trailing "/" will be stripped) -base_url="http://grafana.url" -# url of your grafana dashboard (prefix and suffix "/" are stirpped), example: /d/ad61652-2712-1722/my-dashboard -dashboard_url="/d/your-dashboard" -# Grafana dashboard uid to annotate. Find it in Dashboard Settings -> JSON Model -dashboard_uid="dashboard-uid-to-annotate" -bearer_token_secret="my-awesome-token" - # if you want to use polygon_mumbial [Network] selected_networks=["polygon_mumbai"] diff --git a/integration-tests/testconfig/forwarder_ocr2/example.toml b/integration-tests/testconfig/forwarder_ocr2/example.toml index 3ec3e4c690a..e3fb66a0f3a 100644 --- a/integration-tests/testconfig/forwarder_ocr2/example.toml +++ b/integration-tests/testconfig/forwarder_ocr2/example.toml @@ -8,33 +8,6 @@ version="2.7.0" # if set to true will save logs even if test did not fail test_log_collect=false -[Logging.LogStream] -# supported targets: file, loki, in-memory. if empty no logs will be persistet -log_targets=["file"] -# context timeout for starting log producer and also time-frame for requesting logs -log_producer_timeout="10s" -# number of retries before log producer gives up and stops listening to logs -log_producer_retry_limit=10 - -[Logging.Loki] -tenant_id="tenant_id" -# full URL of Loki ingest endpoint -endpoint="https://loki.url/api/v3/push" -# currently only needed when using public instance -basic_auth_secret="loki-basic-auth" -# only needed for cloud grafana -bearer_token_secret="bearer_token" - -# LogStream will try to shorten Grafana URLs by default (if all 3 variables are set) -[Logging.Grafana] -# grafana url (trailing "/" will be stripped) -base_url="http://grafana.url" -# url of your grafana dashboard (prefix and suffix "/" are stirpped), example: /d/ad61652-2712-1722/my-dashboard -dashboard_url="/d/your-dashboard" -# Grafana dashboard uid to annotate. Find it in Dashboard Settings -> JSON Model -dashboard_uid="dashboard-uid-to-annotate" -bearer_token_secret="my-awesome-token" - # if you want to use polygon_mumbial [Network] selected_networks=["polygon_mumbai"] diff --git a/integration-tests/testconfig/functions/example.toml b/integration-tests/testconfig/functions/example.toml index 74d931632a8..ec7076fa9f9 100644 --- a/integration-tests/testconfig/functions/example.toml +++ b/integration-tests/testconfig/functions/example.toml @@ -7,14 +7,6 @@ version="2.7.0" # if set to true will save logs even if test did not fail test_log_collect=false -[Logging.LogStream] -# supported targets: file, loki, in-memory. if empty no logs will be persistet -log_targets=["file"] -# context timeout for starting log producer and also time-frame for requesting logs -log_producer_timeout="10s" -# number of retries before log producer gives up and stops listening to logs -log_producer_retry_limit=10 - # if you want to use simulated network [Network] selected_networks=["polygon_mumbai"] diff --git a/integration-tests/testconfig/keeper/example.toml b/integration-tests/testconfig/keeper/example.toml index 4efbf974827..7fe3bf26d0a 100644 --- a/integration-tests/testconfig/keeper/example.toml +++ b/integration-tests/testconfig/keeper/example.toml @@ -7,14 +7,6 @@ version="2.7.0" # if set to true will save logs even if test did not fail test_log_collect=false -[Logging.LogStream] -# supported targets: file, loki, in-memory. if empty no logs will be persistet -log_targets=["file"] -# context timeout for starting log producer and also time-frame for requesting logs -log_producer_timeout="10s" -# number of retries before log producer gives up and stops listening to logs -log_producer_retry_limit=10 - # if you want to use polygon_mumbial [Network] selected_networks=["polygon_mumbai"] diff --git a/integration-tests/testconfig/log_poller/example.toml b/integration-tests/testconfig/log_poller/example.toml index 78f3b5482d9..b94b6e0e202 100644 --- a/integration-tests/testconfig/log_poller/example.toml +++ b/integration-tests/testconfig/log_poller/example.toml @@ -7,14 +7,6 @@ version="2.7.0" # if set to true will save logs even if test did not fail test_log_collect=false -[Logging.LogStream] -# supported targets: file, loki, in-memory. if empty no logs will be persistet -log_targets=["file"] -# context timeout for starting log producer and also time-frame for requesting logs -log_producer_timeout="10s" -# number of retries before log producer gives up and stops listening to logs -log_producer_retry_limit=10 - # if you want to use polygon_mumbial [Network] selected_networks=["polygon_mumbai"] diff --git a/integration-tests/testconfig/node/example.toml b/integration-tests/testconfig/node/example.toml index bc5628e46b3..4635e40c037 100644 --- a/integration-tests/testconfig/node/example.toml +++ b/integration-tests/testconfig/node/example.toml @@ -7,14 +7,6 @@ version="2.7.0" # if set to true will save logs even if test did not fail test_log_collect=false -[Logging.LogStream] -# supported targets: file, loki, in-memory. if empty no logs will be persistet -log_targets=["file"] -# context timeout for starting log producer and also time-frame for requesting logs -log_producer_timeout="10s" -# number of retries before log producer gives up and stops listening to logs -log_producer_retry_limit=10 - # if you want to use polygon_mumbial [Network] selected_networks=["polygon_mumbai"] diff --git a/integration-tests/testconfig/ocr/example.toml b/integration-tests/testconfig/ocr/example.toml index 7c1c755567f..d1edd3a67fd 100644 --- a/integration-tests/testconfig/ocr/example.toml +++ b/integration-tests/testconfig/ocr/example.toml @@ -7,33 +7,6 @@ version="2.7.0" # if set to true will save logs even if test did not fail test_log_collect=false -[Logging.LogStream] -# supported targets: file, loki, in-memory. if empty no logs will be persistet -log_targets=["file"] -# context timeout for starting log producer and also time-frame for requesting logs -log_producer_timeout="10s" -# number of retries before log producer gives up and stops listening to logs -log_producer_retry_limit=10 - -[Logging.Loki] -tenant_id="tenant_id" -# full URL of Loki ingest endpoint -endpoint="https://loki.url/api/v3/push" -# currently only needed when using public instance -basic_auth_secret="loki-basic-auth" -# only needed for cloud grafana -bearer_token_secret="bearer_token" - -# LogStream will try to shorten Grafana URLs by default (if all 3 variables are set) -[Logging.Grafana] -# grafana url (trailing "/" will be stripped) -base_url="http://grafana.url" -# url of your grafana dashboard (prefix and suffix "/" are stirpped), example: /d/ad61652-2712-1722/my-dashboard -dashboard_url="/d/your-dashboard" -# Grafana dashboard uid to annotate. Find it in Dashboard Settings -> JSON Model -dashboard_uid="dashboard-uid-to-annotate" -bearer_token_secret="my-awesome-token" - # if you want to use polygon_mumbial [Network] selected_networks=["polygon_mumbai"] diff --git a/integration-tests/testconfig/ocr2/example.toml b/integration-tests/testconfig/ocr2/example.toml index 319f64d2580..679e4527a31 100644 --- a/integration-tests/testconfig/ocr2/example.toml +++ b/integration-tests/testconfig/ocr2/example.toml @@ -7,33 +7,6 @@ version="2.7.0" # if set to true will save logs even if test did not fail test_log_collect=false -[Logging.LogStream] -# supported targets: file, loki, in-memory. if empty no logs will be persistet -log_targets=["file"] -# context timeout for starting log producer and also time-frame for requesting logs -log_producer_timeout="10s" -# number of retries before log producer gives up and stops listening to logs -log_producer_retry_limit=10 - -[Logging.Loki] -tenant_id="tenant_id" -# full URL of Loki ingest endpoint -endpoint="https://loki.url/api/v3/push" -# currently only needed when using public instance -basic_auth_secret="loki-basic-auth" -# only needed for cloud grafana -bearer_token_secret="bearer_token" - -# LogStream will try to shorten Grafana URLs by default (if all 3 variables are set) -[Logging.Grafana] -# grafana url (trailing "/" will be stripped) -base_url="http://grafana.url" -# url of your grafana dashboard (prefix and suffix "/" are stirpped), example: /d/ad61652-2712-1722/my-dashboard -dashboard_url="/d/your-dashboard" -# Grafana dashboard uid to annotate. Find it in Dashboard Settings -> JSON Model -dashboard_uid="dashboard-uid-to-annotate" -bearer_token_secret="my-awesome-token" - # if you want to use polygon_mumbial [Network] selected_networks=["polygon_mumbai"] diff --git a/integration-tests/testconfig/testconfig.go b/integration-tests/testconfig/testconfig.go index 545818e3348..19e3f0b7ada 100644 --- a/integration-tests/testconfig/testconfig.go +++ b/integration-tests/testconfig/testconfig.go @@ -6,7 +6,6 @@ import ( "fmt" "math/big" "os" - "slices" "strings" "github.com/barkimedes/go-deepcopy" @@ -631,26 +630,6 @@ func (c *TestConfig) Validate() error { return fmt.Errorf("logging config must be set") } - if err := c.Logging.Validate(); err != nil { - return errors.Wrapf(err, "logging config validation failed") - } - - if c.Logging.Loki != nil { - if err := c.Logging.Loki.Validate(); err != nil { - return errors.Wrapf(err, "loki config validation failed") - } - } - - if c.Logging.LogStream != nil && slices.Contains(c.Logging.LogStream.LogTargets, "loki") { - if c.Logging.Loki == nil { - return fmt.Errorf("in order to use Loki as logging target you must set Loki config in logging config") - } - - if err := c.Logging.Loki.Validate(); err != nil { - return errors.Wrapf(err, "loki config validation failed") - } - } - if c.Pyroscope != nil { if err := c.Pyroscope.Validate(); err != nil { return errors.Wrapf(err, "pyroscope config validation failed") diff --git a/integration-tests/testconfig/vrfv2/example.toml b/integration-tests/testconfig/vrfv2/example.toml index 13af6dee620..3665c2f43cf 100644 --- a/integration-tests/testconfig/vrfv2/example.toml +++ b/integration-tests/testconfig/vrfv2/example.toml @@ -7,14 +7,6 @@ version="2.7.0" # if set to true will save logs even if test did not fail test_log_collect=false -[Logging.LogStream] -# supported targets: file, loki, in-memory. if empty no logs will be persistet -log_targets=["file"] -# context timeout for starting log producer and also time-frame for requesting logs -log_producer_timeout="10s" -# number of retries before log producer gives up and stops listening to logs -log_producer_retry_limit=10 - # if you want to use polygon_mumbial [Network] selected_networks=["polygon_mumbai"] diff --git a/integration-tests/testconfig/vrfv2plus/example.toml b/integration-tests/testconfig/vrfv2plus/example.toml index 160e9ba03a9..a45d53f67b8 100644 --- a/integration-tests/testconfig/vrfv2plus/example.toml +++ b/integration-tests/testconfig/vrfv2plus/example.toml @@ -7,14 +7,6 @@ version="2.7.0" # if set to true will save logs even if test did not fail test_log_collect=false -[Logging.LogStream] -# supported targets: file, loki, in-memory. if empty no logs will be persistet -log_targets=["file"] -# context timeout for starting log producer and also time-frame for requesting logs -log_producer_timeout="10s" -# number of retries before log producer gives up and stops listening to logs -log_producer_retry_limit=10 - # if you want to use polygon_mumbial [Network] selected_networks=["polygon_mumbai"] diff --git a/integration-tests/testsetups/ccip/test_helpers.go b/integration-tests/testsetups/ccip/test_helpers.go index b859fab10c5..514a232bb80 100644 --- a/integration-tests/testsetups/ccip/test_helpers.go +++ b/integration-tests/testsetups/ccip/test_helpers.go @@ -11,6 +11,7 @@ import ( "github.com/ethereum/go-ethereum/accounts/abi/bind" chainsel "github.com/smartcontractkit/chain-selectors" + "go.uber.org/zap/zapcore" commonconfig "github.com/smartcontractkit/chainlink-common/pkg/config" "github.com/smartcontractkit/chainlink-testing-framework/lib/blockchain" @@ -18,6 +19,7 @@ import ( ctftestenv "github.com/smartcontractkit/chainlink-testing-framework/lib/docker/test_env" "github.com/smartcontractkit/chainlink-testing-framework/lib/logging" "github.com/smartcontractkit/chainlink-testing-framework/lib/networks" + "github.com/smartcontractkit/chainlink-testing-framework/lib/testreporters" "github.com/smartcontractkit/chainlink-testing-framework/lib/utils/conversions" "github.com/smartcontractkit/chainlink-testing-framework/lib/utils/ptr" "github.com/smartcontractkit/chainlink-testing-framework/lib/utils/testcontext" @@ -169,7 +171,6 @@ func NewIntegrationEnvironment(t *testing.T, opts ...changeset.TestOps) (changes dockerEnv.devEnvTestCfg.CCIP.RMNConfig.GetProxyVersion(), dockerEnv.devEnvTestCfg.CCIP.RMNConfig.GetAFN2ProxyImage(), dockerEnv.devEnvTestCfg.CCIP.RMNConfig.GetAFN2ProxyVersion(), - dockerEnv.testEnv.LogStream, ) require.NoError(t, err) return deployedEnv, *rmnCluster @@ -323,11 +324,30 @@ func CreateDockerEnv(t *testing.T) ( } } + // ignore critical CL node logs until they are fixed, as otherwise tests will fail + var logScannerSettings = test_env.GetDefaultChainlinkNodeLogScannerSettingsWithExtraAllowedMessages(testreporters.NewAllowedLogMessage( + "No live RPC nodes available", + "CL nodes are started before simulated chains, so this is expected", + zapcore.DPanicLevel, + testreporters.WarnAboutAllowedMsgs_No), + testreporters.NewAllowedLogMessage( + "Error stopping job service", + "Possible lifecycle bug in chainlink: failed to close RMN home reader: has already been stopped: already stopped", + zapcore.DPanicLevel, + testreporters.WarnAboutAllowedMsgs_No), + testreporters.NewAllowedLogMessage( + "Shutdown grace period of 5s exceeded, closing DB and exiting...", + "Possible lifecycle bug in chainlink.", + zapcore.DPanicLevel, + testreporters.WarnAboutAllowedMsgs_No), + ) + builder := test_env.NewCLTestEnvBuilder(). WithTestConfig(&cfg). WithTestInstance(t). WithMockAdapter(). WithJobDistributor(cfg.CCIP.JobDistributorConfig). + WithChainlinkNodeLogScanner(logScannerSettings). WithStandardCleanup() // if private ethereum networks are provided, we will use them to create the test environment @@ -433,7 +453,6 @@ func StartChainlinkNodes( pointer.GetString(cfg.GetChainlinkImageConfig().Image), pointer.GetString(cfg.GetChainlinkImageConfig().Version), toml, - env.LogStream, test_env.WithPgDBOptions( ctftestenv.WithPostgresImageVersion(pointer.GetString(cfg.GetChainlinkImageConfig().PostgresVersion)), ), From 93033566cdfb1c1f33fb11db43a877098194f616 Mon Sep 17 00:00:00 2001 From: Bartek Tofel Date: Thu, 12 Dec 2024 09:50:15 +0100 Subject: [PATCH 08/15] add CHAINLINK_USER_TEAM: CCIP to nightly CCIP tests (#15646) --- .github/e2e-tests.yml | 114 +++++++++++++++++++++++------------------- 1 file changed, 63 insertions(+), 51 deletions(-) diff --git a/.github/e2e-tests.yml b/.github/e2e-tests.yml index fe30e2342c2..1bf55a64418 100644 --- a/.github/e2e-tests.yml +++ b/.github/e2e-tests.yml @@ -10,7 +10,7 @@ runner-test-matrix: # START: OCR tests # Example of 1 runner for all tests in integration-tests/smoke/ocr_test.go - - id: smoke/ocr_test.go:* + - id: smoke/ocr_test.go:* path: integration-tests/smoke/ocr_test.go test_env_type: docker runs_on: ubuntu-latest @@ -27,7 +27,7 @@ runner-test-matrix: runs_on: ubuntu-latest test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestOCRv1Soak$ -test.parallel=1 -timeout 900h -count=1 -json test_cmd_opts: 2>&1 | tee /tmp/gotest.log | gotestloghelper -ci -singlepackage -hidepassingtests=false - test_secrets_required: true + test_secrets_required: true test_env_vars: TEST_SUITE: soak @@ -60,7 +60,7 @@ runner-test-matrix: test_config_override_path: integration-tests/testconfig/ocr2/overrides/base_sepolia_quick_smoke_test.toml test_secrets_required: true test_env_vars: - TEST_SUITE: soak + TEST_SUITE: soak - id: soak/ocr_test.go:TestForwarderOCRv1Soak path: integration-tests/soak/ocr_test.go @@ -79,7 +79,7 @@ runner-test-matrix: test_secrets_required: true test_env_vars: TEST_SUITE: soak - + - id: soak/ocr_test.go:TestOCRSoak_GethReorgBelowFinality_FinalityTagDisabled path: integration-tests/soak/ocr_test.go test_env_type: k8s-remote-runner @@ -87,7 +87,7 @@ runner-test-matrix: test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run TestOCRSoak_GethReorgBelowFinality_FinalityTagDisabled -test.parallel=1 -timeout 900h -count=1 -json test_secrets_required: true test_env_vars: - TEST_SUITE: soak + TEST_SUITE: soak - id: soak/ocr_test.go:TestOCRSoak_GethReorgBelowFinality_FinalityTagEnabled path: integration-tests/soak/ocr_test.go @@ -96,7 +96,7 @@ runner-test-matrix: test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestOCRSoak_GethReorgBelowFinality_FinalityTagEnabled$ -test.parallel=1 -timeout 900h -count=1 -json test_secrets_required: true test_env_vars: - TEST_SUITE: soak + TEST_SUITE: soak - id: soak/ocr_test.go:TestOCRSoak_GasSpike path: integration-tests/soak/ocr_test.go @@ -105,7 +105,7 @@ runner-test-matrix: test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestOCRSoak_GasSpike$ -test.parallel=1 -timeout 900h -count=1 -json test_secrets_required: true test_env_vars: - TEST_SUITE: soak + TEST_SUITE: soak - id: soak/ocr_test.go:TestOCRSoak_ChangeBlockGasLimit path: integration-tests/soak/ocr_test.go @@ -114,7 +114,7 @@ runner-test-matrix: test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestOCRSoak_ChangeBlockGasLimit$ -test.parallel=1 -timeout 900h -count=1 -json test_secrets_required: true test_env_vars: - TEST_SUITE: soak + TEST_SUITE: soak - id: soak/ocr_test.go:TestOCRSoak_RPCDownForAllCLNodes path: integration-tests/soak/ocr_test.go @@ -123,7 +123,7 @@ runner-test-matrix: test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestOCRSoak_RPCDownForAllCLNodes$ -test.parallel=1 -timeout 900h -count=1 -json test_secrets_required: true test_env_vars: - TEST_SUITE: soak + TEST_SUITE: soak - id: soak/ocr_test.go:TestOCRSoak_RPCDownForHalfCLNodes path: integration-tests/soak/ocr_test.go @@ -132,7 +132,7 @@ runner-test-matrix: test_cmd: cd integration-tests/ && go test soak/ocr_test.go -v -test.run ^TestOCRSoak_RPCDownForHalfCLNodes$ -test.parallel=1 -timeout 900h -count=1 -json test_secrets_required: true test_env_vars: - TEST_SUITE: soak + TEST_SUITE: soak - id: smoke/forwarder_ocr_test.go:* path: integration-tests/smoke/forwarder_ocr_test.go @@ -168,7 +168,7 @@ runner-test-matrix: pyroscope_env: ci-smoke-ocr2-evm-simulated test_env_vars: E2E_TEST_CHAINLINK_VERSION: '{{ env.DEFAULT_CHAINLINK_PLUGINS_VERSION }}' # This is the chainlink version that has the plugins - + - id: smoke/ocr2_test.go:*-plugins path: integration-tests/smoke/ocr2_test.go test_env_type: docker @@ -197,7 +197,7 @@ runner-test-matrix: # END: OCR tests # START: Automation tests - + - id: smoke/automation_test.go:^TestAutomationBasic/registry_2_0|TestAutomationBasic/registry_2_1_conditional|TestAutomationBasic/registry_2_1_logtrigger$ path: integration-tests/smoke/automation_test.go test_env_type: docker @@ -273,7 +273,7 @@ runner-test-matrix: - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run "^TestAutomationBasic/registry_2_3_with_mercury_v03_link|TestAutomationBasic/registry_2_3_with_logtrigger_and_mercury_v02_link$" -test.parallel=2 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-automation-evm-simulated + pyroscope_env: ci-smoke-automation-evm-simulated - id: smoke/automation_test.go:^TestSetUpkeepTriggerConfig$ path: integration-tests/smoke/automation_test.go @@ -284,7 +284,7 @@ runner-test-matrix: - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestSetUpkeepTriggerConfig$ -test.parallel=2 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-automation-evm-simulated + pyroscope_env: ci-smoke-automation-evm-simulated - id: smoke/automation_test.go:^TestAutomationAddFunds$ path: integration-tests/smoke/automation_test.go @@ -295,7 +295,7 @@ runner-test-matrix: - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestAutomationAddFunds$ -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-automation-evm-simulated + pyroscope_env: ci-smoke-automation-evm-simulated - id: smoke/automation_test.go:^TestAutomationPauseUnPause$ path: integration-tests/smoke/automation_test.go @@ -306,7 +306,7 @@ runner-test-matrix: - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestAutomationPauseUnPause$ -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-automation-evm-simulated + pyroscope_env: ci-smoke-automation-evm-simulated - id: smoke/automation_test.go:^TestAutomationRegisterUpkeep$ path: integration-tests/smoke/automation_test.go @@ -317,7 +317,7 @@ runner-test-matrix: - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestAutomationRegisterUpkeep$ -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-automation-evm-simulated + pyroscope_env: ci-smoke-automation-evm-simulated - id: smoke/automation_test.go:^TestAutomationPauseRegistry$ path: integration-tests/smoke/automation_test.go @@ -328,7 +328,7 @@ runner-test-matrix: - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestAutomationPauseRegistry$ -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-automation-evm-simulated + pyroscope_env: ci-smoke-automation-evm-simulated - id: smoke/automation_test.go:^TestAutomationKeeperNodesDown$ path: integration-tests/smoke/automation_test.go @@ -339,7 +339,7 @@ runner-test-matrix: - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestAutomationKeeperNodesDown$ -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-automation-evm-simulated + pyroscope_env: ci-smoke-automation-evm-simulated - id: smoke/automation_test.go:^TestAutomationPerformSimulation$ path: integration-tests/smoke/automation_test.go @@ -350,7 +350,7 @@ runner-test-matrix: - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestAutomationPerformSimulation$ -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-automation-evm-simulated + pyroscope_env: ci-smoke-automation-evm-simulated - id: smoke/automation_test.go:^TestAutomationCheckPerformGasLimit$ path: integration-tests/smoke/automation_test.go @@ -361,7 +361,7 @@ runner-test-matrix: - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestAutomationCheckPerformGasLimit$ -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-automation-evm-simulated + pyroscope_env: ci-smoke-automation-evm-simulated - id: smoke/automation_test.go:^TestUpdateCheckData$ path: integration-tests/smoke/automation_test.go @@ -372,7 +372,7 @@ runner-test-matrix: - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestUpdateCheckData$ -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-automation-evm-simulated + pyroscope_env: ci-smoke-automation-evm-simulated - id: smoke/automation_test.go:^TestSetOffchainConfigWithMaxGasPrice$ path: integration-tests/smoke/automation_test.go @@ -383,7 +383,7 @@ runner-test-matrix: - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestSetOffchainConfigWithMaxGasPrice$ -test.parallel=2 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-automation-evm-simulated + pyroscope_env: ci-smoke-automation-evm-simulated - id: smoke/keeper_test.go:^TestKeeperBasicSmoke$ path: integration-tests/smoke/keeper_test.go @@ -393,7 +393,7 @@ runner-test-matrix: - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperBasicSmoke$ -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-keeper-evm-simulated + pyroscope_env: ci-smoke-keeper-evm-simulated - id: smoke/keeper_test.go:^TestKeeperBlockCountPerTurn$ path: integration-tests/smoke/keeper_test.go @@ -403,7 +403,7 @@ runner-test-matrix: - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperBlockCountPerTurn$ -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-keeper-evm-simulated + pyroscope_env: ci-smoke-keeper-evm-simulated - id: smoke/keeper_test.go:^TestKeeperSimulation$ path: integration-tests/smoke/keeper_test.go @@ -413,7 +413,7 @@ runner-test-matrix: - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperSimulation$ -test.parallel=2 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-keeper-evm-simulated + pyroscope_env: ci-smoke-keeper-evm-simulated - id: smoke/keeper_test.go:^TestKeeperCheckPerformGasLimit$ path: integration-tests/smoke/keeper_test.go @@ -423,7 +423,7 @@ runner-test-matrix: - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperCheckPerformGasLimit$ -test.parallel=2 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-keeper-evm-simulated + pyroscope_env: ci-smoke-keeper-evm-simulated - id: smoke/keeper_test.go:^TestKeeperRegisterUpkeep$ path: integration-tests/smoke/keeper_test.go @@ -433,7 +433,7 @@ runner-test-matrix: - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperRegisterUpkeep$ -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-keeper-evm-simulated + pyroscope_env: ci-smoke-keeper-evm-simulated - id: smoke/keeper_test.go:^TestKeeperAddFunds$ path: integration-tests/smoke/keeper_test.go @@ -443,7 +443,7 @@ runner-test-matrix: - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperAddFunds$ -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-keeper-evm-simulated + pyroscope_env: ci-smoke-keeper-evm-simulated - id: smoke/keeper_test.go:^TestKeeperRemove$ path: integration-tests/smoke/keeper_test.go @@ -453,8 +453,8 @@ runner-test-matrix: - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperRemove$ -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-keeper-evm-simulated - + pyroscope_env: ci-smoke-keeper-evm-simulated + - id: smoke/keeper_test.go:^TestKeeperPauseRegistry$ path: integration-tests/smoke/keeper_test.go test_env_type: docker @@ -463,7 +463,7 @@ runner-test-matrix: - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperPauseRegistry$ -test.parallel=2 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-keeper-evm-simulated + pyroscope_env: ci-smoke-keeper-evm-simulated - id: smoke/keeper_test.go:^TestKeeperMigrateRegistry$ path: integration-tests/smoke/keeper_test.go @@ -473,7 +473,7 @@ runner-test-matrix: - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperMigrateRegistry$ -test.parallel=1 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-keeper-evm-simulated + pyroscope_env: ci-smoke-keeper-evm-simulated - id: smoke/keeper_test.go:^TestKeeperNodeDown$ path: integration-tests/smoke/keeper_test.go @@ -483,7 +483,7 @@ runner-test-matrix: - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperNodeDown$ -test.parallel=3 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-keeper-evm-simulated + pyroscope_env: ci-smoke-keeper-evm-simulated - id: smoke/keeper_test.go:^TestKeeperPauseUnPauseUpkeep$ path: integration-tests/smoke/keeper_test.go @@ -493,7 +493,7 @@ runner-test-matrix: - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperPauseUnPauseUpkeep$ -test.parallel=1 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-keeper-evm-simulated + pyroscope_env: ci-smoke-keeper-evm-simulated - id: smoke/keeper_test.go:^TestKeeperUpdateCheckData$ path: integration-tests/smoke/keeper_test.go @@ -503,7 +503,7 @@ runner-test-matrix: - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperUpdateCheckData$ -test.parallel=1 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-keeper-evm-simulated + pyroscope_env: ci-smoke-keeper-evm-simulated - id: smoke/keeper_test.go:^TestKeeperJobReplacement$ path: integration-tests/smoke/keeper_test.go @@ -513,7 +513,7 @@ runner-test-matrix: - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestKeeperJobReplacement$ -test.parallel=1 -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-keeper-evm-simulated + pyroscope_env: ci-smoke-keeper-evm-simulated - id: load/automationv2_1/automationv2_1_test.go:TestLogTrigger path: integration-tests/load/automationv2_1/automationv2_1_test.go @@ -546,7 +546,7 @@ runner-test-matrix: test_env_type: docker runs_on: ubuntu22.04-8cores-32GB triggers: - - Automation Nightly Tests + - Automation Nightly Tests test_cmd: cd integration-tests/smoke && go test -test.run ^TestAutomationNodeUpgrade/registry_2_1 -test.parallel=5 -timeout 60m -count=1 -json test_env_vars: E2E_TEST_CHAINLINK_IMAGE: public.ecr.aws/chainlink/chainlink @@ -676,7 +676,7 @@ runner-test-matrix: test_env_vars: TEST_TYPE: Smoke triggers: - - On Demand VRFV2 Plus Performance Test + - On Demand VRFV2 Plus Performance Test - id: load/vrfv2plus/vrfv2plus_test.go:^TestVRFV2PlusBHSPerformance$Smoke path: integration-tests/load/vrfv2plus/vrfv2plus_test.go @@ -688,7 +688,7 @@ runner-test-matrix: test_env_vars: TEST_TYPE: Smoke triggers: - - On Demand VRFV2 Plus Performance Test + - On Demand VRFV2 Plus Performance Test - id: load/vrfv2/vrfv2_test.go:^TestVRFV2Performance$Smoke path: integration-tests/load/vrfv2/vrfv2_test.go @@ -698,9 +698,9 @@ runner-test-matrix: test_config_override_required: true test_secrets_required: true test_env_vars: - TEST_TYPE: Smoke + TEST_TYPE: Smoke triggers: - - On Demand VRFV2 Performance Test + - On Demand VRFV2 Performance Test - id: load/vrfv2/vrfv2_test.go:^TestVRFV2PlusBHSPerformance$Smoke path: integration-tests/load/vrfv2/vrfv2_test.go @@ -892,7 +892,7 @@ runner-test-matrix: - Merge Queue E2E Core Tests - Nightly E2E Tests test_cmd: cd integration-tests/ && go test smoke/flux_test.go -timeout 30m -count=1 -json - pyroscope_env: ci-smoke-flux-evm-simulated + pyroscope_env: ci-smoke-flux-evm-simulated - id: smoke/reorg_above_finality_test.go:* path: integration-tests/smoke/reorg_above_finality_test.go @@ -904,7 +904,7 @@ runner-test-matrix: - Nightly E2E Tests test_cmd: cd integration-tests/ && go test smoke/reorg_above_finality_test.go -timeout 30m -count=1 -json pyroscope_env: ci-smoke-reorg-above-finality-evm-simulated - + - id: migration/upgrade_version_test.go:* path: integration-tests/migration/upgrade_version_test.go test_env_type: docker @@ -1106,7 +1106,8 @@ runner-test-matrix: test_cmd: cd integration-tests/ccip-tests/smoke && go test ccip_test.go -test.run ^TestSmokeCCIPForBidirectionalLane$ -timeout 30m -count=1 -test.parallel=1 -json test_env_vars: E2E_TEST_SELECTED_NETWORK: SIMULATED_1,SIMULATED_2 - + CHAINLINK_USER_TEAM: CCIP + - id: ccip-smoke-usdc path: integration-tests/ccip-tests/smoke/ccip_test.go test_env_type: docker @@ -1118,6 +1119,7 @@ runner-test-matrix: test_cmd: cd integration-tests/ccip-tests/smoke && go test ccip_test.go -test.run ^TestSmokeCCIPForBidirectionalLane$ -timeout 30m -count=1 -test.parallel=1 -json test_env_vars: E2E_TEST_SELECTED_NETWORK: SIMULATED_1,SIMULATED_2 + CHAINLINK_USER_TEAM: CCIP test_config_override_path: integration-tests/ccip-tests/testconfig/tomls/usdc_mock_deployment.toml - id: ccip-smoke-db-compatibility @@ -1131,6 +1133,7 @@ runner-test-matrix: test_cmd: cd integration-tests/ccip-tests/smoke && go test ccip_test.go -test.run ^TestSmokeCCIPForBidirectionalLane$ -timeout 30m -count=1 -test.parallel=1 -json test_env_vars: E2E_TEST_SELECTED_NETWORK: SIMULATED_1,SIMULATED_2 + CHAINLINK_USER_TEAM: CCIP test_config_override_path: integration-tests/ccip-tests/testconfig/tomls/db-compatibility.toml - id: ccip-smoke-leader-lane @@ -1157,6 +1160,7 @@ runner-test-matrix: test_cmd: cd integration-tests/ccip-tests/smoke && go test ccip_test.go -test.run ^TestSmokeCCIPTokenPoolRateLimits$ -timeout 30m -count=1 -test.parallel=1 -json test_env_vars: E2E_TEST_SELECTED_NETWORK: SIMULATED_1,SIMULATED_2 + CHAINLINK_USER_TEAM: CCIP - id: ccip-tests/smoke/ccip_test.go:^TestSmokeCCIPMulticall$ path: integration-tests/ccip-tests/smoke/ccip_test.go @@ -1169,6 +1173,7 @@ runner-test-matrix: test_cmd: cd integration-tests/ccip-tests/smoke && go test ccip_test.go -test.run ^TestSmokeCCIPMulticall$ -timeout 30m -count=1 -test.parallel=1 -json test_env_vars: E2E_TEST_SELECTED_NETWORK: SIMULATED_1,SIMULATED_2 + CHAINLINK_USER_TEAM: CCIP - id: ccip-tests/smoke/ccip_test.go:^TestSmokeCCIPManuallyExecuteAfterExecutionFailingDueToInsufficientGas$ path: integration-tests/ccip-tests/smoke/ccip_test.go @@ -1181,6 +1186,7 @@ runner-test-matrix: test_cmd: cd integration-tests/ccip-tests/smoke && go test ccip_test.go -test.run ^TestSmokeCCIPManuallyExecuteAfterExecutionFailingDueToInsufficientGas$ -timeout 30m -count=1 -test.parallel=1 -json test_env_vars: E2E_TEST_SELECTED_NETWORK: SIMULATED_1,SIMULATED_2 + CHAINLINK_USER_TEAM: CCIP - id: ccip-tests/smoke/ccip_test.go:^TestSmokeCCIPOnRampLimits$ path: integration-tests/ccip-tests/smoke/ccip_test.go @@ -1193,6 +1199,7 @@ runner-test-matrix: test_cmd: cd integration-tests/ccip-tests/smoke && go test ccip_test.go -test.run ^TestSmokeCCIPOnRampLimits$ -timeout 30m -count=1 -test.parallel=1 -json test_env_vars: E2E_TEST_SELECTED_NETWORK: SIMULATED_1,SIMULATED_2 + CHAINLINK_USER_TEAM: CCIP - id: ccip-tests/smoke/ccip_test.go:^TestSmokeCCIPOffRampCapacityLimit$ path: integration-tests/ccip-tests/smoke/ccip_test.go @@ -1202,7 +1209,8 @@ runner-test-matrix: - Nightly E2E Tests test_cmd: cd integration-tests/ccip-tests/smoke && go test ccip_test.go -test.run ^TestSmokeCCIPOffRampCapacityLimit$ -timeout 30m -count=1 -test.parallel=1 -json test_env_vars: - E2E_TEST_SELECTED_NETWORK: SIMULATED_1,SIMULATED_2 + E2E_TEST_SELECTED_NETWORK: SIMULATED_1,SIMULATED_2 + CHAINLINK_USER_TEAM: CCIP - id: ccip-tests/smoke/ccip_test.go:^TestSmokeCCIPOffRampAggRateLimit$ path: integration-tests/ccip-tests/smoke/ccip_test.go @@ -1213,6 +1221,7 @@ runner-test-matrix: test_cmd: cd integration-tests/ccip-tests/smoke && go test ccip_test.go -test.run ^TestSmokeCCIPOffRampAggRateLimit$ -timeout 30m -count=1 -test.parallel=1 -json test_env_vars: E2E_TEST_SELECTED_NETWORK: SIMULATED_1,SIMULATED_2 + CHAINLINK_USER_TEAM: CCIP - id: ccip-tests/smoke/ccip_test.go:^TestSmokeCCIPReorgBelowFinality$ path: integration-tests/ccip-tests/smoke/ccip_test.go @@ -1225,6 +1234,7 @@ runner-test-matrix: test_cmd: cd integration-tests/ccip-tests/smoke && go test ccip_test.go -test.run ^TestSmokeCCIPReorgBelowFinality$ -timeout 30m -count=1 -test.parallel=1 -json test_env_vars: E2E_TEST_SELECTED_NETWORK: SIMULATED_1,SIMULATED_2 + CHAINLINK_USER_TEAM: CCIP test_config_override_path: integration-tests/ccip-tests/testconfig/tomls/ccip-reorg.toml - id: ccip-tests/smoke/ccip_test.go:^TestSmokeCCIPReorgAboveFinalityAtDestination$ @@ -1238,6 +1248,7 @@ runner-test-matrix: test_cmd: cd integration-tests/ccip-tests/smoke && go test ccip_test.go -test.run ^TestSmokeCCIPReorgAboveFinalityAtDestination$ -timeout 30m -count=1 -test.parallel=1 -json test_env_vars: E2E_TEST_SELECTED_NETWORK: SIMULATED_1,SIMULATED_2 + CHAINLINK_USER_TEAM: CCIP test_config_override_path: integration-tests/ccip-tests/testconfig/tomls/ccip-reorg.toml - id: ccip-tests/smoke/ccip_test.go:^TestSmokeCCIPReorgAboveFinalityAtSource$ @@ -1251,6 +1262,7 @@ runner-test-matrix: test_cmd: cd integration-tests/ccip-tests/smoke && go test ccip_test.go -test.run ^TestSmokeCCIPReorgAboveFinalityAtSource$ -timeout 30m -count=1 -test.parallel=1 -json test_env_vars: E2E_TEST_SELECTED_NETWORK: SIMULATED_1,SIMULATED_2 + CHAINLINK_USER_TEAM: CCIP test_config_override_path: integration-tests/ccip-tests/testconfig/tomls/ccip-reorg.toml - id: integration-tests/ccip-tests/load/ccip_test.go:TestLoadCCIPStableRPS @@ -1262,8 +1274,8 @@ runner-test-matrix: TEST_SUITE: ccip-load E2E_TEST_GRAFANA_DASHBOARD_URL: "/d/6vjVx-1V8/ccip-long-running-tests" triggers: - - E2E CCIP Load Tests - test_artifacts_on_failure: + - E2E CCIP Load Tests + test_artifacts_on_failure: - ./integration-tests/load/logs/payload_ccip.json # Enable when CCIP-2277 is resolved @@ -1277,8 +1289,8 @@ runner-test-matrix: # test_env_vars: # E2E_TEST_GRAFANA_DASHBOARD_URL: "/d/6vjVx-1V8/ccip-long-running-tests" # triggers: - # - E2E CCIP Load Tests - # test_artifacts_on_failure: + # - E2E CCIP Load Tests + # test_artifacts_on_failure: # - ./integration-tests/load/logs/payload_ccip.json - id: ccip-tests/chaos/ccip_test.go @@ -1306,5 +1318,5 @@ runner-test-matrix: TEST_TRIGGERED_BY: ccip-cron-chaos-eth TEST_LOG_LEVEL: debug E2E_TEST_GRAFANA_DASHBOARD_URL: /d/6vjVx-1V8/ccip-long-running-tests - + # END: CCIP tests From 385798d3ba02b98a1fdafcb2f9af63e10f29e725 Mon Sep 17 00:00:00 2001 From: Cedric Date: Thu, 12 Dec 2024 10:12:16 +0000 Subject: [PATCH 09/15] [CAPPL-364] Return an error when secrets are empty (#15635) --- core/services/workflows/syncer/handler.go | 5 ++-- core/services/workflows/syncer/orm.go | 4 +++ core/services/workflows/syncer/orm_test.go | 34 ++++++++++++++++++++++ 3 files changed, 41 insertions(+), 2 deletions(-) diff --git a/core/services/workflows/syncer/handler.go b/core/services/workflows/syncer/handler.go index b88527f905d..f3392a8489a 100644 --- a/core/services/workflows/syncer/handler.go +++ b/core/services/workflows/syncer/handler.go @@ -456,12 +456,13 @@ func (h *eventHandler) workflowRegisteredEvent( } wfID := hex.EncodeToString(payload.WorkflowID[:]) + owner := hex.EncodeToString(payload.WorkflowOwner) entry := &job.WorkflowSpec{ Workflow: hex.EncodeToString(decodedBinary), Config: string(config), WorkflowID: wfID, Status: status, - WorkflowOwner: hex.EncodeToString(payload.WorkflowOwner), + WorkflowOwner: owner, WorkflowName: payload.WorkflowName, SpecType: job.WASMFile, BinaryURL: payload.BinaryURL, @@ -480,7 +481,7 @@ func (h *eventHandler) workflowRegisteredEvent( engine, err := h.engineFactory( ctx, wfID, - string(payload.WorkflowOwner), + owner, payload.WorkflowName, config, decodedBinary, diff --git a/core/services/workflows/syncer/orm.go b/core/services/workflows/syncer/orm.go index 97f2c834f36..9980d8e7b78 100644 --- a/core/services/workflows/syncer/orm.go +++ b/core/services/workflows/syncer/orm.go @@ -161,6 +161,10 @@ func (orm *orm) GetContentsByWorkflowID(ctx context.Context, workflowID string) return "", "", ErrEmptySecrets } + if jr.Contents.String == "" { + return "", "", ErrEmptySecrets + } + return jr.SecretsURLHash.String, jr.Contents.String, nil } diff --git a/core/services/workflows/syncer/orm_test.go b/core/services/workflows/syncer/orm_test.go index 08c60447498..addca5c18e2 100644 --- a/core/services/workflows/syncer/orm_test.go +++ b/core/services/workflows/syncer/orm_test.go @@ -256,3 +256,37 @@ func Test_GetContentsByWorkflowID(t *testing.T) { assert.Equal(t, giveHash, gotHash) assert.Equal(t, giveContent, gotContent) } + +func Test_GetContentsByWorkflowID_SecretsProvidedButEmpty(t *testing.T) { + db := pgtest.NewSqlxDB(t) + ctx := testutils.Context(t) + lggr := logger.TestLogger(t) + orm := &orm{ds: db, lggr: lggr} + + // workflow_id is missing + _, _, err := orm.GetContentsByWorkflowID(ctx, "doesnt-exist") + require.ErrorContains(t, err, "no rows in result set") + + // secrets_id is nil; should return EmptySecrets + workflowID := "aWorkflowID" + giveURL := "https://example.com" + giveBytes, err := crypto.Keccak256([]byte(giveURL)) + require.NoError(t, err) + giveHash := hex.EncodeToString(giveBytes) + giveContent := "" + _, err = orm.UpsertWorkflowSpecWithSecrets(ctx, &job.WorkflowSpec{ + Workflow: "", + Config: "", + WorkflowID: workflowID, + WorkflowOwner: "aWorkflowOwner", + WorkflowName: "aWorkflowName", + BinaryURL: "", + ConfigURL: "", + CreatedAt: time.Now(), + SpecType: job.DefaultSpecType, + }, giveURL, giveHash, giveContent) + require.NoError(t, err) + + _, _, err = orm.GetContentsByWorkflowID(ctx, workflowID) + require.ErrorIs(t, err, ErrEmptySecrets) +} From 5083d473972fca4b3b190f9051d825062c35b82a Mon Sep 17 00:00:00 2001 From: Matthew Pendrey Date: Thu, 12 Dec 2024 10:41:32 +0000 Subject: [PATCH 10/15] temp disable flaky tests (#15595) * temp disable flaky test * update skip method --- core/capabilities/remote/executable/client_test.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/core/capabilities/remote/executable/client_test.go b/core/capabilities/remote/executable/client_test.go index 5c4da350b9e..0314f62b1b7 100644 --- a/core/capabilities/remote/executable/client_test.go +++ b/core/capabilities/remote/executable/client_test.go @@ -29,6 +29,7 @@ const ( ) func Test_Client_DonTopologies(t *testing.T) { + testutils.SkipFlakey(t, "https://smartcontract-it.atlassian.net/browse/CAPPL-363") ctx := testutils.Context(t) transmissionSchedule, err := values.NewMap(map[string]any{ @@ -87,6 +88,7 @@ func Test_Client_DonTopologies(t *testing.T) { } func Test_Client_TransmissionSchedules(t *testing.T) { + testutils.SkipFlakey(t, "https://smartcontract-it.atlassian.net/browse/CAPPL-363") ctx := testutils.Context(t) responseTest := func(t *testing.T, response commoncap.CapabilityResponse, responseError error) { From 1e87a192adc29da00b53671547797ae6480d90d3 Mon Sep 17 00:00:00 2001 From: pablolagreca Date: Thu, 12 Dec 2024 11:25:24 -0300 Subject: [PATCH 11/15] [INTAUTO-308] - Adding Solana specific chain client and state (#15576) --- deployment/ccip/changeset/solana_state.go | 6 ++++++ deployment/ccip/changeset/state.go | 3 ++- deployment/environment.go | 2 +- deployment/solana_chain.go | 5 +++++ 4 files changed, 14 insertions(+), 2 deletions(-) create mode 100644 deployment/ccip/changeset/solana_state.go create mode 100644 deployment/solana_chain.go diff --git a/deployment/ccip/changeset/solana_state.go b/deployment/ccip/changeset/solana_state.go new file mode 100644 index 00000000000..4e5507cfcd3 --- /dev/null +++ b/deployment/ccip/changeset/solana_state.go @@ -0,0 +1,6 @@ +package changeset + +// SolChainState holds a Go binding for all the currently deployed CCIP programs +// on a chain. If a binding is nil, it means here is no such contract on the chain. +type SolCCIPChainState struct { +} diff --git a/deployment/ccip/changeset/state.go b/deployment/ccip/changeset/state.go index 7453195d304..af982f35e0a 100644 --- a/deployment/ccip/changeset/state.go +++ b/deployment/ccip/changeset/state.go @@ -252,7 +252,8 @@ type CCIPOnChainState struct { // Populated go bindings for the appropriate version for all contracts. // We would hold 2 versions of each contract here. Once we upgrade we can phase out the old one. // When generating bindings, make sure the package name corresponds to the version. - Chains map[uint64]CCIPChainState + Chains map[uint64]CCIPChainState + SolChains map[uint64]SolCCIPChainState } func (s CCIPOnChainState) View(chains []uint64) (map[string]view.ChainView, error) { diff --git a/deployment/environment.go b/deployment/environment.go index 3d120adbbf1..c9de89b8c0c 100644 --- a/deployment/environment.go +++ b/deployment/environment.go @@ -95,6 +95,7 @@ type Environment struct { Logger logger.Logger ExistingAddresses AddressBook Chains map[uint64]Chain + SolChains map[uint64]SolChain NodeIDs []string Offchain OffchainClient GetContext func() context.Context @@ -331,7 +332,6 @@ func NodeInfo(nodeIDs []string, oc NodeChainConfigsLister) (Nodes, error) { Enabled: 1, Ids: nodeIDs, } - } nodesFromJD, err := oc.ListNodes(context.Background(), &nodev1.ListNodesRequest{ Filter: filter, diff --git a/deployment/solana_chain.go b/deployment/solana_chain.go new file mode 100644 index 00000000000..338642e3e32 --- /dev/null +++ b/deployment/solana_chain.go @@ -0,0 +1,5 @@ +package deployment + +// SolChain represents a Solana chain. +type SolChain struct { +} From c68dcc8ef70ff08954a06c343ce17765d34a369d Mon Sep 17 00:00:00 2001 From: Mateusz Sekara Date: Thu, 12 Dec 2024 15:28:41 +0100 Subject: [PATCH 12/15] CCIP-4448 Track observation/outcome length in bytes (#15656) * Track observation/outcome length * Track observation/outcome length * Post review fixes --- core/services/ocr3/promwrapper/factory.go | 1 + core/services/ocr3/promwrapper/plugin.go | 20 +++++++++- core/services/ocr3/promwrapper/plugin_test.go | 40 ++++++++++++++----- core/services/ocr3/promwrapper/types.go | 7 ++++ 4 files changed, 55 insertions(+), 13 deletions(-) diff --git a/core/services/ocr3/promwrapper/factory.go b/core/services/ocr3/promwrapper/factory.go index 6518cea3c0d..e369b3260ef 100644 --- a/core/services/ocr3/promwrapper/factory.go +++ b/core/services/ocr3/promwrapper/factory.go @@ -47,6 +47,7 @@ func (r ReportingPluginFactory[RI]) NewReportingPlugin(ctx context.Context, conf config.ConfigDigest.String(), promOCR3ReportsGenerated, promOCR3Durations, + promOCR3Sizes, promOCR3PluginStatus, ) return wrapped, info, err diff --git a/core/services/ocr3/promwrapper/plugin.go b/core/services/ocr3/promwrapper/plugin.go index dcee5050d1e..aa5fb87a6ee 100644 --- a/core/services/ocr3/promwrapper/plugin.go +++ b/core/services/ocr3/promwrapper/plugin.go @@ -21,6 +21,7 @@ type reportingPlugin[RI any] struct { // Prometheus components for tracking metrics reportsGenerated *prometheus.CounterVec durations *prometheus.HistogramVec + sizes *prometheus.CounterVec status *prometheus.GaugeVec } @@ -31,6 +32,7 @@ func newReportingPlugin[RI any]( configDigest string, reportsGenerated *prometheus.CounterVec, durations *prometheus.HistogramVec, + sizes *prometheus.CounterVec, status *prometheus.GaugeVec, ) *reportingPlugin[RI] { return &reportingPlugin[RI]{ @@ -40,6 +42,7 @@ func newReportingPlugin[RI any]( configDigest: configDigest, reportsGenerated: reportsGenerated, durations: durations, + sizes: sizes, status: status, } } @@ -51,9 +54,11 @@ func (p *reportingPlugin[RI]) Query(ctx context.Context, outctx ocr3types.Outcom } func (p *reportingPlugin[RI]) Observation(ctx context.Context, outctx ocr3types.OutcomeContext, query ocrtypes.Query) (ocrtypes.Observation, error) { - return withObservedExecution(p, observation, func() (ocrtypes.Observation, error) { + result, err := withObservedExecution(p, observation, func() (ocrtypes.Observation, error) { return p.ReportingPlugin.Observation(ctx, outctx, query) }) + p.trackSize(observation, len(result), err) + return result, err } func (p *reportingPlugin[RI]) ValidateObservation(ctx context.Context, outctx ocr3types.OutcomeContext, query ocrtypes.Query, ao ocrtypes.AttributedObservation) error { @@ -65,9 +70,11 @@ func (p *reportingPlugin[RI]) ValidateObservation(ctx context.Context, outctx oc } func (p *reportingPlugin[RI]) Outcome(ctx context.Context, outctx ocr3types.OutcomeContext, query ocrtypes.Query, aos []ocrtypes.AttributedObservation) (ocr3types.Outcome, error) { - return withObservedExecution(p, outcome, func() (ocr3types.Outcome, error) { + result, err := withObservedExecution(p, outcome, func() (ocr3types.Outcome, error) { return p.ReportingPlugin.Outcome(ctx, outctx, query, aos) }) + p.trackSize(outcome, len(result), err) + return result, err } func (p *reportingPlugin[RI]) Reports(ctx context.Context, seqNr uint64, outcome ocr3types.Outcome) ([]ocr3types.ReportPlus[RI], error) { @@ -111,6 +118,15 @@ func (p *reportingPlugin[RI]) updateStatus(status bool) { Set(float64(boolToInt(status))) } +func (p *reportingPlugin[RI]) trackSize(function functionType, size int, err error) { + if err != nil { + return + } + p.sizes. + WithLabelValues(p.chainID, p.plugin, string(function)). + Add(float64(size)) +} + func boolToInt(arg bool) int { if arg { return 1 diff --git a/core/services/ocr3/promwrapper/plugin_test.go b/core/services/ocr3/promwrapper/plugin_test.go index 9a7b6f2e648..a10a467799f 100644 --- a/core/services/ocr3/promwrapper/plugin_test.go +++ b/core/services/ocr3/promwrapper/plugin_test.go @@ -17,17 +17,20 @@ import ( ) func Test_ReportsGeneratedGauge(t *testing.T) { + pluginObservationSize := 5 + pluginOutcomeSize := 3 + plugin1 := newReportingPlugin( fakePlugin[uint]{reports: make([]ocr3types.ReportPlus[uint], 2)}, - "123", "empty", "abc", promOCR3ReportsGenerated, promOCR3Durations, promOCR3PluginStatus, + "123", "empty", "abc", promOCR3ReportsGenerated, promOCR3Durations, promOCR3Sizes, promOCR3PluginStatus, ) plugin2 := newReportingPlugin( - fakePlugin[bool]{reports: make([]ocr3types.ReportPlus[bool], 10)}, - "solana", "different_plugin", "abc", promOCR3ReportsGenerated, promOCR3Durations, promOCR3PluginStatus, + fakePlugin[bool]{reports: make([]ocr3types.ReportPlus[bool], 10), observationSize: pluginObservationSize, outcomeSize: pluginOutcomeSize}, + "solana", "different_plugin", "abc", promOCR3ReportsGenerated, promOCR3Durations, promOCR3Sizes, promOCR3PluginStatus, ) plugin3 := newReportingPlugin( fakePlugin[string]{err: errors.New("error")}, - "1234", "empty", "abc", promOCR3ReportsGenerated, promOCR3Durations, promOCR3PluginStatus, + "1234", "empty", "abc", promOCR3ReportsGenerated, promOCR3Durations, promOCR3Sizes, promOCR3PluginStatus, ) r1, err := plugin1.Reports(tests.Context(t), 1, nil) @@ -64,20 +67,33 @@ func Test_ReportsGeneratedGauge(t *testing.T) { require.NoError(t, plugin1.Close()) pluginHealth = testutil.ToFloat64(promOCR3PluginStatus.WithLabelValues("123", "empty", "abc")) require.Equal(t, 0, int(pluginHealth)) + + iterations := 10 + for i := 0; i < iterations; i++ { + _, err1 := plugin2.Outcome(tests.Context(t), ocr3types.OutcomeContext{}, nil, nil) + require.NoError(t, err1) + } + _, err1 := plugin2.Observation(tests.Context(t), ocr3types.OutcomeContext{}, nil) + require.NoError(t, err1) + + outcomesLen := testutil.ToFloat64(promOCR3Sizes.WithLabelValues("solana", "different_plugin", "outcome")) + require.Equal(t, pluginOutcomeSize*iterations, int(outcomesLen)) + observationLen := testutil.ToFloat64(promOCR3Sizes.WithLabelValues("solana", "different_plugin", "observation")) + require.Equal(t, pluginObservationSize, int(observationLen)) } func Test_DurationHistograms(t *testing.T) { plugin1 := newReportingPlugin( fakePlugin[uint]{}, - "123", "empty", "abc", promOCR3ReportsGenerated, promOCR3Durations, promOCR3PluginStatus, + "123", "empty", "abc", promOCR3ReportsGenerated, promOCR3Durations, promOCR3Sizes, promOCR3PluginStatus, ) plugin2 := newReportingPlugin( fakePlugin[uint]{err: errors.New("error")}, - "123", "empty", "abc", promOCR3ReportsGenerated, promOCR3Durations, promOCR3PluginStatus, + "123", "empty", "abc", promOCR3ReportsGenerated, promOCR3Durations, promOCR3Sizes, promOCR3PluginStatus, ) plugin3 := newReportingPlugin( fakePlugin[uint]{}, - "solana", "commit", "abc", promOCR3ReportsGenerated, promOCR3Durations, promOCR3PluginStatus, + "solana", "commit", "abc", promOCR3ReportsGenerated, promOCR3Durations, promOCR3Sizes, promOCR3PluginStatus, ) for _, p := range []*reportingPlugin[uint]{plugin1, plugin2, plugin3} { @@ -102,8 +118,10 @@ func Test_DurationHistograms(t *testing.T) { } type fakePlugin[RI any] struct { - reports []ocr3types.ReportPlus[RI] - err error + reports []ocr3types.ReportPlus[RI] + observationSize int + outcomeSize int + err error } func (f fakePlugin[RI]) Query(context.Context, ocr3types.OutcomeContext) (ocrtypes.Query, error) { @@ -117,7 +135,7 @@ func (f fakePlugin[RI]) Observation(context.Context, ocr3types.OutcomeContext, o if f.err != nil { return nil, f.err } - return ocrtypes.Observation{}, nil + return make([]byte, f.observationSize), nil } func (f fakePlugin[RI]) ValidateObservation(context.Context, ocr3types.OutcomeContext, ocrtypes.Query, ocrtypes.AttributedObservation) error { @@ -132,7 +150,7 @@ func (f fakePlugin[RI]) Outcome(context.Context, ocr3types.OutcomeContext, ocrty if f.err != nil { return nil, f.err } - return ocr3types.Outcome{}, nil + return make([]byte, f.outcomeSize), nil } func (f fakePlugin[RI]) Reports(context.Context, uint64, ocr3types.Outcome) ([]ocr3types.ReportPlus[RI], error) { diff --git a/core/services/ocr3/promwrapper/types.go b/core/services/ocr3/promwrapper/types.go index 2fa29dcdf20..59468358783 100644 --- a/core/services/ocr3/promwrapper/types.go +++ b/core/services/ocr3/promwrapper/types.go @@ -48,6 +48,13 @@ var ( }, []string{"chainID", "plugin", "function", "success"}, ) + promOCR3Sizes = promauto.NewCounterVec( + prometheus.CounterOpts{ + Name: "ocr3_reporting_plugin_data_sizes", + Help: "Tracks the size of the data produced by OCR3 plugin in bytes (e.g. reports, observations etc.)", + }, + []string{"chainID", "plugin", "function"}, + ) promOCR3PluginStatus = promauto.NewGaugeVec( prometheus.GaugeOpts{ Name: "ocr3_reporting_plugin_status", From 86ccd475a5ffb4dcad294414422b15b7657a5991 Mon Sep 17 00:00:00 2001 From: Makram Date: Thu, 12 Dec 2024 16:31:59 +0200 Subject: [PATCH 13/15] integration-tests/smoke/ccip: skip rmn tests (#15661) No end to the flakes. Skipping until we can fix them. --- integration-tests/smoke/ccip/ccip_rmn_test.go | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/integration-tests/smoke/ccip/ccip_rmn_test.go b/integration-tests/smoke/ccip/ccip_rmn_test.go index adf07be290f..c22f9bcf20e 100644 --- a/integration-tests/smoke/ccip/ccip_rmn_test.go +++ b/integration-tests/smoke/ccip/ccip_rmn_test.go @@ -35,6 +35,7 @@ import ( ) func TestRMN_TwoMessagesOnTwoLanesIncludingBatching(t *testing.T) { + t.Skip("This test is flaky and needs to be fixed") runRmnTestCase(t, rmnTestCase{ name: "messages on two lanes including batching", waitForExec: true, @@ -58,6 +59,7 @@ func TestRMN_TwoMessagesOnTwoLanesIncludingBatching(t *testing.T) { } func TestRMN_MultipleMessagesOnOneLaneNoWaitForExec(t *testing.T) { + t.Skip("This test is flaky and needs to be fixed") runRmnTestCase(t, rmnTestCase{ name: "multiple messages for rmn batching inspection and one rmn node down", waitForExec: false, // do not wait for execution reports @@ -80,6 +82,7 @@ func TestRMN_MultipleMessagesOnOneLaneNoWaitForExec(t *testing.T) { } func TestRMN_NotEnoughObservers(t *testing.T) { + t.Skip("This test is flaky and needs to be fixed") runRmnTestCase(t, rmnTestCase{ name: "one message but not enough observers, should not get a commit report", passIfNoCommitAfter: 15 * time.Second, @@ -102,6 +105,7 @@ func TestRMN_NotEnoughObservers(t *testing.T) { } func TestRMN_DifferentSigners(t *testing.T) { + t.Skip("This test is flaky and needs to be fixed") runRmnTestCase(t, rmnTestCase{ name: "different signers and different observers", homeChainConfig: homeChainConfig{ @@ -126,6 +130,7 @@ func TestRMN_DifferentSigners(t *testing.T) { } func TestRMN_NotEnoughSigners(t *testing.T) { + t.Skip("This test is flaky and needs to be fixed") runRmnTestCase(t, rmnTestCase{ name: "different signers and different observers", passIfNoCommitAfter: 15 * time.Second, @@ -151,6 +156,7 @@ func TestRMN_NotEnoughSigners(t *testing.T) { } func TestRMN_DifferentRmnNodesForDifferentChains(t *testing.T) { + t.Skip("This test is flaky and needs to be fixed") runRmnTestCase(t, rmnTestCase{ name: "different rmn nodes support different chains", waitForExec: false, @@ -177,6 +183,7 @@ func TestRMN_DifferentRmnNodesForDifferentChains(t *testing.T) { } func TestRMN_TwoMessagesOneSourceChainCursed(t *testing.T) { + t.Skip("This test is flaky and needs to be fixed") runRmnTestCase(t, rmnTestCase{ name: "two messages, one source chain is cursed", passIfNoCommitAfter: 15 * time.Second, @@ -203,6 +210,7 @@ func TestRMN_TwoMessagesOneSourceChainCursed(t *testing.T) { } func TestRMN_GlobalCurseTwoMessagesOnTwoLanes(t *testing.T) { + t.Skip("This test is flaky and needs to be fixed") runRmnTestCase(t, rmnTestCase{ name: "global curse messages on two lanes", waitForExec: false, From 771151b209003ad0dd975642382639b84ec76572 Mon Sep 17 00:00:00 2001 From: Street <5597260+MStreet3@users.noreply.github.com> Date: Thu, 12 Dec 2024 09:32:11 -0500 Subject: [PATCH 14/15] fix(workflow/syncer): upsert spec with existing secrets (#15655) --- core/services/workflows/syncer/orm.go | 7 +- core/services/workflows/syncer/orm_test.go | 83 ++++++++++++++++++++++ 2 files changed, 88 insertions(+), 2 deletions(-) diff --git a/core/services/workflows/syncer/orm.go b/core/services/workflows/syncer/orm.go index 9980d8e7b78..bd0501795e6 100644 --- a/core/services/workflows/syncer/orm.go +++ b/core/services/workflows/syncer/orm.go @@ -332,10 +332,13 @@ func (orm *orm) UpsertWorkflowSpecWithSecrets( status = EXCLUDED.status, binary_url = EXCLUDED.binary_url, config_url = EXCLUDED.config_url, - secrets_id = EXCLUDED.secrets_id, created_at = EXCLUDED.created_at, updated_at = EXCLUDED.updated_at, - spec_type = EXCLUDED.spec_type + spec_type = EXCLUDED.spec_type, + secrets_id = CASE + WHEN workflow_specs.secrets_id IS NULL THEN EXCLUDED.secrets_id + ELSE workflow_specs.secrets_id + END RETURNING id ` diff --git a/core/services/workflows/syncer/orm_test.go b/core/services/workflows/syncer/orm_test.go index addca5c18e2..a94233e78a1 100644 --- a/core/services/workflows/syncer/orm_test.go +++ b/core/services/workflows/syncer/orm_test.go @@ -290,3 +290,86 @@ func Test_GetContentsByWorkflowID_SecretsProvidedButEmpty(t *testing.T) { _, _, err = orm.GetContentsByWorkflowID(ctx, workflowID) require.ErrorIs(t, err, ErrEmptySecrets) } + +func Test_UpsertWorkflowSpecWithSecrets(t *testing.T) { + db := pgtest.NewSqlxDB(t) + ctx := testutils.Context(t) + lggr := logger.TestLogger(t) + orm := &orm{ds: db, lggr: lggr} + + t.Run("inserts new spec and new secrets", func(t *testing.T) { + giveURL := "https://example.com" + giveBytes, err := crypto.Keccak256([]byte(giveURL)) + require.NoError(t, err) + giveHash := hex.EncodeToString(giveBytes) + giveContent := "some contents" + + spec := &job.WorkflowSpec{ + Workflow: "test_workflow", + Config: "test_config", + WorkflowID: "cid-123", + WorkflowOwner: "owner-123", + WorkflowName: "Test Workflow", + Status: job.WorkflowSpecStatusActive, + BinaryURL: "http://example.com/binary", + ConfigURL: "http://example.com/config", + CreatedAt: time.Now(), + SpecType: job.WASMFile, + } + + _, err = orm.UpsertWorkflowSpecWithSecrets(ctx, spec, giveURL, giveHash, giveContent) + require.NoError(t, err) + + // Verify the record exists in the database + var dbSpec job.WorkflowSpec + err = db.Get(&dbSpec, `SELECT * FROM workflow_specs WHERE workflow_owner = $1 AND workflow_name = $2`, spec.WorkflowOwner, spec.WorkflowName) + require.NoError(t, err) + require.Equal(t, spec.Workflow, dbSpec.Workflow) + + // Verify the secrets exists in the database + contents, err := orm.GetContents(ctx, giveURL) + require.NoError(t, err) + require.Equal(t, giveContent, contents) + }) + + t.Run("updates existing spec and secrets", func(t *testing.T) { + giveURL := "https://example.com" + giveBytes, err := crypto.Keccak256([]byte(giveURL)) + require.NoError(t, err) + giveHash := hex.EncodeToString(giveBytes) + giveContent := "some contents" + + spec := &job.WorkflowSpec{ + Workflow: "test_workflow", + Config: "test_config", + WorkflowID: "cid-123", + WorkflowOwner: "owner-123", + WorkflowName: "Test Workflow", + Status: job.WorkflowSpecStatusActive, + BinaryURL: "http://example.com/binary", + ConfigURL: "http://example.com/config", + CreatedAt: time.Now(), + SpecType: job.WASMFile, + } + + _, err = orm.UpsertWorkflowSpecWithSecrets(ctx, spec, giveURL, giveHash, giveContent) + require.NoError(t, err) + + // Update the status + spec.Status = job.WorkflowSpecStatusPaused + + _, err = orm.UpsertWorkflowSpecWithSecrets(ctx, spec, giveURL, giveHash, "new contents") + require.NoError(t, err) + + // Verify the record is updated in the database + var dbSpec job.WorkflowSpec + err = db.Get(&dbSpec, `SELECT * FROM workflow_specs WHERE workflow_owner = $1 AND workflow_name = $2`, spec.WorkflowOwner, spec.WorkflowName) + require.NoError(t, err) + require.Equal(t, spec.Config, dbSpec.Config) + + // Verify the secrets is updated in the database + contents, err := orm.GetContents(ctx, giveURL) + require.NoError(t, err) + require.Equal(t, "new contents", contents) + }) +} From dde17518ff7f3dd3fe1d53614f211357944516f0 Mon Sep 17 00:00:00 2001 From: krehermann <16602512+krehermann@users.noreply.github.com> Date: Thu, 12 Dec 2024 08:20:18 -0700 Subject: [PATCH 15/15] refactor helper to use in cli in CLD (#15647) * refactor helper to use in cli in CLD * cleanup cfg and validation * fix tests * parallel ccip tests * refactor mcms utils * ccip wait timeout tests * fix oversights --- .../ccip/changeset/accept_ownership_test.go | 8 +- .../ccip/changeset/cs_add_chain_test.go | 14 +- deployment/ccip/changeset/cs_add_lane_test.go | 1 + .../ccip/changeset/cs_ccip_home_test.go | 22 +- .../ccip/changeset/cs_deploy_chain_test.go | 11 +- .../ccip/changeset/cs_home_chain_test.go | 1 + .../changeset/cs_initial_add_chain_test.go | 6 +- deployment/ccip/changeset/cs_jobspec_test.go | 1 + .../ccip/changeset/cs_update_rmn_config.go | 7 +- .../changeset/cs_update_rmn_config_test.go | 1 + deployment/ccip/changeset/test_assertions.go | 4 +- deployment/ccip/changeset/test_environment.go | 12 +- deployment/ccip/changeset/view_test.go | 1 + .../common/changeset/internal/mcms_test.go | 11 +- deployment/common/changeset/state.go | 96 +----- deployment/common/changeset/test_helpers.go | 8 +- .../transfer_to_mcms_with_timelock_test.go | 12 +- .../common/proposalutils/mcms_helpers.go | 273 ++++++++++++++++++ .../mcms_test_helpers.go | 67 ++--- .../changeset/accept_ownership_test.go | 13 +- .../append_node_capabilities_test.go | 3 +- .../changeset/deploy_forwarder_test.go | 5 +- .../keystone/changeset/deploy_ocr3_test.go | 3 +- deployment/keystone/changeset/helpers_test.go | 11 +- .../keystone/changeset/update_don_test.go | 3 +- .../update_node_capabilities_test.go | 3 +- .../keystone/changeset/update_nodes_test.go | 3 +- 27 files changed, 375 insertions(+), 225 deletions(-) create mode 100644 deployment/common/proposalutils/mcms_helpers.go rename deployment/common/{changeset => proposalutils}/mcms_test_helpers.go (54%) diff --git a/deployment/ccip/changeset/accept_ownership_test.go b/deployment/ccip/changeset/accept_ownership_test.go index 5580b31a85a..1dbef8e7a0b 100644 --- a/deployment/ccip/changeset/accept_ownership_test.go +++ b/deployment/ccip/changeset/accept_ownership_test.go @@ -9,9 +9,11 @@ import ( "golang.org/x/exp/maps" commonchangeset "github.com/smartcontractkit/chainlink/deployment/common/changeset" + "github.com/smartcontractkit/chainlink/deployment/common/proposalutils" ) func Test_NewAcceptOwnershipChangeset(t *testing.T) { + t.Parallel() e := NewMemoryEnvironment(t) state, err := LoadOnchainState(e.Env) require.NoError(t, err) @@ -20,12 +22,12 @@ func Test_NewAcceptOwnershipChangeset(t *testing.T) { source := allChains[0] dest := allChains[1] - timelockContracts := map[uint64]*commonchangeset.TimelockExecutionContracts{ - source: &commonchangeset.TimelockExecutionContracts{ + timelockContracts := map[uint64]*proposalutils.TimelockExecutionContracts{ + source: &proposalutils.TimelockExecutionContracts{ Timelock: state.Chains[source].Timelock, CallProxy: state.Chains[source].CallProxy, }, - dest: &commonchangeset.TimelockExecutionContracts{ + dest: &proposalutils.TimelockExecutionContracts{ Timelock: state.Chains[dest].Timelock, CallProxy: state.Chains[dest].CallProxy, }, diff --git a/deployment/ccip/changeset/cs_add_chain_test.go b/deployment/ccip/changeset/cs_add_chain_test.go index b21d7411ce7..96b77f1bd7d 100644 --- a/deployment/ccip/changeset/cs_add_chain_test.go +++ b/deployment/ccip/changeset/cs_add_chain_test.go @@ -1,12 +1,12 @@ package changeset import ( - "math/big" "testing" "time" "github.com/smartcontractkit/chainlink/deployment/ccip/changeset/internal" commonchangeset "github.com/smartcontractkit/chainlink/deployment/common/changeset" + "github.com/smartcontractkit/chainlink/deployment/common/proposalutils" commontypes "github.com/smartcontractkit/chainlink/deployment/common/types" "github.com/smartcontractkit/chainlink/v2/core/capabilities/ccip/types" @@ -30,6 +30,7 @@ import ( ) func TestAddChainInbound(t *testing.T) { + t.Parallel() // 4 chains where the 4th is added after initial deployment. e := NewMemoryEnvironment(t, WithChains(4), @@ -46,12 +47,7 @@ func TestAddChainInbound(t *testing.T) { require.NoError(t, err) require.NoError(t, e.Env.ExistingAddresses.Merge(newAddresses)) - cfg := commontypes.MCMSWithTimelockConfig{ - Canceller: commonchangeset.SingleGroupMCMS(t), - Bypasser: commonchangeset.SingleGroupMCMS(t), - Proposer: commonchangeset.SingleGroupMCMS(t), - TimelockMinDelay: big.NewInt(0), - } + cfg := proposalutils.SingleGroupTimelockConfig(t) e.Env, err = commonchangeset.ApplyChangesets(t, e.Env, nil, []commonchangeset.ChangesetApplication{ { Changeset: commonchangeset.WrapChangeSet(commonchangeset.DeployLinkToken), @@ -152,7 +148,7 @@ func TestAddChainInbound(t *testing.T) { } // transfer ownership to timelock - _, err = commonchangeset.ApplyChangesets(t, e.Env, map[uint64]*commonchangeset.TimelockExecutionContracts{ + _, err = commonchangeset.ApplyChangesets(t, e.Env, map[uint64]*proposalutils.TimelockExecutionContracts{ initialDeploy[0]: { Timelock: state.Chains[initialDeploy[0]].Timelock, CallProxy: state.Chains[initialDeploy[0]].CallProxy, @@ -194,7 +190,7 @@ func TestAddChainInbound(t *testing.T) { nodeIDs = append(nodeIDs, node.NodeID) } - _, err = commonchangeset.ApplyChangesets(t, e.Env, map[uint64]*commonchangeset.TimelockExecutionContracts{ + _, err = commonchangeset.ApplyChangesets(t, e.Env, map[uint64]*proposalutils.TimelockExecutionContracts{ e.HomeChainSel: { Timelock: state.Chains[e.HomeChainSel].Timelock, CallProxy: state.Chains[e.HomeChainSel].CallProxy, diff --git a/deployment/ccip/changeset/cs_add_lane_test.go b/deployment/ccip/changeset/cs_add_lane_test.go index 7f1374a1725..5c324c975ef 100644 --- a/deployment/ccip/changeset/cs_add_lane_test.go +++ b/deployment/ccip/changeset/cs_add_lane_test.go @@ -16,6 +16,7 @@ import ( ) func TestAddLanesWithTestRouter(t *testing.T) { + t.Parallel() e := NewMemoryEnvironment(t) // Here we have CR + nodes set up, but no CCIP contracts deployed. state, err := LoadOnchainState(e.Env) diff --git a/deployment/ccip/changeset/cs_ccip_home_test.go b/deployment/ccip/changeset/cs_ccip_home_test.go index 92784551957..47f262d3f83 100644 --- a/deployment/ccip/changeset/cs_ccip_home_test.go +++ b/deployment/ccip/changeset/cs_ccip_home_test.go @@ -27,7 +27,7 @@ import ( func TestActiveCandidate(t *testing.T) { t.Skipf("to be enabled after latest cl-ccip is compatible") - + t.Parallel() tenv := NewMemoryEnvironment(t, WithChains(3), WithNodes(5)) @@ -86,9 +86,9 @@ func TestActiveCandidate(t *testing.T) { ConfirmExecWithSeqNrsForAll(t, e, state, expectedSeqNumExec, startBlocks) // compose the transfer ownership and accept ownership changesets - timelockContracts := make(map[uint64]*commonchangeset.TimelockExecutionContracts) + timelockContracts := make(map[uint64]*proposalutils.TimelockExecutionContracts) for _, chain := range allChains { - timelockContracts[chain] = &commonchangeset.TimelockExecutionContracts{ + timelockContracts[chain] = &proposalutils.TimelockExecutionContracts{ Timelock: state.Chains[chain].Timelock, CallProxy: state.Chains[chain].CallProxy, } @@ -176,8 +176,8 @@ func TestActiveCandidate(t *testing.T) { Batch: setCommitCandidateOp, }}, "set new candidates on commit plugin", 0) require.NoError(t, err) - setCommitCandidateSigned := commonchangeset.SignProposal(t, e, setCommitCandidateProposal) - commonchangeset.ExecuteProposal(t, e, setCommitCandidateSigned, &commonchangeset.TimelockExecutionContracts{ + setCommitCandidateSigned := proposalutils.SignProposal(t, e, setCommitCandidateProposal) + proposalutils.ExecuteProposal(t, e, setCommitCandidateSigned, &proposalutils.TimelockExecutionContracts{ Timelock: state.Chains[tenv.HomeChainSel].Timelock, CallProxy: state.Chains[tenv.HomeChainSel].CallProxy, }, tenv.HomeChainSel) @@ -197,8 +197,8 @@ func TestActiveCandidate(t *testing.T) { Batch: setExecCandidateOp, }}, "set new candidates on commit and exec plugins", 0) require.NoError(t, err) - setExecCandidateSigned := commonchangeset.SignProposal(t, e, setExecCandidateProposal) - commonchangeset.ExecuteProposal(t, e, setExecCandidateSigned, &commonchangeset.TimelockExecutionContracts{ + setExecCandidateSigned := proposalutils.SignProposal(t, e, setExecCandidateProposal) + proposalutils.ExecuteProposal(t, e, setExecCandidateSigned, &proposalutils.TimelockExecutionContracts{ Timelock: state.Chains[tenv.HomeChainSel].Timelock, CallProxy: state.Chains[tenv.HomeChainSel].CallProxy, }, tenv.HomeChainSel) @@ -234,8 +234,8 @@ func TestActiveCandidate(t *testing.T) { Batch: promoteOps, }}, "promote candidates and revoke actives", 0) require.NoError(t, err) - promoteSigned := commonchangeset.SignProposal(t, e, promoteProposal) - commonchangeset.ExecuteProposal(t, e, promoteSigned, &commonchangeset.TimelockExecutionContracts{ + promoteSigned := proposalutils.SignProposal(t, e, promoteProposal) + proposalutils.ExecuteProposal(t, e, promoteSigned, &proposalutils.TimelockExecutionContracts{ Timelock: state.Chains[tenv.HomeChainSel].Timelock, CallProxy: state.Chains[tenv.HomeChainSel].CallProxy, }, tenv.HomeChainSel) @@ -298,7 +298,7 @@ func Test_PromoteCandidate(t *testing.T) { if tc.mcmsEnabled { // Transfer ownership to timelock so that we can promote the zero digest later down the line. - _, err = commonchangeset.ApplyChangesets(t, tenv.Env, map[uint64]*commonchangeset.TimelockExecutionContracts{ + _, err = commonchangeset.ApplyChangesets(t, tenv.Env, map[uint64]*proposalutils.TimelockExecutionContracts{ source: { Timelock: state.Chains[source].Timelock, CallProxy: state.Chains[source].CallProxy, @@ -345,7 +345,7 @@ func Test_PromoteCandidate(t *testing.T) { MinDelay: 0, } } - _, err = commonchangeset.ApplyChangesets(t, tenv.Env, map[uint64]*commonchangeset.TimelockExecutionContracts{ + _, err = commonchangeset.ApplyChangesets(t, tenv.Env, map[uint64]*proposalutils.TimelockExecutionContracts{ tenv.HomeChainSel: { Timelock: state.Chains[tenv.HomeChainSel].Timelock, CallProxy: state.Chains[tenv.HomeChainSel].CallProxy, diff --git a/deployment/ccip/changeset/cs_deploy_chain_test.go b/deployment/ccip/changeset/cs_deploy_chain_test.go index fbf9c881138..9e1a581112d 100644 --- a/deployment/ccip/changeset/cs_deploy_chain_test.go +++ b/deployment/ccip/changeset/cs_deploy_chain_test.go @@ -3,7 +3,6 @@ package changeset import ( "encoding/json" "fmt" - "math/big" "testing" "github.com/stretchr/testify/require" @@ -11,12 +10,14 @@ import ( "github.com/smartcontractkit/chainlink/deployment" commonchangeset "github.com/smartcontractkit/chainlink/deployment/common/changeset" + "github.com/smartcontractkit/chainlink/deployment/common/proposalutils" commontypes "github.com/smartcontractkit/chainlink/deployment/common/types" "github.com/smartcontractkit/chainlink/deployment/environment/memory" "github.com/smartcontractkit/chainlink/v2/core/logger" ) func TestDeployChainContractsChangeset(t *testing.T) { + t.Parallel() lggr := logger.TestLogger(t) e := memory.NewMemoryEnvironment(t, lggr, zapcore.InfoLevel, memory.MemoryEnvironmentConfig{ Bootstraps: 1, @@ -30,12 +31,7 @@ func TestDeployChainContractsChangeset(t *testing.T) { p2pIds := nodes.NonBootstraps().PeerIDs() cfg := make(map[uint64]commontypes.MCMSWithTimelockConfig) for _, chain := range e.AllChainSelectors() { - cfg[chain] = commontypes.MCMSWithTimelockConfig{ - Canceller: commonchangeset.SingleGroupMCMS(t), - Bypasser: commonchangeset.SingleGroupMCMS(t), - Proposer: commonchangeset.SingleGroupMCMS(t), - TimelockMinDelay: big.NewInt(0), - } + cfg[chain] = proposalutils.SingleGroupTimelockConfig(t) } e, err = commonchangeset.ApplyChangesets(t, e, nil, []commonchangeset.ChangesetApplication{ { @@ -98,6 +94,7 @@ func TestDeployChainContractsChangeset(t *testing.T) { } func TestDeployCCIPContracts(t *testing.T) { + t.Parallel() e := NewMemoryEnvironment(t) // Deploy all the CCIP contracts. state, err := LoadOnchainState(e.Env) diff --git a/deployment/ccip/changeset/cs_home_chain_test.go b/deployment/ccip/changeset/cs_home_chain_test.go index a06161f7086..eb620691db0 100644 --- a/deployment/ccip/changeset/cs_home_chain_test.go +++ b/deployment/ccip/changeset/cs_home_chain_test.go @@ -13,6 +13,7 @@ import ( ) func TestDeployHomeChain(t *testing.T) { + t.Parallel() lggr := logger.TestLogger(t) e := memory.NewMemoryEnvironment(t, lggr, zapcore.InfoLevel, memory.MemoryEnvironmentConfig{ Bootstraps: 1, diff --git a/deployment/ccip/changeset/cs_initial_add_chain_test.go b/deployment/ccip/changeset/cs_initial_add_chain_test.go index c1404eb7123..f344068f11b 100644 --- a/deployment/ccip/changeset/cs_initial_add_chain_test.go +++ b/deployment/ccip/changeset/cs_initial_add_chain_test.go @@ -9,10 +9,12 @@ import ( "github.com/stretchr/testify/require" commonchangeset "github.com/smartcontractkit/chainlink/deployment/common/changeset" + "github.com/smartcontractkit/chainlink/deployment/common/proposalutils" "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/ccip/generated/router" ) func TestInitialAddChainAppliedTwice(t *testing.T) { + t.Parallel() // This already applies the initial add chain changeset. e := NewMemoryEnvironment(t) @@ -24,10 +26,10 @@ func TestInitialAddChainAppliedTwice(t *testing.T) { allChains := e.Env.AllChainSelectors() tokenConfig := NewTestTokenConfig(state.Chains[e.FeedChainSel].USDFeeds) chainConfigs := make(map[uint64]CCIPOCRParams) - timelockContractsPerChain := make(map[uint64]*commonchangeset.TimelockExecutionContracts) + timelockContractsPerChain := make(map[uint64]*proposalutils.TimelockExecutionContracts) for _, chain := range allChains { - timelockContractsPerChain[chain] = &commonchangeset.TimelockExecutionContracts{ + timelockContractsPerChain[chain] = &proposalutils.TimelockExecutionContracts{ Timelock: state.Chains[chain].Timelock, CallProxy: state.Chains[chain].CallProxy, } diff --git a/deployment/ccip/changeset/cs_jobspec_test.go b/deployment/ccip/changeset/cs_jobspec_test.go index 21e80e85aa2..a0445b0d5ee 100644 --- a/deployment/ccip/changeset/cs_jobspec_test.go +++ b/deployment/ccip/changeset/cs_jobspec_test.go @@ -13,6 +13,7 @@ import ( ) func TestJobSpecChangeset(t *testing.T) { + t.Parallel() lggr := logger.TestLogger(t) e := memory.NewMemoryEnvironment(t, lggr, zapcore.InfoLevel, memory.MemoryEnvironmentConfig{ Chains: 1, diff --git a/deployment/ccip/changeset/cs_update_rmn_config.go b/deployment/ccip/changeset/cs_update_rmn_config.go index 25ae8308eb5..42eace928c3 100644 --- a/deployment/ccip/changeset/cs_update_rmn_config.go +++ b/deployment/ccip/changeset/cs_update_rmn_config.go @@ -12,7 +12,6 @@ import ( "github.com/smartcontractkit/ccip-owner-contracts/pkg/proposal/mcms" "github.com/smartcontractkit/ccip-owner-contracts/pkg/proposal/timelock" "github.com/smartcontractkit/chainlink/deployment" - commonchangeset "github.com/smartcontractkit/chainlink/deployment/common/changeset" "github.com/smartcontractkit/chainlink/deployment/common/proposalutils" "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/ccip/generated/rmn_home" "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/ccip/generated/rmn_remote" @@ -304,10 +303,10 @@ func NewPromoteCandidateConfigChangeset(e deployment.Environment, config Promote }, nil } -func buildTimelockPerChain(e deployment.Environment, state CCIPOnChainState) map[uint64]*commonchangeset.TimelockExecutionContracts { - timelocksPerChain := make(map[uint64]*commonchangeset.TimelockExecutionContracts) +func buildTimelockPerChain(e deployment.Environment, state CCIPOnChainState) map[uint64]*proposalutils.TimelockExecutionContracts { + timelocksPerChain := make(map[uint64]*proposalutils.TimelockExecutionContracts) for _, chain := range e.Chains { - timelocksPerChain[chain.Selector] = &commonchangeset.TimelockExecutionContracts{ + timelocksPerChain[chain.Selector] = &proposalutils.TimelockExecutionContracts{ Timelock: state.Chains[chain.Selector].Timelock, CallProxy: state.Chains[chain.Selector].CallProxy, } diff --git a/deployment/ccip/changeset/cs_update_rmn_config_test.go b/deployment/ccip/changeset/cs_update_rmn_config_test.go index 3ec309182aa..bab70f68fb5 100644 --- a/deployment/ccip/changeset/cs_update_rmn_config_test.go +++ b/deployment/ccip/changeset/cs_update_rmn_config_test.go @@ -56,6 +56,7 @@ func TestUpdateRMNConfig(t *testing.T) { for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { + t.Parallel() updateRMNConfig(t, tc) }) } diff --git a/deployment/ccip/changeset/test_assertions.go b/deployment/ccip/changeset/test_assertions.go index c0b510acc07..a114e52b361 100644 --- a/deployment/ccip/changeset/test_assertions.go +++ b/deployment/ccip/changeset/test_assertions.go @@ -221,8 +221,8 @@ func ConfirmCommitForAllWithExpectedSeqNums( return false } }, - 3*time.Minute, - 1*time.Second, + tests.WaitTimeout(t), + 2*time.Second, "all commitments did not confirm", ) } diff --git a/deployment/ccip/changeset/test_environment.go b/deployment/ccip/changeset/test_environment.go index ede078254c2..0efa44d108c 100644 --- a/deployment/ccip/changeset/test_environment.go +++ b/deployment/ccip/changeset/test_environment.go @@ -20,6 +20,7 @@ import ( "github.com/smartcontractkit/chainlink/deployment" commonchangeset "github.com/smartcontractkit/chainlink/deployment/common/changeset" + "github.com/smartcontractkit/chainlink/deployment/common/proposalutils" commontypes "github.com/smartcontractkit/chainlink/deployment/common/types" "github.com/smartcontractkit/chainlink/deployment/environment/memory" ) @@ -299,12 +300,7 @@ func NewEnvironmentWithJobsAndContracts(t *testing.T, tc *TestConfigs, tEnv Test mcmsCfg := make(map[uint64]commontypes.MCMSWithTimelockConfig) for _, c := range e.Env.AllChainSelectors() { - mcmsCfg[c] = commontypes.MCMSWithTimelockConfig{ - Canceller: commonchangeset.SingleGroupMCMS(t), - Bypasser: commonchangeset.SingleGroupMCMS(t), - Proposer: commonchangeset.SingleGroupMCMS(t), - TimelockMinDelay: big.NewInt(0), - } + mcmsCfg[c] = proposalutils.SingleGroupTimelockConfig(t) } var ( usdcChains []uint64 @@ -382,9 +378,9 @@ func NewEnvironmentWithJobsAndContracts(t *testing.T, tc *TestConfigs, tEnv Test } // Build the per chain config. chainConfigs := make(map[uint64]CCIPOCRParams) - timelockContractsPerChain := make(map[uint64]*commonchangeset.TimelockExecutionContracts) + timelockContractsPerChain := make(map[uint64]*proposalutils.TimelockExecutionContracts) for _, chain := range allChains { - timelockContractsPerChain[chain] = &commonchangeset.TimelockExecutionContracts{ + timelockContractsPerChain[chain] = &proposalutils.TimelockExecutionContracts{ Timelock: state.Chains[chain].Timelock, CallProxy: state.Chains[chain].CallProxy, } diff --git a/deployment/ccip/changeset/view_test.go b/deployment/ccip/changeset/view_test.go index 11430bfbddf..35193979849 100644 --- a/deployment/ccip/changeset/view_test.go +++ b/deployment/ccip/changeset/view_test.go @@ -7,6 +7,7 @@ import ( ) func TestSmokeView(t *testing.T) { + t.Parallel() tenv := NewMemoryEnvironment(t, WithChains(3)) _, err := ViewCCIP(tenv.Env) require.NoError(t, err) diff --git a/deployment/common/changeset/internal/mcms_test.go b/deployment/common/changeset/internal/mcms_test.go index 10fb1d980de..8446aab4bfe 100644 --- a/deployment/common/changeset/internal/mcms_test.go +++ b/deployment/common/changeset/internal/mcms_test.go @@ -2,7 +2,6 @@ package internal_test import ( "encoding/json" - "math/big" "testing" chainsel "github.com/smartcontractkit/chain-selectors" @@ -11,6 +10,7 @@ import ( "github.com/smartcontractkit/chainlink/deployment" "github.com/smartcontractkit/chainlink/deployment/common/changeset" "github.com/smartcontractkit/chainlink/deployment/common/changeset/internal" + "github.com/smartcontractkit/chainlink/deployment/common/proposalutils" "github.com/smartcontractkit/chainlink/deployment/common/types" "github.com/smartcontractkit/chainlink/deployment/environment/memory" "github.com/smartcontractkit/chainlink/v2/core/logger" @@ -23,7 +23,7 @@ func TestDeployMCMSWithConfig(t *testing.T) { }) ab := deployment.NewMemoryAddressBook() _, err := internal.DeployMCMSWithConfig(types.ProposerManyChainMultisig, - lggr, chains[chainsel.TEST_90000001.Selector], ab, changeset.SingleGroupMCMS(t)) + lggr, chains[chainsel.TEST_90000001.Selector], ab, proposalutils.SingleGroupMCMS(t)) require.NoError(t, err) } @@ -35,12 +35,7 @@ func TestDeployMCMSWithTimelockContracts(t *testing.T) { ab := deployment.NewMemoryAddressBook() _, err := internal.DeployMCMSWithTimelockContracts(lggr, chains[chainsel.TEST_90000001.Selector], - ab, types.MCMSWithTimelockConfig{ - Canceller: changeset.SingleGroupMCMS(t), - Bypasser: changeset.SingleGroupMCMS(t), - Proposer: changeset.SingleGroupMCMS(t), - TimelockMinDelay: big.NewInt(0), - }) + ab, proposalutils.SingleGroupTimelockConfig(t)) require.NoError(t, err) addresses, err := ab.AddressesForChain(chainsel.TEST_90000001.Selector) require.NoError(t, err) diff --git a/deployment/common/changeset/state.go b/deployment/common/changeset/state.go index a580c13b40b..c45fe6ba9b5 100644 --- a/deployment/common/changeset/state.go +++ b/deployment/common/changeset/state.go @@ -5,9 +5,9 @@ import ( "fmt" "github.com/ethereum/go-ethereum/common" - owner_helpers "github.com/smartcontractkit/ccip-owner-contracts/pkg/gethwrappers" "github.com/smartcontractkit/chainlink/deployment" + "github.com/smartcontractkit/chainlink/deployment/common/proposalutils" "github.com/smartcontractkit/chainlink/deployment/common/types" "github.com/smartcontractkit/chainlink/deployment/common/view/v1_0" "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/generated/link_token_interface" @@ -19,32 +19,18 @@ import ( // It is public for use in product specific packages. // Either all fields are nil or all fields are non-nil. type MCMSWithTimelockState struct { - CancellerMcm *owner_helpers.ManyChainMultiSig - BypasserMcm *owner_helpers.ManyChainMultiSig - ProposerMcm *owner_helpers.ManyChainMultiSig - Timelock *owner_helpers.RBACTimelock - CallProxy *owner_helpers.CallProxy + *proposalutils.MCMSWithTimelockContracts } -// Validate checks that all fields are non-nil, ensuring it's ready -// for use generating views or interactions. -func (state MCMSWithTimelockState) Validate() error { - if state.Timelock == nil { - return errors.New("timelock not found") - } - if state.CancellerMcm == nil { - return errors.New("canceller not found") - } - if state.ProposerMcm == nil { - return errors.New("proposer not found") - } - if state.BypasserMcm == nil { - return errors.New("bypasser not found") - } - if state.CallProxy == nil { - return errors.New("call proxy not found") +func MaybeLoadMCMSWithTimelockState(chain deployment.Chain, addresses map[string]deployment.TypeAndVersion) (*MCMSWithTimelockState, error) { + contracts, err := proposalutils.MaybeLoadMCMSWithTimelockContracts(chain, addresses) + if err != nil { + return nil, err } - return nil + + return &MCMSWithTimelockState{ + MCMSWithTimelockContracts: contracts, + }, nil } func (state MCMSWithTimelockState) GenerateMCMSWithTimelockView() (v1_0.MCMSWithTimelockView, error) { @@ -80,68 +66,6 @@ func (state MCMSWithTimelockState) GenerateMCMSWithTimelockView() (v1_0.MCMSWith }, nil } -// MaybeLoadMCMSWithTimelockState looks for the addresses corresponding to -// contracts deployed with DeployMCMSWithTimelock and loads them into a -// MCMSWithTimelockState struct. If none of the contracts are found, the state struct will be nil. -// An error indicates: -// - Found but was unable to load a contract -// - It only found part of the bundle of contracts -// - If found more than one instance of a contract (we expect one bundle in the given addresses) -func MaybeLoadMCMSWithTimelockState(chain deployment.Chain, addresses map[string]deployment.TypeAndVersion) (*MCMSWithTimelockState, error) { - state := MCMSWithTimelockState{} - // We expect one of each contract on the chain. - timelock := deployment.NewTypeAndVersion(types.RBACTimelock, deployment.Version1_0_0) - callProxy := deployment.NewTypeAndVersion(types.CallProxy, deployment.Version1_0_0) - proposer := deployment.NewTypeAndVersion(types.ProposerManyChainMultisig, deployment.Version1_0_0) - canceller := deployment.NewTypeAndVersion(types.CancellerManyChainMultisig, deployment.Version1_0_0) - bypasser := deployment.NewTypeAndVersion(types.BypasserManyChainMultisig, deployment.Version1_0_0) - - // Ensure we either have the bundle or not. - _, err := deployment.AddressesContainBundle(addresses, - map[deployment.TypeAndVersion]struct{}{ - timelock: {}, proposer: {}, canceller: {}, bypasser: {}, callProxy: {}, - }) - if err != nil { - return nil, fmt.Errorf("unable to check MCMS contracts on chain %s error: %w", chain.Name(), err) - } - - for address, tvStr := range addresses { - switch tvStr { - case timelock: - tl, err := owner_helpers.NewRBACTimelock(common.HexToAddress(address), chain.Client) - if err != nil { - return nil, err - } - state.Timelock = tl - case callProxy: - cp, err := owner_helpers.NewCallProxy(common.HexToAddress(address), chain.Client) - if err != nil { - return nil, err - } - state.CallProxy = cp - case proposer: - mcms, err := owner_helpers.NewManyChainMultiSig(common.HexToAddress(address), chain.Client) - if err != nil { - return nil, err - } - state.ProposerMcm = mcms - case bypasser: - mcms, err := owner_helpers.NewManyChainMultiSig(common.HexToAddress(address), chain.Client) - if err != nil { - return nil, err - } - state.BypasserMcm = mcms - case canceller: - mcms, err := owner_helpers.NewManyChainMultiSig(common.HexToAddress(address), chain.Client) - if err != nil { - return nil, err - } - state.CancellerMcm = mcms - } - } - return &state, nil -} - type LinkTokenState struct { LinkToken *link_token.LinkToken } diff --git a/deployment/common/changeset/test_helpers.go b/deployment/common/changeset/test_helpers.go index 8fce5ea79f2..e92b36e5b55 100644 --- a/deployment/common/changeset/test_helpers.go +++ b/deployment/common/changeset/test_helpers.go @@ -9,6 +9,7 @@ import ( "github.com/smartcontractkit/chainlink-testing-framework/lib/utils/testcontext" "github.com/smartcontractkit/chainlink/deployment" + "github.com/smartcontractkit/chainlink/deployment/common/proposalutils" ) type ChangesetApplication struct { @@ -32,7 +33,7 @@ func WrapChangeSet[C any](fn deployment.ChangeSet[C]) func(e deployment.Environm } // ApplyChangesets applies the changeset applications to the environment and returns the updated environment. -func ApplyChangesets(t *testing.T, e deployment.Environment, timelockContractsPerChain map[uint64]*TimelockExecutionContracts, changesetApplications []ChangesetApplication) (deployment.Environment, error) { +func ApplyChangesets(t *testing.T, e deployment.Environment, timelockContractsPerChain map[uint64]*proposalutils.TimelockExecutionContracts, changesetApplications []ChangesetApplication) (deployment.Environment, error) { currentEnv := e for i, csa := range changesetApplications { out, err := csa.Changeset(currentEnv, csa.Config) @@ -72,14 +73,14 @@ func ApplyChangesets(t *testing.T, e deployment.Environment, timelockContractsPe chains.Add(uint64(op.ChainIdentifier)) } - signed := SignProposal(t, e, &prop) + signed := proposalutils.SignProposal(t, e, &prop) for _, sel := range chains.ToSlice() { timelockContracts, ok := timelockContractsPerChain[sel] if !ok || timelockContracts == nil { return deployment.Environment{}, fmt.Errorf("timelock contracts not found for chain %d", sel) } - ExecuteProposal(t, e, signed, timelockContracts, sel) + proposalutils.ExecuteProposal(t, e, signed, timelockContracts, sel) } } } @@ -91,6 +92,7 @@ func ApplyChangesets(t *testing.T, e deployment.Environment, timelockContractsPe NodeIDs: e.NodeIDs, Offchain: e.Offchain, OCRSecrets: e.OCRSecrets, + GetContext: e.GetContext, } } return currentEnv, nil diff --git a/deployment/common/changeset/transfer_to_mcms_with_timelock_test.go b/deployment/common/changeset/transfer_to_mcms_with_timelock_test.go index 6c68924b35e..40cef99a54f 100644 --- a/deployment/common/changeset/transfer_to_mcms_with_timelock_test.go +++ b/deployment/common/changeset/transfer_to_mcms_with_timelock_test.go @@ -6,8 +6,7 @@ import ( "github.com/ethereum/go-ethereum/common" "github.com/stretchr/testify/require" - "math/big" - + "github.com/smartcontractkit/chainlink/deployment/common/proposalutils" "github.com/smartcontractkit/chainlink/deployment/common/types" "github.com/smartcontractkit/chainlink/deployment/environment/memory" "github.com/smartcontractkit/chainlink/v2/core/logger" @@ -28,12 +27,7 @@ func TestTransferToMCMSWithTimelock(t *testing.T) { { Changeset: WrapChangeSet(DeployMCMSWithTimelock), Config: map[uint64]types.MCMSWithTimelockConfig{ - chain1: { - Canceller: SingleGroupMCMS(t), - Bypasser: SingleGroupMCMS(t), - Proposer: SingleGroupMCMS(t), - TimelockMinDelay: big.NewInt(0), - }, + chain1: proposalutils.SingleGroupTimelockConfig(t), }, }, }) @@ -44,7 +38,7 @@ func TestTransferToMCMSWithTimelock(t *testing.T) { require.NoError(t, err) link, err := MaybeLoadLinkTokenState(e.Chains[chain1], addrs) require.NoError(t, err) - e, err = ApplyChangesets(t, e, map[uint64]*TimelockExecutionContracts{ + e, err = ApplyChangesets(t, e, map[uint64]*proposalutils.TimelockExecutionContracts{ chain1: { Timelock: state.Timelock, CallProxy: state.CallProxy, diff --git a/deployment/common/proposalutils/mcms_helpers.go b/deployment/common/proposalutils/mcms_helpers.go new file mode 100644 index 00000000000..4a7540761ee --- /dev/null +++ b/deployment/common/proposalutils/mcms_helpers.go @@ -0,0 +1,273 @@ +package proposalutils + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + + "github.com/ethereum/go-ethereum/accounts/abi/bind" + "github.com/ethereum/go-ethereum/common" + owner_helpers "github.com/smartcontractkit/ccip-owner-contracts/pkg/gethwrappers" + "github.com/smartcontractkit/ccip-owner-contracts/pkg/proposal/mcms" + "github.com/smartcontractkit/chainlink-common/pkg/logger" + "github.com/smartcontractkit/chainlink/deployment" + "github.com/smartcontractkit/chainlink/deployment/common/types" +) + +// TimelockExecutionContracts is a helper struct for executing timelock proposals. it contains +// the timelock and call proxy contracts. +type TimelockExecutionContracts struct { + Timelock *owner_helpers.RBACTimelock + CallProxy *owner_helpers.CallProxy +} + +// NewTimelockExecutionContracts creates a new TimelockExecutionContracts struct. +// If there are multiple timelocks or call proxy on the chain, an error is returned. +// If there is a missing timelocks or call proxy on the chain, an error is returned. +func NewTimelockExecutionContracts(env deployment.Environment, chainSelector uint64) (*TimelockExecutionContracts, error) { + addrTypeVer, err := env.ExistingAddresses.AddressesForChain(chainSelector) + if err != nil { + return nil, fmt.Errorf("error getting addresses for chain: %w", err) + } + var timelock *owner_helpers.RBACTimelock + var callProxy *owner_helpers.CallProxy + for addr, tv := range addrTypeVer { + if tv.Type == types.RBACTimelock { + if timelock != nil { + return nil, fmt.Errorf("multiple timelocks found on chain %d", chainSelector) + } + var err error + timelock, err = owner_helpers.NewRBACTimelock(common.HexToAddress(addr), env.Chains[chainSelector].Client) + if err != nil { + return nil, fmt.Errorf("error creating timelock: %w", err) + } + } + if tv.Type == types.CallProxy { + if callProxy != nil { + return nil, fmt.Errorf("multiple call proxies found on chain %d", chainSelector) + } + var err error + callProxy, err = owner_helpers.NewCallProxy(common.HexToAddress(addr), env.Chains[chainSelector].Client) + if err != nil { + return nil, fmt.Errorf("error creating call proxy: %w", err) + } + } + } + if timelock == nil || callProxy == nil { + return nil, fmt.Errorf("missing timelock (%T) or call proxy(%T) on chain %d", timelock == nil, callProxy == nil, chainSelector) + } + return &TimelockExecutionContracts{ + Timelock: timelock, + CallProxy: callProxy, + }, nil +} + +type RunTimelockExecutorConfig struct { + Executor *mcms.Executor + TimelockContracts *TimelockExecutionContracts + ChainSelector uint64 + // BlockStart is optional. It filter the timelock scheduled events. + // If not provided, the executor assumes that the operations have not been executed yet + // executes all the operations for the given chain. + BlockStart *uint64 + BlockEnd *uint64 +} + +func (cfg RunTimelockExecutorConfig) Validate() error { + if cfg.Executor == nil { + return fmt.Errorf("executor is nil") + } + if cfg.TimelockContracts == nil { + return fmt.Errorf("timelock contracts is nil") + } + if cfg.ChainSelector == 0 { + return fmt.Errorf("chain selector is 0") + } + if cfg.BlockStart != nil && cfg.BlockEnd == nil { + if *cfg.BlockStart > *cfg.BlockEnd { + return fmt.Errorf("block start is greater than block end") + } + } + if cfg.BlockStart == nil && cfg.BlockEnd != nil { + return fmt.Errorf("block start must not be nil when block end is not nil") + } + + if len(cfg.Executor.Operations[mcms.ChainIdentifier(cfg.ChainSelector)]) == 0 { + return fmt.Errorf("no operations for chain %d", cfg.ChainSelector) + } + return nil +} + +// RunTimelockExecutor runs the scheduled operations for the given chain. +// If the block start is not provided, it assumes that the operations have not been scheduled yet +// and executes all the operations for the given chain. +// It is an error if there are no operations for the given chain. +func RunTimelockExecutor(env deployment.Environment, cfg RunTimelockExecutorConfig) error { + // TODO: This sort of helper probably should move to the MCMS lib. + // Execute all the transactions in the proposal which are for this chain. + if err := cfg.Validate(); err != nil { + return fmt.Errorf("error validating config: %w", err) + } + for _, chainOp := range cfg.Executor.Operations[mcms.ChainIdentifier(cfg.ChainSelector)] { + for idx, op := range cfg.Executor.ChainAgnosticOps { + start := cfg.BlockStart + end := cfg.BlockEnd + if bytes.Equal(op.Data, chainOp.Data) && op.To == chainOp.To { + if start == nil { + opTx, err2 := cfg.Executor.ExecuteOnChain(env.Chains[cfg.ChainSelector].Client, env.Chains[cfg.ChainSelector].DeployerKey, idx) + if err2 != nil { + return fmt.Errorf("error executing on chain: %w", err2) + } + block, err2 := env.Chains[cfg.ChainSelector].Confirm(opTx) + if err2 != nil { + return fmt.Errorf("error confirming on chain: %w", err2) + } + start = &block + end = &block + } + + it, err2 := cfg.TimelockContracts.Timelock.FilterCallScheduled(&bind.FilterOpts{ + Start: *start, + End: end, + Context: env.GetContext(), + }, nil, nil) + if err2 != nil { + return fmt.Errorf("error filtering call scheduled: %w", err2) + } + var calls []owner_helpers.RBACTimelockCall + var pred, salt [32]byte + for it.Next() { + // Note these are the same for the whole batch, can overwrite + pred = it.Event.Predecessor + salt = it.Event.Salt + verboseDebug(env.Logger, it.Event) + env.Logger.Info("scheduled", "event", it.Event) + calls = append(calls, owner_helpers.RBACTimelockCall{ + Target: it.Event.Target, + Data: it.Event.Data, + Value: it.Event.Value, + }) + } + + timelockExecutorProxy, err := owner_helpers.NewRBACTimelock(cfg.TimelockContracts.CallProxy.Address(), env.Chains[cfg.ChainSelector].Client) + if err != nil { + return fmt.Errorf("error creating timelock executor proxy: %w", err) + } + tx, err := timelockExecutorProxy.ExecuteBatch( + env.Chains[cfg.ChainSelector].DeployerKey, calls, pred, salt) + if err != nil { + return fmt.Errorf("error executing batch: %w", err) + } + _, err = env.Chains[cfg.ChainSelector].Confirm(tx) + if err != nil { + return fmt.Errorf("error confirming batch: %w", err) + } + } + } + } + return nil +} + +func verboseDebug(lggr logger.Logger, event *owner_helpers.RBACTimelockCallScheduled) { + b, err := json.Marshal(event) + if err != nil { + panic(err) + } + lggr.Debug("scheduled", "event", string(b)) +} + +// MCMSWithTimelockContracts holds the Go bindings +// for a MCMSWithTimelock contract deployment. +// It is public for use in product specific packages. +// Either all fields are nil or all fields are non-nil. +type MCMSWithTimelockContracts struct { + CancellerMcm *owner_helpers.ManyChainMultiSig + BypasserMcm *owner_helpers.ManyChainMultiSig + ProposerMcm *owner_helpers.ManyChainMultiSig + Timelock *owner_helpers.RBACTimelock + CallProxy *owner_helpers.CallProxy +} + +// Validate checks that all fields are non-nil, ensuring it's ready +// for use generating views or interactions. +func (state MCMSWithTimelockContracts) Validate() error { + if state.Timelock == nil { + return errors.New("timelock not found") + } + if state.CancellerMcm == nil { + return errors.New("canceller not found") + } + if state.ProposerMcm == nil { + return errors.New("proposer not found") + } + if state.BypasserMcm == nil { + return errors.New("bypasser not found") + } + if state.CallProxy == nil { + return errors.New("call proxy not found") + } + return nil +} + +// MaybeLoadMCMSWithTimelockContracts looks for the addresses corresponding to +// contracts deployed with DeployMCMSWithTimelock and loads them into a +// MCMSWithTimelockState struct. If none of the contracts are found, the state struct will be nil. +// An error indicates: +// - Found but was unable to load a contract +// - It only found part of the bundle of contracts +// - If found more than one instance of a contract (we expect one bundle in the given addresses) +func MaybeLoadMCMSWithTimelockContracts(chain deployment.Chain, addresses map[string]deployment.TypeAndVersion) (*MCMSWithTimelockContracts, error) { + state := MCMSWithTimelockContracts{} + // We expect one of each contract on the chain. + timelock := deployment.NewTypeAndVersion(types.RBACTimelock, deployment.Version1_0_0) + callProxy := deployment.NewTypeAndVersion(types.CallProxy, deployment.Version1_0_0) + proposer := deployment.NewTypeAndVersion(types.ProposerManyChainMultisig, deployment.Version1_0_0) + canceller := deployment.NewTypeAndVersion(types.CancellerManyChainMultisig, deployment.Version1_0_0) + bypasser := deployment.NewTypeAndVersion(types.BypasserManyChainMultisig, deployment.Version1_0_0) + + // Ensure we either have the bundle or not. + _, err := deployment.AddressesContainBundle(addresses, + map[deployment.TypeAndVersion]struct{}{ + timelock: {}, proposer: {}, canceller: {}, bypasser: {}, callProxy: {}, + }) + if err != nil { + return nil, fmt.Errorf("unable to check MCMS contracts on chain %s error: %w", chain.Name(), err) + } + + for address, tvStr := range addresses { + switch tvStr { + case timelock: + tl, err := owner_helpers.NewRBACTimelock(common.HexToAddress(address), chain.Client) + if err != nil { + return nil, err + } + state.Timelock = tl + case callProxy: + cp, err := owner_helpers.NewCallProxy(common.HexToAddress(address), chain.Client) + if err != nil { + return nil, err + } + state.CallProxy = cp + case proposer: + mcms, err := owner_helpers.NewManyChainMultiSig(common.HexToAddress(address), chain.Client) + if err != nil { + return nil, err + } + state.ProposerMcm = mcms + case bypasser: + mcms, err := owner_helpers.NewManyChainMultiSig(common.HexToAddress(address), chain.Client) + if err != nil { + return nil, err + } + state.BypasserMcm = mcms + case canceller: + mcms, err := owner_helpers.NewManyChainMultiSig(common.HexToAddress(address), chain.Client) + if err != nil { + return nil, err + } + state.CancellerMcm = mcms + } + } + return &state, nil +} diff --git a/deployment/common/changeset/mcms_test_helpers.go b/deployment/common/proposalutils/mcms_test_helpers.go similarity index 54% rename from deployment/common/changeset/mcms_test_helpers.go rename to deployment/common/proposalutils/mcms_test_helpers.go index ffa99114d74..610fe84f34c 100644 --- a/deployment/common/changeset/mcms_test_helpers.go +++ b/deployment/common/proposalutils/mcms_test_helpers.go @@ -1,22 +1,21 @@ -package changeset +package proposalutils import ( - "bytes" - "context" "crypto/ecdsa" + "math/big" "testing" - "github.com/ethereum/go-ethereum/accounts/abi/bind" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/crypto" "github.com/smartcontractkit/ccip-owner-contracts/pkg/config" - owner_helpers "github.com/smartcontractkit/ccip-owner-contracts/pkg/gethwrappers" "github.com/smartcontractkit/ccip-owner-contracts/pkg/proposal/mcms" "github.com/smartcontractkit/ccip-owner-contracts/pkg/proposal/timelock" chainsel "github.com/smartcontractkit/chain-selectors" "github.com/stretchr/testify/require" "github.com/smartcontractkit/chainlink/deployment" + commontypes "github.com/smartcontractkit/chainlink/deployment/common/types" + // "github.com/smartcontractkit/chainlink/deployment/common/proposalutils" ) var ( @@ -25,13 +24,6 @@ var ( TestXXXMCMSSigner *ecdsa.PrivateKey ) -// TimelockExecutionContracts is a helper struct for executing timelock proposals. it contains -// the timelock and call proxy contracts. -type TimelockExecutionContracts struct { - Timelock *owner_helpers.RBACTimelock - CallProxy *owner_helpers.CallProxy -} - func init() { key, err := crypto.GenerateKey() if err != nil { @@ -79,45 +71,22 @@ func ExecuteProposal(t *testing.T, env deployment.Environment, executor *mcms.Ex if err2 != nil { require.NoError(t, deployment.MaybeDataErr(err2)) } + _, err2 = env.Chains[sel].Confirm(tx) require.NoError(t, err2) + cfg := RunTimelockExecutorConfig{ + Executor: executor, + TimelockContracts: timelockContracts, + ChainSelector: sel, + } + require.NoError(t, RunTimelockExecutor(env, cfg)) +} - // TODO: This sort of helper probably should move to the MCMS lib. - // Execute all the transactions in the proposal which are for this chain. - for _, chainOp := range executor.Operations[mcms.ChainIdentifier(sel)] { - for idx, op := range executor.ChainAgnosticOps { - if bytes.Equal(op.Data, chainOp.Data) && op.To == chainOp.To { - opTx, err3 := executor.ExecuteOnChain(env.Chains[sel].Client, env.Chains[sel].DeployerKey, idx) - require.NoError(t, err3) - block, err3 := env.Chains[sel].Confirm(opTx) - require.NoError(t, err3) - t.Log("executed", chainOp) - it, err3 := timelockContracts.Timelock.FilterCallScheduled(&bind.FilterOpts{ - Start: block, - End: &block, - Context: context.Background(), - }, nil, nil) - require.NoError(t, err3) - var calls []owner_helpers.RBACTimelockCall - var pred, salt [32]byte - for it.Next() { - // Note these are the same for the whole batch, can overwrite - pred = it.Event.Predecessor - salt = it.Event.Salt - t.Log("scheduled", it.Event) - calls = append(calls, owner_helpers.RBACTimelockCall{ - Target: it.Event.Target, - Data: it.Event.Data, - Value: it.Event.Value, - }) - } - timelockExecutorProxy, err := owner_helpers.NewRBACTimelock(timelockContracts.CallProxy.Address(), env.Chains[sel].Client) - tx, err := timelockExecutorProxy.ExecuteBatch( - env.Chains[sel].DeployerKey, calls, pred, salt) - require.NoError(t, err) - _, err = env.Chains[sel].Confirm(tx) - require.NoError(t, err) - } - } +func SingleGroupTimelockConfig(t *testing.T) commontypes.MCMSWithTimelockConfig { + return commontypes.MCMSWithTimelockConfig{ + Canceller: SingleGroupMCMS(t), + Bypasser: SingleGroupMCMS(t), + Proposer: SingleGroupMCMS(t), + TimelockMinDelay: big.NewInt(0), } } diff --git a/deployment/keystone/changeset/accept_ownership_test.go b/deployment/keystone/changeset/accept_ownership_test.go index b2aa1b20194..9e9d29e563a 100644 --- a/deployment/keystone/changeset/accept_ownership_test.go +++ b/deployment/keystone/changeset/accept_ownership_test.go @@ -1,7 +1,6 @@ package changeset_test import ( - "math/big" "testing" "github.com/stretchr/testify/require" @@ -10,6 +9,7 @@ import ( "github.com/smartcontractkit/chainlink-common/pkg/logger" commonchangeset "github.com/smartcontractkit/chainlink/deployment/common/changeset" + "github.com/smartcontractkit/chainlink/deployment/common/proposalutils" "github.com/smartcontractkit/chainlink/deployment/common/types" "github.com/smartcontractkit/chainlink/deployment/environment/memory" "github.com/smartcontractkit/chainlink/deployment/keystone/changeset" @@ -44,12 +44,7 @@ func TestAcceptAllOwnership(t *testing.T) { { Changeset: commonchangeset.WrapChangeSet(commonchangeset.DeployMCMSWithTimelock), Config: map[uint64]types.MCMSWithTimelockConfig{ - registrySel: { - Canceller: commonchangeset.SingleGroupMCMS(t), - Bypasser: commonchangeset.SingleGroupMCMS(t), - Proposer: commonchangeset.SingleGroupMCMS(t), - TimelockMinDelay: big.NewInt(0), - }, + registrySel: proposalutils.SingleGroupTimelockConfig(t), }, }, }) @@ -59,8 +54,8 @@ func TestAcceptAllOwnership(t *testing.T) { timelock, err := commonchangeset.MaybeLoadMCMSWithTimelockState(env.Chains[registrySel], addrs) require.NoError(t, err) - _, err = commonchangeset.ApplyChangesets(t, env, map[uint64]*commonchangeset.TimelockExecutionContracts{ - registrySel: &commonchangeset.TimelockExecutionContracts{ + _, err = commonchangeset.ApplyChangesets(t, env, map[uint64]*proposalutils.TimelockExecutionContracts{ + registrySel: &proposalutils.TimelockExecutionContracts{ Timelock: timelock.Timelock, CallProxy: timelock.CallProxy, }, diff --git a/deployment/keystone/changeset/append_node_capabilities_test.go b/deployment/keystone/changeset/append_node_capabilities_test.go index 159500ab5a7..bfc01b309f5 100644 --- a/deployment/keystone/changeset/append_node_capabilities_test.go +++ b/deployment/keystone/changeset/append_node_capabilities_test.go @@ -8,6 +8,7 @@ import ( "golang.org/x/exp/maps" commonchangeset "github.com/smartcontractkit/chainlink/deployment/common/changeset" + "github.com/smartcontractkit/chainlink/deployment/common/proposalutils" "github.com/smartcontractkit/chainlink/deployment/keystone/changeset" kcr "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/keystone/generated/capabilities_registry" "github.com/smartcontractkit/chainlink/v2/core/services/keystore/keys/p2pkey" @@ -87,7 +88,7 @@ func TestAppendNodeCapabilities(t *testing.T) { // now apply the changeset such that the proposal is signed and execed contracts := te.ContractSets()[te.RegistrySelector] - timelockContracts := map[uint64]*commonchangeset.TimelockExecutionContracts{ + timelockContracts := map[uint64]*proposalutils.TimelockExecutionContracts{ te.RegistrySelector: { Timelock: contracts.Timelock, CallProxy: contracts.CallProxy, diff --git a/deployment/keystone/changeset/deploy_forwarder_test.go b/deployment/keystone/changeset/deploy_forwarder_test.go index dd894fde9d9..e04bac6d264 100644 --- a/deployment/keystone/changeset/deploy_forwarder_test.go +++ b/deployment/keystone/changeset/deploy_forwarder_test.go @@ -11,6 +11,7 @@ import ( "github.com/smartcontractkit/chainlink-common/pkg/logger" "github.com/smartcontractkit/chainlink/deployment" commonchangeset "github.com/smartcontractkit/chainlink/deployment/common/changeset" + "github.com/smartcontractkit/chainlink/deployment/common/proposalutils" "github.com/smartcontractkit/chainlink/deployment/environment/memory" "github.com/smartcontractkit/chainlink/deployment/keystone/changeset" ) @@ -116,11 +117,11 @@ func TestConfigureForwarders(t *testing.T) { require.Len(t, csOut.Proposals, nChains) require.Nil(t, csOut.AddressBook) - timelockContracts := make(map[uint64]*commonchangeset.TimelockExecutionContracts) + timelockContracts := make(map[uint64]*proposalutils.TimelockExecutionContracts) for selector, contractSet := range te.ContractSets() { require.NotNil(t, contractSet.Timelock) require.NotNil(t, contractSet.CallProxy) - timelockContracts[selector] = &commonchangeset.TimelockExecutionContracts{ + timelockContracts[selector] = &proposalutils.TimelockExecutionContracts{ Timelock: contractSet.Timelock, CallProxy: contractSet.CallProxy, } diff --git a/deployment/keystone/changeset/deploy_ocr3_test.go b/deployment/keystone/changeset/deploy_ocr3_test.go index 5d02f83500d..7a276886242 100644 --- a/deployment/keystone/changeset/deploy_ocr3_test.go +++ b/deployment/keystone/changeset/deploy_ocr3_test.go @@ -13,6 +13,7 @@ import ( "github.com/smartcontractkit/chainlink-common/pkg/logger" commonchangeset "github.com/smartcontractkit/chainlink/deployment/common/changeset" + "github.com/smartcontractkit/chainlink/deployment/common/proposalutils" "github.com/smartcontractkit/chainlink/deployment/environment/memory" kslib "github.com/smartcontractkit/chainlink/deployment/keystone" "github.com/smartcontractkit/chainlink/deployment/keystone/changeset" @@ -118,7 +119,7 @@ func TestConfigureOCR3(t *testing.T) { contracts := te.ContractSets()[te.RegistrySelector] require.NoError(t, err) - var timelockContracts = map[uint64]*commonchangeset.TimelockExecutionContracts{ + var timelockContracts = map[uint64]*proposalutils.TimelockExecutionContracts{ te.RegistrySelector: { Timelock: contracts.Timelock, CallProxy: contracts.CallProxy, diff --git a/deployment/keystone/changeset/helpers_test.go b/deployment/keystone/changeset/helpers_test.go index 4e7553d0b8e..d956db991de 100644 --- a/deployment/keystone/changeset/helpers_test.go +++ b/deployment/keystone/changeset/helpers_test.go @@ -8,7 +8,6 @@ import ( "errors" "fmt" "math" - "math/big" "sort" "testing" @@ -21,6 +20,7 @@ import ( "github.com/smartcontractkit/chainlink/deployment" commonchangeset "github.com/smartcontractkit/chainlink/deployment/common/changeset" + "github.com/smartcontractkit/chainlink/deployment/common/proposalutils" commontypes "github.com/smartcontractkit/chainlink/deployment/common/types" "github.com/smartcontractkit/chainlink/deployment/environment/memory" "github.com/smartcontractkit/chainlink/deployment/keystone" @@ -258,12 +258,7 @@ func SetupTestEnv(t *testing.T, c TestConfig) TestEnv { timelockCfgs := make(map[uint64]commontypes.MCMSWithTimelockConfig) for sel := range env.Chains { t.Logf("Enabling MCMS on chain %d", sel) - timelockCfgs[sel] = commontypes.MCMSWithTimelockConfig{ - Canceller: commonchangeset.SingleGroupMCMS(t), - Bypasser: commonchangeset.SingleGroupMCMS(t), - Proposer: commonchangeset.SingleGroupMCMS(t), - TimelockMinDelay: big.NewInt(0), - } + timelockCfgs[sel] = proposalutils.SingleGroupTimelockConfig(t) } env, err = commonchangeset.ApplyChangesets(t, env, nil, []commonchangeset.ChangesetApplication{ { @@ -284,7 +279,7 @@ func SetupTestEnv(t *testing.T, c TestConfig) TestEnv { require.NoError(t, mcms.Validate()) // transfer ownership of all contracts to the MCMS - env, err = commonchangeset.ApplyChangesets(t, env, map[uint64]*commonchangeset.TimelockExecutionContracts{sel: {Timelock: mcms.Timelock, CallProxy: mcms.CallProxy}}, []commonchangeset.ChangesetApplication{ + env, err = commonchangeset.ApplyChangesets(t, env, map[uint64]*proposalutils.TimelockExecutionContracts{sel: {Timelock: mcms.Timelock, CallProxy: mcms.CallProxy}}, []commonchangeset.ChangesetApplication{ { Changeset: commonchangeset.WrapChangeSet(kschangeset.AcceptAllOwnershipsProposal), Config: &kschangeset.AcceptAllOwnershipRequest{ diff --git a/deployment/keystone/changeset/update_don_test.go b/deployment/keystone/changeset/update_don_test.go index 18287da6887..64cb41c14e5 100644 --- a/deployment/keystone/changeset/update_don_test.go +++ b/deployment/keystone/changeset/update_don_test.go @@ -7,6 +7,7 @@ import ( "github.com/stretchr/testify/require" commonchangeset "github.com/smartcontractkit/chainlink/deployment/common/changeset" + "github.com/smartcontractkit/chainlink/deployment/common/proposalutils" "github.com/smartcontractkit/chainlink/deployment/keystone/changeset" "github.com/smartcontractkit/chainlink/deployment/keystone/changeset/internal" kcr "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/keystone/generated/capabilities_registry" @@ -118,7 +119,7 @@ func TestUpdateDon(t *testing.T) { // now apply the changeset such that the proposal is signed and execed contracts := te.ContractSets()[te.RegistrySelector] - timelockContracts := map[uint64]*commonchangeset.TimelockExecutionContracts{ + timelockContracts := map[uint64]*proposalutils.TimelockExecutionContracts{ te.RegistrySelector: { Timelock: contracts.Timelock, CallProxy: contracts.CallProxy, diff --git a/deployment/keystone/changeset/update_node_capabilities_test.go b/deployment/keystone/changeset/update_node_capabilities_test.go index cb5588ff3d1..87b49acf614 100644 --- a/deployment/keystone/changeset/update_node_capabilities_test.go +++ b/deployment/keystone/changeset/update_node_capabilities_test.go @@ -8,6 +8,7 @@ import ( "golang.org/x/exp/maps" commonchangeset "github.com/smartcontractkit/chainlink/deployment/common/changeset" + "github.com/smartcontractkit/chainlink/deployment/common/proposalutils" "github.com/smartcontractkit/chainlink/deployment/keystone/changeset" kcr "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/keystone/generated/capabilities_registry" "github.com/smartcontractkit/chainlink/v2/core/services/keystore/keys/p2pkey" @@ -118,7 +119,7 @@ func TestUpdateNodeCapabilities(t *testing.T) { // now apply the changeset such that the proposal is signed and execed contracts := te.ContractSets()[te.RegistrySelector] - timelockContracts := map[uint64]*commonchangeset.TimelockExecutionContracts{ + timelockContracts := map[uint64]*proposalutils.TimelockExecutionContracts{ te.RegistrySelector: { Timelock: contracts.Timelock, CallProxy: contracts.CallProxy, diff --git a/deployment/keystone/changeset/update_nodes_test.go b/deployment/keystone/changeset/update_nodes_test.go index be3bfb12ee6..31f71cd9603 100644 --- a/deployment/keystone/changeset/update_nodes_test.go +++ b/deployment/keystone/changeset/update_nodes_test.go @@ -9,6 +9,7 @@ import ( "golang.org/x/exp/maps" commonchangeset "github.com/smartcontractkit/chainlink/deployment/common/changeset" + "github.com/smartcontractkit/chainlink/deployment/common/proposalutils" "github.com/smartcontractkit/chainlink/deployment/keystone/changeset" "github.com/smartcontractkit/chainlink/v2/core/services/keystore/keys/p2pkey" ) @@ -89,7 +90,7 @@ func TestUpdateNodes(t *testing.T) { // now apply the changeset such that the proposal is signed and execed contracts := te.ContractSets()[te.RegistrySelector] - timelockContracts := map[uint64]*commonchangeset.TimelockExecutionContracts{ + timelockContracts := map[uint64]*proposalutils.TimelockExecutionContracts{ te.RegistrySelector: { Timelock: contracts.Timelock, CallProxy: contracts.CallProxy,