diff --git a/.changeset/funny-tips-promise.md b/.changeset/funny-tips-promise.md new file mode 100644 index 00000000000..16fd0a9fc33 --- /dev/null +++ b/.changeset/funny-tips-promise.md @@ -0,0 +1,6 @@ +--- +"chainlink": patch +--- + +#added +compare user-defined max gas price with current gas price in automation simulation pipeline diff --git a/.changeset/mighty-flies-breathe.md b/.changeset/mighty-flies-breathe.md new file mode 100644 index 00000000000..d983aad7086 --- /dev/null +++ b/.changeset/mighty-flies-breathe.md @@ -0,0 +1,5 @@ +--- +"chainlink": patch +--- + +#added ORM and corresponding tables for CCIP gas prices and token prices diff --git a/.changeset/modern-trainers-hear.md b/.changeset/modern-trainers-hear.md new file mode 100644 index 00000000000..08f7ab0988a --- /dev/null +++ b/.changeset/modern-trainers-hear.md @@ -0,0 +1,5 @@ +--- +"chainlink": patch +--- + +#internal Generate gethwrappers for capability registry changes diff --git a/.changeset/neat-pianos-argue.md b/.changeset/neat-pianos-argue.md new file mode 100644 index 00000000000..f65c19584db --- /dev/null +++ b/.changeset/neat-pianos-argue.md @@ -0,0 +1,6 @@ +--- +"chainlink": patch +--- + +#added +pass a gas estimator to registry 2.1 pipeline diff --git a/.changeset/ten-dodos-run.md b/.changeset/ten-dodos-run.md new file mode 100644 index 00000000000..42ab8ec58b2 --- /dev/null +++ b/.changeset/ten-dodos-run.md @@ -0,0 +1,5 @@ +--- +"chainlink": patch +--- + +#internal Normalize keystone workflow ref regex property to match id regex diff --git a/.changeset/witty-weeks-kneel.md b/.changeset/witty-weeks-kneel.md new file mode 100644 index 00000000000..d638d037081 --- /dev/null +++ b/.changeset/witty-weeks-kneel.md @@ -0,0 +1,5 @@ +--- +"chainlink": patch +--- + +#added an integration test for max gas price check diff --git a/.github/workflows/ci-core.yml b/.github/workflows/ci-core.yml index a3ea68380f9..1d7b58820b0 100644 --- a/.github/workflows/ci-core.yml +++ b/.github/workflows/ci-core.yml @@ -170,7 +170,7 @@ jobs: env: OUTPUT_FILE: ./output.txt USE_TEE: false - CL_DATABASE_URL: ${{ env.DB_URL }} + CL_DATABASE_URL: ${{ env.DB_URL }} run: ./tools/bin/${{ matrix.type.cmd }} ./... - name: Print Filtered Test Results if: ${{ failure() && matrix.type.cmd == 'go_core_tests' && needs.filter.outputs.changes == 'true' }} @@ -203,7 +203,7 @@ jobs: ./coverage.txt ./postgres_logs.txt - name: Notify Slack - if: ${{ failure() && steps.print-races.outputs.post_to_slack == 'true' && matrix.type.cmd == 'go_core_race_tests' && (github.event_name == 'merge_group' || github.base_ref == 'develop') && needs.filter.outputs.changes == 'true' }} + if: ${{ failure() && steps.print-races.outputs.post_to_slack == 'true' && matrix.type.cmd == 'go_core_race_tests' && (github.event_name == 'merge_group' || github.event.branch == 'develop') && needs.filter.outputs.changes == 'true' }} uses: slackapi/slack-github-action@6c661ce58804a1a20f6dc5fbee7f0381b469e001 # v1.25.0 env: SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} diff --git a/common/client/poller.go b/common/client/poller.go index ebdcbd66283..d6080722c5c 100644 --- a/common/client/poller.go +++ b/common/client/poller.go @@ -27,10 +27,11 @@ type Poller[T any] struct { wg sync.WaitGroup } -// NewPoller creates a new Poller instance +// NewPoller creates a new Poller instance and returns a channel to receive the polled data func NewPoller[ T any, -](pollingInterval time.Duration, pollingFunc func(ctx context.Context) (T, error), pollingTimeout time.Duration, channel chan<- T, logger logger.Logger) Poller[T] { +](pollingInterval time.Duration, pollingFunc func(ctx context.Context) (T, error), pollingTimeout time.Duration, logger logger.Logger) (Poller[T], <-chan T) { + channel := make(chan T) return Poller[T]{ pollingInterval: pollingInterval, pollingFunc: pollingFunc, @@ -39,7 +40,7 @@ func NewPoller[ logger: logger, errCh: make(chan error), stopCh: make(chan struct{}), - } + }, channel } var _ types.Subscription = &Poller[any]{} @@ -58,6 +59,7 @@ func (p *Poller[T]) Unsubscribe() { close(p.stopCh) p.wg.Wait() close(p.errCh) + close(p.channel) return nil }) } diff --git a/common/client/poller_test.go b/common/client/poller_test.go index 3f11c759adb..82a05b5dfc7 100644 --- a/common/client/poller_test.go +++ b/common/client/poller_test.go @@ -23,10 +23,7 @@ func Test_Poller(t *testing.T) { return nil, nil } - channel := make(chan Head, 1) - defer close(channel) - - poller := NewPoller[Head](time.Millisecond, pollFunc, time.Second, channel, lggr) + poller, _ := NewPoller[Head](time.Millisecond, pollFunc, time.Second, lggr) err := poller.Start() require.NoError(t, err) @@ -50,12 +47,8 @@ func Test_Poller(t *testing.T) { return h.ToMockHead(t), nil } - // data channel to receive updates from the poller - channel := make(chan Head, 1) - defer close(channel) - // Create poller and start to receive data - poller := NewPoller[Head](time.Millisecond, pollFunc, time.Second, channel, lggr) + poller, channel := NewPoller[Head](time.Millisecond, pollFunc, time.Second, lggr) require.NoError(t, poller.Start()) defer poller.Unsubscribe() @@ -79,14 +72,10 @@ func Test_Poller(t *testing.T) { return nil, fmt.Errorf("polling error %d", pollNumber) } - // data channel to receive updates from the poller - channel := make(chan Head, 1) - defer close(channel) - olggr, observedLogs := logger.TestObserved(t, zap.WarnLevel) // Create poller and subscribe to receive data - poller := NewPoller[Head](time.Millisecond, pollFunc, time.Second, channel, olggr) + poller, _ := NewPoller[Head](time.Millisecond, pollFunc, time.Second, olggr) require.NoError(t, poller.Start()) defer poller.Unsubscribe() @@ -114,14 +103,10 @@ func Test_Poller(t *testing.T) { // Set instant timeout pollingTimeout := time.Duration(0) - // data channel to receive updates from the poller - channel := make(chan Head, 1) - defer close(channel) - olggr, observedLogs := logger.TestObserved(t, zap.WarnLevel) // Create poller and subscribe to receive data - poller := NewPoller[Head](time.Millisecond, pollFunc, pollingTimeout, channel, olggr) + poller, _ := NewPoller[Head](time.Millisecond, pollFunc, pollingTimeout, olggr) require.NoError(t, poller.Start()) defer poller.Unsubscribe() @@ -146,14 +131,10 @@ func Test_Poller(t *testing.T) { // Set long timeout pollingTimeout := time.Minute - // data channel to receive updates from the poller - channel := make(chan Head, 1) - defer close(channel) - olggr, observedLogs := logger.TestObserved(t, zap.WarnLevel) // Create poller and subscribe to receive data - poller := NewPoller[Head](time.Millisecond, pollFunc, pollingTimeout, channel, olggr) + poller, _ := NewPoller[Head](time.Millisecond, pollFunc, pollingTimeout, olggr) require.NoError(t, poller.Start()) // Unsubscribe while blocked in polling function @@ -184,8 +165,7 @@ func Test_Poller_Unsubscribe(t *testing.T) { } t.Run("Test multiple unsubscribe", func(t *testing.T) { - channel := make(chan Head, 1) - poller := NewPoller[Head](time.Millisecond, pollFunc, time.Second, channel, lggr) + poller, channel := NewPoller[Head](time.Millisecond, pollFunc, time.Second, lggr) err := poller.Start() require.NoError(t, err) @@ -194,14 +174,12 @@ func Test_Poller_Unsubscribe(t *testing.T) { poller.Unsubscribe() }) - t.Run("Test unsubscribe with closed channel", func(t *testing.T) { - channel := make(chan Head, 1) - poller := NewPoller[Head](time.Millisecond, pollFunc, time.Second, channel, lggr) + t.Run("Read channel after unsubscribe", func(t *testing.T) { + poller, channel := NewPoller[Head](time.Millisecond, pollFunc, time.Second, lggr) err := poller.Start() require.NoError(t, err) - <-channel - close(channel) poller.Unsubscribe() + require.Equal(t, <-channel, nil) }) } diff --git a/contracts/.changeset/three-hotels-agree.md b/contracts/.changeset/three-hotels-agree.md new file mode 100644 index 00000000000..66ed4b2377e --- /dev/null +++ b/contracts/.changeset/three-hotels-agree.md @@ -0,0 +1,5 @@ +--- +"@chainlink/contracts": patch +--- + +Add function to update nodes in capability registry diff --git a/contracts/src/v0.8/keystone/CapabilityRegistry.sol b/contracts/src/v0.8/keystone/CapabilityRegistry.sol index 9e0a67434fc..6ac4caedf50 100644 --- a/contracts/src/v0.8/keystone/CapabilityRegistry.sol +++ b/contracts/src/v0.8/keystone/CapabilityRegistry.sol @@ -106,6 +106,16 @@ contract CapabilityRegistry is OwnerIsCreator, TypeAndVersionInterface { /// @param nodeOperatorId The ID of the node operator that manages this node event NodeAdded(bytes32 p2pId, uint256 nodeOperatorId); + /// @notice This event is emitted when a node is updated + /// @param p2pId The P2P ID of the node + /// @param nodeOperatorId The ID of the node operator that manages this node + /// @param signer The node's signer address + event NodeUpdated(bytes32 p2pId, uint256 nodeOperatorId, address signer); + + /// @notice This error is thrown when trying to set the node's + /// signer address to zero + error InvalidNodeSigner(); + /// @notice This error is thrown when trying add a capability that already /// exists. error CapabilityAlreadyExists(); @@ -236,12 +246,16 @@ contract CapabilityRegistry is OwnerIsCreator, TypeAndVersionInterface { for (uint256 i; i < nodes.length; ++i) { Node memory node = nodes[i]; + bool isOwner = msg.sender == owner(); + NodeOperator memory nodeOperator = s_nodeOperators[node.nodeOperatorId]; - if (msg.sender != nodeOperator.admin) revert AccessForbidden(); + if (!isOwner && msg.sender != nodeOperator.admin) revert AccessForbidden(); bool nodeExists = s_nodes[node.p2pId].supportedHashedCapabilityIds.length > 0; if (nodeExists || bytes32(node.p2pId) == bytes32("")) revert InvalidNodeP2PId(node.p2pId); + if (node.signer == address(0)) revert InvalidNodeSigner(); + if (node.supportedHashedCapabilityIds.length == 0) revert InvalidNodeCapabilities(node.supportedHashedCapabilityIds); @@ -255,6 +269,36 @@ contract CapabilityRegistry is OwnerIsCreator, TypeAndVersionInterface { } } + /// @notice Updates nodes. The node admin can update the node's signer address + /// and reconfigure its supported capabilities + /// @param nodes The nodes to update + function updateNodes(Node[] calldata nodes) external { + for (uint256 i; i < nodes.length; ++i) { + Node memory node = nodes[i]; + + bool isOwner = msg.sender == owner(); + + NodeOperator memory nodeOperator = s_nodeOperators[node.nodeOperatorId]; + if (!isOwner && msg.sender != nodeOperator.admin) revert AccessForbidden(); + + bool nodeExists = s_nodes[node.p2pId].supportedHashedCapabilityIds.length > 0; + if (!nodeExists) revert InvalidNodeP2PId(node.p2pId); + + if (node.signer == address(0)) revert InvalidNodeSigner(); + + if (node.supportedHashedCapabilityIds.length == 0) + revert InvalidNodeCapabilities(node.supportedHashedCapabilityIds); + + for (uint256 j; j < node.supportedHashedCapabilityIds.length; ++j) { + if (!s_hashedCapabilityIds.contains(node.supportedHashedCapabilityIds[j])) + revert InvalidNodeCapabilities(node.supportedHashedCapabilityIds); + } + + s_nodes[node.p2pId] = node; + emit NodeUpdated(node.p2pId, node.nodeOperatorId, node.signer); + } + } + /// @notice Gets a node's data /// @param p2pId The P2P ID of the node to query for /// @return Node The node data diff --git a/contracts/src/v0.8/keystone/test/CapabilityRegistry_AddNodesTest.t.sol b/contracts/src/v0.8/keystone/test/CapabilityRegistry_AddNodesTest.t.sol index e179cc5cea6..9174e4ed0d3 100644 --- a/contracts/src/v0.8/keystone/test/CapabilityRegistry_AddNodesTest.t.sol +++ b/contracts/src/v0.8/keystone/test/CapabilityRegistry_AddNodesTest.t.sol @@ -18,7 +18,7 @@ contract CapabilityRegistry_AddNodesTest is BaseTest { s_capabilityRegistry.addCapability(s_capabilityWithConfigurationContract); } - function test_RevertWhen_CalledByNonNodeOperatorAdmin() public { + function test_RevertWhen_CalledByNonNodeOperatorAdminAndNonOwner() public { changePrank(STRANGER); CapabilityRegistry.Node[] memory nodes = new CapabilityRegistry.Node[](1); @@ -36,6 +36,24 @@ contract CapabilityRegistry_AddNodesTest is BaseTest { s_capabilityRegistry.addNodes(nodes); } + function test_RevertWhen_SignerAddressEmpty() public { + changePrank(NODE_OPERATOR_ONE_ADMIN); + CapabilityRegistry.Node[] memory nodes = new CapabilityRegistry.Node[](1); + + bytes32[] memory hashedCapabilityIds = new bytes32[](1); + hashedCapabilityIds[0] = s_basicHashedCapabilityId; + + nodes[0] = CapabilityRegistry.Node({ + nodeOperatorId: TEST_NODE_OPERATOR_ONE_ID, + p2pId: P2P_ID, + signer: address(0), + supportedHashedCapabilityIds: hashedCapabilityIds + }); + + vm.expectRevert(abi.encodeWithSelector(CapabilityRegistry.InvalidNodeSigner.selector)); + s_capabilityRegistry.addNodes(nodes); + } + function test_RevertWhen_AddingDuplicateP2PId() public { changePrank(NODE_OPERATOR_ONE_ADMIN); CapabilityRegistry.Node[] memory nodes = new CapabilityRegistry.Node[](1); @@ -133,4 +151,31 @@ contract CapabilityRegistry_AddNodesTest is BaseTest { assertEq(node.supportedHashedCapabilityIds[0], s_basicHashedCapabilityId); assertEq(node.supportedHashedCapabilityIds[1], s_capabilityWithConfigurationContractId); } + + function test_OwnerCanAddNodes() public { + changePrank(ADMIN); + + CapabilityRegistry.Node[] memory nodes = new CapabilityRegistry.Node[](1); + bytes32[] memory hashedCapabilityIds = new bytes32[](2); + hashedCapabilityIds[0] = s_basicHashedCapabilityId; + hashedCapabilityIds[1] = s_capabilityWithConfigurationContractId; + + nodes[0] = CapabilityRegistry.Node({ + nodeOperatorId: TEST_NODE_OPERATOR_ONE_ID, + p2pId: P2P_ID, + signer: NODE_OPERATOR_ONE_SIGNER_ADDRESS, + supportedHashedCapabilityIds: hashedCapabilityIds + }); + + vm.expectEmit(address(s_capabilityRegistry)); + emit NodeAdded(P2P_ID, TEST_NODE_OPERATOR_ONE_ID); + s_capabilityRegistry.addNodes(nodes); + + CapabilityRegistry.Node memory node = s_capabilityRegistry.getNode(P2P_ID); + assertEq(node.nodeOperatorId, TEST_NODE_OPERATOR_ONE_ID); + assertEq(node.p2pId, P2P_ID); + assertEq(node.supportedHashedCapabilityIds.length, 2); + assertEq(node.supportedHashedCapabilityIds[0], s_basicHashedCapabilityId); + assertEq(node.supportedHashedCapabilityIds[1], s_capabilityWithConfigurationContractId); + } } diff --git a/contracts/src/v0.8/keystone/test/CapabilityRegistry_UpdateNodesTest.t.sol b/contracts/src/v0.8/keystone/test/CapabilityRegistry_UpdateNodesTest.t.sol new file mode 100644 index 00000000000..7ccbd14dbf2 --- /dev/null +++ b/contracts/src/v0.8/keystone/test/CapabilityRegistry_UpdateNodesTest.t.sol @@ -0,0 +1,195 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.19; + +import {BaseTest} from "./BaseTest.t.sol"; +import {CapabilityRegistry} from "../CapabilityRegistry.sol"; + +contract CapabilityRegistry_UpdateNodesTest is BaseTest { + event NodeUpdated(bytes32 p2pId, uint256 nodeOperatorId, address signer); + + uint256 private constant TEST_NODE_OPERATOR_ONE_ID = 0; + uint256 private constant TEST_NODE_OPERATOR_TWO_ID = 1; + bytes32 private constant INVALID_P2P_ID = bytes32("fake-p2p"); + + function setUp() public override { + BaseTest.setUp(); + changePrank(ADMIN); + s_capabilityRegistry.addNodeOperators(_getNodeOperators()); + s_capabilityRegistry.addCapability(s_basicCapability); + s_capabilityRegistry.addCapability(s_capabilityWithConfigurationContract); + + CapabilityRegistry.Node[] memory nodes = new CapabilityRegistry.Node[](1); + bytes32[] memory hashedCapabilityIds = new bytes32[](2); + hashedCapabilityIds[0] = s_basicHashedCapabilityId; + hashedCapabilityIds[1] = s_capabilityWithConfigurationContractId; + + nodes[0] = CapabilityRegistry.Node({ + nodeOperatorId: TEST_NODE_OPERATOR_ONE_ID, + p2pId: P2P_ID, + signer: NODE_OPERATOR_ONE_SIGNER_ADDRESS, + supportedHashedCapabilityIds: hashedCapabilityIds + }); + + changePrank(NODE_OPERATOR_ONE_ADMIN); + + s_capabilityRegistry.addNodes(nodes); + } + + function test_RevertWhen_CalledByNonNodeOperatorAdminAndNonOwner() public { + changePrank(STRANGER); + CapabilityRegistry.Node[] memory nodes = new CapabilityRegistry.Node[](1); + + bytes32[] memory hashedCapabilityIds = new bytes32[](1); + hashedCapabilityIds[0] = s_basicHashedCapabilityId; + + nodes[0] = CapabilityRegistry.Node({ + nodeOperatorId: TEST_NODE_OPERATOR_ONE_ID, + p2pId: P2P_ID, + signer: NODE_OPERATOR_TWO_SIGNER_ADDRESS, + supportedHashedCapabilityIds: hashedCapabilityIds + }); + + vm.expectRevert(CapabilityRegistry.AccessForbidden.selector); + s_capabilityRegistry.updateNodes(nodes); + } + + function test_RevertWhen_NodeDoesNotExist() public { + changePrank(NODE_OPERATOR_ONE_ADMIN); + CapabilityRegistry.Node[] memory nodes = new CapabilityRegistry.Node[](1); + + bytes32[] memory hashedCapabilityIds = new bytes32[](1); + hashedCapabilityIds[0] = s_basicHashedCapabilityId; + + nodes[0] = CapabilityRegistry.Node({ + nodeOperatorId: TEST_NODE_OPERATOR_ONE_ID, + p2pId: INVALID_P2P_ID, + signer: NODE_OPERATOR_ONE_SIGNER_ADDRESS, + supportedHashedCapabilityIds: hashedCapabilityIds + }); + + vm.expectRevert(abi.encodeWithSelector(CapabilityRegistry.InvalidNodeP2PId.selector, INVALID_P2P_ID)); + s_capabilityRegistry.updateNodes(nodes); + } + + function test_RevertWhen_P2PIDEmpty() public { + changePrank(NODE_OPERATOR_ONE_ADMIN); + CapabilityRegistry.Node[] memory nodes = new CapabilityRegistry.Node[](1); + + bytes32[] memory hashedCapabilityIds = new bytes32[](1); + hashedCapabilityIds[0] = s_basicHashedCapabilityId; + + nodes[0] = CapabilityRegistry.Node({ + nodeOperatorId: TEST_NODE_OPERATOR_ONE_ID, + p2pId: bytes32(""), + signer: NODE_OPERATOR_ONE_SIGNER_ADDRESS, + supportedHashedCapabilityIds: hashedCapabilityIds + }); + + vm.expectRevert(abi.encodeWithSelector(CapabilityRegistry.InvalidNodeP2PId.selector, bytes32(""))); + s_capabilityRegistry.updateNodes(nodes); + } + + function test_RevertWhen_SignerAddressEmpty() public { + changePrank(NODE_OPERATOR_ONE_ADMIN); + CapabilityRegistry.Node[] memory nodes = new CapabilityRegistry.Node[](1); + + bytes32[] memory hashedCapabilityIds = new bytes32[](1); + hashedCapabilityIds[0] = s_basicHashedCapabilityId; + + nodes[0] = CapabilityRegistry.Node({ + nodeOperatorId: TEST_NODE_OPERATOR_ONE_ID, + p2pId: P2P_ID, + signer: address(0), + supportedHashedCapabilityIds: hashedCapabilityIds + }); + + vm.expectRevert(abi.encodeWithSelector(CapabilityRegistry.InvalidNodeSigner.selector)); + s_capabilityRegistry.updateNodes(nodes); + } + + function test_RevertWhen_UpdatingNodeWithoutCapabilities() public { + changePrank(NODE_OPERATOR_ONE_ADMIN); + CapabilityRegistry.Node[] memory nodes = new CapabilityRegistry.Node[](1); + + bytes32[] memory hashedCapabilityIds = new bytes32[](0); + + nodes[0] = CapabilityRegistry.Node({ + nodeOperatorId: TEST_NODE_OPERATOR_ONE_ID, + p2pId: P2P_ID, + signer: NODE_OPERATOR_ONE_SIGNER_ADDRESS, + supportedHashedCapabilityIds: hashedCapabilityIds + }); + vm.expectRevert(abi.encodeWithSelector(CapabilityRegistry.InvalidNodeCapabilities.selector, hashedCapabilityIds)); + s_capabilityRegistry.updateNodes(nodes); + } + + function test_RevertWhen_AddingNodeWithInvalidCapability() public { + changePrank(NODE_OPERATOR_ONE_ADMIN); + CapabilityRegistry.Node[] memory nodes = new CapabilityRegistry.Node[](1); + + bytes32[] memory hashedCapabilityIds = new bytes32[](1); + hashedCapabilityIds[0] = s_nonExistentHashedCapabilityId; + + nodes[0] = CapabilityRegistry.Node({ + nodeOperatorId: TEST_NODE_OPERATOR_ONE_ID, + p2pId: P2P_ID, + signer: NODE_OPERATOR_ONE_SIGNER_ADDRESS, + supportedHashedCapabilityIds: hashedCapabilityIds + }); + + vm.expectRevert(abi.encodeWithSelector(CapabilityRegistry.InvalidNodeCapabilities.selector, hashedCapabilityIds)); + s_capabilityRegistry.updateNodes(nodes); + } + + function test_UpdatesNode() public { + changePrank(NODE_OPERATOR_ONE_ADMIN); + + CapabilityRegistry.Node[] memory nodes = new CapabilityRegistry.Node[](1); + bytes32[] memory hashedCapabilityIds = new bytes32[](1); + hashedCapabilityIds[0] = s_basicHashedCapabilityId; + + nodes[0] = CapabilityRegistry.Node({ + nodeOperatorId: TEST_NODE_OPERATOR_ONE_ID, + p2pId: P2P_ID, + signer: NODE_OPERATOR_TWO_SIGNER_ADDRESS, + supportedHashedCapabilityIds: hashedCapabilityIds + }); + + vm.expectEmit(address(s_capabilityRegistry)); + emit NodeUpdated(P2P_ID, TEST_NODE_OPERATOR_ONE_ID, NODE_OPERATOR_TWO_SIGNER_ADDRESS); + s_capabilityRegistry.updateNodes(nodes); + + CapabilityRegistry.Node memory node = s_capabilityRegistry.getNode(P2P_ID); + assertEq(node.nodeOperatorId, TEST_NODE_OPERATOR_ONE_ID); + assertEq(node.p2pId, P2P_ID); + assertEq(node.signer, NODE_OPERATOR_TWO_SIGNER_ADDRESS); + assertEq(node.supportedHashedCapabilityIds.length, 1); + assertEq(node.supportedHashedCapabilityIds[0], s_basicHashedCapabilityId); + } + + function test_OwnerCanUpdateNodes() public { + changePrank(ADMIN); + + CapabilityRegistry.Node[] memory nodes = new CapabilityRegistry.Node[](1); + bytes32[] memory hashedCapabilityIds = new bytes32[](1); + hashedCapabilityIds[0] = s_basicHashedCapabilityId; + + nodes[0] = CapabilityRegistry.Node({ + nodeOperatorId: TEST_NODE_OPERATOR_ONE_ID, + p2pId: P2P_ID, + signer: NODE_OPERATOR_TWO_SIGNER_ADDRESS, + supportedHashedCapabilityIds: hashedCapabilityIds + }); + + vm.expectEmit(address(s_capabilityRegistry)); + emit NodeUpdated(P2P_ID, TEST_NODE_OPERATOR_ONE_ID, NODE_OPERATOR_TWO_SIGNER_ADDRESS); + s_capabilityRegistry.updateNodes(nodes); + + CapabilityRegistry.Node memory node = s_capabilityRegistry.getNode(P2P_ID); + assertEq(node.nodeOperatorId, TEST_NODE_OPERATOR_ONE_ID); + assertEq(node.p2pId, P2P_ID); + assertEq(node.signer, NODE_OPERATOR_TWO_SIGNER_ADDRESS); + assertEq(node.supportedHashedCapabilityIds.length, 1); + assertEq(node.supportedHashedCapabilityIds[0], s_basicHashedCapabilityId); + } +} diff --git a/core/gethwrappers/keystone/generated/keystone_capability_registry/keystone_capability_registry.go b/core/gethwrappers/keystone/generated/keystone_capability_registry/keystone_capability_registry.go index 8ea5e29d99c..9080fbd7807 100644 --- a/core/gethwrappers/keystone/generated/keystone_capability_registry/keystone_capability_registry.go +++ b/core/gethwrappers/keystone/generated/keystone_capability_registry/keystone_capability_registry.go @@ -50,8 +50,8 @@ type CapabilityRegistryNodeOperator struct { } var CapabilityRegistryMetaData = &bind.MetaData{ - ABI: "[{\"inputs\":[],\"name\":\"AccessForbidden\",\"type\":\"error\"},{\"inputs\":[{\"internalType\":\"bytes32\",\"name\":\"hashedCapabilityId\",\"type\":\"bytes32\"}],\"name\":\"CapabilityAlreadyDeprecated\",\"type\":\"error\"},{\"inputs\":[],\"name\":\"CapabilityAlreadyExists\",\"type\":\"error\"},{\"inputs\":[{\"internalType\":\"bytes32\",\"name\":\"hashedCapabilityId\",\"type\":\"bytes32\"}],\"name\":\"CapabilityDoesNotExist\",\"type\":\"error\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"proposedConfigurationContract\",\"type\":\"address\"}],\"name\":\"InvalidCapabilityConfigurationContractInterface\",\"type\":\"error\"},{\"inputs\":[{\"internalType\":\"bytes32[]\",\"name\":\"hashedCapabilityIds\",\"type\":\"bytes32[]\"}],\"name\":\"InvalidNodeCapabilities\",\"type\":\"error\"},{\"inputs\":[],\"name\":\"InvalidNodeOperatorAdmin\",\"type\":\"error\"},{\"inputs\":[{\"internalType\":\"bytes32\",\"name\":\"p2pId\",\"type\":\"bytes32\"}],\"name\":\"InvalidNodeP2PId\",\"type\":\"error\"},{\"inputs\":[{\"internalType\":\"uint256\",\"name\":\"lengthOne\",\"type\":\"uint256\"},{\"internalType\":\"uint256\",\"name\":\"lengthTwo\",\"type\":\"uint256\"}],\"name\":\"LengthMismatch\",\"type\":\"error\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"bytes32\",\"name\":\"hashedCapabilityId\",\"type\":\"bytes32\"}],\"name\":\"CapabilityAdded\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"bytes32\",\"name\":\"hashedCapabilityId\",\"type\":\"bytes32\"}],\"name\":\"CapabilityDeprecated\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"internalType\":\"bytes32\",\"name\":\"p2pId\",\"type\":\"bytes32\"},{\"indexed\":false,\"internalType\":\"uint256\",\"name\":\"nodeOperatorId\",\"type\":\"uint256\"}],\"name\":\"NodeAdded\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"internalType\":\"uint256\",\"name\":\"nodeOperatorId\",\"type\":\"uint256\"},{\"indexed\":true,\"internalType\":\"address\",\"name\":\"admin\",\"type\":\"address\"},{\"indexed\":false,\"internalType\":\"string\",\"name\":\"name\",\"type\":\"string\"}],\"name\":\"NodeOperatorAdded\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"internalType\":\"uint256\",\"name\":\"nodeOperatorId\",\"type\":\"uint256\"}],\"name\":\"NodeOperatorRemoved\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"internalType\":\"uint256\",\"name\":\"nodeOperatorId\",\"type\":\"uint256\"},{\"indexed\":true,\"internalType\":\"address\",\"name\":\"admin\",\"type\":\"address\"},{\"indexed\":false,\"internalType\":\"string\",\"name\":\"name\",\"type\":\"string\"}],\"name\":\"NodeOperatorUpdated\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"address\",\"name\":\"from\",\"type\":\"address\"},{\"indexed\":true,\"internalType\":\"address\",\"name\":\"to\",\"type\":\"address\"}],\"name\":\"OwnershipTransferRequested\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"address\",\"name\":\"from\",\"type\":\"address\"},{\"indexed\":true,\"internalType\":\"address\",\"name\":\"to\",\"type\":\"address\"}],\"name\":\"OwnershipTransferred\",\"type\":\"event\"},{\"inputs\":[],\"name\":\"acceptOwnership\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"components\":[{\"internalType\":\"bytes32\",\"name\":\"labelledName\",\"type\":\"bytes32\"},{\"internalType\":\"bytes32\",\"name\":\"version\",\"type\":\"bytes32\"},{\"internalType\":\"enumCapabilityRegistry.CapabilityResponseType\",\"name\":\"responseType\",\"type\":\"uint8\"},{\"internalType\":\"address\",\"name\":\"configurationContract\",\"type\":\"address\"}],\"internalType\":\"structCapabilityRegistry.Capability\",\"name\":\"capability\",\"type\":\"tuple\"}],\"name\":\"addCapability\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"components\":[{\"internalType\":\"address\",\"name\":\"admin\",\"type\":\"address\"},{\"internalType\":\"string\",\"name\":\"name\",\"type\":\"string\"}],\"internalType\":\"structCapabilityRegistry.NodeOperator[]\",\"name\":\"nodeOperators\",\"type\":\"tuple[]\"}],\"name\":\"addNodeOperators\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"components\":[{\"internalType\":\"uint256\",\"name\":\"nodeOperatorId\",\"type\":\"uint256\"},{\"internalType\":\"bytes32\",\"name\":\"p2pId\",\"type\":\"bytes32\"},{\"internalType\":\"address\",\"name\":\"signer\",\"type\":\"address\"},{\"internalType\":\"bytes32[]\",\"name\":\"supportedHashedCapabilityIds\",\"type\":\"bytes32[]\"}],\"internalType\":\"structCapabilityRegistry.Node[]\",\"name\":\"nodes\",\"type\":\"tuple[]\"}],\"name\":\"addNodes\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"bytes32\",\"name\":\"hashedCapabilityId\",\"type\":\"bytes32\"}],\"name\":\"deprecateCapability\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"getCapabilities\",\"outputs\":[{\"components\":[{\"internalType\":\"bytes32\",\"name\":\"labelledName\",\"type\":\"bytes32\"},{\"internalType\":\"bytes32\",\"name\":\"version\",\"type\":\"bytes32\"},{\"internalType\":\"enumCapabilityRegistry.CapabilityResponseType\",\"name\":\"responseType\",\"type\":\"uint8\"},{\"internalType\":\"address\",\"name\":\"configurationContract\",\"type\":\"address\"}],\"internalType\":\"structCapabilityRegistry.Capability[]\",\"name\":\"\",\"type\":\"tuple[]\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"bytes32\",\"name\":\"hashedId\",\"type\":\"bytes32\"}],\"name\":\"getCapability\",\"outputs\":[{\"components\":[{\"internalType\":\"bytes32\",\"name\":\"labelledName\",\"type\":\"bytes32\"},{\"internalType\":\"bytes32\",\"name\":\"version\",\"type\":\"bytes32\"},{\"internalType\":\"enumCapabilityRegistry.CapabilityResponseType\",\"name\":\"responseType\",\"type\":\"uint8\"},{\"internalType\":\"address\",\"name\":\"configurationContract\",\"type\":\"address\"}],\"internalType\":\"structCapabilityRegistry.Capability\",\"name\":\"\",\"type\":\"tuple\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"bytes32\",\"name\":\"labelledName\",\"type\":\"bytes32\"},{\"internalType\":\"bytes32\",\"name\":\"version\",\"type\":\"bytes32\"}],\"name\":\"getHashedCapabilityId\",\"outputs\":[{\"internalType\":\"bytes32\",\"name\":\"\",\"type\":\"bytes32\"}],\"stateMutability\":\"pure\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"bytes32\",\"name\":\"p2pId\",\"type\":\"bytes32\"}],\"name\":\"getNode\",\"outputs\":[{\"components\":[{\"internalType\":\"uint256\",\"name\":\"nodeOperatorId\",\"type\":\"uint256\"},{\"internalType\":\"bytes32\",\"name\":\"p2pId\",\"type\":\"bytes32\"},{\"internalType\":\"address\",\"name\":\"signer\",\"type\":\"address\"},{\"internalType\":\"bytes32[]\",\"name\":\"supportedHashedCapabilityIds\",\"type\":\"bytes32[]\"}],\"internalType\":\"structCapabilityRegistry.Node\",\"name\":\"\",\"type\":\"tuple\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"uint256\",\"name\":\"nodeOperatorId\",\"type\":\"uint256\"}],\"name\":\"getNodeOperator\",\"outputs\":[{\"components\":[{\"internalType\":\"address\",\"name\":\"admin\",\"type\":\"address\"},{\"internalType\":\"string\",\"name\":\"name\",\"type\":\"string\"}],\"internalType\":\"structCapabilityRegistry.NodeOperator\",\"name\":\"\",\"type\":\"tuple\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"bytes32\",\"name\":\"hashedCapabilityId\",\"type\":\"bytes32\"}],\"name\":\"isCapabilityDeprecated\",\"outputs\":[{\"internalType\":\"bool\",\"name\":\"\",\"type\":\"bool\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"owner\",\"outputs\":[{\"internalType\":\"address\",\"name\":\"\",\"type\":\"address\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"uint256[]\",\"name\":\"nodeOperatorIds\",\"type\":\"uint256[]\"}],\"name\":\"removeNodeOperators\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"to\",\"type\":\"address\"}],\"name\":\"transferOwnership\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"typeAndVersion\",\"outputs\":[{\"internalType\":\"string\",\"name\":\"\",\"type\":\"string\"}],\"stateMutability\":\"pure\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"uint256[]\",\"name\":\"nodeOperatorIds\",\"type\":\"uint256[]\"},{\"components\":[{\"internalType\":\"address\",\"name\":\"admin\",\"type\":\"address\"},{\"internalType\":\"string\",\"name\":\"name\",\"type\":\"string\"}],\"internalType\":\"structCapabilityRegistry.NodeOperator[]\",\"name\":\"nodeOperators\",\"type\":\"tuple[]\"}],\"name\":\"updateNodeOperators\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"}]", - Bin: "0x60806040523480156200001157600080fd5b503380600081620000695760405162461bcd60e51b815260206004820152601860248201527f43616e6e6f7420736574206f776e657220746f207a65726f000000000000000060448201526064015b60405180910390fd5b600080546001600160a01b0319166001600160a01b03848116919091179091558116156200009c576200009c81620000a5565b50505062000150565b336001600160a01b03821603620000ff5760405162461bcd60e51b815260206004820152601760248201527f43616e6e6f74207472616e7366657220746f2073656c66000000000000000000604482015260640162000060565b600180546001600160a01b0319166001600160a01b0383811691821790925560008054604051929316917fed8889f560326eb138920d842192f0eb3dd22b4f139c87a2c57538e05bae12789190a350565b61214580620001606000396000f3fe608060405234801561001057600080fd5b50600436106101005760003560e01c806365c14dc711610097578063ae3c241c11610066578063ae3c241c14610292578063c2d483a1146102a5578063ddbe4f82146102b8578063f2fde38b146102cd57600080fd5b806365c14dc71461022257806379ba5097146102425780638da5cb5b1461024a5780639cb7c5f41461027257600080fd5b80631cdf6343116100d35780631cdf63431461019457806336b402fb146101a7578063398f3773146101ef57806350c946fe1461020257600080fd5b80630c5801e314610105578063117392ce1461011a578063125700111461012d578063181f5a7714610155575b600080fd5b6101186101133660046116ec565b6102e0565b005b610118610128366004611758565b6105f1565b61014061013b366004611770565b61083c565b60405190151581526020015b60405180910390f35b604080518082018252601881527f4361706162696c697479526567697374727920312e302e3000000000000000006020820152905161014c91906117ed565b6101186101a2366004611800565b61084f565b6101e16101b5366004611842565b604080516020808201949094528082019290925280518083038201815260609092019052805191012090565b60405190815260200161014c565b6101186101fd366004611800565b610912565b610215610210366004611770565b610aab565b60405161014c9190611864565b610235610230366004611770565b610b73565b60405161014c91906118ea565b610118610c50565b60005460405173ffffffffffffffffffffffffffffffffffffffff909116815260200161014c565b610285610280366004611770565b610d4d565b60405161014c91906119cc565b6101186102a0366004611770565b610df7565b6101186102b3366004611800565b610ec2565b6102c061124c565b60405161014c91906119da565b6101186102db366004611a4a565b611391565b828114610328576040517fab8b67c600000000000000000000000000000000000000000000000000000000815260048101849052602481018290526044015b60405180910390fd5b6000805473ffffffffffffffffffffffffffffffffffffffff16905b848110156105e957600086868381811061036057610360611a67565b905060200201359050600085858481811061037d5761037d611a67565b905060200281019061038f9190611a96565b61039890611b9e565b805190915073ffffffffffffffffffffffffffffffffffffffff166103e9576040517feeacd93900000000000000000000000000000000000000000000000000000000815260040160405180910390fd5b805173ffffffffffffffffffffffffffffffffffffffff16331480159061042657503373ffffffffffffffffffffffffffffffffffffffff851614155b1561045d576040517fef67f5d800000000000000000000000000000000000000000000000000000000815260040160405180910390fd5b805160008381526007602052604090205473ffffffffffffffffffffffffffffffffffffffff908116911614158061050f57506020808201516040516104a392016117ed565b604080517fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe081840301815282825280516020918201206000868152600783529290922091926104f6926001019101611cb7565b6040516020818303038152906040528051906020012014155b156105d6578051600083815260076020908152604090912080547fffffffffffffffffffffffff00000000000000000000000000000000000000001673ffffffffffffffffffffffffffffffffffffffff90931692909217825582015160019091019061057c9082611da6565b50806000015173ffffffffffffffffffffffffffffffffffffffff167f14c8f513e8a6d86d2d16b0cb64976de4e72386c4f8068eca3b7354373f8fe97a8383602001516040516105cd929190611ec0565b60405180910390a25b5050806105e290611f08565b9050610344565b505050505050565b6105f96113a5565b604080518235602082810191909152808401358284015282518083038401815260609092019092528051910120610631600382611428565b15610668576040517fe288638f00000000000000000000000000000000000000000000000000000000815260040160405180910390fd5b600061067a6080840160608501611a4a565b73ffffffffffffffffffffffffffffffffffffffff16146107e5576106a56080830160608401611a4a565b73ffffffffffffffffffffffffffffffffffffffff163b158061078557506106d36080830160608401611a4a565b6040517f01ffc9a70000000000000000000000000000000000000000000000000000000081527f884efe6100000000000000000000000000000000000000000000000000000000600482015273ffffffffffffffffffffffffffffffffffffffff91909116906301ffc9a790602401602060405180830381865afa15801561075f573d6000803e3d6000fd5b505050506040513d601f19601f820116820180604052508101906107839190611f40565b155b156107e55761079a6080830160608401611a4a565b6040517fabb5e3fd00000000000000000000000000000000000000000000000000000000815273ffffffffffffffffffffffffffffffffffffffff909116600482015260240161031f565b6107f0600382611443565b506000818152600260205260409020829061080b8282611f62565b505060405181907f65610e5677eedff94555572640e442f89848a109ef8593fa927ac30b2565ff0690600090a25050565b6000610849600583611428565b92915050565b6108576113a5565b60005b8181101561090d57600083838381811061087657610876611a67565b60209081029290920135600081815260079093526040832080547fffffffffffffffffffffffff00000000000000000000000000000000000000001681559093509190506108c76001830182611606565b50506040518181527f1e5877d7b3001d1569bf733b76c7eceda58bd6c031e5b8d0b7042308ba2e9d4f9060200160405180910390a15061090681611f08565b905061085a565b505050565b61091a6113a5565b60005b8181101561090d57600083838381811061093957610939611a67565b905060200281019061094b9190611a96565b61095490611b9e565b805190915073ffffffffffffffffffffffffffffffffffffffff166109a5576040517feeacd93900000000000000000000000000000000000000000000000000000000815260040160405180910390fd5b600954604080518082018252835173ffffffffffffffffffffffffffffffffffffffff908116825260208086015181840190815260008681526007909252939020825181547fffffffffffffffffffffffff000000000000000000000000000000000000000016921691909117815591519091906001820190610a289082611da6565b50905050600960008154610a3b90611f08565b909155508151602083015160405173ffffffffffffffffffffffffffffffffffffffff909216917fda6697b182650034bd205cdc2dbfabb06bdb3a0a83a2b45bfefa3c4881284e0b91610a9091859190611ec0565b60405180910390a2505080610aa490611f08565b905061091d565b6040805160808101825260008082526020820181905291810191909152606080820152600082815260086020908152604091829020825160808101845281548152600182015481840152600282015473ffffffffffffffffffffffffffffffffffffffff16818501526003820180548551818602810186019096528086529194929360608601939290830182828015610b6357602002820191906000526020600020905b815481526020019060010190808311610b4f575b5050505050815250509050919050565b6040805180820190915260008152606060208201526000828152600760209081526040918290208251808401909352805473ffffffffffffffffffffffffffffffffffffffff1683526001810180549192840191610bd090611c6a565b80601f0160208091040260200160405190810160405280929190818152602001828054610bfc90611c6a565b8015610b635780601f10610c1e57610100808354040283529160200191610b63565b820191906000526020600020905b815481529060010190602001808311610c2c57505050919092525091949350505050565b60015473ffffffffffffffffffffffffffffffffffffffff163314610cd1576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152601660248201527f4d7573742062652070726f706f736564206f776e657200000000000000000000604482015260640161031f565b60008054337fffffffffffffffffffffffff00000000000000000000000000000000000000008083168217845560018054909116905560405173ffffffffffffffffffffffffffffffffffffffff90921692909183917f8be0079c531659141344cd1fd0a4f28419497f9722a3daafe3b4186f6b6457e091a350565b604080516080808201835260008083526020808401829052838501829052606084018290528582526002808252918590208551938401865280548452600180820154928501929092529182015493949293919284019160ff1690811115610db657610db661192d565b6001811115610dc757610dc761192d565b815260029190910154610100900473ffffffffffffffffffffffffffffffffffffffff1660209091015292915050565b610dff6113a5565b610e0a600382611428565b610e43576040517fe181733f0000000000000000000000000000000000000000000000000000000081526004810182905260240161031f565b610e4e600582611428565b15610e88576040517f16950d1d0000000000000000000000000000000000000000000000000000000081526004810182905260240161031f565b610e93600582611443565b5060405181907fdcea1b78b6ddc31592a94607d537543fcaafda6cc52d6d5cc7bbfca1422baf2190600090a250565b60005b8181101561090d576000838383818110610ee157610ee1611a67565b9050602002810190610ef39190611fe4565b610efc90612018565b805160009081526007602090815260408083208151808301909252805473ffffffffffffffffffffffffffffffffffffffff168252600181018054959650939491939092840191610f4c90611c6a565b80601f0160208091040260200160405190810160405280929190818152602001828054610f7890611c6a565b8015610fc55780601f10610f9a57610100808354040283529160200191610fc5565b820191906000526020600020905b815481529060010190602001808311610fa857829003601f168201915b5050505050815250509050806000015173ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff1614611039576040517fef67f5d800000000000000000000000000000000000000000000000000000000815260040160405180910390fd5b6020808301516000908152600890915260409020600301541515808061106157506020830151155b156110a05782602001516040517f64e2ee9200000000000000000000000000000000000000000000000000000000815260040161031f91815260200190565b8260600151516000036110e55782606001516040517f3748d4c600000000000000000000000000000000000000000000000000000000815260040161031f91906120ed565b60005b836060015151811015611172576111268460600151828151811061110e5761110e611a67565b6020026020010151600361142890919063ffffffff16565b6111625783606001516040517f3748d4c600000000000000000000000000000000000000000000000000000000815260040161031f91906120ed565b61116b81611f08565b90506110e8565b506020838101805160009081526008835260409081902086518155915160018301558501516002820180547fffffffffffffffffffffffff00000000000000000000000000000000000000001673ffffffffffffffffffffffffffffffffffffffff9092169190911790556060850151805186936111f7926003850192910190611640565b505050602083810151845160408051928352928201527f5bfe8a52ad26ac6ee7b0cd46d2fd92be04735a31c45ef8aa3d4b7ea1b61bbc1f910160405180910390a15050508061124590611f08565b9050610ec5565b6060600061125a600361144f565b90506000611268600561145c565b82516112749190612125565b67ffffffffffffffff81111561128c5761128c611ad4565b6040519080825280602002602001820160405280156112fc57816020015b6040805160808101825260008082526020808301829052928201819052606082015282527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff9092019101816112aa5790505b5090506000805b835181101561138857600084828151811061132057611320611a67565b6020026020010151905061133e81600561142890919063ffffffff16565b6113775761134b81610d4d565b84848151811061135d5761135d611a67565b6020026020010181905250828061137390611f08565b9350505b5061138181611f08565b9050611303565b50909392505050565b6113996113a5565b6113a281611466565b50565b60005473ffffffffffffffffffffffffffffffffffffffff163314611426576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152601660248201527f4f6e6c792063616c6c61626c65206279206f776e657200000000000000000000604482015260640161031f565b565b600081815260018301602052604081205415155b9392505050565b600061143c838361155b565b6060600061143c836115aa565b6000610849825490565b3373ffffffffffffffffffffffffffffffffffffffff8216036114e5576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152601760248201527f43616e6e6f74207472616e7366657220746f2073656c66000000000000000000604482015260640161031f565b600180547fffffffffffffffffffffffff00000000000000000000000000000000000000001673ffffffffffffffffffffffffffffffffffffffff83811691821790925560008054604051929316917fed8889f560326eb138920d842192f0eb3dd22b4f139c87a2c57538e05bae12789190a350565b60008181526001830160205260408120546115a257508154600181810184556000848152602080822090930184905584548482528286019093526040902091909155610849565b506000610849565b6060816000018054806020026020016040519081016040528092919081815260200182805480156115fa57602002820191906000526020600020905b8154815260200190600101908083116115e6575b50505050509050919050565b50805461161290611c6a565b6000825580601f10611622575050565b601f0160209004906000526020600020908101906113a2919061168b565b82805482825590600052602060002090810192821561167b579160200282015b8281111561167b578251825591602001919060010190611660565b5061168792915061168b565b5090565b5b80821115611687576000815560010161168c565b60008083601f8401126116b257600080fd5b50813567ffffffffffffffff8111156116ca57600080fd5b6020830191508360208260051b85010111156116e557600080fd5b9250929050565b6000806000806040858703121561170257600080fd5b843567ffffffffffffffff8082111561171a57600080fd5b611726888389016116a0565b9096509450602087013591508082111561173f57600080fd5b5061174c878288016116a0565b95989497509550505050565b60006080828403121561176a57600080fd5b50919050565b60006020828403121561178257600080fd5b5035919050565b6000815180845260005b818110156117af57602081850181015186830182015201611793565b5060006020828601015260207fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0601f83011685010191505092915050565b60208152600061143c6020830184611789565b6000806020838503121561181357600080fd5b823567ffffffffffffffff81111561182a57600080fd5b611836858286016116a0565b90969095509350505050565b6000806040838503121561185557600080fd5b50508035926020909101359150565b6000602080835260a0830184518285015281850151604085015273ffffffffffffffffffffffffffffffffffffffff6040860151166060850152606085015160808086015281815180845260c0870191508483019350600092505b808310156118df57835182529284019260019290920191908401906118bf565b509695505050505050565b6020815273ffffffffffffffffffffffffffffffffffffffff8251166020820152600060208301516040808401526119256060840182611789565b949350505050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052602160045260246000fd5b80518252602081015160208301526040810151600281106119a6577f4e487b7100000000000000000000000000000000000000000000000000000000600052602160045260246000fd5b604083015260609081015173ffffffffffffffffffffffffffffffffffffffff16910152565b60808101610849828461195c565b6020808252825182820181905260009190848201906040850190845b81811015611a1c57611a0983855161195c565b92840192608092909201916001016119f6565b50909695505050505050565b73ffffffffffffffffffffffffffffffffffffffff811681146113a257600080fd5b600060208284031215611a5c57600080fd5b813561143c81611a28565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052603260045260246000fd5b600082357fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc1833603018112611aca57600080fd5b9190910192915050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b6040805190810167ffffffffffffffff81118282101715611b2657611b26611ad4565b60405290565b6040516080810167ffffffffffffffff81118282101715611b2657611b26611ad4565b604051601f82017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe016810167ffffffffffffffff81118282101715611b9657611b96611ad4565b604052919050565b600060408236031215611bb057600080fd5b611bb8611b03565b8235611bc381611a28565b815260208381013567ffffffffffffffff80821115611be157600080fd5b9085019036601f830112611bf457600080fd5b813581811115611c0657611c06611ad4565b611c36847fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0601f84011601611b4f565b91508082523684828501011115611c4c57600080fd5b80848401858401376000908201840152918301919091525092915050565b600181811c90821680611c7e57607f821691505b60208210810361176a577f4e487b7100000000000000000000000000000000000000000000000000000000600052602260045260246000fd5b6000602080835260008454611ccb81611c6a565b80848701526040600180841660008114611cec5760018114611d2457611d52565b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff008516838a01528284151560051b8a01019550611d52565b896000528660002060005b85811015611d4a5781548b8201860152908301908801611d2f565b8a0184019650505b509398975050505050505050565b601f82111561090d57600081815260208120601f850160051c81016020861015611d875750805b601f850160051c820191505b818110156105e957828155600101611d93565b815167ffffffffffffffff811115611dc057611dc0611ad4565b611dd481611dce8454611c6a565b84611d60565b602080601f831160018114611e275760008415611df15750858301515b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff600386901b1c1916600185901b1785556105e9565b6000858152602081207fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe08616915b82811015611e7457888601518255948401946001909101908401611e55565b5085821015611eb057878501517fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff600388901b60f8161c191681555b5050505050600190811b01905550565b8281526040602082015260006119256040830184611789565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601160045260246000fd5b60007fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8203611f3957611f39611ed9565b5060010190565b600060208284031215611f5257600080fd5b8151801515811461143c57600080fd5b813581556020820135600182015560028101604083013560028110611f8657600080fd5b81546060850135611f9681611a28565b74ffffffffffffffffffffffffffffffffffffffff008160081b1660ff84167fffffffffffffffffffffff000000000000000000000000000000000000000000841617178455505050505050565b600082357fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff81833603018112611aca57600080fd5b60006080823603121561202a57600080fd5b612032611b2c565b8235815260208084013581830152604084013561204e81611a28565b6040830152606084013567ffffffffffffffff8082111561206e57600080fd5b9085019036601f83011261208157600080fd5b81358181111561209357612093611ad4565b8060051b91506120a4848301611b4f565b81815291830184019184810190368411156120be57600080fd5b938501935b838510156120dc578435825293850193908501906120c3565b606087015250939695505050505050565b6020808252825182820181905260009190848201906040850190845b81811015611a1c57835183529284019291840191600101612109565b8181038181111561084957610849611ed956fea164736f6c6343000813000a", + ABI: "[{\"inputs\":[],\"name\":\"AccessForbidden\",\"type\":\"error\"},{\"inputs\":[{\"internalType\":\"bytes32\",\"name\":\"hashedCapabilityId\",\"type\":\"bytes32\"}],\"name\":\"CapabilityAlreadyDeprecated\",\"type\":\"error\"},{\"inputs\":[],\"name\":\"CapabilityAlreadyExists\",\"type\":\"error\"},{\"inputs\":[{\"internalType\":\"bytes32\",\"name\":\"hashedCapabilityId\",\"type\":\"bytes32\"}],\"name\":\"CapabilityDoesNotExist\",\"type\":\"error\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"proposedConfigurationContract\",\"type\":\"address\"}],\"name\":\"InvalidCapabilityConfigurationContractInterface\",\"type\":\"error\"},{\"inputs\":[{\"internalType\":\"bytes32[]\",\"name\":\"hashedCapabilityIds\",\"type\":\"bytes32[]\"}],\"name\":\"InvalidNodeCapabilities\",\"type\":\"error\"},{\"inputs\":[],\"name\":\"InvalidNodeOperatorAdmin\",\"type\":\"error\"},{\"inputs\":[{\"internalType\":\"bytes32\",\"name\":\"p2pId\",\"type\":\"bytes32\"}],\"name\":\"InvalidNodeP2PId\",\"type\":\"error\"},{\"inputs\":[],\"name\":\"InvalidNodeSigner\",\"type\":\"error\"},{\"inputs\":[{\"internalType\":\"uint256\",\"name\":\"lengthOne\",\"type\":\"uint256\"},{\"internalType\":\"uint256\",\"name\":\"lengthTwo\",\"type\":\"uint256\"}],\"name\":\"LengthMismatch\",\"type\":\"error\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"bytes32\",\"name\":\"hashedCapabilityId\",\"type\":\"bytes32\"}],\"name\":\"CapabilityAdded\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"bytes32\",\"name\":\"hashedCapabilityId\",\"type\":\"bytes32\"}],\"name\":\"CapabilityDeprecated\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"internalType\":\"bytes32\",\"name\":\"p2pId\",\"type\":\"bytes32\"},{\"indexed\":false,\"internalType\":\"uint256\",\"name\":\"nodeOperatorId\",\"type\":\"uint256\"}],\"name\":\"NodeAdded\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"internalType\":\"uint256\",\"name\":\"nodeOperatorId\",\"type\":\"uint256\"},{\"indexed\":true,\"internalType\":\"address\",\"name\":\"admin\",\"type\":\"address\"},{\"indexed\":false,\"internalType\":\"string\",\"name\":\"name\",\"type\":\"string\"}],\"name\":\"NodeOperatorAdded\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"internalType\":\"uint256\",\"name\":\"nodeOperatorId\",\"type\":\"uint256\"}],\"name\":\"NodeOperatorRemoved\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"internalType\":\"uint256\",\"name\":\"nodeOperatorId\",\"type\":\"uint256\"},{\"indexed\":true,\"internalType\":\"address\",\"name\":\"admin\",\"type\":\"address\"},{\"indexed\":false,\"internalType\":\"string\",\"name\":\"name\",\"type\":\"string\"}],\"name\":\"NodeOperatorUpdated\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"internalType\":\"bytes32\",\"name\":\"p2pId\",\"type\":\"bytes32\"},{\"indexed\":false,\"internalType\":\"uint256\",\"name\":\"nodeOperatorId\",\"type\":\"uint256\"},{\"indexed\":false,\"internalType\":\"address\",\"name\":\"signer\",\"type\":\"address\"}],\"name\":\"NodeUpdated\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"address\",\"name\":\"from\",\"type\":\"address\"},{\"indexed\":true,\"internalType\":\"address\",\"name\":\"to\",\"type\":\"address\"}],\"name\":\"OwnershipTransferRequested\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"address\",\"name\":\"from\",\"type\":\"address\"},{\"indexed\":true,\"internalType\":\"address\",\"name\":\"to\",\"type\":\"address\"}],\"name\":\"OwnershipTransferred\",\"type\":\"event\"},{\"inputs\":[],\"name\":\"acceptOwnership\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"components\":[{\"internalType\":\"bytes32\",\"name\":\"labelledName\",\"type\":\"bytes32\"},{\"internalType\":\"bytes32\",\"name\":\"version\",\"type\":\"bytes32\"},{\"internalType\":\"enumCapabilityRegistry.CapabilityResponseType\",\"name\":\"responseType\",\"type\":\"uint8\"},{\"internalType\":\"address\",\"name\":\"configurationContract\",\"type\":\"address\"}],\"internalType\":\"structCapabilityRegistry.Capability\",\"name\":\"capability\",\"type\":\"tuple\"}],\"name\":\"addCapability\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"components\":[{\"internalType\":\"address\",\"name\":\"admin\",\"type\":\"address\"},{\"internalType\":\"string\",\"name\":\"name\",\"type\":\"string\"}],\"internalType\":\"structCapabilityRegistry.NodeOperator[]\",\"name\":\"nodeOperators\",\"type\":\"tuple[]\"}],\"name\":\"addNodeOperators\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"components\":[{\"internalType\":\"uint256\",\"name\":\"nodeOperatorId\",\"type\":\"uint256\"},{\"internalType\":\"bytes32\",\"name\":\"p2pId\",\"type\":\"bytes32\"},{\"internalType\":\"address\",\"name\":\"signer\",\"type\":\"address\"},{\"internalType\":\"bytes32[]\",\"name\":\"supportedHashedCapabilityIds\",\"type\":\"bytes32[]\"}],\"internalType\":\"structCapabilityRegistry.Node[]\",\"name\":\"nodes\",\"type\":\"tuple[]\"}],\"name\":\"addNodes\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"bytes32\",\"name\":\"hashedCapabilityId\",\"type\":\"bytes32\"}],\"name\":\"deprecateCapability\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"getCapabilities\",\"outputs\":[{\"components\":[{\"internalType\":\"bytes32\",\"name\":\"labelledName\",\"type\":\"bytes32\"},{\"internalType\":\"bytes32\",\"name\":\"version\",\"type\":\"bytes32\"},{\"internalType\":\"enumCapabilityRegistry.CapabilityResponseType\",\"name\":\"responseType\",\"type\":\"uint8\"},{\"internalType\":\"address\",\"name\":\"configurationContract\",\"type\":\"address\"}],\"internalType\":\"structCapabilityRegistry.Capability[]\",\"name\":\"\",\"type\":\"tuple[]\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"bytes32\",\"name\":\"hashedId\",\"type\":\"bytes32\"}],\"name\":\"getCapability\",\"outputs\":[{\"components\":[{\"internalType\":\"bytes32\",\"name\":\"labelledName\",\"type\":\"bytes32\"},{\"internalType\":\"bytes32\",\"name\":\"version\",\"type\":\"bytes32\"},{\"internalType\":\"enumCapabilityRegistry.CapabilityResponseType\",\"name\":\"responseType\",\"type\":\"uint8\"},{\"internalType\":\"address\",\"name\":\"configurationContract\",\"type\":\"address\"}],\"internalType\":\"structCapabilityRegistry.Capability\",\"name\":\"\",\"type\":\"tuple\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"bytes32\",\"name\":\"labelledName\",\"type\":\"bytes32\"},{\"internalType\":\"bytes32\",\"name\":\"version\",\"type\":\"bytes32\"}],\"name\":\"getHashedCapabilityId\",\"outputs\":[{\"internalType\":\"bytes32\",\"name\":\"\",\"type\":\"bytes32\"}],\"stateMutability\":\"pure\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"bytes32\",\"name\":\"p2pId\",\"type\":\"bytes32\"}],\"name\":\"getNode\",\"outputs\":[{\"components\":[{\"internalType\":\"uint256\",\"name\":\"nodeOperatorId\",\"type\":\"uint256\"},{\"internalType\":\"bytes32\",\"name\":\"p2pId\",\"type\":\"bytes32\"},{\"internalType\":\"address\",\"name\":\"signer\",\"type\":\"address\"},{\"internalType\":\"bytes32[]\",\"name\":\"supportedHashedCapabilityIds\",\"type\":\"bytes32[]\"}],\"internalType\":\"structCapabilityRegistry.Node\",\"name\":\"\",\"type\":\"tuple\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"uint256\",\"name\":\"nodeOperatorId\",\"type\":\"uint256\"}],\"name\":\"getNodeOperator\",\"outputs\":[{\"components\":[{\"internalType\":\"address\",\"name\":\"admin\",\"type\":\"address\"},{\"internalType\":\"string\",\"name\":\"name\",\"type\":\"string\"}],\"internalType\":\"structCapabilityRegistry.NodeOperator\",\"name\":\"\",\"type\":\"tuple\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"bytes32\",\"name\":\"hashedCapabilityId\",\"type\":\"bytes32\"}],\"name\":\"isCapabilityDeprecated\",\"outputs\":[{\"internalType\":\"bool\",\"name\":\"\",\"type\":\"bool\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"owner\",\"outputs\":[{\"internalType\":\"address\",\"name\":\"\",\"type\":\"address\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"uint256[]\",\"name\":\"nodeOperatorIds\",\"type\":\"uint256[]\"}],\"name\":\"removeNodeOperators\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"to\",\"type\":\"address\"}],\"name\":\"transferOwnership\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"typeAndVersion\",\"outputs\":[{\"internalType\":\"string\",\"name\":\"\",\"type\":\"string\"}],\"stateMutability\":\"pure\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"uint256[]\",\"name\":\"nodeOperatorIds\",\"type\":\"uint256[]\"},{\"components\":[{\"internalType\":\"address\",\"name\":\"admin\",\"type\":\"address\"},{\"internalType\":\"string\",\"name\":\"name\",\"type\":\"string\"}],\"internalType\":\"structCapabilityRegistry.NodeOperator[]\",\"name\":\"nodeOperators\",\"type\":\"tuple[]\"}],\"name\":\"updateNodeOperators\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"components\":[{\"internalType\":\"uint256\",\"name\":\"nodeOperatorId\",\"type\":\"uint256\"},{\"internalType\":\"bytes32\",\"name\":\"p2pId\",\"type\":\"bytes32\"},{\"internalType\":\"address\",\"name\":\"signer\",\"type\":\"address\"},{\"internalType\":\"bytes32[]\",\"name\":\"supportedHashedCapabilityIds\",\"type\":\"bytes32[]\"}],\"internalType\":\"structCapabilityRegistry.Node[]\",\"name\":\"nodes\",\"type\":\"tuple[]\"}],\"name\":\"updateNodes\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"}]", + Bin: "0x60806040523480156200001157600080fd5b503380600081620000695760405162461bcd60e51b815260206004820152601860248201527f43616e6e6f7420736574206f776e657220746f207a65726f000000000000000060448201526064015b60405180910390fd5b600080546001600160a01b0319166001600160a01b03848116919091179091558116156200009c576200009c81620000a5565b50505062000150565b336001600160a01b03821603620000ff5760405162461bcd60e51b815260206004820152601760248201527f43616e6e6f74207472616e7366657220746f2073656c66000000000000000000604482015260640162000060565b600180546001600160a01b0319166001600160a01b0383811691821790925560008054604051929316917fed8889f560326eb138920d842192f0eb3dd22b4f139c87a2c57538e05bae12789190a350565b6125e680620001606000396000f3fe608060405234801561001057600080fd5b506004361061011b5760003560e01c806365c14dc7116100b2578063ae3c241c11610081578063c2d483a111610066578063c2d483a1146102d3578063ddbe4f82146102e6578063f2fde38b146102fb57600080fd5b8063ae3c241c146102ad578063b38e51f6146102c057600080fd5b806365c14dc71461023d57806379ba50971461025d5780638da5cb5b146102655780639cb7c5f41461028d57600080fd5b80631cdf6343116100ee5780631cdf6343146101af57806336b402fb146101c2578063398f37731461020a57806350c946fe1461021d57600080fd5b80630c5801e314610120578063117392ce146101355780631257001114610148578063181f5a7714610170575b600080fd5b61013361012e366004611b8d565b61030e565b005b610133610143366004611bf9565b61061f565b61015b610156366004611c11565b61086a565b60405190151581526020015b60405180910390f35b604080518082018252601881527f4361706162696c697479526567697374727920312e302e300000000000000000602082015290516101679190611c8e565b6101336101bd366004611ca1565b61087d565b6101fc6101d0366004611ce3565b604080516020808201949094528082019290925280518083038201815260609092019052805191012090565b604051908152602001610167565b610133610218366004611ca1565b610940565b61023061022b366004611c11565b610ad9565b6040516101679190611d05565b61025061024b366004611c11565b610ba1565b6040516101679190611d8b565b610133610c7e565b60005460405173ffffffffffffffffffffffffffffffffffffffff9091168152602001610167565b6102a061029b366004611c11565b610d7b565b6040516101679190611e6d565b6101336102bb366004611c11565b610e25565b6101336102ce366004611ca1565b610ef0565b6101336102e1366004611ca1565b61130c565b6102ee6116ed565b6040516101679190611e7b565b610133610309366004611eeb565b611832565b828114610356576040517fab8b67c600000000000000000000000000000000000000000000000000000000815260048101849052602481018290526044015b60405180910390fd5b6000805473ffffffffffffffffffffffffffffffffffffffff16905b8481101561061757600086868381811061038e5761038e611f08565b90506020020135905060008585848181106103ab576103ab611f08565b90506020028101906103bd9190611f37565b6103c69061203f565b805190915073ffffffffffffffffffffffffffffffffffffffff16610417576040517feeacd93900000000000000000000000000000000000000000000000000000000815260040160405180910390fd5b805173ffffffffffffffffffffffffffffffffffffffff16331480159061045457503373ffffffffffffffffffffffffffffffffffffffff851614155b1561048b576040517fef67f5d800000000000000000000000000000000000000000000000000000000815260040160405180910390fd5b805160008381526007602052604090205473ffffffffffffffffffffffffffffffffffffffff908116911614158061053d57506020808201516040516104d19201611c8e565b604080517fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe08184030181528282528051602091820120600086815260078352929092209192610524926001019101612158565b6040516020818303038152906040528051906020012014155b15610604578051600083815260076020908152604090912080547fffffffffffffffffffffffff00000000000000000000000000000000000000001673ffffffffffffffffffffffffffffffffffffffff9093169290921782558201516001909101906105aa9082612247565b50806000015173ffffffffffffffffffffffffffffffffffffffff167f14c8f513e8a6d86d2d16b0cb64976de4e72386c4f8068eca3b7354373f8fe97a8383602001516040516105fb929190612361565b60405180910390a25b505080610610906123a9565b9050610372565b505050505050565b610627611846565b60408051823560208281019190915280840135828401528251808303840181526060909201909252805191012061065f6003826118c9565b15610696576040517fe288638f00000000000000000000000000000000000000000000000000000000815260040160405180910390fd5b60006106a86080840160608501611eeb565b73ffffffffffffffffffffffffffffffffffffffff1614610813576106d36080830160608401611eeb565b73ffffffffffffffffffffffffffffffffffffffff163b15806107b357506107016080830160608401611eeb565b6040517f01ffc9a70000000000000000000000000000000000000000000000000000000081527f884efe6100000000000000000000000000000000000000000000000000000000600482015273ffffffffffffffffffffffffffffffffffffffff91909116906301ffc9a790602401602060405180830381865afa15801561078d573d6000803e3d6000fd5b505050506040513d601f19601f820116820180604052508101906107b191906123e1565b155b15610813576107c86080830160608401611eeb565b6040517fabb5e3fd00000000000000000000000000000000000000000000000000000000815273ffffffffffffffffffffffffffffffffffffffff909116600482015260240161034d565b61081e6003826118e4565b50600081815260026020526040902082906108398282612403565b505060405181907f65610e5677eedff94555572640e442f89848a109ef8593fa927ac30b2565ff0690600090a25050565b60006108776005836118c9565b92915050565b610885611846565b60005b8181101561093b5760008383838181106108a4576108a4611f08565b60209081029290920135600081815260079093526040832080547fffffffffffffffffffffffff00000000000000000000000000000000000000001681559093509190506108f56001830182611aa7565b50506040518181527f1e5877d7b3001d1569bf733b76c7eceda58bd6c031e5b8d0b7042308ba2e9d4f9060200160405180910390a150610934816123a9565b9050610888565b505050565b610948611846565b60005b8181101561093b57600083838381811061096757610967611f08565b90506020028101906109799190611f37565b6109829061203f565b805190915073ffffffffffffffffffffffffffffffffffffffff166109d3576040517feeacd93900000000000000000000000000000000000000000000000000000000815260040160405180910390fd5b600954604080518082018252835173ffffffffffffffffffffffffffffffffffffffff908116825260208086015181840190815260008681526007909252939020825181547fffffffffffffffffffffffff000000000000000000000000000000000000000016921691909117815591519091906001820190610a569082612247565b50905050600960008154610a69906123a9565b909155508151602083015160405173ffffffffffffffffffffffffffffffffffffffff909216917fda6697b182650034bd205cdc2dbfabb06bdb3a0a83a2b45bfefa3c4881284e0b91610abe91859190612361565b60405180910390a2505080610ad2906123a9565b905061094b565b6040805160808101825260008082526020820181905291810191909152606080820152600082815260086020908152604091829020825160808101845281548152600182015481840152600282015473ffffffffffffffffffffffffffffffffffffffff16818501526003820180548551818602810186019096528086529194929360608601939290830182828015610b9157602002820191906000526020600020905b815481526020019060010190808311610b7d575b5050505050815250509050919050565b6040805180820190915260008152606060208201526000828152600760209081526040918290208251808401909352805473ffffffffffffffffffffffffffffffffffffffff1683526001810180549192840191610bfe9061210b565b80601f0160208091040260200160405190810160405280929190818152602001828054610c2a9061210b565b8015610b915780601f10610c4c57610100808354040283529160200191610b91565b820191906000526020600020905b815481529060010190602001808311610c5a57505050919092525091949350505050565b60015473ffffffffffffffffffffffffffffffffffffffff163314610cff576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152601660248201527f4d7573742062652070726f706f736564206f776e657200000000000000000000604482015260640161034d565b60008054337fffffffffffffffffffffffff00000000000000000000000000000000000000008083168217845560018054909116905560405173ffffffffffffffffffffffffffffffffffffffff90921692909183917f8be0079c531659141344cd1fd0a4f28419497f9722a3daafe3b4186f6b6457e091a350565b604080516080808201835260008083526020808401829052838501829052606084018290528582526002808252918590208551938401865280548452600180820154928501929092529182015493949293919284019160ff1690811115610de457610de4611dce565b6001811115610df557610df5611dce565b815260029190910154610100900473ffffffffffffffffffffffffffffffffffffffff1660209091015292915050565b610e2d611846565b610e386003826118c9565b610e71576040517fe181733f0000000000000000000000000000000000000000000000000000000081526004810182905260240161034d565b610e7c6005826118c9565b15610eb6576040517f16950d1d0000000000000000000000000000000000000000000000000000000081526004810182905260240161034d565b610ec16005826118e4565b5060405181907fdcea1b78b6ddc31592a94607d537543fcaafda6cc52d6d5cc7bbfca1422baf2190600090a250565b60005b8181101561093b576000838383818110610f0f57610f0f611f08565b9050602002810190610f219190612485565b610f2a906124b9565b90506000610f4d60005473ffffffffffffffffffffffffffffffffffffffff1690565b825160009081526007602090815260408083208151808301909252805473ffffffffffffffffffffffffffffffffffffffff90811683526001820180549690911633149650939491939092840191610fa49061210b565b80601f0160208091040260200160405190810160405280929190818152602001828054610fd09061210b565b801561101d5780601f10610ff25761010080835404028352916020019161101d565b820191906000526020600020905b81548152906001019060200180831161100057829003601f168201915b50505050508152505090508115801561104d5750805173ffffffffffffffffffffffffffffffffffffffff163314155b15611084576040517fef67f5d800000000000000000000000000000000000000000000000000000000815260040160405180910390fd5b6020808401516000908152600890915260409020600301541515806110dd5783602001516040517f64e2ee9200000000000000000000000000000000000000000000000000000000815260040161034d91815260200190565b604084015173ffffffffffffffffffffffffffffffffffffffff1661112e576040517f8377314600000000000000000000000000000000000000000000000000000000815260040160405180910390fd5b8360600151516000036111735783606001516040517f3748d4c600000000000000000000000000000000000000000000000000000000815260040161034d919061258e565b60005b846060015151811015611200576111b48560600151828151811061119c5761119c611f08565b602002602001015160036118c990919063ffffffff16565b6111f05784606001516040517f3748d4c600000000000000000000000000000000000000000000000000000000815260040161034d919061258e565b6111f9816123a9565b9050611176565b506020848101805160009081526008835260409081902087518155915160018301558601516002820180547fffffffffffffffffffffffff00000000000000000000000000000000000000001673ffffffffffffffffffffffffffffffffffffffff909216919091179055606086015180518793611285926003850192910190611ae1565b509050507f6bbba867c646be512c2f3241e65fdffdefd5528d7e7939649e06e10ee5addc3e8460200151856000015186604001516040516112ef93929190928352602083019190915273ffffffffffffffffffffffffffffffffffffffff16604082015260600190565b60405180910390a15050505080611305906123a9565b9050610ef3565b60005b8181101561093b57600083838381811061132b5761132b611f08565b905060200281019061133d9190612485565b611346906124b9565b9050600061136960005473ffffffffffffffffffffffffffffffffffffffff1690565b825160009081526007602090815260408083208151808301909252805473ffffffffffffffffffffffffffffffffffffffff908116835260018201805496909116331496509394919390928401916113c09061210b565b80601f01602080910402602001604051908101604052809291908181526020018280546113ec9061210b565b80156114395780601f1061140e57610100808354040283529160200191611439565b820191906000526020600020905b81548152906001019060200180831161141c57829003601f168201915b5050505050815250509050811580156114695750805173ffffffffffffffffffffffffffffffffffffffff163314155b156114a0576040517fef67f5d800000000000000000000000000000000000000000000000000000000815260040160405180910390fd5b602080840151600090815260089091526040902060030154151580806114c857506020840151155b156115075783602001516040517f64e2ee9200000000000000000000000000000000000000000000000000000000815260040161034d91815260200190565b604084015173ffffffffffffffffffffffffffffffffffffffff16611558576040517f8377314600000000000000000000000000000000000000000000000000000000815260040160405180910390fd5b83606001515160000361159d5783606001516040517f3748d4c600000000000000000000000000000000000000000000000000000000815260040161034d919061258e565b60005b846060015151811015611612576115c68560600151828151811061119c5761119c611f08565b6116025784606001516040517f3748d4c600000000000000000000000000000000000000000000000000000000815260040161034d919061258e565b61160b816123a9565b90506115a0565b506020848101805160009081526008835260409081902087518155915160018301558601516002820180547fffffffffffffffffffffffff00000000000000000000000000000000000000001673ffffffffffffffffffffffffffffffffffffffff909216919091179055606086015180518793611697926003850192910190611ae1565b505050602084810151855160408051928352928201527f5bfe8a52ad26ac6ee7b0cd46d2fd92be04735a31c45ef8aa3d4b7ea1b61bbc1f910160405180910390a150505050806116e6906123a9565b905061130f565b606060006116fb60036118f0565b9050600061170960056118fd565b825161171591906125c6565b67ffffffffffffffff81111561172d5761172d611f75565b60405190808252806020026020018201604052801561179d57816020015b6040805160808101825260008082526020808301829052928201819052606082015282527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff90920191018161174b5790505b5090506000805b83518110156118295760008482815181106117c1576117c1611f08565b602002602001015190506117df8160056118c990919063ffffffff16565b611818576117ec81610d7b565b8484815181106117fe576117fe611f08565b60200260200101819052508280611814906123a9565b9350505b50611822816123a9565b90506117a4565b50909392505050565b61183a611846565b61184381611907565b50565b60005473ffffffffffffffffffffffffffffffffffffffff1633146118c7576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152601660248201527f4f6e6c792063616c6c61626c65206279206f776e657200000000000000000000604482015260640161034d565b565b600081815260018301602052604081205415155b9392505050565b60006118dd83836119fc565b606060006118dd83611a4b565b6000610877825490565b3373ffffffffffffffffffffffffffffffffffffffff821603611986576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152601760248201527f43616e6e6f74207472616e7366657220746f2073656c66000000000000000000604482015260640161034d565b600180547fffffffffffffffffffffffff00000000000000000000000000000000000000001673ffffffffffffffffffffffffffffffffffffffff83811691821790925560008054604051929316917fed8889f560326eb138920d842192f0eb3dd22b4f139c87a2c57538e05bae12789190a350565b6000818152600183016020526040812054611a4357508154600181810184556000848152602080822090930184905584548482528286019093526040902091909155610877565b506000610877565b606081600001805480602002602001604051908101604052809291908181526020018280548015611a9b57602002820191906000526020600020905b815481526020019060010190808311611a87575b50505050509050919050565b508054611ab39061210b565b6000825580601f10611ac3575050565b601f0160209004906000526020600020908101906118439190611b2c565b828054828255906000526020600020908101928215611b1c579160200282015b82811115611b1c578251825591602001919060010190611b01565b50611b28929150611b2c565b5090565b5b80821115611b285760008155600101611b2d565b60008083601f840112611b5357600080fd5b50813567ffffffffffffffff811115611b6b57600080fd5b6020830191508360208260051b8501011115611b8657600080fd5b9250929050565b60008060008060408587031215611ba357600080fd5b843567ffffffffffffffff80821115611bbb57600080fd5b611bc788838901611b41565b90965094506020870135915080821115611be057600080fd5b50611bed87828801611b41565b95989497509550505050565b600060808284031215611c0b57600080fd5b50919050565b600060208284031215611c2357600080fd5b5035919050565b6000815180845260005b81811015611c5057602081850181015186830182015201611c34565b5060006020828601015260207fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0601f83011685010191505092915050565b6020815260006118dd6020830184611c2a565b60008060208385031215611cb457600080fd5b823567ffffffffffffffff811115611ccb57600080fd5b611cd785828601611b41565b90969095509350505050565b60008060408385031215611cf657600080fd5b50508035926020909101359150565b6000602080835260a0830184518285015281850151604085015273ffffffffffffffffffffffffffffffffffffffff6040860151166060850152606085015160808086015281815180845260c0870191508483019350600092505b80831015611d805783518252928401926001929092019190840190611d60565b509695505050505050565b6020815273ffffffffffffffffffffffffffffffffffffffff825116602082015260006020830151604080840152611dc66060840182611c2a565b949350505050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052602160045260246000fd5b8051825260208101516020830152604081015160028110611e47577f4e487b7100000000000000000000000000000000000000000000000000000000600052602160045260246000fd5b604083015260609081015173ffffffffffffffffffffffffffffffffffffffff16910152565b608081016108778284611dfd565b6020808252825182820181905260009190848201906040850190845b81811015611ebd57611eaa838551611dfd565b9284019260809290920191600101611e97565b50909695505050505050565b73ffffffffffffffffffffffffffffffffffffffff8116811461184357600080fd5b600060208284031215611efd57600080fd5b81356118dd81611ec9565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052603260045260246000fd5b600082357fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc1833603018112611f6b57600080fd5b9190910192915050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b6040805190810167ffffffffffffffff81118282101715611fc757611fc7611f75565b60405290565b6040516080810167ffffffffffffffff81118282101715611fc757611fc7611f75565b604051601f82017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe016810167ffffffffffffffff8111828210171561203757612037611f75565b604052919050565b60006040823603121561205157600080fd5b612059611fa4565b823561206481611ec9565b815260208381013567ffffffffffffffff8082111561208257600080fd5b9085019036601f83011261209557600080fd5b8135818111156120a7576120a7611f75565b6120d7847fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0601f84011601611ff0565b915080825236848285010111156120ed57600080fd5b80848401858401376000908201840152918301919091525092915050565b600181811c9082168061211f57607f821691505b602082108103611c0b577f4e487b7100000000000000000000000000000000000000000000000000000000600052602260045260246000fd5b600060208083526000845461216c8161210b565b8084870152604060018084166000811461218d57600181146121c5576121f3565b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff008516838a01528284151560051b8a010195506121f3565b896000528660002060005b858110156121eb5781548b82018601529083019088016121d0565b8a0184019650505b509398975050505050505050565b601f82111561093b57600081815260208120601f850160051c810160208610156122285750805b601f850160051c820191505b8181101561061757828155600101612234565b815167ffffffffffffffff81111561226157612261611f75565b6122758161226f845461210b565b84612201565b602080601f8311600181146122c857600084156122925750858301515b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff600386901b1c1916600185901b178555610617565b6000858152602081207fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe08616915b82811015612315578886015182559484019460019091019084016122f6565b508582101561235157878501517fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff600388901b60f8161c191681555b5050505050600190811b01905550565b828152604060208201526000611dc66040830184611c2a565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052601160045260246000fd5b60007fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff82036123da576123da61237a565b5060010190565b6000602082840312156123f357600080fd5b815180151581146118dd57600080fd5b81358155602082013560018201556002810160408301356002811061242757600080fd5b8154606085013561243781611ec9565b74ffffffffffffffffffffffffffffffffffffffff008160081b1660ff84167fffffffffffffffffffffff000000000000000000000000000000000000000000841617178455505050505050565b600082357fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff81833603018112611f6b57600080fd5b6000608082360312156124cb57600080fd5b6124d3611fcd565b823581526020808401358183015260408401356124ef81611ec9565b6040830152606084013567ffffffffffffffff8082111561250f57600080fd5b9085019036601f83011261252257600080fd5b81358181111561253457612534611f75565b8060051b9150612545848301611ff0565b818152918301840191848101903684111561255f57600080fd5b938501935b8385101561257d57843582529385019390850190612564565b606087015250939695505050505050565b6020808252825182820181905260009190848201906040850190845b81811015611ebd578351835292840192918401916001016125aa565b818103818111156108775761087761237a56fea164736f6c6343000813000a", } var CapabilityRegistryABI = CapabilityRegistryMetaData.ABI @@ -462,6 +462,18 @@ func (_CapabilityRegistry *CapabilityRegistryTransactorSession) UpdateNodeOperat return _CapabilityRegistry.Contract.UpdateNodeOperators(&_CapabilityRegistry.TransactOpts, nodeOperatorIds, nodeOperators) } +func (_CapabilityRegistry *CapabilityRegistryTransactor) UpdateNodes(opts *bind.TransactOpts, nodes []CapabilityRegistryNode) (*types.Transaction, error) { + return _CapabilityRegistry.contract.Transact(opts, "updateNodes", nodes) +} + +func (_CapabilityRegistry *CapabilityRegistrySession) UpdateNodes(nodes []CapabilityRegistryNode) (*types.Transaction, error) { + return _CapabilityRegistry.Contract.UpdateNodes(&_CapabilityRegistry.TransactOpts, nodes) +} + +func (_CapabilityRegistry *CapabilityRegistryTransactorSession) UpdateNodes(nodes []CapabilityRegistryNode) (*types.Transaction, error) { + return _CapabilityRegistry.Contract.UpdateNodes(&_CapabilityRegistry.TransactOpts, nodes) +} + type CapabilityRegistryCapabilityAddedIterator struct { Event *CapabilityRegistryCapabilityAdded @@ -1209,6 +1221,125 @@ func (_CapabilityRegistry *CapabilityRegistryFilterer) ParseNodeOperatorUpdated( return event, nil } +type CapabilityRegistryNodeUpdatedIterator struct { + Event *CapabilityRegistryNodeUpdated + + contract *bind.BoundContract + event string + + logs chan types.Log + sub ethereum.Subscription + done bool + fail error +} + +func (it *CapabilityRegistryNodeUpdatedIterator) Next() bool { + + if it.fail != nil { + return false + } + + if it.done { + select { + case log := <-it.logs: + it.Event = new(CapabilityRegistryNodeUpdated) + if err := it.contract.UnpackLog(it.Event, it.event, log); err != nil { + it.fail = err + return false + } + it.Event.Raw = log + return true + + default: + return false + } + } + + select { + case log := <-it.logs: + it.Event = new(CapabilityRegistryNodeUpdated) + if err := it.contract.UnpackLog(it.Event, it.event, log); err != nil { + it.fail = err + return false + } + it.Event.Raw = log + return true + + case err := <-it.sub.Err(): + it.done = true + it.fail = err + return it.Next() + } +} + +func (it *CapabilityRegistryNodeUpdatedIterator) Error() error { + return it.fail +} + +func (it *CapabilityRegistryNodeUpdatedIterator) Close() error { + it.sub.Unsubscribe() + return nil +} + +type CapabilityRegistryNodeUpdated struct { + P2pId [32]byte + NodeOperatorId *big.Int + Signer common.Address + Raw types.Log +} + +func (_CapabilityRegistry *CapabilityRegistryFilterer) FilterNodeUpdated(opts *bind.FilterOpts) (*CapabilityRegistryNodeUpdatedIterator, error) { + + logs, sub, err := _CapabilityRegistry.contract.FilterLogs(opts, "NodeUpdated") + if err != nil { + return nil, err + } + return &CapabilityRegistryNodeUpdatedIterator{contract: _CapabilityRegistry.contract, event: "NodeUpdated", logs: logs, sub: sub}, nil +} + +func (_CapabilityRegistry *CapabilityRegistryFilterer) WatchNodeUpdated(opts *bind.WatchOpts, sink chan<- *CapabilityRegistryNodeUpdated) (event.Subscription, error) { + + logs, sub, err := _CapabilityRegistry.contract.WatchLogs(opts, "NodeUpdated") + if err != nil { + return nil, err + } + return event.NewSubscription(func(quit <-chan struct{}) error { + defer sub.Unsubscribe() + for { + select { + case log := <-logs: + + event := new(CapabilityRegistryNodeUpdated) + if err := _CapabilityRegistry.contract.UnpackLog(event, "NodeUpdated", log); err != nil { + return err + } + event.Raw = log + + select { + case sink <- event: + case err := <-sub.Err(): + return err + case <-quit: + return nil + } + case err := <-sub.Err(): + return err + case <-quit: + return nil + } + } + }), nil +} + +func (_CapabilityRegistry *CapabilityRegistryFilterer) ParseNodeUpdated(log types.Log) (*CapabilityRegistryNodeUpdated, error) { + event := new(CapabilityRegistryNodeUpdated) + if err := _CapabilityRegistry.contract.UnpackLog(event, "NodeUpdated", log); err != nil { + return nil, err + } + event.Raw = log + return event, nil +} + type CapabilityRegistryOwnershipTransferRequestedIterator struct { Event *CapabilityRegistryOwnershipTransferRequested @@ -1495,6 +1626,8 @@ func (_CapabilityRegistry *CapabilityRegistry) ParseLog(log types.Log) (generate return _CapabilityRegistry.ParseNodeOperatorRemoved(log) case _CapabilityRegistry.abi.Events["NodeOperatorUpdated"].ID: return _CapabilityRegistry.ParseNodeOperatorUpdated(log) + case _CapabilityRegistry.abi.Events["NodeUpdated"].ID: + return _CapabilityRegistry.ParseNodeUpdated(log) case _CapabilityRegistry.abi.Events["OwnershipTransferRequested"].ID: return _CapabilityRegistry.ParseOwnershipTransferRequested(log) case _CapabilityRegistry.abi.Events["OwnershipTransferred"].ID: @@ -1529,6 +1662,10 @@ func (CapabilityRegistryNodeOperatorUpdated) Topic() common.Hash { return common.HexToHash("0x14c8f513e8a6d86d2d16b0cb64976de4e72386c4f8068eca3b7354373f8fe97a") } +func (CapabilityRegistryNodeUpdated) Topic() common.Hash { + return common.HexToHash("0x6bbba867c646be512c2f3241e65fdffdefd5528d7e7939649e06e10ee5addc3e") +} + func (CapabilityRegistryOwnershipTransferRequested) Topic() common.Hash { return common.HexToHash("0xed8889f560326eb138920d842192f0eb3dd22b4f139c87a2c57538e05bae1278") } @@ -1574,6 +1711,8 @@ type CapabilityRegistryInterface interface { UpdateNodeOperators(opts *bind.TransactOpts, nodeOperatorIds []*big.Int, nodeOperators []CapabilityRegistryNodeOperator) (*types.Transaction, error) + UpdateNodes(opts *bind.TransactOpts, nodes []CapabilityRegistryNode) (*types.Transaction, error) + FilterCapabilityAdded(opts *bind.FilterOpts, hashedCapabilityId [][32]byte) (*CapabilityRegistryCapabilityAddedIterator, error) WatchCapabilityAdded(opts *bind.WatchOpts, sink chan<- *CapabilityRegistryCapabilityAdded, hashedCapabilityId [][32]byte) (event.Subscription, error) @@ -1610,6 +1749,12 @@ type CapabilityRegistryInterface interface { ParseNodeOperatorUpdated(log types.Log) (*CapabilityRegistryNodeOperatorUpdated, error) + FilterNodeUpdated(opts *bind.FilterOpts) (*CapabilityRegistryNodeUpdatedIterator, error) + + WatchNodeUpdated(opts *bind.WatchOpts, sink chan<- *CapabilityRegistryNodeUpdated) (event.Subscription, error) + + ParseNodeUpdated(log types.Log) (*CapabilityRegistryNodeUpdated, error) + FilterOwnershipTransferRequested(opts *bind.FilterOpts, from []common.Address, to []common.Address) (*CapabilityRegistryOwnershipTransferRequestedIterator, error) WatchOwnershipTransferRequested(opts *bind.WatchOpts, sink chan<- *CapabilityRegistryOwnershipTransferRequested, from []common.Address, to []common.Address) (event.Subscription, error) diff --git a/core/gethwrappers/keystone/generation/generated-wrapper-dependency-versions-do-not-edit.txt b/core/gethwrappers/keystone/generation/generated-wrapper-dependency-versions-do-not-edit.txt index 182c8da3f7e..e2bb9865809 100644 --- a/core/gethwrappers/keystone/generation/generated-wrapper-dependency-versions-do-not-edit.txt +++ b/core/gethwrappers/keystone/generation/generated-wrapper-dependency-versions-do-not-edit.txt @@ -1,4 +1,4 @@ GETH_VERSION: 1.13.8 forwarder: ../../../contracts/solc/v0.8.19/KeystoneForwarder/KeystoneForwarder.abi ../../../contracts/solc/v0.8.19/KeystoneForwarder/KeystoneForwarder.bin b4c900aae9e022f01abbac7993d41f93912247613ac6270b0c4da4ef6f2016e3 -keystone_capability_registry: ../../../contracts/solc/v0.8.19/CapabilityRegistry/CapabilityRegistry.abi ../../../contracts/solc/v0.8.19/CapabilityRegistry/CapabilityRegistry.bin aeb366351d69f320c610419a3e09a991bd6ea75690778835eb8f6421d1277f44 +keystone_capability_registry: ../../../contracts/solc/v0.8.19/CapabilityRegistry/CapabilityRegistry.abi ../../../contracts/solc/v0.8.19/CapabilityRegistry/CapabilityRegistry.bin 98d53a1997053a3037827ffd170c12f49d2005a5c266a1ea9eb69bb51e862f37 ocr3_capability: ../../../contracts/solc/v0.8.19/OCR3Capability/OCR3Capability.abi ../../../contracts/solc/v0.8.19/OCR3Capability/OCR3Capability.bin 9dcbdf55bd5729ba266148da3f17733eb592c871c2108ccca546618628fd9ad2 diff --git a/core/services/ccip/mocks/orm.go b/core/services/ccip/mocks/orm.go new file mode 100644 index 00000000000..b9afc6c8695 --- /dev/null +++ b/core/services/ccip/mocks/orm.go @@ -0,0 +1,164 @@ +// Code generated by mockery v2.42.2. DO NOT EDIT. + +package mocks + +import ( + context "context" + + ccip "github.com/smartcontractkit/chainlink/v2/core/services/ccip" + + mock "github.com/stretchr/testify/mock" + + time "time" +) + +// ORM is an autogenerated mock type for the ORM type +type ORM struct { + mock.Mock +} + +// ClearGasPricesByDestChain provides a mock function with given fields: ctx, destChainSelector, to +func (_m *ORM) ClearGasPricesByDestChain(ctx context.Context, destChainSelector uint64, to time.Time) error { + ret := _m.Called(ctx, destChainSelector, to) + + if len(ret) == 0 { + panic("no return value specified for ClearGasPricesByDestChain") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, uint64, time.Time) error); ok { + r0 = rf(ctx, destChainSelector, to) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// ClearTokenPricesByDestChain provides a mock function with given fields: ctx, destChainSelector, to +func (_m *ORM) ClearTokenPricesByDestChain(ctx context.Context, destChainSelector uint64, to time.Time) error { + ret := _m.Called(ctx, destChainSelector, to) + + if len(ret) == 0 { + panic("no return value specified for ClearTokenPricesByDestChain") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, uint64, time.Time) error); ok { + r0 = rf(ctx, destChainSelector, to) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// GetGasPricesByDestChain provides a mock function with given fields: ctx, destChainSelector +func (_m *ORM) GetGasPricesByDestChain(ctx context.Context, destChainSelector uint64) ([]ccip.GasPrice, error) { + ret := _m.Called(ctx, destChainSelector) + + if len(ret) == 0 { + panic("no return value specified for GetGasPricesByDestChain") + } + + var r0 []ccip.GasPrice + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, uint64) ([]ccip.GasPrice, error)); ok { + return rf(ctx, destChainSelector) + } + if rf, ok := ret.Get(0).(func(context.Context, uint64) []ccip.GasPrice); ok { + r0 = rf(ctx, destChainSelector) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]ccip.GasPrice) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, uint64) error); ok { + r1 = rf(ctx, destChainSelector) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetTokenPricesByDestChain provides a mock function with given fields: ctx, destChainSelector +func (_m *ORM) GetTokenPricesByDestChain(ctx context.Context, destChainSelector uint64) ([]ccip.TokenPrice, error) { + ret := _m.Called(ctx, destChainSelector) + + if len(ret) == 0 { + panic("no return value specified for GetTokenPricesByDestChain") + } + + var r0 []ccip.TokenPrice + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, uint64) ([]ccip.TokenPrice, error)); ok { + return rf(ctx, destChainSelector) + } + if rf, ok := ret.Get(0).(func(context.Context, uint64) []ccip.TokenPrice); ok { + r0 = rf(ctx, destChainSelector) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]ccip.TokenPrice) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, uint64) error); ok { + r1 = rf(ctx, destChainSelector) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// InsertGasPricesForDestChain provides a mock function with given fields: ctx, destChainSelector, jobId, gasPrices +func (_m *ORM) InsertGasPricesForDestChain(ctx context.Context, destChainSelector uint64, jobId int32, gasPrices []ccip.GasPriceUpdate) error { + ret := _m.Called(ctx, destChainSelector, jobId, gasPrices) + + if len(ret) == 0 { + panic("no return value specified for InsertGasPricesForDestChain") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, uint64, int32, []ccip.GasPriceUpdate) error); ok { + r0 = rf(ctx, destChainSelector, jobId, gasPrices) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// InsertTokenPricesForDestChain provides a mock function with given fields: ctx, destChainSelector, jobId, tokenPrices +func (_m *ORM) InsertTokenPricesForDestChain(ctx context.Context, destChainSelector uint64, jobId int32, tokenPrices []ccip.TokenPriceUpdate) error { + ret := _m.Called(ctx, destChainSelector, jobId, tokenPrices) + + if len(ret) == 0 { + panic("no return value specified for InsertTokenPricesForDestChain") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, uint64, int32, []ccip.TokenPriceUpdate) error); ok { + r0 = rf(ctx, destChainSelector, jobId, tokenPrices) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// NewORM creates a new instance of ORM. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewORM(t interface { + mock.TestingT + Cleanup(func()) +}) *ORM { + mock := &ORM{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/core/services/ccip/orm.go b/core/services/ccip/orm.go new file mode 100644 index 00000000000..8af7762b18d --- /dev/null +++ b/core/services/ccip/orm.go @@ -0,0 +1,163 @@ +package ccip + +import ( + "context" + "fmt" + "time" + + "github.com/smartcontractkit/chainlink-common/pkg/sqlutil" + + "github.com/smartcontractkit/chainlink/v2/core/chains/evm/assets" +) + +type GasPrice struct { + SourceChainSelector uint64 + GasPrice *assets.Wei + CreatedAt time.Time +} + +type GasPriceUpdate struct { + SourceChainSelector uint64 + GasPrice *assets.Wei +} + +type TokenPrice struct { + TokenAddr string + TokenPrice *assets.Wei + CreatedAt time.Time +} + +type TokenPriceUpdate struct { + TokenAddr string + TokenPrice *assets.Wei +} + +//go:generate mockery --quiet --name ORM --output ./mocks/ --case=underscore +type ORM interface { + GetGasPricesByDestChain(ctx context.Context, destChainSelector uint64) ([]GasPrice, error) + GetTokenPricesByDestChain(ctx context.Context, destChainSelector uint64) ([]TokenPrice, error) + + InsertGasPricesForDestChain(ctx context.Context, destChainSelector uint64, jobId int32, gasPrices []GasPriceUpdate) error + InsertTokenPricesForDestChain(ctx context.Context, destChainSelector uint64, jobId int32, tokenPrices []TokenPriceUpdate) error + + ClearGasPricesByDestChain(ctx context.Context, destChainSelector uint64, to time.Time) error + ClearTokenPricesByDestChain(ctx context.Context, destChainSelector uint64, to time.Time) error +} + +type orm struct { + ds sqlutil.DataSource +} + +var _ ORM = (*orm)(nil) + +func NewORM(ds sqlutil.DataSource) (ORM, error) { + if ds == nil { + return nil, fmt.Errorf("datasource to CCIP NewORM cannot be nil") + } + + return &orm{ + ds: ds, + }, nil +} + +func (o *orm) GetGasPricesByDestChain(ctx context.Context, destChainSelector uint64) ([]GasPrice, error) { + var gasPrices []GasPrice + stmt := ` + SELECT DISTINCT ON (source_chain_selector) + source_chain_selector, gas_price, created_at + FROM ccip.observed_gas_prices + WHERE chain_selector = $1 + ORDER BY source_chain_selector, created_at DESC; + ` + err := o.ds.SelectContext(ctx, &gasPrices, stmt, destChainSelector) + if err != nil { + return nil, err + } + + return gasPrices, nil +} + +func (o *orm) GetTokenPricesByDestChain(ctx context.Context, destChainSelector uint64) ([]TokenPrice, error) { + var tokenPrices []TokenPrice + stmt := ` + SELECT DISTINCT ON (token_addr) + token_addr, token_price, created_at + FROM ccip.observed_token_prices + WHERE chain_selector = $1 + ORDER BY token_addr, created_at DESC; + ` + err := o.ds.SelectContext(ctx, &tokenPrices, stmt, destChainSelector) + if err != nil { + return nil, err + } + + return tokenPrices, nil +} + +func (o *orm) InsertGasPricesForDestChain(ctx context.Context, destChainSelector uint64, jobId int32, gasPrices []GasPriceUpdate) error { + if len(gasPrices) == 0 { + return nil + } + + now := time.Now() + insertData := make([]map[string]interface{}, 0, len(gasPrices)) + for _, price := range gasPrices { + insertData = append(insertData, map[string]interface{}{ + "chain_selector": destChainSelector, + "job_id": jobId, + "source_chain_selector": price.SourceChainSelector, + "gas_price": price.GasPrice, + "created_at": now, + }) + } + + stmt := `INSERT INTO ccip.observed_gas_prices (chain_selector, job_id, source_chain_selector, gas_price, created_at) + VALUES (:chain_selector, :job_id, :source_chain_selector, :gas_price, :created_at);` + _, err := o.ds.NamedExecContext(ctx, stmt, insertData) + if err != nil { + err = fmt.Errorf("error inserting gas prices for job %d: %w", jobId, err) + } + + return err +} + +func (o *orm) InsertTokenPricesForDestChain(ctx context.Context, destChainSelector uint64, jobId int32, tokenPrices []TokenPriceUpdate) error { + if len(tokenPrices) == 0 { + return nil + } + + now := time.Now() + insertData := make([]map[string]interface{}, 0, len(tokenPrices)) + for _, price := range tokenPrices { + insertData = append(insertData, map[string]interface{}{ + "chain_selector": destChainSelector, + "job_id": jobId, + "token_addr": price.TokenAddr, + "token_price": price.TokenPrice, + "created_at": now, + }) + } + + stmt := `INSERT INTO ccip.observed_token_prices (chain_selector, job_id, token_addr, token_price, created_at) + VALUES (:chain_selector, :job_id, :token_addr, :token_price, :created_at);` + _, err := o.ds.NamedExecContext(ctx, stmt, insertData) + if err != nil { + err = fmt.Errorf("error inserting token prices for job %d: %w", jobId, err) + } + + return err +} + +func (o *orm) ClearGasPricesByDestChain(ctx context.Context, destChainSelector uint64, to time.Time) error { + stmt := `DELETE FROM ccip.observed_gas_prices WHERE chain_selector = $1 AND created_at < $2` + + _, err := o.ds.ExecContext(ctx, stmt, destChainSelector, to) + return err +} + +func (o *orm) ClearTokenPricesByDestChain(ctx context.Context, destChainSelector uint64, to time.Time) error { + stmt := `DELETE FROM ccip.observed_token_prices WHERE chain_selector = $1 AND created_at < $2` + + _, err := o.ds.ExecContext(ctx, stmt, destChainSelector, to) + return err +} diff --git a/core/services/ccip/orm_test.go b/core/services/ccip/orm_test.go new file mode 100644 index 00000000000..741cf4b5b38 --- /dev/null +++ b/core/services/ccip/orm_test.go @@ -0,0 +1,346 @@ +package ccip + +import ( + "math/big" + "math/rand" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/smartcontractkit/chainlink-common/pkg/sqlutil" + + "github.com/smartcontractkit/chainlink/v2/core/chains/evm/assets" + "github.com/smartcontractkit/chainlink/v2/core/chains/evm/utils" + "github.com/smartcontractkit/chainlink/v2/core/internal/testutils" + "github.com/smartcontractkit/chainlink/v2/core/internal/testutils/pgtest" +) + +func setupORM(t *testing.T) (ORM, sqlutil.DataSource) { + t.Helper() + + db := pgtest.NewSqlxDB(t) + orm, err := NewORM(db) + + require.NoError(t, err) + + return orm, db +} + +func generateChainSelectors(n int) []uint64 { + selectors := make([]uint64, n) + for i := 0; i < n; i++ { + selectors[i] = rand.Uint64() + } + + return selectors +} + +func generateGasPriceUpdates(chainSelector uint64, n int) []GasPriceUpdate { + updates := make([]GasPriceUpdate, n) + for i := 0; i < n; i++ { + // gas prices can take up whole range of uint256 + uint256Max := new(big.Int).Sub(new(big.Int).Exp(big.NewInt(2), big.NewInt(256), nil), big.NewInt(1)) + row := GasPriceUpdate{ + SourceChainSelector: chainSelector, + GasPrice: assets.NewWei(new(big.Int).Sub(uint256Max, big.NewInt(int64(i)))), + } + updates[i] = row + } + + return updates +} + +func generateTokenAddresses(n int) []string { + addrs := make([]string, n) + for i := 0; i < n; i++ { + addrs[i] = utils.RandomAddress().Hex() + } + + return addrs +} + +func generateTokenPriceUpdates(tokenAddr string, n int) []TokenPriceUpdate { + updates := make([]TokenPriceUpdate, n) + for i := 0; i < n; i++ { + row := TokenPriceUpdate{ + TokenAddr: tokenAddr, + TokenPrice: assets.NewWei(new(big.Int).Mul(big.NewInt(1e18), big.NewInt(int64(i)))), + } + updates[i] = row + } + + return updates +} + +func getGasTableRowCount(t *testing.T, ds sqlutil.DataSource) int { + var count int + stmt := `SELECT COUNT(*) FROM ccip.observed_gas_prices;` + err := ds.QueryRowxContext(testutils.Context(t), stmt).Scan(&count) + require.NoError(t, err) + + return count +} + +func getTokenTableRowCount(t *testing.T, ds sqlutil.DataSource) int { + var count int + stmt := `SELECT COUNT(*) FROM ccip.observed_token_prices;` + err := ds.QueryRowxContext(testutils.Context(t), stmt).Scan(&count) + require.NoError(t, err) + + return count +} + +func TestInitORM(t *testing.T) { + t.Parallel() + + orm, _ := setupORM(t) + assert.NotNil(t, orm) +} + +func TestORM_EmptyGasPrices(t *testing.T) { + t.Parallel() + ctx := testutils.Context(t) + + orm, _ := setupORM(t) + + prices, err := orm.GetGasPricesByDestChain(ctx, 1) + assert.Equal(t, 0, len(prices)) + assert.NoError(t, err) +} + +func TestORM_EmptyTokenPrices(t *testing.T) { + t.Parallel() + ctx := testutils.Context(t) + + orm, _ := setupORM(t) + + prices, err := orm.GetTokenPricesByDestChain(ctx, 1) + assert.Equal(t, 0, len(prices)) + assert.NoError(t, err) +} + +func TestORM_InsertAndGetGasPrices(t *testing.T) { + t.Parallel() + ctx := testutils.Context(t) + + orm, db := setupORM(t) + + numJobs := 5 + numSourceChainSelectors := 10 + numUpdatesPerSourceSelector := 20 + destSelector := uint64(1) + + sourceSelectors := generateChainSelectors(numSourceChainSelectors) + + updates := make(map[uint64][]GasPriceUpdate) + for _, selector := range sourceSelectors { + updates[selector] = generateGasPriceUpdates(selector, numUpdatesPerSourceSelector) + } + + // 5 jobs, each inserting prices for 10 chains, with 20 updates per chain. + expectedPrices := make(map[uint64]GasPriceUpdate) + for i := 0; i < numJobs; i++ { + for selector, updatesPerSelector := range updates { + lastIndex := len(updatesPerSelector) - 1 + + err := orm.InsertGasPricesForDestChain(ctx, destSelector, int32(i), updatesPerSelector[:lastIndex]) + assert.NoError(t, err) + err = orm.InsertGasPricesForDestChain(ctx, destSelector, int32(i), updatesPerSelector[lastIndex:]) + assert.NoError(t, err) + + expectedPrices[selector] = updatesPerSelector[lastIndex] + } + } + + // verify number of rows inserted + numRows := getGasTableRowCount(t, db) + assert.Equal(t, numJobs*numSourceChainSelectors*numUpdatesPerSourceSelector, numRows) + + prices, err := orm.GetGasPricesByDestChain(ctx, destSelector) + assert.NoError(t, err) + // should return 1 price per source chain selector + assert.Equal(t, numSourceChainSelectors, len(prices)) + + // verify getGasPrices returns prices of latest timestamp + for _, price := range prices { + selector := price.SourceChainSelector + assert.Equal(t, expectedPrices[selector].GasPrice, price.GasPrice) + } + + // after the initial inserts, insert new round of prices, 1 price per selector this time + var combinedUpdates []GasPriceUpdate + for selector, updatesPerSelector := range updates { + combinedUpdates = append(combinedUpdates, updatesPerSelector[0]) + expectedPrices[selector] = updatesPerSelector[0] + } + + err = orm.InsertGasPricesForDestChain(ctx, destSelector, 1, combinedUpdates) + assert.NoError(t, err) + assert.Equal(t, numJobs*numSourceChainSelectors*numUpdatesPerSourceSelector+numSourceChainSelectors, getGasTableRowCount(t, db)) + + prices, err = orm.GetGasPricesByDestChain(ctx, destSelector) + assert.NoError(t, err) + assert.Equal(t, numSourceChainSelectors, len(prices)) + + for _, price := range prices { + selector := price.SourceChainSelector + assert.Equal(t, expectedPrices[selector].GasPrice, price.GasPrice) + } +} + +func TestORM_InsertAndDeleteGasPrices(t *testing.T) { + t.Parallel() + ctx := testutils.Context(t) + + orm, db := setupORM(t) + + numSourceChainSelectors := 10 + numUpdatesPerSourceSelector := 20 + destSelector := uint64(1) + + sourceSelectors := generateChainSelectors(numSourceChainSelectors) + + updates := make(map[uint64][]GasPriceUpdate) + for _, selector := range sourceSelectors { + updates[selector] = generateGasPriceUpdates(selector, numUpdatesPerSourceSelector) + } + + for _, updatesPerSelector := range updates { + err := orm.InsertGasPricesForDestChain(ctx, destSelector, 1, updatesPerSelector) + assert.NoError(t, err) + } + + interimTimeStamp := time.Now() + + // insert for the 2nd time after interimTimeStamp + for _, updatesPerSelector := range updates { + err := orm.InsertGasPricesForDestChain(ctx, destSelector, 1, updatesPerSelector) + assert.NoError(t, err) + } + + assert.Equal(t, 2*numSourceChainSelectors*numUpdatesPerSourceSelector, getGasTableRowCount(t, db)) + + // clear by interimTimeStamp should delete rows inserted before it + err := orm.ClearGasPricesByDestChain(ctx, destSelector, interimTimeStamp) + assert.NoError(t, err) + assert.Equal(t, numSourceChainSelectors*numUpdatesPerSourceSelector, getGasTableRowCount(t, db)) + + // clear by Now() should delete all rows + err = orm.ClearGasPricesByDestChain(ctx, destSelector, time.Now()) + assert.NoError(t, err) + assert.Equal(t, 0, getGasTableRowCount(t, db)) +} + +func TestORM_InsertAndGetTokenPrices(t *testing.T) { + t.Parallel() + ctx := testutils.Context(t) + + orm, db := setupORM(t) + + numJobs := 5 + numAddresses := 10 + numUpdatesPerAddress := 20 + destSelector := uint64(1) + + addrs := generateTokenAddresses(numAddresses) + + updates := make(map[string][]TokenPriceUpdate) + for _, addr := range addrs { + updates[addr] = generateTokenPriceUpdates(addr, numUpdatesPerAddress) + } + + // 5 jobs, each inserting prices for 10 chains, with 20 updates per chain. + expectedPrices := make(map[string]TokenPriceUpdate) + for i := 0; i < numJobs; i++ { + for addr, updatesPerAddr := range updates { + lastIndex := len(updatesPerAddr) - 1 + + err := orm.InsertTokenPricesForDestChain(ctx, destSelector, int32(i), updatesPerAddr[:lastIndex]) + assert.NoError(t, err) + err = orm.InsertTokenPricesForDestChain(ctx, destSelector, int32(i), updatesPerAddr[lastIndex:]) + assert.NoError(t, err) + + expectedPrices[addr] = updatesPerAddr[lastIndex] + } + } + + // verify number of rows inserted + numRows := getTokenTableRowCount(t, db) + assert.Equal(t, numJobs*numAddresses*numUpdatesPerAddress, numRows) + + prices, err := orm.GetTokenPricesByDestChain(ctx, destSelector) + assert.NoError(t, err) + // should return 1 price per source chain selector + assert.Equal(t, numAddresses, len(prices)) + + // verify getTokenPrices returns prices of latest timestamp + for _, price := range prices { + addr := price.TokenAddr + assert.Equal(t, expectedPrices[addr].TokenPrice, price.TokenPrice) + } + + // after the initial inserts, insert new round of prices, 1 price per selector this time + var combinedUpdates []TokenPriceUpdate + for addr, updatesPerAddr := range updates { + combinedUpdates = append(combinedUpdates, updatesPerAddr[0]) + expectedPrices[addr] = updatesPerAddr[0] + } + + err = orm.InsertTokenPricesForDestChain(ctx, destSelector, 1, combinedUpdates) + assert.NoError(t, err) + assert.Equal(t, numJobs*numAddresses*numUpdatesPerAddress+numAddresses, getTokenTableRowCount(t, db)) + + prices, err = orm.GetTokenPricesByDestChain(ctx, destSelector) + assert.NoError(t, err) + assert.Equal(t, numAddresses, len(prices)) + + for _, price := range prices { + addr := price.TokenAddr + assert.Equal(t, expectedPrices[addr].TokenPrice, price.TokenPrice) + } +} + +func TestORM_InsertAndDeleteTokenPrices(t *testing.T) { + t.Parallel() + ctx := testutils.Context(t) + + orm, db := setupORM(t) + + numAddresses := 10 + numUpdatesPerAddress := 20 + destSelector := uint64(1) + + addrs := generateTokenAddresses(numAddresses) + + updates := make(map[string][]TokenPriceUpdate) + for _, addr := range addrs { + updates[addr] = generateTokenPriceUpdates(addr, numUpdatesPerAddress) + } + + for _, updatesPerAddr := range updates { + err := orm.InsertTokenPricesForDestChain(ctx, destSelector, 1, updatesPerAddr) + assert.NoError(t, err) + } + + interimTimeStamp := time.Now() + + // insert for the 2nd time after interimTimeStamp + for _, updatesPerAddr := range updates { + err := orm.InsertTokenPricesForDestChain(ctx, destSelector, 1, updatesPerAddr) + assert.NoError(t, err) + } + + assert.Equal(t, 2*numAddresses*numUpdatesPerAddress, getTokenTableRowCount(t, db)) + + // clear by interimTimeStamp should delete rows inserted before it + err := orm.ClearTokenPricesByDestChain(ctx, destSelector, interimTimeStamp) + assert.NoError(t, err) + assert.Equal(t, numAddresses*numUpdatesPerAddress, getTokenTableRowCount(t, db)) + + // clear by Now() should delete all rows + err = orm.ClearTokenPricesByDestChain(ctx, destSelector, time.Now()) + assert.NoError(t, err) + assert.Equal(t, 0, getTokenTableRowCount(t, db)) +} diff --git a/core/services/chainlink/application.go b/core/services/chainlink/application.go index ae3db2e7a73..ef4b0d870dd 100644 --- a/core/services/chainlink/application.go +++ b/core/services/chainlink/application.go @@ -12,6 +12,7 @@ import ( "github.com/ethereum/go-ethereum/core/types" "github.com/google/uuid" "github.com/grafana/pyroscope-go" + "github.com/jonboulle/clockwork" "github.com/pkg/errors" "go.uber.org/multierr" "go.uber.org/zap/zapcore" @@ -63,6 +64,7 @@ import ( "github.com/smartcontractkit/chainlink/v2/core/services/vrf" "github.com/smartcontractkit/chainlink/v2/core/services/webhook" "github.com/smartcontractkit/chainlink/v2/core/services/workflows" + workflowstore "github.com/smartcontractkit/chainlink/v2/core/services/workflows/store" "github.com/smartcontractkit/chainlink/v2/core/sessions" "github.com/smartcontractkit/chainlink/v2/core/sessions/ldapauth" "github.com/smartcontractkit/chainlink/v2/core/sessions/localauth" @@ -319,6 +321,7 @@ func NewApplication(opts ApplicationOpts) (Application, error) { jobORM = job.NewORM(opts.DS, pipelineORM, bridgeORM, keyStore, globalLogger) txmORM = txmgr.NewTxStore(opts.DS, globalLogger) streamRegistry = streams.NewRegistry(globalLogger, pipelineRunner) + workflowORM = workflowstore.NewDBStore(opts.DS, clockwork.NewRealClock()) ) for _, chain := range legacyEVMChains.Slice() { @@ -388,6 +391,7 @@ func NewApplication(opts ApplicationOpts) (Application, error) { globalLogger, registry, legacyEVMChains, + workflowORM, func() *p2ptypes.PeerID { if externalPeerWrapper == nil { return nil diff --git a/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/encoding/interface.go b/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/encoding/interface.go index e942078fe54..39d738fa7c6 100644 --- a/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/encoding/interface.go +++ b/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/encoding/interface.go @@ -31,6 +31,7 @@ const ( UpkeepFailureReasonInvalidRevertDataInput UpkeepFailureReason = 34 UpkeepFailureReasonSimulationFailed UpkeepFailureReason = 35 UpkeepFailureReasonTxHashReorged UpkeepFailureReason = 36 + UpkeepFailureReasonGasPriceTooHigh UpkeepFailureReason = 37 // pipeline execution error NoPipelineError PipelineExecutionState = 0 diff --git a/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/gasprice/gasprice.go b/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/gasprice/gasprice.go new file mode 100644 index 00000000000..2c376443fa5 --- /dev/null +++ b/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/gasprice/gasprice.go @@ -0,0 +1,70 @@ +package gasprice + +import ( + "context" + "math/big" + + "github.com/smartcontractkit/chainlink/v2/core/cbor" + "github.com/smartcontractkit/chainlink/v2/core/chains/evm/assets" + "github.com/smartcontractkit/chainlink/v2/core/chains/evm/gas" + "github.com/smartcontractkit/chainlink/v2/core/logger" + "github.com/smartcontractkit/chainlink/v2/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/encoding" +) + +const ( + // feeLimit is a placeholder when getting current price from gas estimator. it does not impact gas price calculation + feeLimit = uint64(1_000_000) + // maxFeePrice is a placeholder when getting current price from gas estimator. it caps the returned gas price from + // the estimator. it's set to a very high value because the gas price will be compared with user-defined gas price + // later. + maxFeePrice = 1_000_000_000_000_000 +) + +type UpkeepOffchainConfig struct { + MaxGasPrice *big.Int `json:"maxGasPrice" cbor:"maxGasPrice"` +} + +// CheckGasPrice retrieves the current gas price and compare against the max gas price configured in upkeep's offchain config +// any errors in offchain config decoding will result in max gas price check disabled +func CheckGasPrice(ctx context.Context, upkeepId *big.Int, offchainConfigBytes []byte, ge gas.EvmFeeEstimator, lggr logger.Logger) encoding.UpkeepFailureReason { + if len(offchainConfigBytes) == 0 { + return encoding.UpkeepFailureReasonNone + } + + var offchainConfig UpkeepOffchainConfig + if err := cbor.ParseDietCBORToStruct(offchainConfigBytes, &offchainConfig); err != nil { + lggr.Errorw("failed to parse upkeep offchain config, gas price check is disabled", "upkeepId", upkeepId.String(), "err", err) + return encoding.UpkeepFailureReasonNone + } + if offchainConfig.MaxGasPrice == nil || offchainConfig.MaxGasPrice.Int64() <= 0 { + lggr.Warnw("maxGasPrice is not configured or incorrectly configured in upkeep offchain config, gas price check is disabled", "upkeepId", upkeepId.String()) + return encoding.UpkeepFailureReasonNone + } + lggr.Debugf("successfully decode offchain config for %s, max gas price is %s", upkeepId.String(), offchainConfig.MaxGasPrice.String()) + + fee, _, err := ge.GetFee(ctx, []byte{}, feeLimit, assets.NewWei(big.NewInt(maxFeePrice))) + if err != nil { + lggr.Errorw("failed to get fee, gas price check is disabled", "upkeepId", upkeepId.String(), "err", err) + return encoding.UpkeepFailureReasonNone + } + + if fee.ValidDynamic() { + lggr.Debugf("current gas price EIP-1559 is fee cap %s, tip cap %s", fee.DynamicFeeCap.String(), fee.DynamicTipCap.String()) + if fee.DynamicFeeCap.Cmp(assets.NewWei(offchainConfig.MaxGasPrice)) > 0 { + // current gas price is higher than max gas price + lggr.Warnf("maxGasPrice %s for %s is LOWER than current gas price %d", offchainConfig.MaxGasPrice.String(), upkeepId.String(), fee.DynamicFeeCap.Int64()) + return encoding.UpkeepFailureReasonGasPriceTooHigh + } + lggr.Debugf("maxGasPrice %s for %s is HIGHER than current gas price %d", offchainConfig.MaxGasPrice.String(), upkeepId.String(), fee.DynamicFeeCap.Int64()) + } else { + lggr.Debugf("current gas price legacy is %s", fee.Legacy.String()) + if fee.Legacy.Cmp(assets.NewWei(offchainConfig.MaxGasPrice)) > 0 { + // current gas price is higher than max gas price + lggr.Warnf("maxGasPrice %s for %s is LOWER than current gas price %d", offchainConfig.MaxGasPrice.String(), upkeepId.String(), fee.Legacy.Int64()) + return encoding.UpkeepFailureReasonGasPriceTooHigh + } + lggr.Debugf("maxGasPrice %s for %s is HIGHER than current gas price %d", offchainConfig.MaxGasPrice.String(), upkeepId.String(), fee.Legacy.Int64()) + } + + return encoding.UpkeepFailureReasonNone +} diff --git a/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/gasprice/gasprice_test.go b/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/gasprice/gasprice_test.go new file mode 100644 index 00000000000..9b5640051df --- /dev/null +++ b/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/gasprice/gasprice_test.go @@ -0,0 +1,128 @@ +package gasprice + +import ( + "math/big" + "testing" + + "github.com/fxamacker/cbor/v2" + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + + "github.com/smartcontractkit/chainlink/v2/core/chains/evm/assets" + "github.com/smartcontractkit/chainlink/v2/core/chains/evm/gas" + gasMocks "github.com/smartcontractkit/chainlink/v2/core/chains/evm/gas/mocks" + "github.com/smartcontractkit/chainlink/v2/core/internal/testutils" + "github.com/smartcontractkit/chainlink/v2/core/logger" + "github.com/smartcontractkit/chainlink/v2/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/encoding" +) + +type WrongOffchainConfig struct { + MaxGasPrice1 []int `json:"maxGasPrice1" cbor:"maxGasPrice1"` +} + +func TestGasPrice_Check(t *testing.T) { + lggr := logger.TestLogger(t) + uid, _ := new(big.Int).SetString("1843548457736589226156809205796175506139185429616502850435279853710366065936", 10) + + tests := []struct { + Name string + MaxGasPrice *big.Int + CurrentLegacyGasPrice *big.Int + CurrentDynamicGasPrice *big.Int + ExpectedResult encoding.UpkeepFailureReason + FailedToGetFee bool + NotConfigured bool + ParsingFailed bool + }{ + { + Name: "no offchain config", + ExpectedResult: encoding.UpkeepFailureReasonNone, + }, + { + Name: "maxGasPrice not configured in offchain config", + NotConfigured: true, + ExpectedResult: encoding.UpkeepFailureReasonNone, + }, + { + Name: "fail to parse offchain config", + ParsingFailed: true, + MaxGasPrice: big.NewInt(10_000_000_000), + ExpectedResult: encoding.UpkeepFailureReasonNone, + }, + { + Name: "fail to retrieve current gas price", + MaxGasPrice: big.NewInt(8_000_000_000), + FailedToGetFee: true, + ExpectedResult: encoding.UpkeepFailureReasonNone, + }, + { + Name: "current gas price is too high - legacy", + MaxGasPrice: big.NewInt(10_000_000_000), + CurrentLegacyGasPrice: big.NewInt(18_000_000_000), + ExpectedResult: encoding.UpkeepFailureReasonGasPriceTooHigh, + }, + { + Name: "current gas price is too high - dynamic", + MaxGasPrice: big.NewInt(10_000_000_000), + CurrentDynamicGasPrice: big.NewInt(15_000_000_000), + ExpectedResult: encoding.UpkeepFailureReasonGasPriceTooHigh, + }, + { + Name: "current gas price is less than user's max gas price - legacy", + MaxGasPrice: big.NewInt(8_000_000_000), + CurrentLegacyGasPrice: big.NewInt(5_000_000_000), + ExpectedResult: encoding.UpkeepFailureReasonNone, + }, + { + Name: "current gas price is less than user's max gas price - dynamic", + MaxGasPrice: big.NewInt(10_000_000_000), + CurrentDynamicGasPrice: big.NewInt(8_000_000_000), + ExpectedResult: encoding.UpkeepFailureReasonNone, + }, + } + for _, test := range tests { + t.Run(test.Name, func(t *testing.T) { + ctx := testutils.Context(t) + ge := gasMocks.NewEvmFeeEstimator(t) + if test.FailedToGetFee { + ge.On("GetFee", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return( + gas.EvmFee{}, + feeLimit, + errors.New("failed to retrieve gas price"), + ) + } else if test.CurrentLegacyGasPrice != nil { + ge.On("GetFee", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return( + gas.EvmFee{ + Legacy: assets.NewWei(test.CurrentLegacyGasPrice), + }, + feeLimit, + nil, + ) + } else if test.CurrentDynamicGasPrice != nil { + ge.On("GetFee", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return( + gas.EvmFee{ + DynamicFeeCap: assets.NewWei(test.CurrentDynamicGasPrice), + DynamicTipCap: assets.NewWei(big.NewInt(1_000_000_000)), + }, + feeLimit, + nil, + ) + } + + var oc []byte + if test.ParsingFailed { + oc, _ = cbor.Marshal(WrongOffchainConfig{MaxGasPrice1: []int{1, 2, 3}}) + if len(oc) > 0 { + oc[len(oc)-1] = 0x99 + } + } else if test.NotConfigured { + oc = []byte{1, 2, 3, 4} // parsing this will set maxGasPrice field to nil + } else if test.MaxGasPrice != nil { + oc, _ = cbor.Marshal(UpkeepOffchainConfig{MaxGasPrice: test.MaxGasPrice}) + } + fr := CheckGasPrice(ctx, uid, oc, ge, lggr) + assert.Equal(t, test.ExpectedResult, fr) + }) + } +} diff --git a/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/registry.go b/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/registry.go index 206932cf543..5a6466a8b15 100644 --- a/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/registry.go +++ b/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/registry.go @@ -27,6 +27,7 @@ import ( ocr2keepers "github.com/smartcontractkit/chainlink-common/pkg/types/automation" "github.com/smartcontractkit/chainlink/v2/core/chains/evm/client" + "github.com/smartcontractkit/chainlink/v2/core/chains/evm/gas" "github.com/smartcontractkit/chainlink/v2/core/chains/evm/logpoller" "github.com/smartcontractkit/chainlink/v2/core/chains/legacyevm" "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/generated" @@ -113,6 +114,7 @@ func NewEvmRegistry( bs: blockSub, finalityDepth: finalityDepth, streams: streams.NewStreamsLookup(mercuryConfig, blockSub, client.Client(), registry, lggr), + ge: client.GasEstimator(), } } @@ -194,6 +196,7 @@ type EvmRegistry struct { logEventProvider logprovider.LogEventProvider finalityDepth uint32 streams streams.Lookup + ge gas.EvmFeeEstimator } func (r *EvmRegistry) Name() string { @@ -627,3 +630,13 @@ func (r *EvmRegistry) fetchTriggerConfig(id *big.Int) ([]byte, error) { } return cfg, nil } + +// fetchUpkeepOffchainConfig fetches upkeep offchain config in raw bytes for an upkeep. +func (r *EvmRegistry) fetchUpkeepOffchainConfig(id *big.Int) ([]byte, error) { + opts := r.buildCallOpts(r.ctx, nil) + ui, err := r.registry.GetUpkeep(opts, id) + if err != nil { + return []byte{}, err + } + return ui.OffchainConfig, nil +} diff --git a/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/registry_check_pipeline.go b/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/registry_check_pipeline.go index 3e935d0adf1..e341730c794 100644 --- a/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/registry_check_pipeline.go +++ b/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/registry_check_pipeline.go @@ -16,6 +16,7 @@ import ( "github.com/smartcontractkit/chainlink/v2/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/core" "github.com/smartcontractkit/chainlink/v2/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/encoding" + "github.com/smartcontractkit/chainlink/v2/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/gasprice" ) const ( @@ -305,7 +306,19 @@ func (r *EvmRegistry) simulatePerformUpkeeps(ctx context.Context, checkResults [ block, _, upkeepId := r.getBlockAndUpkeepId(cr.UpkeepID, cr.Trigger) - opts := r.buildCallOpts(ctx, block) + oc, err := r.fetchUpkeepOffchainConfig(upkeepId) + if err != nil { + // this is mostly caused by RPC flakiness + r.lggr.Errorw("failed get offchain config, gas price check will be disabled", "err", err, "upkeepId", upkeepId, "block", block) + } + fr := gasprice.CheckGasPrice(ctx, upkeepId, oc, r.ge, r.lggr) + if uint8(fr) == uint8(encoding.UpkeepFailureReasonGasPriceTooHigh) { + r.lggr.Infof("upkeep %s upkeep failure reason is %d", upkeepId, fr) + checkResults[i].Eligible = false + checkResults[i].Retryable = false + checkResults[i].IneligibilityReason = uint8(fr) + continue + } // Since checkUpkeep is true, simulate perform upkeep to ensure it doesn't revert payload, err := r.abi.Pack("simulatePerformUpkeep", upkeepId, cr.PerformData) @@ -317,6 +330,7 @@ func (r *EvmRegistry) simulatePerformUpkeeps(ctx context.Context, checkResults [ continue } + opts := r.buildCallOpts(ctx, block) var result string performReqs = append(performReqs, rpc.BatchElem{ Method: "eth_call", diff --git a/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/registry_check_pipeline_test.go b/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/registry_check_pipeline_test.go index 330da44b71b..e74ad4821a6 100644 --- a/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/registry_check_pipeline_test.go +++ b/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/registry_check_pipeline_test.go @@ -23,6 +23,7 @@ import ( ocr2keepers "github.com/smartcontractkit/chainlink-common/pkg/types/automation" evmClientMocks "github.com/smartcontractkit/chainlink/v2/core/chains/evm/client/mocks" + gasMocks "github.com/smartcontractkit/chainlink/v2/core/chains/evm/gas/mocks" "github.com/smartcontractkit/chainlink/v2/core/chains/evm/logpoller" "github.com/smartcontractkit/chainlink/v2/core/chains/evm/types" ac "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/generated/i_automation_v21_plus_common" @@ -651,6 +652,13 @@ func TestRegistry_SimulatePerformUpkeeps(t *testing.T) { }).Once() e.client = client + mockReg := mocks.NewRegistry(t) + mockReg.On("GetUpkeep", mock.Anything, mock.Anything).Return( + encoding.UpkeepInfo{OffchainConfig: make([]byte, 0)}, + nil, + ).Times(2) + e.registry = mockReg + results, err := e.simulatePerformUpkeeps(testutils.Context(t), tc.inputs) assert.Equal(t, tc.results, results) assert.Equal(t, tc.err, err) @@ -670,6 +678,7 @@ func setupEVMRegistry(t *testing.T) *EvmRegistry { mockReg := mocks.NewRegistry(t) mockHttpClient := mocks.NewHttpClient(t) client := evmClientMocks.NewClient(t) + ge := gasMocks.NewEvmFeeEstimator(t) r := &EvmRegistry{ lggr: lggr, @@ -694,6 +703,8 @@ func setupEVMRegistry(t *testing.T) *EvmRegistry { AllowListCache: cache.New(defaultAllowListExpiration, cleanupInterval), }, hc: mockHttpClient, + bs: &BlockSubscriber{latestBlock: atomic.Pointer[ocr2keepers.BlockKey]{}}, + ge: ge, } return r } diff --git a/core/services/workflows/delegate.go b/core/services/workflows/delegate.go index 8dc440da477..9db802f9a2f 100644 --- a/core/services/workflows/delegate.go +++ b/core/services/workflows/delegate.go @@ -15,6 +15,7 @@ import ( "github.com/smartcontractkit/chainlink/v2/core/logger" "github.com/smartcontractkit/chainlink/v2/core/services/job" p2ptypes "github.com/smartcontractkit/chainlink/v2/core/services/p2p/types" + "github.com/smartcontractkit/chainlink/v2/core/services/workflows/store" ) type Delegate struct { @@ -22,6 +23,7 @@ type Delegate struct { logger logger.Logger legacyEVMChains legacyevm.LegacyChainContainer peerID func() *p2ptypes.PeerID + store store.Store } var _ job.Delegate = (*Delegate)(nil) @@ -58,6 +60,7 @@ func (d *Delegate) ServicesForSpec(ctx context.Context, spec job.Job) ([]job.Ser Registry: d.registry, DONInfo: dinfo, PeerID: d.peerID, + Store: d.store, } engine, err := NewEngine(cfg) if err != nil { @@ -103,8 +106,8 @@ func initializeDONInfo(lggr logger.Logger) (*capabilities.DON, error) { }, nil } -func NewDelegate(logger logger.Logger, registry core.CapabilitiesRegistry, legacyEVMChains legacyevm.LegacyChainContainer, peerID func() *p2ptypes.PeerID) *Delegate { - return &Delegate{logger: logger, registry: registry, legacyEVMChains: legacyEVMChains, peerID: peerID} +func NewDelegate(logger logger.Logger, registry core.CapabilitiesRegistry, legacyEVMChains legacyevm.LegacyChainContainer, store store.Store, peerID func() *p2ptypes.PeerID) *Delegate { + return &Delegate{logger: logger, registry: registry, legacyEVMChains: legacyEVMChains, store: store, peerID: peerID} } func ValidatedWorkflowSpec(tomlString string) (job.Job, error) { diff --git a/core/services/workflows/engine.go b/core/services/workflows/engine.go index e405102e123..292ad9c6468 100644 --- a/core/services/workflows/engine.go +++ b/core/services/workflows/engine.go @@ -8,18 +8,20 @@ import ( "sync" "time" + "github.com/jonboulle/clockwork" + "github.com/smartcontractkit/chainlink-common/pkg/capabilities" "github.com/smartcontractkit/chainlink-common/pkg/services" "github.com/smartcontractkit/chainlink-common/pkg/types/core" "github.com/smartcontractkit/chainlink-common/pkg/values" "github.com/smartcontractkit/chainlink/v2/core/logger" p2ptypes "github.com/smartcontractkit/chainlink/v2/core/services/p2p/types" + "github.com/smartcontractkit/chainlink/v2/core/services/workflows/store" ) const ( // NOTE: max 32 bytes per ID - consider enforcing exactly 32 bytes? - mockedTriggerID = "cccccccccc0000000000000000000000" - mockedWorkflowID = "15c631d295ef5e32deb99a10ee6804bc4af1385568f9b3363f6552ac6dbb2cef" + mockedTriggerID = "cccccccccc0000000000000000000000" ) type donInfo struct { @@ -30,18 +32,19 @@ type donInfo struct { // Engine handles the lifecycle of a single workflow and its executions. type Engine struct { services.StateMachine - logger logger.Logger - registry core.CapabilitiesRegistry - workflow *workflow - donInfo donInfo - executionStates *inMemoryStore - pendingStepRequests chan stepRequest - triggerEvents chan capabilities.CapabilityResponse - newWorkerCh chan struct{} - stepUpdateCh chan stepState - wg sync.WaitGroup - stopCh services.StopChan - newWorkerTimeout time.Duration + logger logger.Logger + registry core.CapabilitiesRegistry + workflow *workflow + donInfo donInfo + executionStates store.Store + pendingStepRequests chan stepRequest + triggerEvents chan capabilities.CapabilityResponse + newWorkerCh chan struct{} + stepUpdateCh chan store.WorkflowExecutionStep + wg sync.WaitGroup + stopCh services.StopChan + newWorkerTimeout time.Duration + maxExecutionDuration time.Duration // testing lifecycle hook to signal when an execution is finished. onExecutionFinished func(string) @@ -53,6 +56,8 @@ type Engine struct { // Used for testing to control the retry interval // when initializing the engine. retryMs int + + clock clockwork.Clock } func (e *Engine) Start(ctx context.Context) error { @@ -183,7 +188,13 @@ func (e *Engine) init(ctx context.Context) { return } - e.logger.Debug("capabilities resolved, registering triggers") + e.logger.Debug("capabilities resolved, resuming in-progress workflows") + err := e.resumeInProgressExecutions(ctx) + if err != nil { + e.logger.Errorf("failed to resume workflows: %w", err) + } + + e.logger.Debug("registering triggers") for _, t := range e.workflow.triggers { err := e.registerTrigger(ctx, t) if err != nil { @@ -195,6 +206,55 @@ func (e *Engine) init(ctx context.Context) { e.afterInit(true) } +var ( + defaultOffset, defaultLimit = 0, 1_000 +) + +func (e *Engine) resumeInProgressExecutions(ctx context.Context) error { + wipExecutions, err := e.executionStates.GetUnfinished(ctx, defaultOffset, defaultLimit) + if err != nil { + return err + } + + // TODO: paginate properly + if len(wipExecutions) >= defaultLimit { + e.logger.Warnf("possible execution overflow during resumption") + } + + // Cache the dependents associated with a step. + // We may have to reprocess many executions, but should only + // need to calculate the dependents of a step once since + // they won't change. + refToDeps := map[string][]*step{} + for _, execution := range wipExecutions { + for _, step := range execution.Steps { + // NOTE: In order to determine what tasks need to be enqueued, + // we look at any completed steps, and for each dependent, + // check if they are ready to be enqueued. + // This will also handle an execution that has stalled immediately on creation, + // since we always create an execution with an initially completed trigger step. + if step.Status != store.StatusCompleted { + continue + } + + sds, ok := refToDeps[step.Ref] + if !ok { + s, err := e.workflow.dependents(step.Ref) + if err != nil { + return err + } + + sds = s + } + + for _, sd := range sds { + e.queueIfReady(execution, sd) + } + } + } + return nil +} + // initializeExecutionStrategy for `step`. // Broadly speaking, we'll use `immediateExecution` for non-target steps // and `scheduledExecution` for targets. If we don't have the necessary @@ -341,12 +401,12 @@ func (e *Engine) loop(ctx context.Context) { // Wait for a new worker to be available before dispatching a new one. // We'll do this up to newWorkerTimeout. If this expires, we'll put the // message back on the queue and keep going. - t := time.NewTimer(e.newWorkerTimeout) + t := e.clock.NewTimer(e.newWorkerTimeout) select { case <-e.newWorkerCh: e.wg.Add(1) go e.workerForStepRequest(ctx, pendingStepRequest) - case <-t.C: + case <-t.Chan(): e.logger.Errorf("timed out when spinning off worker for pending step request %+v", pendingStepRequest) e.pendingStepRequests <- pendingStepRequest } @@ -379,21 +439,23 @@ func generateExecutionID(workflowID, eventID string) (string, error) { // startExecution kicks off a new workflow execution when a trigger event is received. func (e *Engine) startExecution(ctx context.Context, executionID string, event values.Value) error { e.logger.Debugw("executing on a trigger event", "event", event, "executionID", executionID) - ec := &executionState{ - steps: map[string]*stepState{ + ec := &store.WorkflowExecution{ + Steps: map[string]*store.WorkflowExecutionStep{ keywordTrigger: { - outputs: &stepOutput{ - value: event, + Outputs: &store.StepOutput{ + Value: event, }, - status: statusCompleted, + Status: store.StatusCompleted, + ExecutionID: executionID, + Ref: keywordTrigger, }, }, - workflowID: e.workflow.id, - executionID: executionID, - status: statusStarted, + WorkflowID: e.workflow.id, + ExecutionID: executionID, + Status: store.StatusStarted, } - err := e.executionStates.add(ctx, ec) + err := e.executionStates.Add(ctx, ec) if err != nil { return err } @@ -413,26 +475,25 @@ func (e *Engine) startExecution(ctx context.Context, executionID string, event v return nil } -func (e *Engine) handleStepUpdate(ctx context.Context, stepUpdate stepState) error { - state, err := e.executionStates.updateStep(ctx, &stepUpdate) +func (e *Engine) handleStepUpdate(ctx context.Context, stepUpdate store.WorkflowExecutionStep) error { + state, err := e.executionStates.UpsertStep(ctx, &stepUpdate) if err != nil { return err } - switch stepUpdate.status { - case statusCompleted: - stepDependents, err := e.workflow.dependents(stepUpdate.ref) + switch stepUpdate.Status { + case store.StatusCompleted: + stepDependents, err := e.workflow.dependents(stepUpdate.Ref) if err != nil { return err } // There are no steps left to process in the current path, so let's check if // we've completed the workflow. - // If not, we'll check for any dependents that are ready to process. if len(stepDependents) == 0 { workflowCompleted := true err := e.workflow.walkDo(keywordTrigger, func(s *step) error { - step, ok := state.steps[s.Ref] + step, ok := state.Steps[s.Ref] // The step is missing from the state, // which means it hasn't been processed yet. // Let's mark `workflowCompleted` = false, and @@ -442,8 +503,8 @@ func (e *Engine) handleStepUpdate(ctx context.Context, stepUpdate stepState) err return nil } - switch step.status { - case statusCompleted, statusErrored: + switch step.Status { + case store.StatusCompleted, store.StatusErrored: default: workflowCompleted = false } @@ -454,18 +515,23 @@ func (e *Engine) handleStepUpdate(ctx context.Context, stepUpdate stepState) err } if workflowCompleted { - err := e.finishExecution(ctx, state.executionID, statusCompleted) - if err != nil { - return err - } + return e.finishExecution(ctx, state.ExecutionID, store.StatusCompleted) } } + // We haven't completed the workflow, but should we continue? + // If we've been executing for too long, let's time the workflow out and stop here. + if state.CreatedAt != nil && e.clock.Since(*state.CreatedAt) > e.maxExecutionDuration { + return e.finishExecution(ctx, state.ExecutionID, store.StatusTimeout) + } + + // Finally, since the workflow hasn't timed out or completed, let's + // check for any dependents that are ready to process. for _, sd := range stepDependents { e.queueIfReady(state, sd) } - case statusErrored: - err := e.finishExecution(ctx, state.executionID, statusErrored) + case store.StatusErrored: + err := e.finishExecution(ctx, state.ExecutionID, store.StatusErrored) if err != nil { return err } @@ -474,11 +540,11 @@ func (e *Engine) handleStepUpdate(ctx context.Context, stepUpdate stepState) err return nil } -func (e *Engine) queueIfReady(state executionState, step *step) { +func (e *Engine) queueIfReady(state store.WorkflowExecution, step *step) { // Check if all dependencies are completed for the current step var waitingOnDependencies bool for _, dr := range step.dependencies { - stepState, ok := state.steps[dr] + stepState, ok := state.Steps[dr] if !ok { waitingOnDependencies = true continue @@ -489,7 +555,7 @@ func (e *Engine) queueIfReady(state executionState, step *step) { // This includes cases where one of the dependent // steps has errored, since that means we shouldn't // schedule the step for execution. - if stepState.status != statusCompleted { + if stepState.Status != store.StatusCompleted { waitingOnDependencies = true } } @@ -506,7 +572,7 @@ func (e *Engine) queueIfReady(state executionState, step *step) { func (e *Engine) finishExecution(ctx context.Context, executionID string, status string) error { e.logger.Infow("finishing execution", "executionID", executionID, "status", status) - err := e.executionStates.updateStatus(ctx, executionID, status) + err := e.executionStates.UpdateStatus(ctx, executionID, status) if err != nil { return err } @@ -521,27 +587,27 @@ func (e *Engine) workerForStepRequest(ctx context.Context, msg stepRequest) { // Instantiate a child logger; in addition to the WorkflowID field the workflow // logger will already have, this adds the `stepRef` and `executionID` - l := e.logger.With("stepRef", msg.stepRef, "executionID", msg.state.executionID) + l := e.logger.With("stepRef", msg.stepRef, "executionID", msg.state.ExecutionID) l.Debugw("executing on a step event") - stepState := &stepState{ - outputs: &stepOutput{}, - executionID: msg.state.executionID, - ref: msg.stepRef, + stepState := &store.WorkflowExecutionStep{ + Outputs: &store.StepOutput{}, + ExecutionID: msg.state.ExecutionID, + Ref: msg.stepRef, } inputs, outputs, err := e.executeStep(ctx, l, msg) if err != nil { l.Errorf("error executing step request: %s", err) - stepState.outputs.err = err - stepState.status = statusErrored + stepState.Outputs.Err = err + stepState.Status = store.StatusErrored } else { l.Infow("step executed successfully", "outputs", outputs) - stepState.outputs.value = outputs - stepState.status = statusCompleted + stepState.Outputs.Value = outputs + stepState.Status = store.StatusCompleted } - stepState.inputs = inputs + stepState.Inputs = inputs // Let's try and emit the stepUpdate. // If the context is canceled, we'll just drop the update. @@ -577,8 +643,8 @@ func (e *Engine) executeStep(ctx context.Context, l logger.Logger, msg stepReque Inputs: inputs, Config: step.config, Metadata: capabilities.RequestMetadata{ - WorkflowID: msg.state.workflowID, - WorkflowExecutionID: msg.state.executionID, + WorkflowID: msg.state.WorkflowID, + WorkflowExecutionID: msg.state.ExecutionID, }, } @@ -670,27 +736,31 @@ func (e *Engine) Close() error { } type Config struct { - Spec string - WorkflowID string - Lggr logger.Logger - Registry core.CapabilitiesRegistry - MaxWorkerLimit int - QueueSize int - NewWorkerTimeout time.Duration - DONInfo *capabilities.DON - PeerID func() *p2ptypes.PeerID + Spec string + WorkflowID string + Lggr logger.Logger + Registry core.CapabilitiesRegistry + MaxWorkerLimit int + QueueSize int + NewWorkerTimeout time.Duration + MaxExecutionDuration time.Duration + DONInfo *capabilities.DON + PeerID func() *p2ptypes.PeerID + Store store.Store // For testing purposes only maxRetries int retryMs int afterInit func(success bool) onExecutionFinished func(weid string) + clock clockwork.Clock } const ( - defaultWorkerLimit = 100 - defaultQueueSize = 100000 - defaultNewWorkerTimeout = 2 * time.Second + defaultWorkerLimit = 100 + defaultQueueSize = 100000 + defaultNewWorkerTimeout = 2 * time.Second + defaultMaxExecutionDuration = 10 * time.Minute ) func NewEngine(cfg Config) (engine *Engine, err error) { @@ -706,6 +776,14 @@ func NewEngine(cfg Config) (engine *Engine, err error) { cfg.NewWorkerTimeout = defaultNewWorkerTimeout } + if cfg.MaxExecutionDuration == 0 { + cfg.MaxExecutionDuration = defaultMaxExecutionDuration + } + + if cfg.Store == nil { + cfg.Store = store.NewInMemoryStore() + } + if cfg.retryMs == 0 { cfg.retryMs = 5000 } @@ -718,6 +796,10 @@ func NewEngine(cfg Config) (engine *Engine, err error) { cfg.onExecutionFinished = func(weid string) {} } + if cfg.clock == nil { + cfg.clock = clockwork.NewRealClock() + } + // TODO: validation of the workflow spec // We'll need to check, among other things: // - that there are no step `ref` called `trigger` as this is reserved for any triggers @@ -747,18 +829,20 @@ func NewEngine(cfg Config) (engine *Engine, err error) { DON: cfg.DONInfo, PeerID: cfg.PeerID, }, - executionStates: newInMemoryStore(), - pendingStepRequests: make(chan stepRequest, cfg.QueueSize), - newWorkerCh: newWorkerCh, - stepUpdateCh: make(chan stepState), - triggerEvents: make(chan capabilities.CapabilityResponse), - stopCh: make(chan struct{}), - newWorkerTimeout: cfg.NewWorkerTimeout, + executionStates: cfg.Store, + pendingStepRequests: make(chan stepRequest, cfg.QueueSize), + newWorkerCh: newWorkerCh, + stepUpdateCh: make(chan store.WorkflowExecutionStep), + triggerEvents: make(chan capabilities.CapabilityResponse), + stopCh: make(chan struct{}), + newWorkerTimeout: cfg.NewWorkerTimeout, + maxExecutionDuration: cfg.MaxExecutionDuration, onExecutionFinished: cfg.onExecutionFinished, afterInit: cfg.afterInit, maxRetries: cfg.maxRetries, retryMs: cfg.retryMs, + clock: cfg.clock, } return engine, nil } diff --git a/core/services/workflows/engine_test.go b/core/services/workflows/engine_test.go index ff4c5682129..212ad37367e 100644 --- a/core/services/workflows/engine_test.go +++ b/core/services/workflows/engine_test.go @@ -4,7 +4,9 @@ import ( "context" "errors" "testing" + "time" + "github.com/jonboulle/clockwork" "github.com/shopspring/decimal" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -13,8 +15,10 @@ import ( "github.com/smartcontractkit/chainlink-common/pkg/values" coreCap "github.com/smartcontractkit/chainlink/v2/core/capabilities" "github.com/smartcontractkit/chainlink/v2/core/internal/testutils" + "github.com/smartcontractkit/chainlink/v2/core/internal/testutils/pgtest" "github.com/smartcontractkit/chainlink/v2/core/logger" p2ptypes "github.com/smartcontractkit/chainlink/v2/core/services/p2p/types" + "github.com/smartcontractkit/chainlink/v2/core/services/workflows/store" ) const hardcodedWorkflow = ` @@ -71,7 +75,7 @@ type testHooks struct { } // newTestEngine creates a new engine with some test defaults. -func newTestEngine(t *testing.T, reg *coreCap.Registry, spec string) (*Engine, *testHooks) { +func newTestEngine(t *testing.T, reg *coreCap.Registry, spec string, opts ...func(c *Config)) (*Engine, *testHooks) { peerID := p2ptypes.PeerID{} initFailed := make(chan struct{}) executionFinished := make(chan string, 100) @@ -91,6 +95,10 @@ func newTestEngine(t *testing.T, reg *coreCap.Registry, spec string) (*Engine, * onExecutionFinished: func(weid string) { executionFinished <- weid }, + clock: clockwork.NewFakeClock(), + } + for _, o := range opts { + o(&cfg) } eng, err := NewEngine(cfg) require.NoError(t, err) @@ -152,14 +160,16 @@ func (m *mockCapability) UnregisterFromWorkflow(ctx context.Context, request cap type mockTriggerCapability struct { capabilities.CapabilityInfo - triggerEvent capabilities.CapabilityResponse + triggerEvent *capabilities.CapabilityResponse ch chan capabilities.CapabilityResponse } var _ capabilities.TriggerCapability = (*mockTriggerCapability)(nil) func (m *mockTriggerCapability) RegisterTrigger(ctx context.Context, req capabilities.CapabilityRequest) (<-chan capabilities.CapabilityResponse, error) { - m.ch <- m.triggerEvent + if m.triggerEvent != nil { + m.ch <- *m.triggerEvent + } return m.ch, nil } @@ -169,47 +179,70 @@ func (m *mockTriggerCapability) UnregisterTrigger(ctx context.Context, req capab func TestEngineWithHardcodedWorkflow(t *testing.T) { t.Parallel() - ctx := testutils.Context(t) - reg := coreCap.NewRegistry(logger.TestLogger(t)) - trigger, cr := mockTrigger(t) - - require.NoError(t, reg.Add(ctx, trigger)) - require.NoError(t, reg.Add(ctx, mockConsensus())) - target1 := mockTarget() - require.NoError(t, reg.Add(ctx, target1)) - - target2 := newMockCapability( - capabilities.MustNewCapabilityInfo( - "write_ethereum-testnet-sepolia", - capabilities.CapabilityTypeTarget, - "a write capability targeting ethereum sepolia testnet", - "v1.0.0", - nil, - ), - func(req capabilities.CapabilityRequest) (capabilities.CapabilityResponse, error) { - m := req.Inputs.Underlying["report"].(*values.Map) - return capabilities.CapabilityResponse{ - Value: m, - }, nil + testCases := []struct { + name string + store store.Store + }{ + { + name: "db-engine", + store: store.NewDBStore(pgtest.NewSqlxDB(t), clockwork.NewFakeClock()), }, - ) - require.NoError(t, reg.Add(ctx, target2)) - - eng, hooks := newTestEngine(t, reg, hardcodedWorkflow) - - err := eng.Start(ctx) - require.NoError(t, err) - defer eng.Close() - - eid := getExecutionId(t, eng, hooks) - assert.Equal(t, cr, <-target1.response) - assert.Equal(t, cr, <-target2.response) - - state, err := eng.executionStates.get(ctx, eid) - require.NoError(t, err) - - assert.Equal(t, state.status, statusCompleted) + { + name: "in-memory-engine", + store: store.NewInMemoryStore(), + }, + } + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + ctx := testutils.Context(t) + reg := coreCap.NewRegistry(logger.TestLogger(t)) + + trigger, cr := mockTrigger(t) + + require.NoError(t, reg.Add(ctx, trigger)) + require.NoError(t, reg.Add(ctx, mockConsensus())) + target1 := mockTarget() + require.NoError(t, reg.Add(ctx, target1)) + + target2 := newMockCapability( + capabilities.MustNewCapabilityInfo( + "write_ethereum-testnet-sepolia", + capabilities.CapabilityTypeTarget, + "a write capability targeting ethereum sepolia testnet", + "v1.0.0", + nil, + ), + func(req capabilities.CapabilityRequest) (capabilities.CapabilityResponse, error) { + m := req.Inputs.Underlying["report"].(*values.Map) + return capabilities.CapabilityResponse{ + Value: m, + }, nil + }, + ) + require.NoError(t, reg.Add(ctx, target2)) + + eng, testHooks := newTestEngine( + t, + reg, + hardcodedWorkflow, + func(c *Config) { c.Store = tc.store }, + ) + + err := eng.Start(ctx) + require.NoError(t, err) + defer eng.Close() + + eid := getExecutionId(t, eng, testHooks) + assert.Equal(t, cr, <-target1.response) + assert.Equal(t, cr, <-target2.response) + + state, err := eng.executionStates.Get(ctx, eid) + require.NoError(t, err) + + assert.Equal(t, state.Status, store.StatusCompleted) + }) + } } const ( @@ -275,10 +308,24 @@ func mockTrigger(t *testing.T) (capabilities.TriggerCapability, capabilities.Cap cr := capabilities.CapabilityResponse{ Value: resp, } - mt.triggerEvent = cr + mt.triggerEvent = &cr return mt, cr } +func mockNoopTrigger(t *testing.T) capabilities.TriggerCapability { + mt := &mockTriggerCapability{ + CapabilityInfo: capabilities.MustNewCapabilityInfo( + "mercury-trigger", + capabilities.CapabilityTypeTrigger, + "issues a trigger when a mercury report is received.", + "v1.0.0", + nil, + ), + ch: make(chan capabilities.CapabilityResponse, 10), + } + return mt +} + func mockFailingConsensus() *mockCapability { return newMockCapability( capabilities.MustNewCapabilityInfo( @@ -357,12 +404,12 @@ func TestEngine_ErrorsTheWorkflowIfAStepErrors(t *testing.T) { defer eng.Close() eid := getExecutionId(t, eng, hooks) - state, err := eng.executionStates.get(ctx, eid) + state, err := eng.executionStates.Get(ctx, eid) require.NoError(t, err) - assert.Equal(t, state.status, statusErrored) + assert.Equal(t, state.Status, store.StatusErrored) // evm_median is the ref of our failing consensus step - assert.Equal(t, state.steps["evm_median"].status, statusErrored) + assert.Equal(t, state.Steps["evm_median"].Status, store.StatusErrored) } const ( @@ -455,14 +502,14 @@ func TestEngine_MultiStepDependencies(t *testing.T) { defer eng.Close() eid := getExecutionId(t, eng, hooks) - state, err := eng.executionStates.get(ctx, eid) + state, err := eng.executionStates.Get(ctx, eid) require.NoError(t, err) - assert.Equal(t, state.status, statusCompleted) + assert.Equal(t, state.Status, store.StatusCompleted) // The inputs to the consensus step should // be the outputs of the two dependents. - inputs := state.steps["evm_median"].inputs + inputs := state.Steps["evm_median"].Inputs unw, err := values.Unwrap(inputs) require.NoError(t, err) @@ -477,3 +524,116 @@ func TestEngine_MultiStepDependencies(t *testing.T) { require.NoError(t, err) assert.Equal(t, obs.([]any)[1], o) } + +func TestEngine_ResumesPendingExecutions(t *testing.T) { + t.Parallel() + ctx := testutils.Context(t) + reg := coreCap.NewRegistry(logger.TestLogger(t)) + + trigger := mockNoopTrigger(t) + resp, err := values.NewMap(map[string]any{ + "123": decimal.NewFromFloat(1.00), + "456": decimal.NewFromFloat(1.25), + "789": decimal.NewFromFloat(1.50), + }) + require.NoError(t, err) + + require.NoError(t, reg.Add(ctx, trigger)) + require.NoError(t, reg.Add(ctx, mockConsensus())) + require.NoError(t, reg.Add(ctx, mockTarget())) + + action, _ := mockAction() + require.NoError(t, reg.Add(ctx, action)) + + dbstore := store.NewDBStore(pgtest.NewSqlxDB(t), clockwork.NewFakeClock()) + ec := &store.WorkflowExecution{ + Steps: map[string]*store.WorkflowExecutionStep{ + keywordTrigger: { + Outputs: &store.StepOutput{ + Value: resp, + }, + Status: store.StatusCompleted, + ExecutionID: "", + Ref: keywordTrigger, + }, + }, + WorkflowID: "", + ExecutionID: "", + Status: store.StatusStarted, + } + err = dbstore.Add(ctx, ec) + require.NoError(t, err) + + eng, hooks := newTestEngine( + t, + reg, + multiStepWorkflow, + func(c *Config) { c.Store = dbstore }, + ) + err = eng.Start(ctx) + require.NoError(t, err) + + eid := getExecutionId(t, eng, hooks) + gotEx, err := dbstore.Get(ctx, eid) + require.NoError(t, err) + assert.Equal(t, store.StatusCompleted, gotEx.Status) +} + +func TestEngine_TimesOutOldExecutions(t *testing.T) { + t.Parallel() + ctx := testutils.Context(t) + reg := coreCap.NewRegistry(logger.TestLogger(t)) + + trigger := mockNoopTrigger(t) + resp, err := values.NewMap(map[string]any{ + "123": decimal.NewFromFloat(1.00), + "456": decimal.NewFromFloat(1.25), + "789": decimal.NewFromFloat(1.50), + }) + require.NoError(t, err) + + require.NoError(t, reg.Add(ctx, trigger)) + require.NoError(t, reg.Add(ctx, mockConsensus())) + require.NoError(t, reg.Add(ctx, mockTarget())) + + action, _ := mockAction() + require.NoError(t, reg.Add(ctx, action)) + + clock := clockwork.NewFakeClock() + dbstore := store.NewDBStore(pgtest.NewSqlxDB(t), clock) + ec := &store.WorkflowExecution{ + Steps: map[string]*store.WorkflowExecutionStep{ + keywordTrigger: { + Outputs: &store.StepOutput{ + Value: resp, + }, + Status: store.StatusCompleted, + ExecutionID: "", + Ref: keywordTrigger, + }, + }, + WorkflowID: "", + ExecutionID: "", + Status: store.StatusStarted, + } + err = dbstore.Add(ctx, ec) + require.NoError(t, err) + + eng, hooks := newTestEngine( + t, + reg, + multiStepWorkflow, + func(c *Config) { + c.Store = dbstore + c.clock = clock + }, + ) + clock.Advance(15 * time.Minute) + err = eng.Start(ctx) + require.NoError(t, err) + + _ = getExecutionId(t, eng, hooks) + gotEx, err := dbstore.Get(ctx, "") + require.NoError(t, err) + assert.Equal(t, store.StatusTimeout, gotEx.Status) +} diff --git a/core/services/workflows/models.go b/core/services/workflows/models.go index cd167403089..8dce11cabe5 100644 --- a/core/services/workflows/models.go +++ b/core/services/workflows/models.go @@ -8,11 +8,12 @@ import ( "github.com/smartcontractkit/chainlink-common/pkg/capabilities" "github.com/smartcontractkit/chainlink-common/pkg/values" + "github.com/smartcontractkit/chainlink/v2/core/services/workflows/store" ) type stepRequest struct { stepRef string - state executionState + state store.WorkflowExecution } // stepDefinition is the parsed representation of a step in a workflow. diff --git a/core/services/workflows/models_yaml.go b/core/services/workflows/models_yaml.go index 5ed7941f84a..74ed8ee466d 100644 --- a/core/services/workflows/models_yaml.go +++ b/core/services/workflows/models_yaml.go @@ -211,7 +211,7 @@ type stepDefinitionYaml struct { // - “ref” has a circular reference. // // NOTE: Should introduce a custom validator to cover trigger case - Ref string `json:"ref,omitempty" jsonschema:"pattern=^[a-z0-9_]+$"` + Ref string `json:"ref,omitempty" jsonschema:"pattern=^[a-z0-9_-]+$"` // Capabilities can specify an additional optional ”inputs” property. It allows specifying a dependency on the result of one or more other capabilities. These are always runtime values that cannot be provided upfront. It takes a map of the argument name internal to the capability and an explicit reference to the values. // diff --git a/core/services/workflows/state.go b/core/services/workflows/state.go index c229b14e1dd..4026a59be0b 100644 --- a/core/services/workflows/state.go +++ b/core/services/workflows/state.go @@ -6,71 +6,44 @@ import ( "strconv" "strings" - "github.com/smartcontractkit/chainlink-common/pkg/values" -) + "github.com/smartcontractkit/chainlink/v2/core/services/workflows/store" -const ( - statusStarted = "started" - statusErrored = "errored" - statusTimeout = "timeout" - statusCompleted = "completed" + "github.com/smartcontractkit/chainlink-common/pkg/values" ) -type stepOutput struct { - err error - value values.Value -} - -type stepState struct { - executionID string - ref string - status string - - inputs *values.Map - outputs *stepOutput -} - -type executionState struct { - steps map[string]*stepState - executionID string - workflowID string - - status string -} - // copyState returns a deep copy of the input executionState -func copyState(es executionState) executionState { - steps := map[string]*stepState{} - for ref, step := range es.steps { +func copyState(es store.WorkflowExecution) store.WorkflowExecution { + steps := map[string]*store.WorkflowExecutionStep{} + for ref, step := range es.Steps { var mval *values.Map - if step.inputs != nil { - mp := values.Proto(step.inputs).GetMapValue() + if step.Inputs != nil { + mp := values.Proto(step.Inputs).GetMapValue() mval = values.FromMapValueProto(mp) } - op := values.Proto(step.outputs.value) + op := values.Proto(step.Outputs.Value) copiedov := values.FromProto(op) - newState := &stepState{ - executionID: step.executionID, - ref: step.ref, - status: step.status, + newState := &store.WorkflowExecutionStep{ + ExecutionID: step.ExecutionID, + Ref: step.Ref, + Status: step.Status, - outputs: &stepOutput{ - err: step.outputs.err, - value: copiedov, + Outputs: &store.StepOutput{ + Err: step.Outputs.Err, + Value: copiedov, }, - inputs: mval, + Inputs: mval, } steps[ref] = newState } - return executionState{ - executionID: es.executionID, - workflowID: es.workflowID, - status: es.status, - steps: steps, + return store.WorkflowExecution{ + ExecutionID: es.ExecutionID, + WorkflowID: es.WorkflowID, + Status: es.Status, + Steps: steps, } } @@ -84,7 +57,7 @@ func copyState(es executionState) executionState { // If a key has more than two parts, then we traverse the parts // to find the value we want to replace. // We support traversing both nested maps and lists and any combination of the two. -func interpolateKey(key string, state executionState) (any, error) { +func interpolateKey(key string, state store.WorkflowExecution) (any, error) { parts := strings.Split(key, ".") if len(parts) < 2 { @@ -92,7 +65,7 @@ func interpolateKey(key string, state executionState) (any, error) { } // lookup the step we want to get either input or output state from - sc, ok := state.steps[parts[0]] + sc, ok := state.Steps[parts[0]] if !ok { return "", fmt.Errorf("could not find ref `%s`", parts[0]) } @@ -100,13 +73,13 @@ func interpolateKey(key string, state executionState) (any, error) { var value values.Value switch parts[1] { case "inputs": - value = sc.inputs + value = sc.Inputs case "outputs": - if sc.outputs.err != nil { + if sc.Outputs.Err != nil { return "", fmt.Errorf("cannot interpolate ref part `%s` in `%+v`: step has errored", parts[1], sc) } - value = sc.outputs.value + value = sc.Outputs.Value default: return "", fmt.Errorf("cannot interpolate ref part `%s` in `%+v`: second part must be `inputs` or `outputs`", parts[1], sc) } @@ -153,7 +126,7 @@ var ( // identifies any values that should be replaced from `state`. // // A value `v` should be replaced if it is wrapped as follows: `$(v)`. -func findAndInterpolateAllKeys(input any, state executionState) (any, error) { +func findAndInterpolateAllKeys(input any, state store.WorkflowExecution) (any, error) { return deepMap( input, func(el string) (any, error) { diff --git a/core/services/workflows/state_test.go b/core/services/workflows/state_test.go index 0917662ccb6..ccd6cd5004d 100644 --- a/core/services/workflows/state_test.go +++ b/core/services/workflows/state_test.go @@ -8,6 +8,7 @@ import ( "github.com/stretchr/testify/require" "github.com/smartcontractkit/chainlink-common/pkg/values" + "github.com/smartcontractkit/chainlink/v2/core/services/workflows/store" ) func TestInterpolateKey(t *testing.T) { @@ -27,18 +28,18 @@ func TestInterpolateKey(t *testing.T) { testCases := []struct { name string key string - state executionState + state store.WorkflowExecution expected any errMsg string }{ { name: "digging into a string", key: "evm_median.outputs.reports", - state: executionState{ - steps: map[string]*stepState{ + state: store.WorkflowExecution{ + Steps: map[string]*store.WorkflowExecutionStep{ "evm_median": { - outputs: &stepOutput{ - value: values.NewString(""), + Outputs: &store.StepOutput{ + Value: values.NewString(""), }, }, }, @@ -48,27 +49,27 @@ func TestInterpolateKey(t *testing.T) { { name: "ref doesn't exist", key: "evm_median.outputs.reports", - state: executionState{ - steps: map[string]*stepState{}, + state: store.WorkflowExecution{ + Steps: map[string]*store.WorkflowExecutionStep{}, }, errMsg: "could not find ref `evm_median`", }, { name: "less than 2 parts", key: "evm_median", - state: executionState{ - steps: map[string]*stepState{}, + state: store.WorkflowExecution{ + Steps: map[string]*store.WorkflowExecutionStep{}, }, errMsg: "must have at least two parts", }, { name: "second part isn't `inputs` or `outputs`", key: "evm_median.foo", - state: executionState{ - steps: map[string]*stepState{ + state: store.WorkflowExecution{ + Steps: map[string]*store.WorkflowExecutionStep{ "evm_median": { - outputs: &stepOutput{ - value: values.NewString(""), + Outputs: &store.StepOutput{ + Value: values.NewString(""), }, }, }, @@ -78,11 +79,11 @@ func TestInterpolateKey(t *testing.T) { { name: "outputs has errored", key: "evm_median.outputs", - state: executionState{ - steps: map[string]*stepState{ + state: store.WorkflowExecution{ + Steps: map[string]*store.WorkflowExecutionStep{ "evm_median": { - outputs: &stepOutput{ - err: errors.New("catastrophic error"), + Outputs: &store.StepOutput{ + Err: errors.New("catastrophic error"), }, }, }, @@ -92,11 +93,11 @@ func TestInterpolateKey(t *testing.T) { { name: "digging into a recursive map", key: "evm_median.outputs.reports.inner", - state: executionState{ - steps: map[string]*stepState{ + state: store.WorkflowExecution{ + Steps: map[string]*store.WorkflowExecutionStep{ "evm_median": { - outputs: &stepOutput{ - value: val, + Outputs: &store.StepOutput{ + Value: val, }, }, }, @@ -106,11 +107,11 @@ func TestInterpolateKey(t *testing.T) { { name: "missing key in map", key: "evm_median.outputs.reports.missing", - state: executionState{ - steps: map[string]*stepState{ + state: store.WorkflowExecution{ + Steps: map[string]*store.WorkflowExecutionStep{ "evm_median": { - outputs: &stepOutput{ - value: val, + Outputs: &store.StepOutput{ + Value: val, }, }, }, @@ -120,11 +121,11 @@ func TestInterpolateKey(t *testing.T) { { name: "digging into an array", key: "evm_median.outputs.reportsList.0", - state: executionState{ - steps: map[string]*stepState{ + state: store.WorkflowExecution{ + Steps: map[string]*store.WorkflowExecutionStep{ "evm_median": { - outputs: &stepOutput{ - value: val, + Outputs: &store.StepOutput{ + Value: val, }, }, }, @@ -134,11 +135,11 @@ func TestInterpolateKey(t *testing.T) { { name: "digging into an array that's too small", key: "evm_median.outputs.reportsList.2", - state: executionState{ - steps: map[string]*stepState{ + state: store.WorkflowExecution{ + Steps: map[string]*store.WorkflowExecutionStep{ "evm_median": { - outputs: &stepOutput{ - value: val, + Outputs: &store.StepOutput{ + Value: val, }, }, }, @@ -148,11 +149,11 @@ func TestInterpolateKey(t *testing.T) { { name: "digging into an array with a string key", key: "evm_median.outputs.reportsList.notAString", - state: executionState{ - steps: map[string]*stepState{ + state: store.WorkflowExecution{ + Steps: map[string]*store.WorkflowExecutionStep{ "evm_median": { - outputs: &stepOutput{ - value: val, + Outputs: &store.StepOutput{ + Value: val, }, }, }, @@ -162,11 +163,11 @@ func TestInterpolateKey(t *testing.T) { { name: "digging into an array with a negative index", key: "evm_median.outputs.reportsList.-1", - state: executionState{ - steps: map[string]*stepState{ + state: store.WorkflowExecution{ + Steps: map[string]*store.WorkflowExecutionStep{ "evm_median": { - outputs: &stepOutput{ - value: val, + Outputs: &store.StepOutput{ + Value: val, }, }, }, @@ -176,11 +177,11 @@ func TestInterpolateKey(t *testing.T) { { name: "empty element", key: "evm_median.outputs..notAString", - state: executionState{ - steps: map[string]*stepState{ + state: store.WorkflowExecution{ + Steps: map[string]*store.WorkflowExecutionStep{ "evm_median": { - outputs: &stepOutput{ - value: val, + Outputs: &store.StepOutput{ + Value: val, }, }, }, @@ -207,7 +208,7 @@ func TestInterpolateInputsFromState(t *testing.T) { testCases := []struct { name string inputs map[string]any - state executionState + state store.WorkflowExecution expected any errMsg string }{ @@ -218,11 +219,11 @@ func TestInterpolateInputsFromState(t *testing.T) { "shouldinterpolate": "$(evm_median.outputs)", }, }, - state: executionState{ - steps: map[string]*stepState{ + state: store.WorkflowExecution{ + Steps: map[string]*store.WorkflowExecutionStep{ "evm_median": { - outputs: &stepOutput{ - value: values.NewString(""), + Outputs: &store.StepOutput{ + Value: values.NewString(""), }, }, }, @@ -238,11 +239,11 @@ func TestInterpolateInputsFromState(t *testing.T) { inputs: map[string]any{ "foo": "bar", }, - state: executionState{ - steps: map[string]*stepState{ + state: store.WorkflowExecution{ + Steps: map[string]*store.WorkflowExecutionStep{ "evm_median": { - outputs: &stepOutput{ - value: values.NewString(""), + Outputs: &store.StepOutput{ + Value: values.NewString(""), }, }, }, diff --git a/core/services/workflows/store.go b/core/services/workflows/store.go deleted file mode 100644 index d6ef72d39b9..00000000000 --- a/core/services/workflows/store.go +++ /dev/null @@ -1,70 +0,0 @@ -package workflows - -import ( - "context" - "fmt" - "sync" -) - -// `inMemoryStore` is a temporary in-memory -// equivalent of the database table that should persist -// workflow progress. -type inMemoryStore struct { - idToState map[string]*executionState - mu sync.RWMutex -} - -func newInMemoryStore() *inMemoryStore { - return &inMemoryStore{idToState: map[string]*executionState{}} -} - -// add adds a new execution state under the given executionID -func (s *inMemoryStore) add(ctx context.Context, state *executionState) error { - s.mu.Lock() - defer s.mu.Unlock() - _, ok := s.idToState[state.executionID] - if ok { - return fmt.Errorf("execution ID %s already exists in store", state.executionID) - } - - s.idToState[state.executionID] = state - return nil -} - -// updateStep updates a step for the given executionID -func (s *inMemoryStore) updateStep(ctx context.Context, step *stepState) (executionState, error) { - s.mu.Lock() - defer s.mu.Unlock() - state, ok := s.idToState[step.executionID] - if !ok { - return executionState{}, fmt.Errorf("could not find execution %s", step.executionID) - } - - state.steps[step.ref] = step - return *state, nil -} - -// updateStatus updates the status for the given executionID -func (s *inMemoryStore) updateStatus(ctx context.Context, executionID string, status string) error { - s.mu.Lock() - defer s.mu.Unlock() - state, ok := s.idToState[executionID] - if !ok { - return fmt.Errorf("could not find execution %s", executionID) - } - - state.status = status - return nil -} - -// get gets the state for the given executionID -func (s *inMemoryStore) get(ctx context.Context, executionID string) (executionState, error) { - s.mu.RLock() - defer s.mu.RUnlock() - state, ok := s.idToState[executionID] - if !ok { - return executionState{}, fmt.Errorf("could not find execution %s", executionID) - } - - return *state, nil -} diff --git a/core/services/workflows/store/models.go b/core/services/workflows/store/models.go new file mode 100644 index 00000000000..29a1df154de --- /dev/null +++ b/core/services/workflows/store/models.go @@ -0,0 +1,41 @@ +package store + +import ( + "time" + + "github.com/smartcontractkit/chainlink-common/pkg/values" +) + +const ( + StatusStarted = "started" + StatusErrored = "errored" + StatusTimeout = "timeout" + StatusCompleted = "completed" +) + +type StepOutput struct { + Err error + Value values.Value +} + +type WorkflowExecutionStep struct { + ExecutionID string + Ref string + Status string + + Inputs *values.Map + Outputs *StepOutput + + UpdatedAt *time.Time +} + +type WorkflowExecution struct { + Steps map[string]*WorkflowExecutionStep + ExecutionID string + WorkflowID string + + Status string + CreatedAt *time.Time + UpdatedAt *time.Time + FinishedAt *time.Time +} diff --git a/core/services/workflows/store/store.go b/core/services/workflows/store/store.go new file mode 100644 index 00000000000..e77050617ab --- /dev/null +++ b/core/services/workflows/store/store.go @@ -0,0 +1,16 @@ +package store + +import ( + "context" +) + +type Store interface { + Add(ctx context.Context, state *WorkflowExecution) error + UpsertStep(ctx context.Context, step *WorkflowExecutionStep) (WorkflowExecution, error) + UpdateStatus(ctx context.Context, executionID string, status string) error + Get(ctx context.Context, executionID string) (WorkflowExecution, error) + GetUnfinished(ctx context.Context, offset, limit int) ([]WorkflowExecution, error) +} + +var _ Store = (*InMemoryStore)(nil) +var _ Store = (*DBStore)(nil) diff --git a/core/services/workflows/store/store_db.go b/core/services/workflows/store/store_db.go new file mode 100644 index 00000000000..73acece5b18 --- /dev/null +++ b/core/services/workflows/store/store_db.go @@ -0,0 +1,382 @@ +package store + +import ( + "context" + "errors" + "fmt" + "time" + + "google.golang.org/protobuf/proto" + + "github.com/jmoiron/sqlx" + "github.com/jonboulle/clockwork" + + "github.com/smartcontractkit/chainlink-common/pkg/sqlutil" + "github.com/smartcontractkit/chainlink-common/pkg/values" + valuespb "github.com/smartcontractkit/chainlink-common/pkg/values/pb" +) + +// `DBStore` is a postgres-backed +// data store that persists workflow progress. +type DBStore struct { + db sqlutil.DataSource + clock clockwork.Clock +} + +// `workflowExecutionRow` describes a row +// of the `workflow_executions` table +type workflowExecutionRow struct { + ID string + WorkflowID *string + Status string + CreatedAt *time.Time + UpdatedAt *time.Time + FinishedAt *time.Time +} + +// `workflowStepRow` describes a row +// of the `workflow_steps` table +type workflowStepRow struct { + ID uint + WorkflowExecutionID string `db:"workflow_execution_id"` + Ref string + Status string + Inputs []byte + OutputErr *string `db:"output_err"` + OutputValue []byte `db:"output_value"` + UpdatedAt *time.Time `db:"updated_at"` +} + +// `UpdateStatus` updates the status of the given workflow execution +func (d *DBStore) UpdateStatus(ctx context.Context, executionID string, status string) error { + sql := `UPDATE workflow_executions SET status = $1, updated_at = $2 WHERE id = $3` + + // If we're completing the workflow execution, let's also set a finished_at timestamp. + if status != StatusStarted { + sql = "UPDATE workflow_executions SET status = $1, updated_at = $2, finished_at = $2 WHERE id = $3" + } + _, err := d.db.ExecContext(ctx, sql, status, d.clock.Now(), executionID) + return err +} + +// `UpsertStep` updates the given step. This will correspond to an insert, or an update +// depending on whether a step with the ref already exists. +func (d *DBStore) UpsertStep(ctx context.Context, stepState *WorkflowExecutionStep) (WorkflowExecution, error) { + step, err := stateToStep(stepState) + if err != nil { + return WorkflowExecution{}, err + } + + err = d.upsertSteps(ctx, []workflowStepRow{step}) + if err != nil { + return WorkflowExecution{}, err + } + + return d.Get(ctx, step.WorkflowExecutionID) +} + +// `Get` fetches the ExecutionState from the database. +func (d *DBStore) Get(ctx context.Context, executionID string) (WorkflowExecution, error) { + wex := &workflowExecutionRow{} + err := d.db.GetContext(ctx, wex, `SELECT * FROM workflow_executions WHERE id = $1`, executionID) + if err != nil { + return WorkflowExecution{}, err + } + + ws := []workflowStepRow{} + err = d.db.SelectContext(ctx, &ws, `SELECT * FROM workflow_steps WHERE workflow_execution_id = $1`, wex.ID) + if err != nil { + return WorkflowExecution{}, err + } + + refToStep := map[string]*WorkflowExecutionStep{} + for _, s := range ws { + ss, err := stepToState(s) + if err != nil { + return WorkflowExecution{}, err + } + + refToStep[s.Ref] = ss + } + + var workflowID string + if wex.WorkflowID != nil { + workflowID = *wex.WorkflowID + } + + es := WorkflowExecution{ + ExecutionID: wex.ID, + WorkflowID: workflowID, + Status: wex.Status, + Steps: refToStep, + CreatedAt: wex.CreatedAt, + UpdatedAt: wex.UpdatedAt, + FinishedAt: wex.FinishedAt, + } + return es, nil +} + +func stepToState(step workflowStepRow) (*WorkflowExecutionStep, error) { + var inputs *values.Map + if len(step.Inputs) > 0 { + vmProto := &valuespb.Map{} + err := proto.Unmarshal(step.Inputs, vmProto) + if err != nil { + return nil, err + } + + inputs = values.FromMapValueProto(vmProto) + } + + var ( + outputErr error + outputs values.Value + ) + + if step.OutputErr != nil { + outputErr = errors.New(*step.OutputErr) + } + + if len(step.OutputValue) != 0 { + vProto := &valuespb.Value{} + err := proto.Unmarshal(step.OutputValue, vProto) + if err != nil { + return nil, err + } + + outputs = values.FromProto(vProto) + } + + var so *StepOutput + if outputErr != nil || outputs != nil { + so = &StepOutput{ + Err: outputErr, + Value: outputs, + } + } + + return &WorkflowExecutionStep{ + ExecutionID: step.WorkflowExecutionID, + Ref: step.Ref, + Status: step.Status, + Inputs: inputs, + Outputs: so, + }, nil +} + +func stateToStep(state *WorkflowExecutionStep) (workflowStepRow, error) { + var inpb []byte + if state.Inputs != nil { + p := values.Proto(state.Inputs).GetMapValue() + ib, err := proto.Marshal(p) + if err != nil { + return workflowStepRow{}, err + } + inpb = ib + } + + wsr := workflowStepRow{ + WorkflowExecutionID: state.ExecutionID, + Ref: state.Ref, + Status: state.Status, + Inputs: inpb, + } + + if state.Outputs == nil { + return wsr, nil + } + + if state.Outputs.Value != nil { + p := values.Proto(state.Outputs.Value) + ob, err := proto.Marshal(p) + if err != nil { + return workflowStepRow{}, err + } + + wsr.OutputValue = ob + } + + if state.Outputs.Err != nil { + errs := state.Outputs.Err.Error() + wsr.OutputErr = &errs + } + return wsr, nil +} + +// `Add` creates the relevant workflow_execution and workflow_step entries +// to persist the passed in ExecutionState. +func (d *DBStore) Add(ctx context.Context, state *WorkflowExecution) error { + return d.transact(ctx, func(db *DBStore) error { + var wid *string + if state.WorkflowID != "" { + wid = &state.WorkflowID + } + wex := &workflowExecutionRow{ + ID: state.ExecutionID, + WorkflowID: wid, + Status: state.Status, + } + err := db.insertWorkflowExecution(ctx, wex) + if err != nil { + return fmt.Errorf("could not insert workflow execution %s: %w", state.ExecutionID, err) + } + + ws := []workflowStepRow{} + for _, step := range state.Steps { + step, err := stateToStep(step) + if err != nil { + return err + } + ws = append(ws, step) + } + if len(ws) > 0 { + return db.upsertSteps(ctx, ws) + } + return nil + }) +} + +func (d *DBStore) upsertSteps(ctx context.Context, steps []workflowStepRow) error { + for _, s := range steps { + now := d.clock.Now() + s.UpdatedAt = &now + } + + sql := ` + INSERT INTO + workflow_steps(workflow_execution_id, ref, status, inputs, output_err, output_value, updated_at) + VALUES (:workflow_execution_id, :ref, :status, :inputs, :output_err, :output_value, :updated_at) + ON CONFLICT ON CONSTRAINT uniq_workflow_execution_id_ref + DO UPDATE SET + workflow_execution_id = EXCLUDED.workflow_execution_id, + ref = EXCLUDED.ref, + status = EXCLUDED.status, + inputs = EXCLUDED.inputs, + output_err = EXCLUDED.output_err, + output_value = EXCLUDED.output_value, + updated_at = EXCLUDED.updated_at; + ` + stmt, args, err := sqlx.Named(sql, steps) + if err != nil { + return err + } + stmt = d.db.Rebind(stmt) + _, err = d.db.ExecContext(ctx, stmt, args...) + return err +} + +func (d *DBStore) insertWorkflowExecution(ctx context.Context, execution *workflowExecutionRow) error { + sql := ` + INSERT INTO + workflow_executions(id, workflow_id, status, created_at) + VALUES ($1, $2, $3, $4) + ` + _, err := d.db.ExecContext(ctx, sql, execution.ID, execution.WorkflowID, execution.Status, d.clock.Now()) + return err +} + +func (d *DBStore) transact(ctx context.Context, fn func(*DBStore) error) error { + return sqlutil.Transact( + ctx, + func(ds sqlutil.DataSource) *DBStore { + return &DBStore{db: ds, clock: d.clock} + }, + d.db, + nil, + fn, + ) +} + +func (d *DBStore) GetUnfinished(ctx context.Context, offset, limit int) ([]WorkflowExecution, error) { + sql := ` + SELECT + workflow_steps.workflow_execution_id AS ws_workflow_execution_id, + workflow_steps.ref AS ws_ref, + workflow_steps.status AS ws_status, + workflow_steps.inputs AS ws_inputs, + workflow_steps.output_err AS ws_output_err, + workflow_steps.output_value AS ws_output_value, + workflow_steps.updated_at AS ws_updated_at, + workflow_executions.id AS we_id, + workflow_executions.workflow_id AS we_workflow_id, + workflow_executions.status AS we_status, + workflow_executions.created_at AS we_created_at, + workflow_executions.updated_at AS we_updated_at, + workflow_executions.finished_at AS we_finished_at + FROM workflow_executions + JOIN workflow_steps + ON workflow_steps.workflow_execution_id = workflow_executions.id + WHERE workflow_executions.status = $1 + ORDER BY workflow_executions.created_at DESC + LIMIT $2 + OFFSET $3 + ` + joinRecords := []struct { + // WorkflowExecutionStep fields + WSWorkflowExecutionID string `db:"ws_workflow_execution_id"` + WSRef string `db:"ws_ref"` + WSStatus string `db:"ws_status"` + WSInputs []byte `db:"ws_inputs"` + WSOutputErr *string `db:"ws_output_err"` + WSOutputValue []byte `db:"ws_output_value"` + WSUpdatedAt *time.Time `db:"ws_updated_at"` + + // WorkflowExecution fields + WEID string `db:"we_id"` + WEWorkflowID *string `db:"we_workflow_id"` + WEStatus string `db:"we_status"` + WECreatedAt *time.Time `db:"we_created_at"` + WEUpdatedAt *time.Time `db:"we_updated_at"` + WEFinishedAt *time.Time `db:"we_finished_at"` + }{} + err := d.db.SelectContext(ctx, &joinRecords, sql, StatusStarted, limit, offset) + if err != nil { + return []WorkflowExecution{}, err + } + + idToExecutionState := map[string]*WorkflowExecution{} + for _, jr := range joinRecords { + var wid string + if jr.WEWorkflowID != nil { + wid = *jr.WEWorkflowID + } + if _, ok := idToExecutionState[jr.WEID]; !ok { + idToExecutionState[jr.WEID] = &WorkflowExecution{ + ExecutionID: jr.WEID, + WorkflowID: wid, + Status: jr.WEStatus, + Steps: map[string]*WorkflowExecutionStep{}, + CreatedAt: jr.WECreatedAt, + UpdatedAt: jr.WEUpdatedAt, + FinishedAt: jr.WEFinishedAt, + } + } + + state, err := stepToState(workflowStepRow{ + WorkflowExecutionID: jr.WSWorkflowExecutionID, + Ref: jr.WSRef, + OutputErr: jr.WSOutputErr, + OutputValue: jr.WSOutputValue, + Inputs: jr.WSInputs, + Status: jr.WSStatus, + UpdatedAt: jr.WSUpdatedAt, + }) + if err != nil { + return nil, err + } + + es := idToExecutionState[jr.WEID] + es.Steps[state.Ref] = state + } + + states := []WorkflowExecution{} + for _, s := range idToExecutionState { + states = append(states, *s) + } + + return states, nil +} + +func NewDBStore(ds sqlutil.DataSource, clock clockwork.Clock) *DBStore { + return &DBStore{db: ds, clock: clock} +} diff --git a/core/services/workflows/store/store_db_test.go b/core/services/workflows/store/store_db_test.go new file mode 100644 index 00000000000..e41f4857363 --- /dev/null +++ b/core/services/workflows/store/store_db_test.go @@ -0,0 +1,215 @@ +package store + +import ( + "crypto/rand" + "encoding/hex" + "errors" + "testing" + + "github.com/jonboulle/clockwork" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/smartcontractkit/chainlink-common/pkg/utils/tests" + "github.com/smartcontractkit/chainlink-common/pkg/values" + "github.com/smartcontractkit/chainlink/v2/core/internal/testutils/pgtest" +) + +func randomID() string { + b := make([]byte, 32) + _, err := rand.Read(b) + if err != nil { + panic(err) + } + return hex.EncodeToString(b) +} + +func Test_StoreDB(t *testing.T) { + db := pgtest.NewSqlxDB(t) + store := &DBStore{db: db, clock: clockwork.NewFakeClock()} + + id := randomID() + es := WorkflowExecution{ + Steps: map[string]*WorkflowExecutionStep{ + "step1": { + ExecutionID: id, + Ref: "step1", + Status: "completed", + }, + "step2": { + ExecutionID: id, + Ref: "step2", + Status: "started", + }, + }, + ExecutionID: id, + Status: "started", + } + + err := store.Add(tests.Context(t), &es) + require.NoError(t, err) + + gotEs, err := store.Get(tests.Context(t), es.ExecutionID) + // Zero out the created at timestamp; this isn't present on `es` + // but is added by the db store. + gotEs.CreatedAt = nil + require.NoError(t, err) + assert.Equal(t, es, gotEs) +} + +func Test_StoreDB_DuplicateEntry(t *testing.T) { + db := pgtest.NewSqlxDB(t) + store := &DBStore{db: db, clock: clockwork.NewFakeClock()} + + id := randomID() + es := WorkflowExecution{ + Steps: map[string]*WorkflowExecutionStep{ + "step1": { + ExecutionID: id, + Ref: "step1", + Status: "completed", + }, + "step2": { + ExecutionID: id, + Ref: "step2", + Status: "started", + }, + }, + ExecutionID: id, + Status: "started", + } + + err := store.Add(tests.Context(t), &es) + require.NoError(t, err) + + err = store.Add(tests.Context(t), &es) + assert.ErrorContains(t, err, "duplicate key value violates") +} + +func Test_StoreDB_UpdateStatus(t *testing.T) { + db := pgtest.NewSqlxDB(t) + store := &DBStore{db: db, clock: clockwork.NewFakeClock()} + + id := randomID() + es := WorkflowExecution{ + Steps: map[string]*WorkflowExecutionStep{ + "step1": { + ExecutionID: id, + Ref: "step1", + Status: "completed", + }, + "step2": { + ExecutionID: id, + Ref: "step2", + Status: "started", + }, + }, + ExecutionID: id, + Status: "started", + } + + err := store.Add(tests.Context(t), &es) + require.NoError(t, err) + + completedStatus := "completed" + err = store.UpdateStatus(tests.Context(t), es.ExecutionID, "completed") + require.NoError(t, err) + + gotEs, err := store.Get(tests.Context(t), es.ExecutionID) + require.NoError(t, err) + + assert.Equal(t, gotEs.Status, completedStatus) +} + +func Test_StoreDB_UpdateStep(t *testing.T) { + db := pgtest.NewSqlxDB(t) + store := &DBStore{db: db, clock: clockwork.NewFakeClock()} + + id := randomID() + stepOne := &WorkflowExecutionStep{ + ExecutionID: id, + Ref: "step1", + Status: "completed", + } + stepTwo := &WorkflowExecutionStep{ + ExecutionID: id, + Ref: "step2", + Status: "started", + } + es := WorkflowExecution{ + Steps: map[string]*WorkflowExecutionStep{ + "step1": stepOne, + "step2": stepTwo, + }, + ExecutionID: id, + Status: "started", + } + + err := store.Add(tests.Context(t), &es) + require.NoError(t, err) + + stepOne.Status = "completed" + nm, err := values.NewMap(map[string]any{"hello": "world"}) + require.NoError(t, err) + + stepOne.Inputs = nm + stepOne.Outputs = &StepOutput{Err: errors.New("some error")} + + es, err = store.UpsertStep(tests.Context(t), stepOne) + require.NoError(t, err) + + gotStep := es.Steps[stepOne.Ref] + assert.Equal(t, stepOne, gotStep) + + stepTwo.Outputs = &StepOutput{Value: nm} + es, err = store.UpsertStep(tests.Context(t), stepTwo) + require.NoError(t, err) + + gotStep = es.Steps[stepTwo.Ref] + assert.Equal(t, stepTwo, gotStep) +} + +func Test_StoreDB_GetUnfinishedSteps(t *testing.T) { + db := pgtest.NewSqlxDB(t) + store := &DBStore{db: db, clock: clockwork.NewFakeClock()} + + id := randomID() + stepOne := &WorkflowExecutionStep{ + ExecutionID: id, + Ref: "step1", + Status: "completed", + } + stepTwo := &WorkflowExecutionStep{ + ExecutionID: id, + Ref: "step2", + Status: "started", + } + es := WorkflowExecution{ + Steps: map[string]*WorkflowExecutionStep{ + "step1": stepOne, + "step2": stepTwo, + }, + ExecutionID: id, + Status: "started", + } + + err := store.Add(tests.Context(t), &es) + require.NoError(t, err) + + id = randomID() + esTwo := WorkflowExecution{ + ExecutionID: id, + Status: "completed", + Steps: map[string]*WorkflowExecutionStep{}, + } + err = store.Add(tests.Context(t), &esTwo) + require.NoError(t, err) + + states, err := store.GetUnfinished(tests.Context(t), 0, 100) + require.NoError(t, err) + + assert.Len(t, states, 1) + // Zero out the completedAt timestamp + states[0].CreatedAt = nil + assert.Equal(t, es, states[0]) +} diff --git a/core/services/workflows/store/store_memory.go b/core/services/workflows/store/store_memory.go new file mode 100644 index 00000000000..7c8226c5d9c --- /dev/null +++ b/core/services/workflows/store/store_memory.go @@ -0,0 +1,86 @@ +package store + +import ( + "context" + "fmt" + "sync" +) + +// `InMemoryStore` is a temporary in-memory +// equivalent of the database table that should persist +// workflow progress. +type InMemoryStore struct { + idToState map[string]*WorkflowExecution + mu sync.RWMutex +} + +func NewInMemoryStore() *InMemoryStore { + return &InMemoryStore{idToState: map[string]*WorkflowExecution{}} +} + +// Add adds a new execution state under the given executionID +func (s *InMemoryStore) Add(ctx context.Context, state *WorkflowExecution) error { + s.mu.Lock() + defer s.mu.Unlock() + _, ok := s.idToState[state.ExecutionID] + if ok { + return fmt.Errorf("execution ID %s already exists in store", state.ExecutionID) + } + + s.idToState[state.ExecutionID] = state + return nil +} + +// UpsertStep updates a step for the given executionID +func (s *InMemoryStore) UpsertStep(ctx context.Context, step *WorkflowExecutionStep) (WorkflowExecution, error) { + s.mu.Lock() + defer s.mu.Unlock() + state, ok := s.idToState[step.ExecutionID] + if !ok { + return WorkflowExecution{}, fmt.Errorf("could not find execution %s", step.ExecutionID) + } + + state.Steps[step.Ref] = step + return *state, nil +} + +// UpdateStatus updates the status for the given executionID +func (s *InMemoryStore) UpdateStatus(ctx context.Context, executionID string, status string) error { + s.mu.Lock() + defer s.mu.Unlock() + state, ok := s.idToState[executionID] + if !ok { + return fmt.Errorf("could not find execution %s", executionID) + } + + state.Status = status + return nil +} + +// Get gets the state for the given executionID +func (s *InMemoryStore) Get(ctx context.Context, executionID string) (WorkflowExecution, error) { + s.mu.RLock() + defer s.mu.RUnlock() + state, ok := s.idToState[executionID] + if !ok { + return WorkflowExecution{}, fmt.Errorf("could not find execution %s", executionID) + } + + return *state, nil +} + +// GetUnfinished gets the states for execution that are in a started state +// Offset and limit are ignored for the in-memory store. +func (s *InMemoryStore) GetUnfinished(ctx context.Context, offset, limit int) ([]WorkflowExecution, error) { + s.mu.RLock() + defer s.mu.RUnlock() + + states := []WorkflowExecution{} + for _, s := range s.idToState { + if s.Status == StatusStarted { + states = append(states, *s) + } + } + + return states, nil +} diff --git a/core/services/workflows/testdata/fixtures/workflows/workflow_schema.json b/core/services/workflows/testdata/fixtures/workflows/workflow_schema.json index 7f257f7798d..f9f9fd88646 100644 --- a/core/services/workflows/testdata/fixtures/workflows/workflow_schema.json +++ b/core/services/workflows/testdata/fixtures/workflows/workflow_schema.json @@ -48,7 +48,7 @@ }, "ref": { "type": "string", - "pattern": "^[a-z0-9_]+$" + "pattern": "^[a-z0-9_-]+$" }, "inputs": { "$ref": "#/$defs/mapping" diff --git a/core/store/migrate/migrations/0235_add_workflow_models.sql b/core/store/migrate/migrations/0235_add_workflow_models.sql new file mode 100644 index 00000000000..bd159b3a9d2 --- /dev/null +++ b/core/store/migrate/migrations/0235_add_workflow_models.sql @@ -0,0 +1,47 @@ +-- +goose Up +-- +goose StatementBegin +CREATE TYPE workflow_status AS ENUM ( + 'started', + 'errored', + 'timeout', + 'completed' +); + +ALTER TABLE workflow_specs + ADD CONSTRAINT fk_unique_workflow_id unique(workflow_id); + +CREATE TABLE workflow_executions ( + id varchar(64) PRIMARY KEY, + workflow_id varchar(64) references workflow_specs(workflow_id), + status workflow_status NOT NULL, + created_at timestamp with time zone, + updated_at timestamp with time zone, + finished_at timestamp with time zone +); + +CREATE TABLE workflow_steps ( + id SERIAL PRIMARY KEY, + workflow_execution_id varchar(64) references workflow_executions(id) NOT NULL, + ref text NOT NULL, + status workflow_status NOT NULL, + inputs bytea, + output_err text, + output_value bytea, + updated_at timestamp with time zone +); + +ALTER TABLE workflow_steps + ADD CONSTRAINT uniq_workflow_execution_id_ref unique(workflow_execution_id, ref); + +-- +goose StatementEnd + +-- +goose Down +-- +goose StatementBegin +ALTER TABLE workflow_steps + DROP CONSTRAINT uniq_workflow_execution_id_ref; +DROP TABLE workflow_steps; +DROP TABLE workflow_executions; +ALTER TABLE workflow_specs + DROP CONSTRAINT fk_unique_workflow_id; +DROP TYPE workflow_status; +-- +goose StatementEnd diff --git a/core/store/migrate/migrations/0236_ccip_prices_cache.sql b/core/store/migrate/migrations/0236_ccip_prices_cache.sql new file mode 100644 index 00000000000..e88b68e5575 --- /dev/null +++ b/core/store/migrate/migrations/0236_ccip_prices_cache.sql @@ -0,0 +1,36 @@ +-- +goose Up +-- +goose StatementBegin +CREATE SCHEMA ccip; + +CREATE TABLE ccip.observed_gas_prices( + chain_selector NUMERIC(20,0) NOT NULL, + job_id INTEGER NOT NULL, + source_chain_selector NUMERIC(20,0) NOT NULL, + gas_price NUMERIC(78,0) NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +CREATE TABLE ccip.observed_token_prices( + chain_selector NUMERIC(20,0) NOT NULL, + job_id INTEGER NOT NULL, + token_addr BYTEA NOT NULL, + token_price NUMERIC(78,0) NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +CREATE INDEX idx_ccip_gas_prices_chain_gas_price_timestamp ON ccip.observed_gas_prices (chain_selector, source_chain_selector, created_at DESC); +CREATE INDEX idx_ccip_token_prices_token_price_timestamp ON ccip.observed_token_prices (chain_selector, token_addr, created_at DESC); + +-- +goose StatementEnd + + +-- +goose Down +-- +goose StatementBegin +DROP INDEX IF EXISTS idx_ccip_token_prices_token_value; +DROP INDEX IF EXISTS idx_ccip_gas_prices_chain_value; + +DROP TABLE ccip.observed_token_prices; +DROP TABLE ccip.observed_gas_prices; + +DROP SCHEMA ccip; +-- +goose StatementEnd diff --git a/integration-tests/actions/private_network.go b/integration-tests/actions/private_network.go index 01a084b66d8..70239a60060 100644 --- a/integration-tests/actions/private_network.go +++ b/integration-tests/actions/private_network.go @@ -3,17 +3,17 @@ package actions import ( "github.com/rs/zerolog" + ctf_config "github.com/smartcontractkit/chainlink-testing-framework/config" ctf_test_env "github.com/smartcontractkit/chainlink-testing-framework/docker/test_env" - tc "github.com/smartcontractkit/chainlink/integration-tests/testconfig" ) -func EthereumNetworkConfigFromConfig(l zerolog.Logger, config tc.GlobalTestConfig) (network ctf_test_env.EthereumNetwork, err error) { +func EthereumNetworkConfigFromConfig(l zerolog.Logger, config ctf_config.GlobalTestConfig) (network ctf_test_env.EthereumNetwork, err error) { if config.GetPrivateEthereumNetworkConfig() == nil { l.Warn().Msg("No TOML private ethereum network config found, will use old geth") ethBuilder := ctf_test_env.NewEthereumNetworkBuilder() network, err = ethBuilder. - WithEthereumVersion(ctf_test_env.EthereumVersion_Eth1). - WithExecutionLayer(ctf_test_env.ExecutionLayer_Geth). + WithEthereumVersion(ctf_config.EthereumVersion_Eth1). + WithExecutionLayer(ctf_config.ExecutionLayer_Geth). Build() return diff --git a/integration-tests/actions/seth/actions.go b/integration-tests/actions/seth/actions.go index d805ce8c460..12743ce5284 100644 --- a/integration-tests/actions/seth/actions.go +++ b/integration-tests/actions/seth/actions.go @@ -25,6 +25,7 @@ import ( "go.uber.org/zap/zapcore" "github.com/smartcontractkit/chainlink-testing-framework/blockchain" + ctf_config "github.com/smartcontractkit/chainlink-testing-framework/config" "github.com/smartcontractkit/chainlink-testing-framework/k8s/environment" "github.com/smartcontractkit/chainlink-testing-framework/logging" "github.com/smartcontractkit/chainlink-testing-framework/testreporters" @@ -36,7 +37,6 @@ import ( "github.com/smartcontractkit/chainlink/integration-tests/client" "github.com/smartcontractkit/chainlink/integration-tests/contracts" "github.com/smartcontractkit/chainlink/integration-tests/contracts/ethereum" - tc "github.com/smartcontractkit/chainlink/integration-tests/testconfig" "github.com/smartcontractkit/chainlink/integration-tests/utils" ) @@ -954,12 +954,12 @@ var OneEphemeralKeysLiveTestnetAutoFixFn = func(sethCfg *seth.Config) error { } // GetChainClient returns a seth client for the given network after validating the config -func GetChainClient(config tc.SethConfig, network blockchain.EVMNetwork) (*seth.Client, error) { +func GetChainClient(config ctf_config.SethConfig, network blockchain.EVMNetwork) (*seth.Client, error) { return GetChainClientWithConfigFunction(config, network, noOpSethConfigFn) } // GetChainClientWithConfigFunction returns a seth client for the given network after validating the config and applying the config function -func GetChainClientWithConfigFunction(config tc.SethConfig, network blockchain.EVMNetwork, configFn SethConfigFunction) (*seth.Client, error) { +func GetChainClientWithConfigFunction(config ctf_config.SethConfig, network blockchain.EVMNetwork, configFn SethConfigFunction) (*seth.Client, error) { readSethCfg := config.GetSethConfig() if readSethCfg == nil { return nil, fmt.Errorf("Seth config not found") diff --git a/integration-tests/actions/vrf/vrfv2/setup_steps.go b/integration-tests/actions/vrf/vrfv2/setup_steps.go index bd41fb33e4e..ca85bdb5f19 100644 --- a/integration-tests/actions/vrf/vrfv2/setup_steps.go +++ b/integration-tests/actions/vrf/vrfv2/setup_steps.go @@ -359,7 +359,7 @@ func SetupVRFV2ForNewEnv( env, err := test_env.NewCLTestEnvBuilder(). WithTestInstance(t). WithTestConfig(&testConfig). - WithPrivateEthereumNetwork(network). + WithPrivateEthereumNetwork(network.EthereumNetworkConfig). WithCLNodes(len(newEnvConfig.NodesToCreate)). WithFunding(big.NewFloat(*testConfig.Common.ChainlinkNodeFunding)). WithCustomCleanup(cleanupFn). diff --git a/integration-tests/actions/vrf/vrfv2plus/setup_steps.go b/integration-tests/actions/vrf/vrfv2plus/setup_steps.go index 0b7be600cc2..ed81935fa2b 100644 --- a/integration-tests/actions/vrf/vrfv2plus/setup_steps.go +++ b/integration-tests/actions/vrf/vrfv2plus/setup_steps.go @@ -402,7 +402,7 @@ func SetupVRFV2PlusForNewEnv( env, err := test_env.NewCLTestEnvBuilder(). WithTestInstance(t). WithTestConfig(&testConfig). - WithPrivateEthereumNetwork(network). + WithPrivateEthereumNetwork(network.EthereumNetworkConfig). WithCLNodes(len(newEnvConfig.NodesToCreate)). WithFunding(big.NewFloat(*testConfig.Common.ChainlinkNodeFunding)). WithCustomCleanup(cleanupFn). diff --git a/integration-tests/contracts/ethereum_contracts_automation_seth.go b/integration-tests/contracts/ethereum_contracts_automation_seth.go index 062586dd918..be980a4c4b3 100644 --- a/integration-tests/contracts/ethereum_contracts_automation_seth.go +++ b/integration-tests/contracts/ethereum_contracts_automation_seth.go @@ -287,6 +287,22 @@ func (v *EthereumKeeperRegistry) SetConfig(config KeeperRegistrySettings, ocrCon } } +func (v *EthereumKeeperRegistry) SetUpkeepOffchainConfig(id *big.Int, offchainConfig []byte) error { + switch v.version { + case ethereum.RegistryVersion_2_0: + _, err := v.client.Decode(v.registry2_0.SetUpkeepOffchainConfig(v.client.NewTXOpts(), id, offchainConfig)) + return err + case ethereum.RegistryVersion_2_1: + _, err := v.client.Decode(v.registry2_1.SetUpkeepOffchainConfig(v.client.NewTXOpts(), id, offchainConfig)) + return err + case ethereum.RegistryVersion_2_2: + _, err := v.client.Decode(v.registry2_2.SetUpkeepOffchainConfig(v.client.NewTXOpts(), id, offchainConfig)) + return err + default: + return fmt.Errorf("SetUpkeepOffchainConfig is not supported by keeper registry version %d", v.version) + } +} + // Pause pauses the registry. func (v *EthereumKeeperRegistry) Pause() error { txOpts := v.client.NewTXOpts() diff --git a/integration-tests/contracts/ethereum_keeper_contracts.go b/integration-tests/contracts/ethereum_keeper_contracts.go index 5da5444679b..31c3cb32dc7 100644 --- a/integration-tests/contracts/ethereum_keeper_contracts.go +++ b/integration-tests/contracts/ethereum_keeper_contracts.go @@ -88,6 +88,7 @@ type KeeperRegistry interface { UpdateCheckData(id *big.Int, newCheckData []byte) error SetUpkeepTriggerConfig(id *big.Int, triggerConfig []byte) error SetUpkeepPrivilegeConfig(id *big.Int, privilegeConfig []byte) error + SetUpkeepOffchainConfig(id *big.Int, offchainConfig []byte) error RegistryOwnerAddress() common.Address ChainModuleAddress() common.Address ReorgProtectionEnabled() bool @@ -1231,6 +1232,46 @@ func (v *LegacyEthereumKeeperRegistry) UnpauseUpkeep(id *big.Int) error { } } +func (v *LegacyEthereumKeeperRegistry) SetUpkeepOffchainConfig(id *big.Int, offchainConfig []byte) error { + switch v.version { + case ethereum.RegistryVersion_2_0: + opts, err := v.client.TransactionOpts(v.client.GetDefaultWallet()) + if err != nil { + return err + } + + tx, err := v.registry2_0.SetUpkeepOffchainConfig(opts, id, offchainConfig) + if err != nil { + return err + } + return v.client.ProcessTransaction(tx) + case ethereum.RegistryVersion_2_1: + opts, err := v.client.TransactionOpts(v.client.GetDefaultWallet()) + if err != nil { + return err + } + + tx, err := v.registry2_1.SetUpkeepOffchainConfig(opts, id, offchainConfig) + if err != nil { + return err + } + return v.client.ProcessTransaction(tx) + case ethereum.RegistryVersion_2_2: + opts, err := v.client.TransactionOpts(v.client.GetDefaultWallet()) + if err != nil { + return err + } + + tx, err := v.registry2_2.SetUpkeepOffchainConfig(opts, id, offchainConfig) + if err != nil { + return err + } + return v.client.ProcessTransaction(tx) + default: + return fmt.Errorf("SetUpkeepOffchainConfig is not supported by keeper registry version %d", v.version) + } +} + // Parses upkeep performed log func (v *LegacyEthereumKeeperRegistry) ParseUpkeepPerformedLog(log *types.Log) (*UpkeepPerformedLog, error) { switch v.version { diff --git a/integration-tests/docker/cmd/internal/commands.go b/integration-tests/docker/cmd/internal/commands.go index 074cfb8083d..e05e5d89fac 100644 --- a/integration-tests/docker/cmd/internal/commands.go +++ b/integration-tests/docker/cmd/internal/commands.go @@ -43,7 +43,7 @@ var StartNodesCmd = &cobra.Command{ _, err = test_env.NewCLTestEnvBuilder(). WithTestConfig(&config). - WithPrivateEthereumNetwork(network). + WithPrivateEthereumNetwork(network.EthereumNetworkConfig). WithMockAdapter(). WithCLNodes(nodeCount). WithoutCleanup(). diff --git a/integration-tests/docker/test_env/test_env.go b/integration-tests/docker/test_env/test_env.go index 6020cb4a352..7cb618b8dae 100644 --- a/integration-tests/docker/test_env/test_env.go +++ b/integration-tests/docker/test_env/test_env.go @@ -18,6 +18,7 @@ import ( tc "github.com/testcontainers/testcontainers-go" "github.com/smartcontractkit/chainlink-testing-framework/blockchain" + ctf_config "github.com/smartcontractkit/chainlink-testing-framework/config" "github.com/smartcontractkit/chainlink-testing-framework/docker" "github.com/smartcontractkit/chainlink-testing-framework/docker/test_env" "github.com/smartcontractkit/chainlink-testing-framework/logging" @@ -29,7 +30,6 @@ import ( "github.com/smartcontractkit/chainlink/integration-tests/client" "github.com/smartcontractkit/chainlink/integration-tests/contracts" d "github.com/smartcontractkit/chainlink/integration-tests/docker" - core_testconfig "github.com/smartcontractkit/chainlink/integration-tests/testconfig" ) var ( @@ -40,7 +40,7 @@ type CLClusterTestEnv struct { Cfg *TestEnvConfig DockerNetwork *tc.DockerNetwork LogStream *logstream.LogStream - TestConfig core_testconfig.GlobalTestConfig + TestConfig ctf_config.GlobalTestConfig /* components */ ClCluster *ClCluster @@ -49,7 +49,7 @@ type CLClusterTestEnv struct { sethClients map[int64]*seth.Client ContractDeployer contracts.ContractDeployer ContractLoader contracts.ContractLoader - PrivateEthereumConfigs []*test_env.EthereumNetwork // new approach to private chains, supporting eth1 and eth2 + PrivateEthereumConfigs []*ctf_config.EthereumNetworkConfig EVMNetworks []*blockchain.EVMNetwork rpcProviders map[int64]*test_env.RpcProvider l zerolog.Logger @@ -95,18 +95,11 @@ func (te *CLClusterTestEnv) ParallelTransactions(enabled bool) { } } -func (te *CLClusterTestEnv) StartEthereumNetwork(cfg *test_env.EthereumNetwork) (blockchain.EVMNetwork, test_env.RpcProvider, error) { +func (te *CLClusterTestEnv) StartEthereumNetwork(cfg *ctf_config.EthereumNetworkConfig) (blockchain.EVMNetwork, test_env.RpcProvider, error) { // if environment is being restored from a previous state, use the existing config // this might fail terribly if temporary folders with chain data on the host machine were removed - if te.Cfg != nil && te.Cfg.EthereumNetwork != nil { - builder := test_env.NewEthereumNetworkBuilder() - c, err := builder.WithExistingConfig(*te.Cfg.EthereumNetwork). - WithTest(te.t). - Build() - if err != nil { - return blockchain.EVMNetwork{}, test_env.RpcProvider{}, err - } - cfg = &c + if te.Cfg != nil && te.Cfg.EthereumNetworkConfig != nil { + cfg = te.Cfg.EthereumNetworkConfig } te.l.Info(). @@ -115,7 +108,15 @@ func (te *CLClusterTestEnv) StartEthereumNetwork(cfg *test_env.EthereumNetwork) Str("Custom Docker Images", fmt.Sprintf("%v", cfg.CustomDockerImages)). Msg("Starting Ethereum network") - n, rpc, err := cfg.Start() + builder := test_env.NewEthereumNetworkBuilder() + c, err := builder.WithExistingConfig(*cfg). + WithTest(te.t). + Build() + if err != nil { + return blockchain.EVMNetwork{}, test_env.RpcProvider{}, err + } + + n, rpc, err := c.Start() if err != nil { return blockchain.EVMNetwork{}, test_env.RpcProvider{}, err @@ -129,7 +130,7 @@ func (te *CLClusterTestEnv) StartMockAdapter() error { } // pass config here -func (te *CLClusterTestEnv) StartClCluster(nodeConfig *chainlink.Config, count int, secretsConfig string, testconfig core_testconfig.GlobalTestConfig, opts ...ClNodeOption) error { +func (te *CLClusterTestEnv) StartClCluster(nodeConfig *chainlink.Config, count int, secretsConfig string, testconfig ctf_config.GlobalTestConfig, opts ...ClNodeOption) error { if te.Cfg != nil && te.Cfg.ClCluster != nil { te.ClCluster = te.Cfg.ClCluster } else { diff --git a/integration-tests/docker/test_env/test_env_builder.go b/integration-tests/docker/test_env/test_env_builder.go index cea3146588d..19617b0c5f7 100644 --- a/integration-tests/docker/test_env/test_env_builder.go +++ b/integration-tests/docker/test_env/test_env_builder.go @@ -12,6 +12,7 @@ import ( "github.com/smartcontractkit/seth" "github.com/smartcontractkit/chainlink-testing-framework/blockchain" + ctf_config "github.com/smartcontractkit/chainlink-testing-framework/config" "github.com/smartcontractkit/chainlink-testing-framework/docker/test_env" "github.com/smartcontractkit/chainlink-testing-framework/logging" "github.com/smartcontractkit/chainlink-testing-framework/logstream" @@ -23,7 +24,6 @@ import ( actions_seth "github.com/smartcontractkit/chainlink/integration-tests/actions/seth" "github.com/smartcontractkit/chainlink/integration-tests/contracts" - tc "github.com/smartcontractkit/chainlink/integration-tests/testconfig" "github.com/smartcontractkit/chainlink/integration-tests/types/config/node" ) @@ -55,8 +55,8 @@ type CLTestEnvBuilder struct { cleanUpCustomFn func() chainOptionsFn []ChainOption evmNetworkOption []EVMNetworkOption - privateEthereumNetworks []*test_env.EthereumNetwork - testConfig tc.GlobalTestConfig + privateEthereumNetworks []*ctf_config.EthereumNetworkConfig + testConfig ctf_config.GlobalTestConfig /* funding */ ETHFunds *big.Float @@ -119,7 +119,7 @@ func (b *CLTestEnvBuilder) WithCLNodes(clNodesCount int) *CLTestEnvBuilder { return b } -func (b *CLTestEnvBuilder) WithTestConfig(cfg tc.GlobalTestConfig) *CLTestEnvBuilder { +func (b *CLTestEnvBuilder) WithTestConfig(cfg ctf_config.GlobalTestConfig) *CLTestEnvBuilder { b.testConfig = cfg return b } @@ -145,12 +145,12 @@ func (b *CLTestEnvBuilder) WithSeth() *CLTestEnvBuilder { return b } -func (b *CLTestEnvBuilder) WithPrivateEthereumNetwork(en test_env.EthereumNetwork) *CLTestEnvBuilder { +func (b *CLTestEnvBuilder) WithPrivateEthereumNetwork(en ctf_config.EthereumNetworkConfig) *CLTestEnvBuilder { b.privateEthereumNetworks = append(b.privateEthereumNetworks, &en) return b } -func (b *CLTestEnvBuilder) WithPrivateEthereumNetworks(ens []*test_env.EthereumNetwork) *CLTestEnvBuilder { +func (b *CLTestEnvBuilder) WithPrivateEthereumNetworks(ens []*ctf_config.EthereumNetworkConfig) *CLTestEnvBuilder { b.privateEthereumNetworks = ens return b } @@ -299,7 +299,7 @@ func (b *CLTestEnvBuilder) Build() (*CLClusterTestEnv, error) { if err != nil { return nil, err } - b.privateEthereumNetworks[i] = &netWithLs + b.privateEthereumNetworks[i] = &netWithLs.EthereumNetworkConfig } } @@ -460,7 +460,8 @@ func (b *CLTestEnvBuilder) Build() (*CLClusterTestEnv, error) { if b.chainOptionsFn != nil && len(b.chainOptionsFn) > 0 { for _, fn := range b.chainOptionsFn { for _, evmCfg := range cfg.EVM { - fn(&evmCfg.Chain) + chainCfg := evmCfg.Chain + fn(&chainCfg) } } } diff --git a/integration-tests/docker/test_env/test_env_config.go b/integration-tests/docker/test_env/test_env_config.go index 0902deb0c2d..9aefa9615c9 100644 --- a/integration-tests/docker/test_env/test_env_config.go +++ b/integration-tests/docker/test_env/test_env_config.go @@ -3,16 +3,16 @@ package test_env import ( "encoding/json" - cte "github.com/smartcontractkit/chainlink-testing-framework/docker/test_env" + ctf_config "github.com/smartcontractkit/chainlink-testing-framework/config" env "github.com/smartcontractkit/chainlink/integration-tests/types/envcommon" ) type TestEnvConfig struct { - Networks []string `json:"networks"` - Geth GethConfig `json:"geth"` - MockAdapter MockAdapterConfig `json:"mock_adapter"` - ClCluster *ClCluster `json:"clCluster"` - EthereumNetwork *cte.EthereumNetwork `json:"private_ethereum_config"` + Networks []string `json:"networks"` + Geth GethConfig `json:"geth"` + MockAdapter MockAdapterConfig `json:"mock_adapter"` + ClCluster *ClCluster `json:"clCluster"` + EthereumNetworkConfig *ctf_config.EthereumNetworkConfig `json:"private_ethereum_config"` } type MockAdapterConfig struct { diff --git a/integration-tests/go.mod b/integration-tests/go.mod index 8cd8ae9a021..ec2ded62c93 100644 --- a/integration-tests/go.mod +++ b/integration-tests/go.mod @@ -10,6 +10,7 @@ require ( github.com/barkimedes/go-deepcopy v0.0.0-20220514131651-17c30cfc62df github.com/cli/go-gh/v2 v2.0.0 github.com/ethereum/go-ethereum v1.13.8 + github.com/fxamacker/cbor/v2 v2.5.0 github.com/go-resty/resty/v2 v2.11.0 github.com/google/go-cmp v0.6.0 github.com/google/uuid v1.6.0 @@ -27,7 +28,7 @@ require ( github.com/slack-go/slack v0.12.2 github.com/smartcontractkit/chainlink-automation v1.0.3 github.com/smartcontractkit/chainlink-common v0.1.7-0.20240429120925-907b29311feb - github.com/smartcontractkit/chainlink-testing-framework v1.28.7 + github.com/smartcontractkit/chainlink-testing-framework v1.28.8 github.com/smartcontractkit/chainlink-vrf v0.0.0-20231120191722-fef03814f868 github.com/smartcontractkit/chainlink/v2 v2.0.0-00010101000000-000000000000 github.com/smartcontractkit/libocr v0.0.0-20240419185742-fd3cab206b2c @@ -171,7 +172,6 @@ require ( github.com/felixge/httpsnoop v1.0.3 // indirect github.com/fsnotify/fsnotify v1.6.0 // indirect github.com/fvbommel/sortorder v1.0.2 // indirect - github.com/fxamacker/cbor/v2 v2.5.0 // indirect github.com/gabriel-vasile/mimetype v1.4.2 // indirect github.com/gagliardetto/binary v0.7.7 // indirect github.com/gagliardetto/solana-go v1.8.4 // indirect diff --git a/integration-tests/go.sum b/integration-tests/go.sum index b1f77bcec35..7e57e196d7e 100644 --- a/integration-tests/go.sum +++ b/integration-tests/go.sum @@ -1523,8 +1523,8 @@ github.com/smartcontractkit/chainlink-solana v1.0.3-0.20240422172640-59d47c73ba5 github.com/smartcontractkit/chainlink-solana v1.0.3-0.20240422172640-59d47c73ba58/go.mod h1:oV5gIuSKrPEcjQ6uB6smBsm5kXHxyydVLNyAs4V9CoQ= github.com/smartcontractkit/chainlink-starknet/relayer v0.0.1-beta-test.0.20240325075535-0f7eb05ee595 h1:y6ks0HsSOhPUueOmTcoxDQ50RCS1XINlRDTemZyHjFw= github.com/smartcontractkit/chainlink-starknet/relayer v0.0.1-beta-test.0.20240325075535-0f7eb05ee595/go.mod h1:vV6WfnVIbK5Q1JsIru4YcTG0T1uRpLJm6t2BgCnCSsg= -github.com/smartcontractkit/chainlink-testing-framework v1.28.7 h1:Yr93tBl5jVx1cfKywt0C0cbuObDPJ6JIU4FIsZ6bZlM= -github.com/smartcontractkit/chainlink-testing-framework v1.28.7/go.mod h1:x1zDOz8zcLjEvs9fNA9y/DMguLam/2+CJdpxX0+rM8A= +github.com/smartcontractkit/chainlink-testing-framework v1.28.8 h1:EaxNwB/16wpISzaUn2WJ4bE3TawD3joEekIlQuWNRGo= +github.com/smartcontractkit/chainlink-testing-framework v1.28.8/go.mod h1:x1zDOz8zcLjEvs9fNA9y/DMguLam/2+CJdpxX0+rM8A= github.com/smartcontractkit/chainlink-testing-framework/grafana v0.0.0-20240328204215-ac91f55f1449 h1:fX/xmGm1GBsD1ZZnooNT+eWA0hiTAqFlHzOC5CY4dy8= github.com/smartcontractkit/chainlink-testing-framework/grafana v0.0.0-20240328204215-ac91f55f1449/go.mod h1:DC8sQMyTlI/44UCTL8QWFwb0bYNoXCfjwCv2hMivYZU= github.com/smartcontractkit/chainlink-vrf v0.0.0-20231120191722-fef03814f868 h1:FFdvEzlYwcuVHkdZ8YnZR/XomeMGbz5E2F2HZI3I3w8= diff --git a/integration-tests/k8s/connect.go b/integration-tests/k8s/connect.go index d9a4223c077..be1f9a25f9d 100644 --- a/integration-tests/k8s/connect.go +++ b/integration-tests/k8s/connect.go @@ -73,6 +73,12 @@ func ConnectRemote() (*blockchain.EVMNetwork, *client2.MockserverClient, *client LocalURL: cfg.MockServerURL, ClusterURL: cfg.MockServerURL, }) + + if len(clClients) < 2 { + return &blockchain.EVMNetwork{}, nil, nil, nil, fmt.Errorf("not enough chainlink nodes, need at least 2, got %d", len(clClients)) + } + + //nolint:gosec // G602 - how is this potentially causing slice out of bounds is beyond me return net, msClient, clClients[0], clClients[1:], nil } diff --git a/integration-tests/load/functions/onchain_monitoring.go b/integration-tests/load/functions/onchain_monitoring.go index 12a10ce0042..31ca8752dd3 100644 --- a/integration-tests/load/functions/onchain_monitoring.go +++ b/integration-tests/load/functions/onchain_monitoring.go @@ -7,7 +7,7 @@ import ( "github.com/rs/zerolog/log" "github.com/smartcontractkit/wasp" - tc "github.com/smartcontractkit/chainlink/integration-tests/testconfig" + ctf_config "github.com/smartcontractkit/chainlink-testing-framework/config" ) /* Monitors on-chain stats of LoadConsumer and pushes them to Loki every second */ @@ -25,7 +25,7 @@ type LoadStats struct { Empty uint32 } -func MonitorLoadStats(t *testing.T, ft *FunctionsTest, labels map[string]string, config tc.GlobalTestConfig) { +func MonitorLoadStats(t *testing.T, ft *FunctionsTest, labels map[string]string, config ctf_config.GlobalTestConfig) { go func() { updatedLabels := make(map[string]string) for k, v := range labels { diff --git a/integration-tests/load/functions/setup.go b/integration-tests/load/functions/setup.go index 47d9859d5f1..e6f96ccbdf2 100644 --- a/integration-tests/load/functions/setup.go +++ b/integration-tests/load/functions/setup.go @@ -14,13 +14,14 @@ import ( "github.com/smartcontractkit/seth" "github.com/smartcontractkit/tdh2/go/tdh2/tdh2easy" + chainlinkutils "github.com/smartcontractkit/chainlink/v2/core/chains/evm/utils" + + ctf_config "github.com/smartcontractkit/chainlink-testing-framework/config" "github.com/smartcontractkit/chainlink-testing-framework/networks" actions_seth "github.com/smartcontractkit/chainlink/integration-tests/actions/seth" "github.com/smartcontractkit/chainlink/integration-tests/contracts" - tc "github.com/smartcontractkit/chainlink/integration-tests/testconfig" "github.com/smartcontractkit/chainlink/integration-tests/types" - chainlinkutils "github.com/smartcontractkit/chainlink/v2/core/chains/evm/utils" ) type FunctionsTest struct { @@ -50,7 +51,7 @@ type S4SecretsCfg struct { S4SetPayload string } -func SetupLocalLoadTestEnv(globalConfig tc.GlobalTestConfig, functionsConfig types.FunctionsTestConfig) (*FunctionsTest, error) { +func SetupLocalLoadTestEnv(globalConfig ctf_config.GlobalTestConfig, functionsConfig types.FunctionsTestConfig) (*FunctionsTest, error) { selectedNetwork := networks.MustGetSelectedNetworkConfig(globalConfig.GetNetworkConfig())[0] seth, err := actions_seth.GetChainClient(globalConfig, selectedNetwork) if err != nil { diff --git a/integration-tests/load/go.mod b/integration-tests/load/go.mod index 20fee3939c0..775fffd97f9 100644 --- a/integration-tests/load/go.mod +++ b/integration-tests/load/go.mod @@ -17,7 +17,7 @@ require ( github.com/slack-go/slack v0.12.2 github.com/smartcontractkit/chainlink-automation v1.0.3 github.com/smartcontractkit/chainlink-common v0.1.7-0.20240429120925-907b29311feb - github.com/smartcontractkit/chainlink-testing-framework v1.28.7 + github.com/smartcontractkit/chainlink-testing-framework v1.28.8 github.com/smartcontractkit/chainlink/integration-tests v0.0.0-20240214231432-4ad5eb95178c github.com/smartcontractkit/chainlink/v2 v2.9.0-beta0.0.20240216210048-da02459ddad8 github.com/smartcontractkit/libocr v0.0.0-20240419185742-fd3cab206b2c diff --git a/integration-tests/load/go.sum b/integration-tests/load/go.sum index 2f19dcb4f17..7f61214c669 100644 --- a/integration-tests/load/go.sum +++ b/integration-tests/load/go.sum @@ -1514,8 +1514,8 @@ github.com/smartcontractkit/chainlink-solana v1.0.3-0.20240422172640-59d47c73ba5 github.com/smartcontractkit/chainlink-solana v1.0.3-0.20240422172640-59d47c73ba58/go.mod h1:oV5gIuSKrPEcjQ6uB6smBsm5kXHxyydVLNyAs4V9CoQ= github.com/smartcontractkit/chainlink-starknet/relayer v0.0.1-beta-test.0.20240325075535-0f7eb05ee595 h1:y6ks0HsSOhPUueOmTcoxDQ50RCS1XINlRDTemZyHjFw= github.com/smartcontractkit/chainlink-starknet/relayer v0.0.1-beta-test.0.20240325075535-0f7eb05ee595/go.mod h1:vV6WfnVIbK5Q1JsIru4YcTG0T1uRpLJm6t2BgCnCSsg= -github.com/smartcontractkit/chainlink-testing-framework v1.28.7 h1:Yr93tBl5jVx1cfKywt0C0cbuObDPJ6JIU4FIsZ6bZlM= -github.com/smartcontractkit/chainlink-testing-framework v1.28.7/go.mod h1:x1zDOz8zcLjEvs9fNA9y/DMguLam/2+CJdpxX0+rM8A= +github.com/smartcontractkit/chainlink-testing-framework v1.28.8 h1:EaxNwB/16wpISzaUn2WJ4bE3TawD3joEekIlQuWNRGo= +github.com/smartcontractkit/chainlink-testing-framework v1.28.8/go.mod h1:x1zDOz8zcLjEvs9fNA9y/DMguLam/2+CJdpxX0+rM8A= github.com/smartcontractkit/chainlink-testing-framework/grafana v0.0.0-20240328204215-ac91f55f1449 h1:fX/xmGm1GBsD1ZZnooNT+eWA0hiTAqFlHzOC5CY4dy8= github.com/smartcontractkit/chainlink-testing-framework/grafana v0.0.0-20240328204215-ac91f55f1449/go.mod h1:DC8sQMyTlI/44UCTL8QWFwb0bYNoXCfjwCv2hMivYZU= github.com/smartcontractkit/chainlink-vrf v0.0.0-20231120191722-fef03814f868 h1:FFdvEzlYwcuVHkdZ8YnZR/XomeMGbz5E2F2HZI3I3w8= diff --git a/integration-tests/migration/upgrade_version_test.go b/integration-tests/migration/upgrade_version_test.go index 47761c09e50..f89644eb815 100644 --- a/integration-tests/migration/upgrade_version_test.go +++ b/integration-tests/migration/upgrade_version_test.go @@ -30,7 +30,7 @@ func TestVersionUpgrade(t *testing.T) { WithTestConfig(&config). WithTestInstance(t). WithStandardCleanup(). - WithPrivateEthereumNetwork(privateNetwork). + WithPrivateEthereumNetwork(privateNetwork.EthereumNetworkConfig). WithCLNodes(1). WithStandardCleanup(). WithSeth(). diff --git a/integration-tests/smoke/automation_test.go b/integration-tests/smoke/automation_test.go index f165f6ce3dc..7eb80ee52d9 100644 --- a/integration-tests/smoke/automation_test.go +++ b/integration-tests/smoke/automation_test.go @@ -12,6 +12,8 @@ import ( "time" "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/common/hexutil" + "github.com/fxamacker/cbor/v2" "github.com/onsi/gomega" "github.com/stretchr/testify/require" @@ -35,6 +37,7 @@ import ( "github.com/smartcontractkit/chainlink/integration-tests/types/config/node" ac "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/generated/automation_compatible_utils" "github.com/smartcontractkit/chainlink/v2/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/core" + "github.com/smartcontractkit/chainlink/v2/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/gasprice" "github.com/smartcontractkit/chainlink/v2/core/services/ocr2/plugins/ocr2keeper/evmregistry/v21/mercury/streams" actions_seth "github.com/smartcontractkit/chainlink/integration-tests/actions/seth" @@ -215,7 +218,7 @@ func SetupAutomationBasic(t *testing.T, nodeUpgrade bool) { for i := 0; i < len(upkeepIDs); i++ { counter, err := consumers[i].Counter(testcontext.Get(t)) require.NoError(t, err, "Failed to retrieve consumer counter for upkeep at index %d", i) - l.Info().Int64("Upkeeps Performed", counter.Int64()).Int("Upkeep ID", i).Msg("Number of upkeeps performed") + l.Info().Int64("Upkeeps Performed", counter.Int64()).Int("Upkeep index", i).Msg("Number of upkeeps performed") g.Expect(counter.Int64()).Should(gomega.BeNumerically(">=", int64(expect)), "Expected consumer counter to be greater than %d, but got %d", expect, counter.Int64()) } @@ -707,7 +710,7 @@ func TestAutomationRegisterUpkeep(t *testing.T) { "Expected consumer counter to be greater than 0, but got %d", counter.Int64()) l.Info(). Int64("Upkeep counter", counter.Int64()). - Int64("Upkeep ID", int64(i)). + Int64("Upkeep index", int64(i)). Msg("Number of upkeeps performed") } }, "4m", "1s").Should(gomega.Succeed()) // ~1m for cluster setup, ~1m for performing each upkeep once, ~2m buffer @@ -733,7 +736,7 @@ func TestAutomationRegisterUpkeep(t *testing.T) { g.Expect(err).ShouldNot(gomega.HaveOccurred(), "Calling consumer's counter shouldn't fail") l.Info(). - Int64("Upkeep ID", int64(i)). + Int64("Upkeep index", int64(i)). Int64("Upkeep counter", currentCounter.Int64()). Int64("initial counter", initialCounters[i].Int64()). Msg("Number of upkeeps performed") @@ -1294,6 +1297,114 @@ func TestUpdateCheckData(t *testing.T) { } } +func TestSetOffchainConfigWithMaxGasPrice(t *testing.T) { + t.Parallel() + registryVersions := map[string]ethereum.KeeperRegistryVersion{ + // registry20 also has upkeep offchain config but the max gas price check is not implemented + "registry_2_1": ethereum.RegistryVersion_2_1, + "registry_2_2": ethereum.RegistryVersion_2_2, + } + + for n, rv := range registryVersions { + name := n + registryVersion := rv + t.Run(name, func(t *testing.T) { + t.Parallel() + l := logging.GetTestLogger(t) + config, err := tc.GetConfig("Smoke", tc.Automation) + if err != nil { + t.Fatal(err) + } + a := setupAutomationTestDocker( + t, registryVersion, automationDefaultRegistryConfig(config), false, false, &config, + ) + + consumers, upkeepIDs := actions_seth.DeployConsumers( + t, + a.ChainClient, + a.Registry, + a.Registrar, + a.LinkToken, + defaultAmountOfUpkeeps, + big.NewInt(automationDefaultLinkFunds), + automationDefaultUpkeepGasLimit, + false, + false, + ) + gom := gomega.NewGomegaWithT(t) + + l.Info().Msg("waiting for all upkeeps to be performed at least once") + gom.Eventually(func(g gomega.Gomega) { + for i := 0; i < len(upkeepIDs); i++ { + counter, err := consumers[i].Counter(testcontext.Get(t)) + g.Expect(err).ShouldNot(gomega.HaveOccurred(), "Failed to retrieve consumer counter for upkeep at index %d", i) + g.Expect(counter.Int64()).Should(gomega.BeNumerically(">", int64(0)), + "Expected consumer counter to be greater than 0, but got %d") + } + }, "3m", "1s").Should(gomega.Succeed()) // ~1m for cluster setup, ~1m for performing each upkeep once, ~2m buffer + + // set the maxGasPrice to 1 wei + uoc, _ := cbor.Marshal(gasprice.UpkeepOffchainConfig{MaxGasPrice: big.NewInt(1)}) + l.Info().Msgf("setting all upkeeps' offchain config to %s, which means maxGasPrice is 1 wei", hexutil.Encode(uoc)) + for _, uid := range upkeepIDs { + err = a.Registry.SetUpkeepOffchainConfig(uid, uoc) + require.NoError(t, err, "Error setting upkeep offchain config") + } + + // Store how many times each upkeep performed once their offchain config is set with maxGasPrice = 1 wei + var countersAfterSettingLowMaxGasPrice = make([]*big.Int, len(upkeepIDs)) + for i := 0; i < len(upkeepIDs); i++ { + countersAfterSettingLowMaxGasPrice[i], err = consumers[i].Counter(testcontext.Get(t)) + require.NoError(t, err, "Failed to retrieve consumer counter for upkeep at index %d", i) + l.Info().Int64("Upkeep Performed times", countersAfterSettingLowMaxGasPrice[i].Int64()).Int("Upkeep index", i).Msg("Number of upkeeps performed") + } + + var latestCounter *big.Int + // the counters of all the upkeeps should stay constant because they are no longer getting serviced + gom.Consistently(func(g gomega.Gomega) { + for i := 0; i < len(upkeepIDs); i++ { + latestCounter, err = consumers[i].Counter(testcontext.Get(t)) + g.Expect(err).ShouldNot(gomega.HaveOccurred(), "Failed to retrieve consumer counter for upkeep at index %d", i) + g.Expect(latestCounter.Int64()).Should(gomega.Equal(countersAfterSettingLowMaxGasPrice[i].Int64()), + "Expected consumer counter to remain constant at %d, but got %d", + countersAfterSettingLowMaxGasPrice[i].Int64(), latestCounter.Int64()) + } + }, "2m", "1s").Should(gomega.Succeed()) + l.Info().Msg("no upkeeps is performed because their max gas price is only 1 wei") + + // setting offchain config with a high max gas price for the first upkeep, it should perform again while + // other upkeeps should not perform + // set the maxGasPrice to 500 gwei for the first upkeep + uoc, _ = cbor.Marshal(gasprice.UpkeepOffchainConfig{MaxGasPrice: big.NewInt(500_000_000_000)}) + l.Info().Msgf("setting the first upkeeps' offchain config to %s, which means maxGasPrice is 500 gwei", hexutil.Encode(uoc)) + err = a.Registry.SetUpkeepOffchainConfig(upkeepIDs[0], uoc) + require.NoError(t, err, "Error setting upkeep offchain config") + + // the counters of all other upkeeps should stay constant because their max gas price remains very low + gom.Consistently(func(g gomega.Gomega) { + for i := 1; i < len(upkeepIDs); i++ { + latestCounter, err = consumers[i].Counter(testcontext.Get(t)) + g.Expect(err).ShouldNot(gomega.HaveOccurred(), "Failed to retrieve consumer counter for upkeep at index %d", i) + g.Expect(latestCounter.Int64()).Should(gomega.Equal(countersAfterSettingLowMaxGasPrice[i].Int64()), + "Expected consumer counter to remain constant at %d, but got %d", + countersAfterSettingLowMaxGasPrice[i].Int64(), latestCounter.Int64()) + } + }, "2m", "1s").Should(gomega.Succeed()) + l.Info().Msg("all the rest upkeeps did not perform again because their max gas price remains 1 wei") + + // the first upkeep should start performing again + gom.Eventually(func(g gomega.Gomega) { + latestCounter, err = consumers[0].Counter(testcontext.Get(t)) + g.Expect(err).ShouldNot(gomega.HaveOccurred(), "Failed to retrieve consumer counter for upkeep at index 0") + g.Expect(latestCounter.Int64()).Should(gomega.BeNumerically(">", countersAfterSettingLowMaxGasPrice[0].Int64()), + "Expected consumer counter to be greater than %d, but got %d", + countersAfterSettingLowMaxGasPrice[0].Int64(), latestCounter.Int64()) + }, "2m", "1s").Should(gomega.Succeed()) // ~1m for cluster setup, ~1m for performing each upkeep once, ~2m buffer + l.Info().Int64("Upkeep Performed times", latestCounter.Int64()).Msg("the first upkeep performed again") + }) + } +} + func setupAutomationTestDocker( t *testing.T, registryVersion ethereum.KeeperRegistryVersion, @@ -1334,7 +1445,7 @@ func setupAutomationTestDocker( env, err = test_env.NewCLTestEnvBuilder(). WithTestInstance(t). WithTestConfig(automationTestConfig). - WithPrivateEthereumNetwork(privateNetwork). + WithPrivateEthereumNetwork(privateNetwork.EthereumNetworkConfig). WithMockAdapter(). WithFunding(big.NewFloat(*automationTestConfig.GetCommonConfig().ChainlinkNodeFunding)). WithStandardCleanup(). @@ -1374,7 +1485,7 @@ func setupAutomationTestDocker( env, err = test_env.NewCLTestEnvBuilder(). WithTestInstance(t). WithTestConfig(automationTestConfig). - WithPrivateEthereumNetwork(privateNetwork). + WithPrivateEthereumNetwork(privateNetwork.EthereumNetworkConfig). WithMockAdapter(). WithCLNodes(clNodesCount). WithCLNodeConfig(clNodeConfig). diff --git a/integration-tests/smoke/automation_test.go_test_list.json b/integration-tests/smoke/automation_test.go_test_list.json index 03029c9018b..e8f0f838dfd 100644 --- a/integration-tests/smoke/automation_test.go_test_list.json +++ b/integration-tests/smoke/automation_test.go_test_list.json @@ -70,6 +70,10 @@ { "name": "TestUpdateCheckData", "nodes": 3 + }, + { + "name": "TestSetOffchainConfigWithMaxGasPrice", + "nodes": 2 } ] } \ No newline at end of file diff --git a/integration-tests/smoke/cron_test.go b/integration-tests/smoke/cron_test.go index 218727b7d66..e281824f0bb 100644 --- a/integration-tests/smoke/cron_test.go +++ b/integration-tests/smoke/cron_test.go @@ -32,7 +32,7 @@ func TestCronBasic(t *testing.T) { env, err := test_env.NewCLTestEnvBuilder(). WithTestInstance(t). WithTestConfig(&config). - WithPrivateEthereumNetwork(privateNetwork). + WithPrivateEthereumNetwork(privateNetwork.EthereumNetworkConfig). WithMockAdapter(). WithCLNodes(1). WithStandardCleanup(). @@ -88,7 +88,7 @@ func TestCronJobReplacement(t *testing.T) { env, err := test_env.NewCLTestEnvBuilder(). WithTestInstance(t). WithTestConfig(&config). - WithPrivateEthereumNetwork(privateNetwork). + WithPrivateEthereumNetwork(privateNetwork.EthereumNetworkConfig). WithMockAdapter(). WithCLNodes(1). WithStandardCleanup(). diff --git a/integration-tests/smoke/flux_test.go b/integration-tests/smoke/flux_test.go index 023dd9dae89..4165e9b79b7 100644 --- a/integration-tests/smoke/flux_test.go +++ b/integration-tests/smoke/flux_test.go @@ -39,7 +39,7 @@ func TestFluxBasic(t *testing.T) { env, err := test_env.NewCLTestEnvBuilder(). WithTestInstance(t). WithTestConfig(&config). - WithPrivateEthereumNetwork(privateNetwork). + WithPrivateEthereumNetwork(privateNetwork.EthereumNetworkConfig). WithMockAdapter(). WithCLNodes(3). WithStandardCleanup(). diff --git a/integration-tests/smoke/forwarder_ocr_test.go b/integration-tests/smoke/forwarder_ocr_test.go index db7ce9ac9ae..0446254362a 100644 --- a/integration-tests/smoke/forwarder_ocr_test.go +++ b/integration-tests/smoke/forwarder_ocr_test.go @@ -35,7 +35,7 @@ func TestForwarderOCRBasic(t *testing.T) { env, err := test_env.NewCLTestEnvBuilder(). WithTestInstance(t). WithTestConfig(&config). - WithPrivateEthereumNetwork(privateNetwork). + WithPrivateEthereumNetwork(privateNetwork.EthereumNetworkConfig). WithMockAdapter(). WithForwarders(). WithCLNodes(6). diff --git a/integration-tests/smoke/forwarders_ocr2_test.go b/integration-tests/smoke/forwarders_ocr2_test.go index 3ccd99f61e6..9dd5d5c39a4 100644 --- a/integration-tests/smoke/forwarders_ocr2_test.go +++ b/integration-tests/smoke/forwarders_ocr2_test.go @@ -38,7 +38,7 @@ func TestForwarderOCR2Basic(t *testing.T) { env, err := test_env.NewCLTestEnvBuilder(). WithTestInstance(t). WithTestConfig(&config). - WithPrivateEthereumNetwork(privateNetwork). + WithPrivateEthereumNetwork(privateNetwork.EthereumNetworkConfig). WithMockAdapter(). WithCLNodeConfig(node.NewConfig(node.NewBaseConfig(), node.WithOCR2(), diff --git a/integration-tests/smoke/keeper_test.go b/integration-tests/smoke/keeper_test.go index f9c02e5f627..8b39924f6a3 100644 --- a/integration-tests/smoke/keeper_test.go +++ b/integration-tests/smoke/keeper_test.go @@ -1377,7 +1377,7 @@ func setupKeeperTest(l zerolog.Logger, t *testing.T, config *tc.TestConfig) ( env, err := test_env.NewCLTestEnvBuilder(). WithTestInstance(t). WithTestConfig(config). - WithPrivateEthereumNetwork(privateNetwork). + WithPrivateEthereumNetwork(privateNetwork.EthereumNetworkConfig). WithCLNodes(5). WithCLNodeConfig(clNodeConfig). WithFunding(big.NewFloat(.5)). diff --git a/integration-tests/smoke/ocr2_test.go b/integration-tests/smoke/ocr2_test.go index d4f7d1e7ffd..d2df0c858c0 100644 --- a/integration-tests/smoke/ocr2_test.go +++ b/integration-tests/smoke/ocr2_test.go @@ -138,7 +138,7 @@ func prepareORCv2SmokeTestEnv(t *testing.T, l zerolog.Logger, firstRoundResult i env, err := test_env.NewCLTestEnvBuilder(). WithTestInstance(t). WithTestConfig(&config). - WithPrivateEthereumNetwork(privateNetwork). + WithPrivateEthereumNetwork(privateNetwork.EthereumNetworkConfig). WithMockAdapter(). WithCLNodeConfig(node.NewConfig(node.NewBaseConfig(), node.WithOCR2(), diff --git a/integration-tests/smoke/ocr_test.go b/integration-tests/smoke/ocr_test.go index 29e633beb15..bef08493962 100644 --- a/integration-tests/smoke/ocr_test.go +++ b/integration-tests/smoke/ocr_test.go @@ -91,7 +91,7 @@ func prepareORCv1SmokeTestEnv(t *testing.T, l zerolog.Logger, firstRoundResult i env, err := test_env.NewCLTestEnvBuilder(). WithTestInstance(t). WithTestConfig(&config). - WithPrivateEthereumNetwork(network). + WithPrivateEthereumNetwork(network.EthereumNetworkConfig). WithMockAdapter(). WithCLNodes(6). WithFunding(big.NewFloat(.5)). diff --git a/integration-tests/smoke/runlog_test.go b/integration-tests/smoke/runlog_test.go index d255fe07235..b01c5a019b1 100644 --- a/integration-tests/smoke/runlog_test.go +++ b/integration-tests/smoke/runlog_test.go @@ -36,7 +36,7 @@ func TestRunLogBasic(t *testing.T) { env, err := test_env.NewCLTestEnvBuilder(). WithTestInstance(t). WithTestConfig(&config). - WithPrivateEthereumNetwork(privateNetwork). + WithPrivateEthereumNetwork(privateNetwork.EthereumNetworkConfig). WithMockAdapter(). WithCLNodes(1). WithFunding(big.NewFloat(.1)). diff --git a/integration-tests/smoke/vrf_test.go b/integration-tests/smoke/vrf_test.go index 3a28c14be00..70911e8de66 100644 --- a/integration-tests/smoke/vrf_test.go +++ b/integration-tests/smoke/vrf_test.go @@ -65,6 +65,7 @@ func TestVRFBasic(t *testing.T) { encodedProvingKeys := make([][2]*big.Int, 0) encodedProvingKeys = append(encodedProvingKeys, provingKey) + //nolint:gosec // G602 requestHash, err := contracts.Coordinator.HashOfKey(testcontext.Get(t), encodedProvingKeys[0]) require.NoError(t, err, "Getting Hash of encoded proving keys shouldn't fail") err = contracts.Consumer.RequestRandomness(requestHash, big.NewInt(1)) @@ -133,6 +134,7 @@ func TestVRFJobReplacement(t *testing.T) { encodedProvingKeys := make([][2]*big.Int, 0) encodedProvingKeys = append(encodedProvingKeys, provingKey) + //nolint:gosec // G602 requestHash, err := contracts.Coordinator.HashOfKey(testcontext.Get(t), encodedProvingKeys[0]) require.NoError(t, err, "Getting Hash of encoded proving keys shouldn't fail") err = contracts.Consumer.RequestRandomness(requestHash, big.NewInt(1)) @@ -192,7 +194,7 @@ func prepareVRFtestEnv(t *testing.T, l zerolog.Logger) (*test_env.CLClusterTestE env, err := test_env.NewCLTestEnvBuilder(). WithTestInstance(t). WithTestConfig(&config). - WithPrivateEthereumNetwork(privateNetwork). + WithPrivateEthereumNetwork(privateNetwork.EthereumNetworkConfig). WithCLNodes(1). WithFunding(big.NewFloat(.1)). WithStandardCleanup(). diff --git a/integration-tests/testconfig/testconfig.go b/integration-tests/testconfig/testconfig.go index fb031958fa5..abeca8e6eb2 100644 --- a/integration-tests/testconfig/testconfig.go +++ b/integration-tests/testconfig/testconfig.go @@ -20,8 +20,6 @@ import ( "github.com/smartcontractkit/seth" ctf_config "github.com/smartcontractkit/chainlink-testing-framework/config" - "github.com/smartcontractkit/chainlink-testing-framework/docker/test_env" - ctf_test_env "github.com/smartcontractkit/chainlink-testing-framework/docker/test_env" k8s_config "github.com/smartcontractkit/chainlink-testing-framework/k8s/config" "github.com/smartcontractkit/chainlink-testing-framework/logging" "github.com/smartcontractkit/chainlink-testing-framework/utils/conversions" @@ -37,15 +35,6 @@ import ( vrfv2plus_config "github.com/smartcontractkit/chainlink/integration-tests/testconfig/vrfv2plus" ) -type GlobalTestConfig interface { - GetChainlinkImageConfig() *ctf_config.ChainlinkImageConfig - GetLoggingConfig() *ctf_config.LoggingConfig - GetNetworkConfig() *ctf_config.NetworkConfig - GetPrivateEthereumNetworkConfig() *test_env.EthereumNetwork - GetPyroscopeConfig() *ctf_config.PyroscopeConfig - SethConfig -} - type UpgradeableChainlinkTestConfig interface { GetChainlinkUpgradeImageConfig() *ctf_config.ChainlinkImageConfig } @@ -82,24 +71,8 @@ type Ocr2TestConfig interface { GetOCR2Config() *ocr2_config.Config } -type NamedConfiguration interface { - GetConfigurationName() string -} - -type SethConfig interface { - GetSethConfig() *seth.Config -} - type TestConfig struct { - ChainlinkImage *ctf_config.ChainlinkImageConfig `toml:"ChainlinkImage"` - ChainlinkUpgradeImage *ctf_config.ChainlinkImageConfig `toml:"ChainlinkUpgradeImage"` - Logging *ctf_config.LoggingConfig `toml:"Logging"` - Network *ctf_config.NetworkConfig `toml:"Network"` - Pyroscope *ctf_config.PyroscopeConfig `toml:"Pyroscope"` - PrivateEthereumNetwork *ctf_test_env.EthereumNetwork `toml:"PrivateEthereumNetwork"` - WaspConfig *ctf_config.WaspAutoBuildConfig `toml:"WaspAutoBuild"` - - Seth *seth.Config `toml:"Seth"` + ctf_config.TestConfig Common *Common `toml:"Common"` Automation *a_config.Config `toml:"Automation"` @@ -184,7 +157,7 @@ func (c TestConfig) GetChainlinkImageConfig() *ctf_config.ChainlinkImageConfig { return c.ChainlinkImage } -func (c TestConfig) GetPrivateEthereumNetworkConfig() *ctf_test_env.EthereumNetwork { +func (c TestConfig) GetPrivateEthereumNetworkConfig() *ctf_config.EthereumNetworkConfig { return c.PrivateEthereumNetwork } diff --git a/integration-tests/testconfig/testconfig_test.go b/integration-tests/testconfig/testconfig_test.go index 4a9dbdaade3..fd5230dac2d 100644 --- a/integration-tests/testconfig/testconfig_test.go +++ b/integration-tests/testconfig/testconfig_test.go @@ -57,13 +57,15 @@ func TestBase64ConfigRead(t *testing.T) { }, }, }, - Network: &ctf_config.NetworkConfig{ - SelectedNetworks: []string{"OPTIMISM_GOERLI"}, - RpcHttpUrls: map[string][]string{ - "OPTIMISM_GOERLI": {"http://localhost:8545"}, - }, - WalletKeys: map[string][]string{ - "OPTIMISM_GOERLI": {"0x3333333333333333333333333333333333333333"}, + TestConfig: ctf_config.TestConfig{ + Network: &ctf_config.NetworkConfig{ + SelectedNetworks: []string{"OPTIMISM_GOERLI"}, + RpcHttpUrls: map[string][]string{ + "OPTIMISM_GOERLI": {"http://localhost:8545"}, + }, + WalletKeys: map[string][]string{ + "OPTIMISM_GOERLI": {"0x3333333333333333333333333333333333333333"}, + }, }, }, } diff --git a/integration-tests/types/testconfigs.go b/integration-tests/types/testconfigs.go index cfebf0a3c7a..58eb1a7c8cf 100644 --- a/integration-tests/types/testconfigs.go +++ b/integration-tests/types/testconfigs.go @@ -1,52 +1,53 @@ package types import ( + ctf_config "github.com/smartcontractkit/chainlink-testing-framework/config" "github.com/smartcontractkit/chainlink-testing-framework/testreporters" tc "github.com/smartcontractkit/chainlink/integration-tests/testconfig" ) type VRFv2TestConfig interface { tc.CommonTestConfig - tc.GlobalTestConfig + ctf_config.GlobalTestConfig tc.VRFv2TestConfig } type VRFv2PlusTestConfig interface { tc.CommonTestConfig - tc.GlobalTestConfig + ctf_config.GlobalTestConfig tc.VRFv2PlusTestConfig } type FunctionsTestConfig interface { tc.CommonTestConfig - tc.GlobalTestConfig + ctf_config.GlobalTestConfig tc.FunctionsTestConfig } type AutomationTestConfig interface { - tc.GlobalTestConfig + ctf_config.GlobalTestConfig tc.CommonTestConfig tc.UpgradeableChainlinkTestConfig tc.AutomationTestConfig } type KeeperBenchmarkTestConfig interface { - tc.GlobalTestConfig + ctf_config.GlobalTestConfig tc.CommonTestConfig tc.KeeperTestConfig - tc.NamedConfiguration + ctf_config.NamedConfiguration testreporters.GrafanaURLProvider } type OcrTestConfig interface { - tc.GlobalTestConfig + ctf_config.GlobalTestConfig tc.CommonTestConfig tc.OcrTestConfig - tc.SethConfig + ctf_config.SethConfig } type Ocr2TestConfig interface { - tc.GlobalTestConfig + ctf_config.GlobalTestConfig tc.CommonTestConfig tc.Ocr2TestConfig } diff --git a/integration-tests/universal/log_poller/helpers.go b/integration-tests/universal/log_poller/helpers.go index 0da2c3eb49f..e2866bc3073 100644 --- a/integration-tests/universal/log_poller/helpers.go +++ b/integration-tests/universal/log_poller/helpers.go @@ -1173,7 +1173,7 @@ func SetupLogPollerTestDocker( env, err = test_env.NewCLTestEnvBuilder(). WithTestConfig(testConfig). WithTestInstance(t). - WithPrivateEthereumNetwork(privateNetwork). + WithPrivateEthereumNetwork(privateNetwork.EthereumNetworkConfig). WithCLNodes(clNodesCount). WithCLNodeConfig(clNodeConfig). WithFunding(big.NewFloat(chainlinkNodeFunding)).