diff --git a/README.md b/README.md index f00ee92..15af30d 100644 --- a/README.md +++ b/README.md @@ -10,6 +10,10 @@ This repository contains supporting modules for operating Llama instances. For the core contracts, see the [Llama](https://github.com/llamaxyz/llama) repository. +## Modules + +- **Token Voting:** smart contract policies that allow `ERC20Votes` or `ERC721Votes` tokenholders to create actions enforced by delegated token thresholds or collectively approve or disapprove an action through token voting. + ## Prerequisites [Foundry](https://github.com/foundry-rs/foundry) must be installed. diff --git a/script/DeployLlamaTokenVotingFactory.s.sol b/script/DeployLlamaTokenVotingFactory.s.sol new file mode 100644 index 0000000..907cfaa --- /dev/null +++ b/script/DeployLlamaTokenVotingFactory.s.sol @@ -0,0 +1,18 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.23; + +import {Script} from "forge-std/Script.sol"; + +import {DeployUtils} from "script/DeployUtils.sol"; + +contract DeployLlamaFactory is Script { + LlamaTokenVotingFactory tokenVotingFactory; + + function run() public { + DeployUtils.print(string.concat("Deploying Llama token voting factory to chain:", vm.toString(block.chainid))); + + vm.broadcast(); + tokenVotingFactory = new LlamaTokenVotingFactory(); + DeployUtils.print(string.concat(" LlamaTokenVotingFactory: ", vm.toString(address(tokenVotingFactory)))); + } +} diff --git a/script/DeployUtils.sol b/script/DeployUtils.sol new file mode 100644 index 0000000..9a85f35 --- /dev/null +++ b/script/DeployUtils.sol @@ -0,0 +1,13 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.23; + +import {console2} from "forge-std/Script.sol"; + +library DeployUtils { + function print(string memory message) internal view { + // Avoid getting flooded with logs during tests. Note that fork tests will show logs with this + // approach, because there's currently no way to tell which environment we're in, e.g. script + // or test. This is being tracked in https://github.com/foundry-rs/foundry/issues/2900. + if (block.chainid != 31_337) console2.log(message); + } +} diff --git a/script/README.md b/script/README.md new file mode 100644 index 0000000..4112946 --- /dev/null +++ b/script/README.md @@ -0,0 +1,35 @@ +# Llama Periphery Scripts + +The current Llama periphery scripts are: +* `DeployLlamaTokenVotingFactory.s.sol`, which deploys the LlamaTokenVotingFactory to new chains + +## DeployLlamaTokenVotingFactory + +To perform a dry-run of the `DeployLlamaFactory` script on a network, first set the +`SCRIPT_RPC_URL` variable in your `.env` file to a local node, e.g. anvil. + +To start anvil: + +```shell +# Start anvil, forking from the desired network. +anvil --fork-url $OPTIMISM_RPC_URL +``` +Next, set `SCRIPT_PRIVATE_KEY` in your `.env` file. For a dry run, you can just +use one of the pre-provisioned private keys that anvil provides on startup. + +Then, to execute the call: + +```shell +just dry-run-deploy +``` + +If that looked good, try broadcasting the script transactions to the local node. +With the local node URL still set as `SCRIPT_RPC_URL` in your `.env` file: + +```shell +just deploy +``` + +When you are ready to deploy to a live network, simply follow the steps above +but with `SCRIPT_RPC_URL` pointing to the appropriate node and +`SCRIPT_PRIVATE_KEY` set to the deployer private key.