Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/master' into HEAD
Browse files Browse the repository at this point in the history
  • Loading branch information
robin-near committed Nov 6, 2023
2 parents f7d7877 + 2615bb6 commit f225a58
Show file tree
Hide file tree
Showing 15 changed files with 445 additions and 149 deletions.
167 changes: 108 additions & 59 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,25 +9,23 @@ on:
merge_group:

jobs:
backward_compat:
name: "Backward Compatibility"
build_binary:
name: "Build neard"
runs-on: ubuntu-22.04-16core
defaults:
run:
working-directory: ./pytest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: 3.11
cache: pip
- uses: Swatinem/rust-cache@a95ba195448af2da9b00fb742d14ffaaf3c21f43
with:
prefix-key: "0" # change this to invalidate CI cache
shared-key: "cargo_nextest"
save-if: "false" # use the cache from nextest, but don’t double-save
# TODO: the python script should use the quick-release cargo profile, but currently does not
- run: pip3 install --user -r requirements.txt
- run: python3 tests/sanity/backward_compatible.py
- run: cargo build --locked --profile quick-release -p neard --bin neard
- uses: actions/upload-artifact@v3
with:
name: neard
path: target/quick-release/neard
if-no-files-found: error
retention-days: 1

cargo_nextest:
name: "Cargo Nextest (${{matrix.name}})"
Expand All @@ -48,7 +46,6 @@ jobs:
# them at earliest convenience :)
flags: "--exclude integration-tests --exclude node-runtime --exclude runtime-params-estimator --exclude near-network --exclude estimator-warehouse"
timeout-minutes: 90

steps:
# Some of the tests allocate really sparse maps, so heuristic-based overcommit limits are not
# appropriate here.
Expand All @@ -68,9 +65,20 @@ jobs:
env:
RUST_BACKTRACE: short

db_migration:
name: "Database Migration"
runs-on: ubuntu-22.04-16core
protobuf_backward_compat:
name: "Protobuf Backward Compatibility"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: bufbuild/buf-setup-action@1158f4fa81bc02e1ff62abcca6d516c9e24c77da
- uses: bufbuild/buf-breaking-action@a074e988ee34efcd4927079e79c611f428354c01
with:
against: "https://github.com/near/nearcore.git#${{github.event.pull_request.base.sha && format('ref={0}', github.event.pull_request.base.sha) || 'branch=master' }}"

py_backward_compat:
name: "Backward Compatibility"
needs: build_binary
runs-on: ubuntu-22.04
defaults:
run:
working-directory: ./pytest
Expand All @@ -80,31 +88,44 @@ jobs:
with:
python-version: 3.11
cache: pip
- uses: Swatinem/rust-cache@a95ba195448af2da9b00fb742d14ffaaf3c21f43
- uses: actions/download-artifact@v3
with:
prefix-key: "0" # change this to invalidate CI cache
save-if: "false" # use the cache from nextest, but don’t double-save
name: neard
path: pytest # NB: this does not account for defaults.run.working-directory
- run: echo "CURRENT_NEARD=$PWD/neard" >> "$GITHUB_ENV"
- run: chmod +x "$CURRENT_NEARD"
- run: pip3 install --user -r requirements.txt
- run: python3 tests/sanity/db_migration.py

protobuf_backward_compat:
name: "Protobuf Backward Compatibility"
runs-on: ubuntu-latest
- run: python3 tests/sanity/backward_compatible.py

py_db_migration:
name: "Database Migration"
needs: build_binary
runs-on: ubuntu-22.04
defaults:
run:
working-directory: ./pytest
steps:
- uses: actions/checkout@v4
- uses: bufbuild/buf-setup-action@1158f4fa81bc02e1ff62abcca6d516c9e24c77da
- uses: bufbuild/buf-breaking-action@a074e988ee34efcd4927079e79c611f428354c01
- uses: actions/setup-python@v4
with:
against: "https://github.com/near/nearcore.git#${{github.event.pull_request.base.sha && format('ref={0}', github.event.pull_request.base.sha) || 'branch=master' }}"
python-version: 3.11
cache: pip
- uses: actions/download-artifact@v3
with:
name: neard
path: pytest # NB: this does not account for defaults.run.working-directory
- run: echo "CURRENT_NEARD=$PWD/neard" >> "$GITHUB_ENV"
- run: echo "NEAR_ROOT=$PWD" >> "$GITHUB_ENV"
- run: chmod +x "$CURRENT_NEARD"
- run: pip3 install --user -r requirements.txt
- run: python3 tests/sanity/db_migration.py

sanity_checks:
py_sanity_checks:
name: "Sanity Checks"
runs-on: ubuntu-22.04-16core
strategy:
fail-fast: false
timeout-minutes: 90

steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
Expand All @@ -114,44 +135,57 @@ jobs:
- uses: Swatinem/rust-cache@a95ba195448af2da9b00fb742d14ffaaf3c21f43
with:
prefix-key: "0" # change this to invalidate CI cache
shared-key: "cargo_nextest"
save-if: "false" # use the cache from nextest, but don’t double-save
- run: cargo build -p neard --bin neard --features nightly
- name: run spin_up_cluster.py
# Note: We're not running spin_up_cluster.py for non-nightly
# because spinning up non-nightly clusters is already covered
# by other steps in the CI, e.g. upgradable.
run: |
cd pytest
python3 -m pip install --progress-bar off --user -r requirements.txt
python3 tests/sanity/spin_up_cluster.py
- run: pip3 install --user -r pytest/requirements.txt
# This is the only job that uses `--features nightly` so we build this in-line instead of a
# separate job like done with the regular neard.
- run: cargo build --profile quick-release -p neard --bin neard --features nightly
# Note: We're not running spin_up_cluster.py for non-nightly
# because spinning up non-nightly clusters is already covered
# by other steps in the CI, e.g. upgradable.
- run: python3 pytest/tests/sanity/spin_up_cluster.py
env:
NEAR_ROOT: "target/quick-release"

- run: cargo build --profile quick-release -p neard --bin neard
py_genesis_check:
name: "Genesis Changes"
needs: build_binary
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: 3.11
cache: pip
- uses: actions/download-artifact@v3
with:
name: neard
path: target/quick-release
- run: echo "CURRENT_NEARD=$PWD/target/quick-release/neard" >> "$GITHUB_ENV"
- run: chmod +x "$CURRENT_NEARD"
- run: pip3 install --user -r pytest/requirements.txt
- run: python3 scripts/state/update_res.py check

py_style_check:
name: "Style"
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: 3.11
cache: pip
- run: pip3 install --user -r pytest/requirements.txt
- run: python3 scripts/check_nightly.py
- run: python3 scripts/check_pytests.py

- run: python3 scripts/fix_nightly_feature_flags.py

# TODO: this should probably be a separate job
- run: ./scripts/formatting --check

# TODO: this should probably be a separate job
- name: check rpc_errors_schema.json
# TODO: the command ran and suggested are different. why? unify.
run: |
rm -f target/rpc_errors_schema.json
cargo check -p near-jsonrpc --features dump_errors_schema
if ! git --no-pager diff --no-index chain/jsonrpc/res/rpc_errors_schema.json target/rpc_errors_schema.json; then
set +x
echo 'The RPC errors schema reflects outdated typing structure; please run'
echo ' ./chain/jsonrpc/build_errors_schema.sh'
exit 1
fi >&2
upgradability:
py_upgradability:
name: "Upgradability"
runs-on: ubuntu-22.04-16core
needs: build_binary
runs-on: ubuntu-22.04
defaults:
run:
working-directory: ./pytest
Expand All @@ -161,9 +195,24 @@ jobs:
with:
python-version: 3.11
cache: pip
- uses: actions/download-artifact@v3
with:
name: neard
path: pytest # NB: this does not account for defaults.run.working-directory
- run: echo "CURRENT_NEARD=$PWD/neard" >> "$GITHUB_ENV"
- run: chmod +x "$CURRENT_NEARD"
- run: pip3 install --user -r requirements.txt
- run: python3 tests/sanity/upgradable.py

rpc_error_schema:
name: "RPC Schema"
runs-on: ubuntu-22.04-8core
steps:
- uses: actions/checkout@v4
- uses: Swatinem/rust-cache@a95ba195448af2da9b00fb742d14ffaaf3c21f43
with:
prefix-key: "0" # change this to invalidate CI cache
shared-key: "cargo_nextest"
save-if: "false" # use the cache from nextest, but don’t double-save
- run: pip3 install --user -r requirements.txt
- run: python3 tests/sanity/upgradable.py
- run: ./chain/jsonrpc/build_errors_schema.sh
- run: git diff --quiet ./chain/jsonrpc/res/rpc_errors_schema.json || exit 1
2 changes: 1 addition & 1 deletion chain/jsonrpc/build_errors_schema.sh
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#!/usr/bin/env bash
cd "${0%/*}/../.." # ensure we're in the workspace directory
rm -f target/rpc_errors_schema.json
cargo build -p near-jsonrpc --features dump_errors_schema
cargo check -p near-jsonrpc --features dump_errors_schema
cp target/rpc_errors_schema.json chain/jsonrpc/res/rpc_errors_schema.json
13 changes: 8 additions & 5 deletions core/store/src/trie/mem/loading.rs
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,8 @@ mod tests {
};
use crate::trie::mem::loading::load_trie_from_flat_state;
use crate::trie::mem::lookup::memtrie_lookup;
use crate::{DBCol, NibbleSlice, ShardTries, Store, Trie, TrieUpdate};
use crate::trie::OptimizedValueRef;
use crate::{DBCol, KeyLookupMode, NibbleSlice, ShardTries, Store, Trie, TrieUpdate};
use near_primitives::hash::CryptoHash;
use near_primitives::shard_layout::{get_block_shard_uid, ShardUId};
use near_primitives::state::FlatStateValue;
Expand Down Expand Up @@ -241,14 +242,16 @@ mod tests {
// real trie. Check non-existent keys too.
for key in keys.iter().chain([b"not in trie".to_vec()].iter()) {
let mut nodes_accessed = Vec::new();
let actual_value = memtrie_lookup(root, key, Some(&mut nodes_accessed));
let expected_value = trie.get_flat_value(key).unwrap();
assert_eq!(actual_value, expected_value, "{:?}", NibbleSlice::new(key));
let actual_value_ref = memtrie_lookup(root, key, Some(&mut nodes_accessed))
.map(OptimizedValueRef::from_flat_value);
let expected_value_ref =
trie.get_optimized_ref(key, KeyLookupMode::FlatStorage).unwrap();
assert_eq!(actual_value_ref, expected_value_ref, "{:?}", NibbleSlice::new(key));

// Do another access with the trie to see how many nodes we're supposed to
// have accessed.
let temp_trie = shard_tries.get_trie_for_shard(shard_uid, state_root);
temp_trie.get_ref(key, crate::KeyLookupMode::Trie).unwrap();
temp_trie.get_optimized_ref(key, crate::KeyLookupMode::Trie).unwrap();
assert_eq!(
temp_trie.get_trie_nodes_count().db_reads,
nodes_accessed.len() as u64,
Expand Down
2 changes: 0 additions & 2 deletions core/store/src/trie/mem/node/mod.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
#![allow(dead_code)] // still being implemented

mod encoding;
mod mutation;
#[cfg(test)]
Expand Down
14 changes: 8 additions & 6 deletions core/store/src/trie/mem/updating.rs
Original file line number Diff line number Diff line change
Expand Up @@ -992,15 +992,17 @@ mod tests {
self.disk.get_trie_for_shard(ShardUId::single_shard(), self.state_root);
let memtrie_result =
memtrie_root.and_then(|memtrie_root| memtrie_lookup(memtrie_root, key, None));
let disk_result = disk_trie.get_ref(key, KeyLookupMode::Trie).unwrap();
let disk_result = disk_trie.get_optimized_ref(key, KeyLookupMode::Trie).unwrap();
if let Some(value_ref) = value_ref {
let memtrie_value_ref = memtrie_result
.expect(&format!("Key {} is in truth but not in memtrie", hex::encode(key)))
.to_value_ref();
let disk_value_ref = disk_result.expect(&format!(
"Key {} is in truth but not in disk trie",
hex::encode(key)
));
let disk_value_ref = disk_result
.expect(&format!(
"Key {} is in truth but not in disk trie",
hex::encode(key)
))
.into_value_ref();
assert_eq!(
memtrie_value_ref,
*value_ref,
Expand Down Expand Up @@ -1239,7 +1241,7 @@ mod tests {
let num_insertions =
rand::thread_rng().gen_range(0..=(MAX_KEYS - existing_keys.len()) / SLOWDOWN);
let num_deletions =
rand::thread_rng().gen_range(0..=existing_keys.len() / SLOWDOWN + 1);
rand::thread_rng().gen_range(0..=(existing_keys.len() + SLOWDOWN - 1) / SLOWDOWN);
let mut changes = Vec::new();
for _ in 0..num_insertions {
let key_length = rand::thread_rng().gen_range(0..=10);
Expand Down
Loading

0 comments on commit f225a58

Please sign in to comment.