Skip to content

Commit

Permalink
chore: fix formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
chris13524 committed Apr 5, 2024
1 parent f71876c commit 95416ae
Show file tree
Hide file tree
Showing 35 changed files with 356 additions and 410 deletions.
43 changes: 23 additions & 20 deletions justfile
Original file line number Diff line number Diff line change
Expand Up @@ -39,21 +39,13 @@ alias clean := cargo-clean
alias check := cargo-check
alias clippy := cargo-clippy
alias udeps := cargo-udeps
alias checkfmt := cargo-checkfmt

alias tfsec := tf-tfsec
alias tflint := tf-tflint

################################################################################
# Meta recipes

# Format the project code
fmt target='all': (_check-string-in-set target "all,rust,tf")
#!/bin/bash
set -euo pipefail
[[ '{{ target }}' == 'all' || '{{ target }}' == 'rust' ]] && { just cargo-fmt; }
[[ '{{ target }}' == 'all' || '{{ target }}' == 'tf' ]] && { just tf-fmt; }

# Update project documentation
docs target='all': (_check-string-in-set target "all,rust,tf")
#!/bin/bash
Expand All @@ -62,7 +54,7 @@ docs target='all': (_check-string-in-set target "all,rust,tf")
[[ '{{ target }}' == 'all' || '{{ target }}' == 'tf' ]] && { just tf-docs; }

# Run linting and tests
devloop: lint cargo-test-all
devloop: lint cargo-test-all fmt-imports

################################################################################
# Linting recipes
Expand All @@ -76,7 +68,7 @@ lint target='all': (_check-string-in-set target "all,rust,tf")


# Lint the rust project for any quality issues
lint-rust: cargo-check cargo-clippy cargo-udeps cargo-checkfmt
lint-rust: cargo-clippy fmt cargo-udeps

# Lint the terrafrom project for any quality issues
lint-tf: tf-checkfmt tf-validate tf-tfsec tf-tflint
Expand Down Expand Up @@ -116,7 +108,6 @@ cargo target='' sub-target='': (_check-string-in-set target "open-docs,build-doc
[[ '{{ target }}' == 'check' ]] && { just cargo-check; }
[[ '{{ target }}' == 'clippy' ]] && { just cargo-clippy; }
[[ '{{ target }}' == 'udeps' ]] && { just cargo-udeps; }
[[ '{{ target }}' == 'checkfmt' ]] && { just cargo-checkfmt; }

# Open rust project documentation in your local browser
cargo-open-docs: (_cargo-build-docs "open" "nodeps")
Expand All @@ -129,10 +120,27 @@ cargo-build-docs: (_cargo-build-docs "" "nodeps")
echo "==> Building project documentation @$JUST_ROOT/target/doc"
cargo doc --all-features --document-private-items ${nodeps:+--no-deps} ${open:+--open}

# Format the application code
@cargo-fmt: _check-cmd-cargo-fmt
printf '==> Running {{ color-cmd }}rustfmt{{ nocolor }}\n'
cargo +nightly fmt
fmt:
#!/bin/bash
set -euo pipefail

if command -v cargo-fmt >/dev/null; then
echo '==> Running rustfmt'
cargo fmt
else
echo '==> rustfmt not found in PATH, skipping'
fi

fmt-imports:
#!/bin/bash
set -euo pipefail

if command -v cargo-fmt >/dev/null; then
echo '==> Running rustfmt'
cargo +nightly fmt -- --config group_imports=StdExternalCrate,imports_granularity=One
else
echo '==> rustfmt not found in PATH, skipping'
fi

# Build service for development
cargo-build: _check-cmd-cargo
Expand Down Expand Up @@ -191,11 +199,6 @@ cargo-udeps: _check-cmd-cargo-udeps
@printf '==> Running {{ color-cmd }}udeps{{ nocolor }}\n'
cargo +nightly udeps

# Check the rust code formatting
cargo-checkfmt: _check-cmd-cargo-fmt
@printf '==> Running {{ color-cmd }}rustfmt{{ nocolor }} --check\n'
cargo +nightly fmt --check

################################################################################
# Terraform recipes

Expand Down
28 changes: 16 additions & 12 deletions rustfmt.toml
Original file line number Diff line number Diff line change
@@ -1,18 +1,22 @@
edition = "2021"
fn_single_line = false
format_code_in_doc_comments = true
format_strings = true
imports_layout = "HorizontalVertical"
imports_granularity = "One"
normalize_comments = true
normalize_doc_attributes = true

reorder_imports = true
reorder_impl_items = true
group_imports = "StdExternalCrate"
use_try_shorthand = true
wrap_comments = true
overflow_delimited_expr = true
remove_nested_parens = true
reorder_modules = true
unstable_features = true
use_field_init_shorthand = true

## We only use settings available in the stable channel

#fn_single_line = false
#format_code_in_doc_comments = true
#format_strings = true
#imports_layout = "HorizontalVertical"
#imports_granularity = "One"
#normalize_comments = true
#normalize_doc_attributes = true
#reorder_impl_items = true
#group_imports = "StdExternalCrate"
#wrap_comments = true
#overflow_delimited_expr = true
#unstable_features = true
46 changes: 19 additions & 27 deletions src/analytics/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,28 +5,17 @@ use {
tracing::info,
wc::{
analytics::{
self,
AnalyticsExt,
ArcCollector,
AwsConfig,
AwsExporter,
BatchCollector,
BatchObserver,
CollectionObserver,
Collector,
CollectorConfig,
ExportObserver,
self, AnalyticsExt, ArcCollector, AwsConfig, AwsExporter, BatchCollector,
BatchObserver, CollectionObserver, Collector, CollectorConfig, ExportObserver,
ParquetBatchFactory,
},
geoip::{self, MaxMindResolver, Resolver},
metrics::otel,
},
};
pub use {
config::Config,
history_lookup_info::HistoryLookupInfo,
identity_lookup_info::IdentityLookupInfo,
message_info::MessageInfo,
config::Config, history_lookup_info::HistoryLookupInfo,
identity_lookup_info::IdentityLookupInfo, message_info::MessageInfo,
onramp_history_lookup_info::OnrampHistoryLookupInfo,
};

Expand Down Expand Up @@ -79,10 +68,11 @@ where
let size = res.as_deref().map(|data| data.len()).unwrap_or(0);
let elapsed = elapsed.as_millis() as u64;

wc::metrics::counter!("analytics_batches_finished", 1, &[
self.0.as_kv(),
success_kv(res.is_ok())
]);
wc::metrics::counter!(
"analytics_batches_finished",
1,
&[self.0.as_kv(), success_kv(res.is_ok())]
);

if let Err(err) = res {
tracing::warn!(
Expand All @@ -106,10 +96,11 @@ where
E: std::error::Error,
{
fn observe_collection(&self, res: &Result<(), E>) {
wc::metrics::counter!("analytics_records_collected", 1, &[
self.0.as_kv(),
success_kv(res.is_ok())
]);
wc::metrics::counter!(
"analytics_records_collected",
1,
&[self.0.as_kv(), success_kv(res.is_ok())]
);

if let Err(err) = res {
tracing::warn!(
Expand All @@ -126,10 +117,11 @@ where
E: std::error::Error,
{
fn observe_export(&self, elapsed: Duration, res: &Result<(), E>) {
wc::metrics::counter!("analytics_batches_exported", 1, &[
self.0.as_kv(),
success_kv(res.is_ok())
]);
wc::metrics::counter!(
"analytics_batches_exported",
1,
&[self.0.as_kv(), success_kv(res.is_ok())]
);

let elapsed = elapsed.as_millis() as u64;

Expand Down
152 changes: 73 additions & 79 deletions src/env/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,19 +12,8 @@ use {
std::{collections::HashMap, fmt::Display},
};
pub use {
aurora::*,
base::*,
binance::*,
getblock::*,
infura::*,
mantle::*,
near::*,
pokt::*,
publicnode::*,
quicknode::*,
server::*,
zksync::*,
zora::*,
aurora::*, base::*, binance::*, getblock::*, infura::*, mantle::*, near::*, pokt::*,
publicnode::*, quicknode::*, server::*, zksync::*, zora::*,
};
mod aurora;
mod base;
Expand Down Expand Up @@ -182,72 +171,77 @@ mod test {

values.iter().for_each(set_env_var);

assert_eq!(Config::from_env().unwrap(), Config {
server: ServerConfig {
host: "1.2.3.4".to_owned(),
port: 123,
prometheus_port: 234,
log_level: "TRACE".to_owned(),
external_ip: Some(Ipv4Addr::new(2, 3, 4, 5).into()),
blocked_countries: vec![
"KP".to_owned(),
"IR".to_owned(),
"CU".to_owned(),
"SY".to_owned(),
],
s3_endpoint: None,
geoip_db_bucket: Some("GEOIP_DB_BUCKET".to_owned()),
geoip_db_key: Some("GEOIP_DB_KEY".to_owned()),
testing_project_id: Some("TESTING_PROJECT_ID".to_owned()),
validate_project_id: true,
},
registry: project::Config {
api_url: Some("API_URL".to_owned()),
api_auth_token: Some("API_AUTH_TOKEN".to_owned()),
project_data_cache_ttl: 345,
},
storage: project::storage::Config {
redis_max_connections: 456,
project_data_redis_addr_read: Some("redis://127.0.0.1/data/read".to_owned()),
project_data_redis_addr_write: Some("redis://127.0.0.1/data/write".to_owned()),
identity_cache_redis_addr_read: Some("redis://127.0.0.1/identity/read".to_owned()),
identity_cache_redis_addr_write: Some(
"redis://127.0.0.1/identity/write".to_owned()
),
rate_limiting_cache_redis_addr_read: Some(
"redis://127.0.0.1/rate_limit/read".to_owned()
),
rate_limiting_cache_redis_addr_write: Some(
"redis://127.0.0.1/rate_limit/write".to_owned()
),
},
postgres: PostgresConfig {
uri: "postgres://postgres@localhost:5432/postgres".to_owned(),
max_connections: 32,
},
analytics: analytics::Config {
s3_endpoint: Some("s3://127.0.0.1".to_owned()),
export_bucket: Some("EXPORT_BUCKET".to_owned()),
},
profiler: ProfilerConfig {},
providers: ProvidersConfig {
prometheus_query_url: Some("PROMETHEUS_QUERY_URL".to_owned()),
prometheus_workspace_header: Some("PROMETHEUS_WORKSPACE_HEADER".to_owned()),
infura_project_id: "INFURA_PROJECT_ID".to_string(),
pokt_project_id: "POKT_PROJECT_ID".to_string(),
quicknode_api_token: "QUICKNODE_API_TOKEN".to_string(),
zerion_api_key: Some("ZERION_API_KEY".to_owned()),
coinbase_api_key: Some("COINBASE_API_KEY".to_owned()),
coinbase_app_id: Some("COINBASE_APP_ID".to_owned()),
one_inch_api_key: Some("ONE_INCH_API_KEY".to_owned()),
getblock_access_tokens: Some("{}".to_owned()),
},
rate_limiting: RateLimitingConfig {
max_tokens: Some(100),
refill_interval_sec: Some(1),
refill_rate: Some(10),
},
});
assert_eq!(
Config::from_env().unwrap(),
Config {
server: ServerConfig {
host: "1.2.3.4".to_owned(),
port: 123,
prometheus_port: 234,
log_level: "TRACE".to_owned(),
external_ip: Some(Ipv4Addr::new(2, 3, 4, 5).into()),
blocked_countries: vec![
"KP".to_owned(),
"IR".to_owned(),
"CU".to_owned(),
"SY".to_owned(),
],
s3_endpoint: None,
geoip_db_bucket: Some("GEOIP_DB_BUCKET".to_owned()),
geoip_db_key: Some("GEOIP_DB_KEY".to_owned()),
testing_project_id: Some("TESTING_PROJECT_ID".to_owned()),
validate_project_id: true,
},
registry: project::Config {
api_url: Some("API_URL".to_owned()),
api_auth_token: Some("API_AUTH_TOKEN".to_owned()),
project_data_cache_ttl: 345,
},
storage: project::storage::Config {
redis_max_connections: 456,
project_data_redis_addr_read: Some("redis://127.0.0.1/data/read".to_owned()),
project_data_redis_addr_write: Some("redis://127.0.0.1/data/write".to_owned()),
identity_cache_redis_addr_read: Some(
"redis://127.0.0.1/identity/read".to_owned()
),
identity_cache_redis_addr_write: Some(
"redis://127.0.0.1/identity/write".to_owned()
),
rate_limiting_cache_redis_addr_read: Some(
"redis://127.0.0.1/rate_limit/read".to_owned()
),
rate_limiting_cache_redis_addr_write: Some(
"redis://127.0.0.1/rate_limit/write".to_owned()
),
},
postgres: PostgresConfig {
uri: "postgres://postgres@localhost:5432/postgres".to_owned(),
max_connections: 32,
},
analytics: analytics::Config {
s3_endpoint: Some("s3://127.0.0.1".to_owned()),
export_bucket: Some("EXPORT_BUCKET".to_owned()),
},
profiler: ProfilerConfig {},
providers: ProvidersConfig {
prometheus_query_url: Some("PROMETHEUS_QUERY_URL".to_owned()),
prometheus_workspace_header: Some("PROMETHEUS_WORKSPACE_HEADER".to_owned()),
infura_project_id: "INFURA_PROJECT_ID".to_string(),
pokt_project_id: "POKT_PROJECT_ID".to_string(),
quicknode_api_token: "QUICKNODE_API_TOKEN".to_string(),
zerion_api_key: Some("ZERION_API_KEY".to_owned()),
coinbase_api_key: Some("COINBASE_API_KEY".to_owned()),
coinbase_app_id: Some("COINBASE_APP_ID".to_owned()),
one_inch_api_key: Some("ONE_INCH_API_KEY".to_owned()),
getblock_access_tokens: Some("{}".to_owned()),
},
rate_limiting: RateLimitingConfig {
max_tokens: Some(100),
refill_interval_sec: Some(1),
refill_rate: Some(10),
},
}
);

values.iter().for_each(reset_env_var);
}
Expand Down
4 changes: 1 addition & 3 deletions src/error.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
use {
crate::{
project::ProjectDataError,
storage::error::StorageError,
utils::crypto::CryptoUitlsError,
project::ProjectDataError, storage::error::StorageError, utils::crypto::CryptoUitlsError,
},
axum::{response::IntoResponse, Json},
cerberus::registry::RegistryError,
Expand Down
Loading

0 comments on commit 95416ae

Please sign in to comment.