diff --git a/Cargo.lock b/Cargo.lock index e297faa4b..7c6639934 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3064,6 +3064,40 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "rust-embed" +version = "8.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a82c0bbc10308ed323529fd3c1dce8badda635aa319a5ff0e6466f33b8101e3f" +dependencies = [ + "rust-embed-impl", + "rust-embed-utils", + "walkdir", +] + +[[package]] +name = "rust-embed-impl" +version = "8.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6227c01b1783cdfee1bcf844eb44594cd16ec71c35305bf1c9fb5aade2735e16" +dependencies = [ + "proc-macro2", + "quote", + "rust-embed-utils", + "syn 2.0.39", + "walkdir", +] + +[[package]] +name = "rust-embed-utils" +version = "8.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8cb0a25bfbb2d4b4402179c2cf030387d9990857ce08a32592c6238db9fa8665" +dependencies = [ + "sha2 0.10.8", + "walkdir", +] + [[package]] name = "rustc-demangle" version = "0.1.23" @@ -3512,6 +3546,7 @@ dependencies = [ "rand", "regex", "rpassword", + "rust-embed", "sep5", "serde", "serde-aux", diff --git a/cmd/soroban-cli/Cargo.toml b/cmd/soroban-cli/Cargo.toml index 759777d2b..49043e544 100644 --- a/cmd/soroban-cli/Cargo.toml +++ b/cmd/soroban-cli/Cargo.toml @@ -105,6 +105,7 @@ ureq = {version = "2.9.1", features = ["json"]} tempfile = "3.8.1" toml_edit = "0.21.0" +rust-embed = { version = "8.2.0", features = ["debug-embed"] } # For hyper-tls [target.'cfg(unix)'.dependencies] openssl = { version = "0.10.55", features = ["vendored"] } diff --git a/cmd/soroban-cli/src/commands/contract/init.rs b/cmd/soroban-cli/src/commands/contract/init.rs index 5db62f81b..fe88dbb68 100644 --- a/cmd/soroban-cli/src/commands/contract/init.rs +++ b/cmd/soroban-cli/src/commands/contract/init.rs @@ -1,13 +1,23 @@ -use std::fs::read_to_string; -use std::path::Path; -use std::{env, fs, io}; - -use clap::builder::{PossibleValue, PossibleValuesParser, ValueParser}; -use clap::{Parser, ValueEnum}; +use std::{ + ffi::OsStr, + fs::{copy, create_dir_all, metadata, read_dir, read_to_string, write}, + io, + num::NonZeroU32, + path::Path, + str, + sync::atomic::AtomicBool, +}; + +use clap::{ + builder::{PossibleValue, PossibleValuesParser, ValueParser}, + Parser, ValueEnum, +}; +use gix::{clone, create, open, progress, remote}; +use rust_embed::RustEmbed; use serde::Deserialize; -use std::num::NonZeroU32; -use std::sync::atomic::AtomicBool; -use toml_edit::{Document, Formatted, InlineTable, TomlError, Value}; +use serde_json::{from_str, json, Error as JsonError, Value as JsonValue}; +use toml_edit::{Document, Formatted, InlineTable, Item, TomlError, Value as TomlValue}; +use ureq::{get, Error as UreqError}; const SOROBAN_EXAMPLES_URL: &str = "https://github.com/stellar/soroban-examples.git"; const GITHUB_URL: &str = "https://github.com"; @@ -69,9 +79,12 @@ struct ReqBody { } fn get_valid_examples() -> Result, Error> { - let body: ReqBody = ureq::get(GITHUB_API_URL) + let body: ReqBody = get(GITHUB_API_URL) .call() - .map_err(Box::new)? + .map_err(|e| { + eprintln!("Error fetching example contracts from soroban-examples repo"); + Box::new(e) + })? .into_json()?; let mut valid_examples = Vec::new(); for item in body.tree { @@ -92,27 +105,30 @@ fn get_valid_examples() -> Result, Error> { #[derive(thiserror::Error, Debug)] pub enum Error { #[error("Io error: {0}")] - CreateDirError(#[from] io::Error), + IoError(#[from] io::Error), // the gix::clone::Error is too large to include in the error enum as is, so we wrap it in a Box - #[error("Failed to clone the template repository")] - CloneError(#[from] Box), + #[error("Failed to clone repository: {0}")] + CloneError(#[from] Box), // the gix::clone::fetch::Error is too large to include in the error enum as is, so we wrap it in a Box - #[error("Failed to fetch the template repository: {0}")] - FetchError(#[from] Box), + #[error("Failed to fetch repository: {0}")] + FetchError(#[from] Box), - #[error("Failed to checkout the template repository: {0}")] - CheckoutError(#[from] gix::clone::checkout::main_worktree::Error), + #[error("Failed to checkout repository worktree: {0}")] + CheckoutError(#[from] clone::checkout::main_worktree::Error), - #[error("Failed to parse Cargo.toml: {0}")] + #[error("Failed to parse toml file: {0}")] TomlParseError(#[from] TomlError), - #[error("Failed to fetch example contracts")] - ExampleContractFetchError(#[from] Box), + #[error("Failed to complete get request")] + UreqError(#[from] Box), #[error("Failed to parse package.json file: {0}")] - JsonParseError(#[from] serde_json::Error), + JsonParseError(#[from] JsonError), + + #[error("Failed to convert bytes to string: {0}")] + ConverBytesToStringErr(#[from] str::Utf8Error), } impl Cmd { @@ -127,20 +143,21 @@ impl Cmd { } } +#[derive(RustEmbed)] +#[folder = "src/utils/contract-init-template"] +struct TemplateFiles; + fn init( project_path: &Path, frontend_template: &String, with_examples: &[String], ) -> Result<(), Error> { - let cli_cmd_root = env!("CARGO_MANIFEST_DIR"); - let template_dir_path = Path::new(cli_cmd_root) - .join("src") - .join("utils") - .join("contract-init-template"); - // create a project dir, and copy the contents of the base template (contract-init-template) into it - std::fs::create_dir_all(project_path)?; - copy_contents(template_dir_path.as_path(), project_path)?; + create_dir_all(project_path).map_err(|e| { + eprintln!("Error creating new project directory: {project_path:?}"); + e + })?; + copy_template_files(project_path)?; if !check_internet_connection() { println!("⚠️ It doesn't look like you're connected to the internet. We're still able to initialize a new project, but additional examples and the frontend template will not be included."); @@ -149,19 +166,25 @@ fn init( if !frontend_template.is_empty() { // create a temp dir for the template repo - let fe_template_dir = tempfile::tempdir()?; + let fe_template_dir = tempfile::tempdir().map_err(|e| { + eprintln!("Error creating temp dir for frontend template"); + e + })?; // clone the template repo into the temp dir clone_repo(frontend_template, fe_template_dir.path())?; // copy the frontend template files into the project - copy_frontend_files(fe_template_dir.path(), project_path); + copy_frontend_files(fe_template_dir.path(), project_path)?; } // if there are --with-example flags, include the example contracts if include_example_contracts(with_examples) { // create an examples temp dir - let examples_dir = tempfile::tempdir()?; + let examples_dir = tempfile::tempdir().map_err(|e| { + eprintln!("Error creating temp dir for soroban-examples"); + e + })?; // clone the soroban-examples repo into the temp dir clone_repo(SOROBAN_EXAMPLES_URL, examples_dir.path())?; @@ -173,17 +196,53 @@ fn init( Ok(()) } +fn copy_template_files(project_path: &Path) -> Result<(), Error> { + for item in TemplateFiles::iter() { + let to = project_path.join(item.as_ref()); + if file_exists(&to.to_string_lossy()) { + println!( + "ℹ️ Skipped creating {} as it already exists", + &to.to_string_lossy() + ); + continue; + } + create_dir_all(to.parent().unwrap()).map_err(|e| { + eprintln!("Error creating directory path for: {to:?}"); + e + })?; + + let Some(file) = TemplateFiles::get(item.as_ref()) else { + println!("⚠️ Failed to read file: {}", item.as_ref()); + continue; + }; + + let file_contents = std::str::from_utf8(file.data.as_ref()).map_err(|e| { + eprintln!( + "Error converting file contents in {:?} to string", + item.as_ref() + ); + e + })?; + + println!("➕ Writing {}", &to.to_string_lossy()); + write(&to, file_contents).map_err(|e| { + eprintln!("Error writing file: {to:?}"); + e + })?; + } + Ok(()) +} + fn copy_contents(from: &Path, to: &Path) -> Result<(), Error> { - let contents_to_exclude_from_copy = [ - ".git", - ".github", - "Makefile", - "Cargo.lock", - ".vscode", - "target", - ]; - for entry in fs::read_dir(from)? { - let entry = entry?; + let contents_to_exclude_from_copy = [".git", ".github", "Makefile", ".vscode", "target"]; + for entry in read_dir(from).map_err(|e| { + eprintln!("Error reading directory: {from:?}"); + e + })? { + let entry = entry.map_err(|e| { + eprintln!("Error reading entry in directory: {from:?}"); + e + })?; let path = entry.path(); let entry_name = entry.file_name().to_string_lossy().to_string(); let new_path = to.join(&entry_name); @@ -193,13 +252,24 @@ fn copy_contents(from: &Path, to: &Path) -> Result<(), Error> { } if path.is_dir() { - std::fs::create_dir_all(&new_path)?; + create_dir_all(&new_path).map_err(|e| { + eprintln!("Error creating directory: {new_path:?}"); + e + })?; copy_contents(&path, &new_path)?; } else { if file_exists(&new_path.to_string_lossy()) { //if file is .gitignore, overwrite the file with a new .gitignore file if path.to_string_lossy().contains(".gitignore") { - std::fs::copy(&path, &new_path)?; + copy(&path, &new_path).map_err(|e| { + eprintln!( + "Error copying from {:?} to {:?}", + path.to_string_lossy(), + new_path + ); + + e + })?; continue; } @@ -211,7 +281,14 @@ fn copy_contents(from: &Path, to: &Path) -> Result<(), Error> { } println!("➕ Writing {}", &new_path.to_string_lossy()); - std::fs::copy(&path, &new_path)?; + copy(&path, &new_path).map_err(|e| { + eprintln!( + "Error copying from {:?} to {:?}", + path.to_string_lossy(), + new_path + ); + e + })?; } } @@ -219,7 +296,7 @@ fn copy_contents(from: &Path, to: &Path) -> Result<(), Error> { } fn file_exists(file_path: &str) -> bool { - if let Ok(metadata) = fs::metadata(file_path) { + if let Ok(metadata) = metadata(file_path) { metadata.is_file() } else { false @@ -231,27 +308,37 @@ fn include_example_contracts(contracts: &[String]) -> bool { } fn clone_repo(from_url: &str, to_path: &Path) -> Result<(), Error> { - let mut prepare = gix::clone::PrepareFetch::new( + let mut prepare = clone::PrepareFetch::new( from_url, to_path, - gix::create::Kind::WithWorktree, - gix::create::Options { + create::Kind::WithWorktree, + create::Options { destination_must_be_empty: false, fs_capabilities: None, }, - gix::open::Options::isolated(), + open::Options::isolated(), ) - .map_err(Box::new)? - .with_shallow(gix::remote::fetch::Shallow::DepthAtRemote( + .map_err(|e| { + eprintln!("Error preparing fetch for {from_url:?}"); + Box::new(e) + })? + .with_shallow(remote::fetch::Shallow::DepthAtRemote( NonZeroU32::new(1).unwrap(), )); let (mut checkout, _outcome) = prepare - .fetch_then_checkout(gix::progress::Discard, &AtomicBool::new(false)) - .map_err(Box::new)?; - - let (_repo, _outcome) = - checkout.main_worktree(gix::progress::Discard, &AtomicBool::new(false))?; + .fetch_then_checkout(progress::Discard, &AtomicBool::new(false)) + .map_err(|e| { + eprintln!("Error calling fetch_then_checkout with {from_url:?}"); + Box::new(e) + })?; + + let (_repo, _outcome) = checkout + .main_worktree(progress::Discard, &AtomicBool::new(false)) + .map_err(|e| { + eprintln!("Error calling main_worktree for {from_url:?}"); + e + })?; Ok(()) } @@ -264,7 +351,10 @@ fn copy_example_contracts(from: &Path, to: &Path, contracts: &[String]) -> Resul let contract_path = Path::new(&contract_as_string); let from_contract_path = from.join(contract_path); let to_contract_path = project_contracts_path.join(contract_path); - std::fs::create_dir_all(&to_contract_path)?; + create_dir_all(&to_contract_path).map_err(|e| { + eprintln!("Error creating directory: {contract_path:?}"); + e + })?; copy_contents(&from_contract_path, &to_contract_path)?; edit_contract_cargo_file(&to_contract_path)?; @@ -275,55 +365,69 @@ fn copy_example_contracts(from: &Path, to: &Path, contracts: &[String]) -> Resul fn edit_contract_cargo_file(contract_path: &Path) -> Result<(), Error> { let cargo_path = contract_path.join("Cargo.toml"); - let cargo_toml_str = read_to_string(&cargo_path)?; - let mut doc = cargo_toml_str.parse::().unwrap(); + let cargo_toml_str = read_to_string(&cargo_path).map_err(|e| { + eprint!("Error reading Cargo.toml file in: {contract_path:?}"); + e + })?; + let mut doc = cargo_toml_str.parse::().map_err(|e| { + eprintln!("Error parsing Cargo.toml file in: {contract_path:?}"); + e + })?; let mut workspace_table = InlineTable::new(); - workspace_table.insert("workspace", Value::Boolean(Formatted::new(true))); + workspace_table.insert("workspace", TomlValue::Boolean(Formatted::new(true))); doc["dependencies"]["soroban-sdk"] = - toml_edit::Item::Value(Value::InlineTable(workspace_table.clone())); - doc["dev_dependencies"]["soroban-sdk"] = - toml_edit::Item::Value(Value::InlineTable(workspace_table)); + Item::Value(TomlValue::InlineTable(workspace_table.clone())); + doc["dev_dependencies"]["soroban-sdk"] = Item::Value(TomlValue::InlineTable(workspace_table)); doc.remove("profile"); - std::fs::write(&cargo_path, doc.to_string())?; + write(&cargo_path, doc.to_string()).map_err(|e| { + eprintln!("Error writing to Cargo.toml file in: {contract_path:?}"); + e + })?; Ok(()) } -fn copy_frontend_files(from: &Path, to: &Path) { +fn copy_frontend_files(from: &Path, to: &Path) -> Result<(), Error> { println!("ℹ️ Initializing with frontend template"); - let _ = copy_contents(from, to); - let _ = edit_package_json_files(to); + copy_contents(from, to)?; + edit_package_json_files(to) } fn edit_package_json_files(project_path: &Path) -> Result<(), Error> { let package_name = project_path.file_name().unwrap(); - edit_package_name(project_path, package_name, "package.json")?; + edit_package_name(project_path, package_name, "package.json").map_err(|e| { + eprintln!("Error editing package.json file in: {project_path:?}"); + e + })?; edit_package_name(project_path, package_name, "package-lock.json") } fn edit_package_name( project_path: &Path, - package_name: &std::ffi::OsStr, + package_name: &OsStr, file_name: &str, ) -> Result<(), Error> { let file_path = project_path.join(file_name); let file_contents = read_to_string(&file_path)?; - let mut doc: serde_json::Value = serde_json::from_str(&file_contents)?; + let mut doc: JsonValue = from_str(&file_contents).map_err(|e| { + eprintln!("Error parsing package.json file in: {project_path:?}"); + e + })?; - doc["name"] = serde_json::json!(package_name.to_string_lossy()); + doc["name"] = json!(package_name.to_string_lossy()); - std::fs::write(&file_path, doc.to_string())?; + write(&file_path, doc.to_string())?; Ok(()) } fn check_internet_connection() -> bool { - if let Ok(_req) = ureq::get(GITHUB_URL).call() { + if let Ok(_req) = get(GITHUB_URL).call() { return true; } @@ -475,18 +579,10 @@ mod tests { ) { let contract_dir = project_dir.join("contracts").join(contract_name); assert!(!contract_dir.as_path().join("Makefile").exists()); - assert!(!contract_dir.as_path().join("Cargo.lock").exists()); } fn assert_base_excluded_paths_do_not_exist(project_dir: &Path) { - let excluded_paths = [ - ".git", - ".github", - "Makefile", - "Cargo.lock", - ".vscode", - "target", - ]; + let excluded_paths = [".git", ".github", "Makefile", ".vscode", "target"]; for path in &excluded_paths { assert!(!project_dir.join(path).exists()); }