diff --git a/implementations/rust/ockam/ockam_api/src/authority_node/authority.rs b/implementations/rust/ockam/ockam_api/src/authority_node/authority.rs index 22c2e431625..a3fff4652c1 100644 --- a/implementations/rust/ockam/ockam_api/src/authority_node/authority.rs +++ b/implementations/rust/ockam/ockam_api/src/authority_node/authority.rs @@ -484,6 +484,7 @@ mod tests { /// - A port for the TCP listener (must not clash with another authority port) /// - An identity that should be trusted as an enroller fn create_configuration( + database_configuration: DatabaseConfiguration, authority: &Identifier, port: u16, trusted: &[Identifier], @@ -501,9 +502,10 @@ mod tests { PreTrustedIdentity::new(attributes, TimestampInSeconds(0), None, authority.clone()), ); } + Ok(Configuration { identifier: authority.clone(), - database_configuration: DatabaseConfiguration::postgres()?.unwrap(), + database_configuration, project_identifier: "123456".to_string(), tcp_listener_address: InternetAddress::new(&format!("127.0.0.1:{}", port)).unwrap(), secure_channel_listener_name: None, @@ -554,13 +556,13 @@ mod tests { let identities = identities::create(db.clone(), node_name); let authority = identities.identities_creation().create_identity().await?; - let configuration = create_configuration(&authority, port, trusted)?; + let configuration = + create_configuration(db.configuration.clone(), &authority, port, trusted)?; let authority = Authority::create(&configuration, Some(db.clone())).await?; authority_node::start_node(ctx, &configuration, authority.clone()).await?; Ok(authority) } - /// Add a member /// Add a member async fn add_member( ctx: &Context, diff --git a/implementations/rust/ockam/ockam_api/src/cli_state/cli_state.rs b/implementations/rust/ockam/ockam_api/src/cli_state/cli_state.rs index cd42fb753c2..bd96eec0e9b 100644 --- a/implementations/rust/ockam/ockam_api/src/cli_state/cli_state.rs +++ b/implementations/rust/ockam/ockam_api/src/cli_state/cli_state.rs @@ -273,17 +273,21 @@ impl CliState { } /// If the postgres database is configured, return the postgres configuration + /// pub(super) fn make_database_configuration( mode: &CliStateMode, ) -> Result { - match DatabaseConfiguration::postgres()? { - Some(configuration) => Ok(configuration), - None => match mode { - CliStateMode::Persistent(root_path) => Ok(DatabaseConfiguration::sqlite( - root_path.join("database.sqlite3"), - )), - CliStateMode::InMemory => Ok(DatabaseConfiguration::sqlite_in_memory()), - }, + match mode { + CliStateMode::Persistent(root_path) => { + let sqlite_path = root_path.join("database.sqlite3"); + match DatabaseConfiguration::postgres_with_legacy_sqlite_path(Some( + sqlite_path.clone(), + ))? { + Some(configuration) => Ok(configuration), + None => Ok(DatabaseConfiguration::sqlite(sqlite_path)), + } + } + CliStateMode::InMemory => Ok(DatabaseConfiguration::sqlite_in_memory()), } } diff --git a/implementations/rust/ockam/ockam_node/src/storage/database/database_configuration.rs b/implementations/rust/ockam/ockam_node/src/storage/database/database_configuration.rs index 758760ecb83..c1783eee5ae 100644 --- a/implementations/rust/ockam/ockam_node/src/storage/database/database_configuration.rs +++ b/implementations/rust/ockam/ockam_node/src/storage/database/database_configuration.rs @@ -30,6 +30,8 @@ pub enum DatabaseConfiguration { Postgres { /// Connection string of the form postgres://[{user}:{password}@]{host}:{port}/{database_name} connection_string: String, + /// Path to a SQLite database that needs to be migrated to the Postgres database. + legacy_sqlite_path: Option, }, } @@ -72,10 +74,19 @@ impl DatabaseUser { impl DatabaseConfiguration { /// Create a postgres database configuration from an environment variable. pub fn postgres() -> Result> { + Self::postgres_with_legacy_sqlite_path(None) + } + + /// Create a postgres database configuration from an environment variable. + /// An optional legacy sqlite path can be provided to migrate the sqlite database to postgres. + pub fn postgres_with_legacy_sqlite_path( + sqlite_path: Option, + ) -> Result> { if let Some(connection_string) = get_env::(OCKAM_DATABASE_CONNECTION_URL)? { check_connection_string_format(&connection_string)?; Ok(Some(DatabaseConfiguration::Postgres { connection_string: connection_string.to_owned(), + legacy_sqlite_path: sqlite_path, })) } else { Ok(None) @@ -122,6 +133,17 @@ impl DatabaseConfiguration { } } + /// Return the legacy sqlite path if any + pub fn legacy_sqlite_path(&self) -> Option { + match self { + DatabaseConfiguration::SqliteInMemory { .. } => None, + DatabaseConfiguration::SqlitePersistent { .. } => None, + DatabaseConfiguration::Postgres { + legacy_sqlite_path, .. + } => legacy_sqlite_path.clone(), + } + } + /// Return the type of database that has been configured pub fn connection_string(&self) -> String { match self { @@ -131,7 +153,9 @@ impl DatabaseConfiguration { DatabaseConfiguration::SqlitePersistent { path, .. } => { Self::create_sqlite_on_disk_connection_string(path) } - DatabaseConfiguration::Postgres { connection_string } => connection_string.clone(), + DatabaseConfiguration::Postgres { + connection_string, .. + } => connection_string.clone(), } } diff --git a/implementations/rust/ockam/ockam_node/src/storage/database/migrations/migration_support/migrator.rs b/implementations/rust/ockam/ockam_node/src/storage/database/migrations/migration_support/migrator.rs index 359b2b90347..4ab2d381829 100644 --- a/implementations/rust/ockam/ockam_node/src/storage/database/migrations/migration_support/migrator.rs +++ b/implementations/rust/ockam/ockam_node/src/storage/database/migrations/migration_support/migrator.rs @@ -13,6 +13,7 @@ use sqlx::migrate::{AppliedMigration, Migrate, Migration as SqlxMigration}; use sqlx::{query, Any, AnyConnection, Pool, Row}; use sqlx_core::executor::Executor; use std::cmp::Ordering; +use std::path::PathBuf; use time::OffsetDateTime; /// Migrator is responsible for running Sql and Rust migrations side by side in the correct order, @@ -22,6 +23,8 @@ pub struct Migrator { rust_migrations: Vec>, // Unsorted, no duplicates sql_migrator: sqlx::migrate::Migrator, + // The path to a legacy sqlite database to potentially import to postgres + legacy_sqlite_path: Option, } impl Migrator { @@ -33,6 +36,7 @@ impl Migrator { Ok(Self { rust_migrations: vec![], sql_migrator, + legacy_sqlite_path: None, }) } @@ -66,6 +70,12 @@ impl Migrator { Ok(()) } + + /// Set the database configuration + pub fn set_legacy_sqlite_path(&mut self, legacy_sqlite_path: Option) -> Result<()> { + self.legacy_sqlite_path = legacy_sqlite_path; + Ok(()) + } } enum Mode { @@ -160,15 +170,11 @@ impl Migrator { for migration in migrations.into_iter() { let needs_migration = match migration { NextMigration::Sql(sql_migration) => { - NextMigration::needs_sql_migration( - sql_migration, - connection, - &applied_migrations, - ) - .await? + self.needs_sql_migration(sql_migration, connection, &applied_migrations) + .await? } NextMigration::Rust(rust_migration) => { - NextMigration::needs_rust_migration( + self.needs_rust_migration( rust_migration, connection, &applied_migrations, @@ -192,12 +198,9 @@ impl Migrator { for migration in migrations.into_iter() { match migration { NextMigration::Sql(sql_migration) => { - match NextMigration::apply_sql_migration( - sql_migration, - connection, - &applied_migrations, - ) - .await? + match self + .apply_sql_migration(sql_migration, connection, &applied_migrations) + .await? { MigrationSuccess => (), MigrationResult::MigrationFailure(failure) => { @@ -209,7 +212,8 @@ impl Migrator { } } NextMigration::Rust(rust_migration) => { - match NextMigration::apply_rust_migration(rust_migration, connection) + match self + .apply_rust_migration(rust_migration, connection) .await? { MigrationSuccess => (), @@ -228,6 +232,104 @@ impl Migrator { } } } + + async fn needs_sql_migration<'a>( + &self, + migration: &'a SqlxMigration, + _connection: &mut AnyConnection, + applied_migrations: &[AppliedMigration], + ) -> Result { + if migration.migration_type.is_down_migration() { + return Ok(false); + } + match applied_migrations + .iter() + .find(|m| m.version == migration.version) + { + Some(applied_migration) => { + if migration.checksum != applied_migration.checksum { + return Err(ockam_core::Error::new( + Origin::Node, + Kind::Conflict, + format!( + "Checksum mismatch for sql migration '{}' for version {}", + migration.description, migration.version, + ), + )); + } + Ok(false) + } + None => Ok(true), + } + } + + async fn apply_sql_migration<'a>( + &self, + migration: &'a SqlxMigration, + connection: &mut AnyConnection, + applied_migrations: &[AppliedMigration], + ) -> Result { + if migration.migration_type.is_down_migration() { + return Ok(MigrationResult::down_migration()); + } + match applied_migrations + .iter() + .find(|m| m.version == migration.version) + { + Some(applied_migration) => { + if migration.checksum != applied_migration.checksum { + Ok(MigrationResult::incorrect_checksum( + migration.description.to_string(), + migration.sql.to_string(), + String::from_utf8(migration.checksum.to_vec()) + .unwrap_or("actual migration checksum cannot be displayed".to_string()), + String::from_utf8(migration.checksum.to_vec()).unwrap_or( + "expected migration checksum cannot be displayed".to_string(), + ), + )) + } else { + Ok(MigrationSuccess) + } + } + None => match connection.apply(migration).await.into_core() { + Ok(_) => Ok(MigrationSuccess), + Err(e) => Err(ockam_core::Error::new( + Origin::Node, + Kind::Conflict, + format!( + "Failed to run the migration {}: {e:?}", + migration.description + ), + )), + }, + } + } + + async fn needs_rust_migration<'a>( + &self, + migration: &'a dyn RustMigration, + connection: &mut AnyConnection, + _applied_migrations: &[AppliedMigration], + ) -> Result { + Ok(!Migrator::has_migrated(connection, migration.name()).await?) + } + + async fn apply_rust_migration( + &self, + migration: &dyn RustMigration, + connection: &mut AnyConnection, + ) -> Result { + if Migrator::has_migrated(connection, migration.name()).await? { + return Ok(MigrationResult::success()); + } + if migration + .migrate(self.legacy_sqlite_path.clone(), connection) + .await? + { + Migrator::mark_as_migrated(connection, migration.name()).await?; + } + Ok(MigrationSuccess) + } } impl Migrator { @@ -371,97 +473,6 @@ impl NextMigration<'_> { Self::Rust(m) => m.version(), } } - - async fn needs_sql_migration<'a>( - migration: &'a SqlxMigration, - _connection: &mut AnyConnection, - applied_migrations: &[AppliedMigration], - ) -> Result { - if migration.migration_type.is_down_migration() { - return Ok(false); - } - match applied_migrations - .iter() - .find(|m| m.version == migration.version) - { - Some(applied_migration) => { - if migration.checksum != applied_migration.checksum { - return Err(ockam_core::Error::new( - Origin::Node, - Kind::Conflict, - format!( - "Checksum mismatch for sql migration '{}' for version {}", - migration.description, migration.version, - ), - )); - } - Ok(false) - } - None => Ok(true), - } - } - - async fn apply_sql_migration<'a>( - migration: &'a SqlxMigration, - connection: &mut AnyConnection, - applied_migrations: &[AppliedMigration], - ) -> Result { - if migration.migration_type.is_down_migration() { - return Ok(MigrationResult::down_migration()); - } - match applied_migrations - .iter() - .find(|m| m.version == migration.version) - { - Some(applied_migration) => { - if migration.checksum != applied_migration.checksum { - Ok(MigrationResult::incorrect_checksum( - migration.description.to_string(), - migration.sql.to_string(), - String::from_utf8(migration.checksum.to_vec()) - .unwrap_or("actual migration checksum cannot be displayed".to_string()), - String::from_utf8(migration.checksum.to_vec()).unwrap_or( - "expected migration checksum cannot be displayed".to_string(), - ), - )) - } else { - Ok(MigrationSuccess) - } - } - None => match connection.apply(migration).await.into_core() { - Ok(_) => Ok(MigrationSuccess), - Err(e) => Err(ockam_core::Error::new( - Origin::Node, - Kind::Conflict, - format!( - "Failed to run the migration {}: {e:?}", - migration.description - ), - )), - }, - } - } - - async fn needs_rust_migration<'a>( - migration: &'a dyn RustMigration, - connection: &mut AnyConnection, - _applied_migrations: &[AppliedMigration], - ) -> Result { - Ok(!Migrator::has_migrated(connection, migration.name()).await?) - } - - async fn apply_rust_migration( - migration: &dyn RustMigration, - connection: &mut AnyConnection, - ) -> Result { - if Migrator::has_migrated(connection, migration.name()).await? { - return Ok(MigrationResult::success()); - } - if migration.migrate(connection).await? { - Migrator::mark_as_migrated(connection, migration.name()).await?; - } - Ok(MigrationSuccess) - } } impl Eq for NextMigration<'_> {} @@ -566,7 +577,11 @@ mod tests { self.version } - async fn migrate(&self, _connection: &mut AnyConnection) -> Result { + async fn migrate( + &self, + _legacy_sqlite_path: Option, + _connection: &mut AnyConnection, + ) -> Result { Ok(true) } } diff --git a/implementations/rust/ockam/ockam_node/src/storage/database/migrations/migration_support/rust_migration.rs b/implementations/rust/ockam/ockam_node/src/storage/database/migrations/migration_support/rust_migration.rs index f9617dc7dd2..44136b01df4 100644 --- a/implementations/rust/ockam/ockam_node/src/storage/database/migrations/migration_support/rust_migration.rs +++ b/implementations/rust/ockam/ockam_node/src/storage/database/migrations/migration_support/rust_migration.rs @@ -1,5 +1,6 @@ use core::fmt::Debug; use sqlx::AnyConnection; +use std::path::PathBuf; use crate::database::Version; use ockam_core::{async_trait, Result}; @@ -14,5 +15,9 @@ pub trait RustMigration: Debug + Send + Sync { fn version(&self) -> Version; /// Execute the migration - async fn migrate(&self, connection: &mut AnyConnection) -> Result; + async fn migrate( + &self, + legacy_sqlite_path: Option, + connection: &mut AnyConnection, + ) -> Result; } diff --git a/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/node_migration_set.rs b/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/node_migration_set.rs index 0f9e29abd03..3a6aa08dd5f 100644 --- a/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/node_migration_set.rs +++ b/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/node_migration_set.rs @@ -1,26 +1,38 @@ +use crate::database::migrations::migration_set::MigrationSet; use crate::database::migrations::sqlite::migration_20231231100000_node_name_identity_attributes::NodeNameIdentityAttributes; use crate::database::migrations::sqlite::migration_20240111100001_add_authority_tables::AuthorityAttributes; use crate::database::migrations::sqlite::migration_20240111100002_delete_trust_context::PolicyTrustContextId; use crate::database::migrations::sqlite::migration_20240212100000_split_policies::SplitPolicies; use crate::database::migrations::sqlite::migration_20240313100000_remove_orphan_resources::RemoveOrphanResources; use crate::database::migrations::sqlite::migration_20240503100000_update_policy_expressions::UpdatePolicyExpressions; -use crate::database::DatabaseType; -use ockam_core::Result; - -use crate::database::migrations::migration_set::MigrationSet; use crate::database::migrations::{Migrator, RustMigration}; +use crate::database::postgres::migration_20250116100000_sqlite_initialization::InitializeFromSqlite; use crate::database::sqlite::migration_20250114100000_members_authority_id::SetAuthorityId; +use crate::database::{DatabaseConfiguration, DatabaseType}; use crate::migrate; +use ockam_core::Result; +use std::path::PathBuf; /// This struct defines the migration to apply to the nodes database pub struct NodeMigrationSet { database_type: DatabaseType, + legacy_sqlite_path: Option, } impl NodeMigrationSet { /// Create a new migration set for a node pub fn new(database_type: DatabaseType) -> Self { - Self { database_type } + Self { + database_type, + legacy_sqlite_path: None, + } + } + /// Create a new migration set for a node + pub fn from_configuration(database_configuration: DatabaseConfiguration) -> Self { + Self { + database_type: database_configuration.database_type(), + legacy_sqlite_path: database_configuration.legacy_sqlite_path(), + } } } @@ -36,7 +48,7 @@ impl MigrationSet for NodeMigrationSet { Box::new(UpdatePolicyExpressions), Box::new(SetAuthorityId), ], - DatabaseType::Postgres => vec![], + DatabaseType::Postgres => vec![Box::new(InitializeFromSqlite)], }; let mut migrator = match self.database_type { DatabaseType::Sqlite => { @@ -47,6 +59,7 @@ impl MigrationSet for NodeMigrationSet { } }; migrator.set_rust_migrations(rust_migrations)?; + migrator.set_legacy_sqlite_path(self.legacy_sqlite_path.clone())?; Ok(migrator) } diff --git a/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust.rs b/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust.rs index 24ee27f6b5a..4c21915685c 100644 --- a/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust.rs +++ b/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust.rs @@ -1,2 +1,5 @@ /// SQLite rust migrations pub mod sqlite; + +/// Postgres rust migrations +pub mod postgres; diff --git a/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/postgres/migration_20250116100000_sqlite_initialization.rs b/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/postgres/migration_20250116100000_sqlite_initialization.rs new file mode 100644 index 00000000000..377147e3468 --- /dev/null +++ b/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/postgres/migration_20250116100000_sqlite_initialization.rs @@ -0,0 +1,781 @@ +use crate::database::{Boolean, FromSqlxError, RustMigration, SqlxDatabase, ToVoid, Version}; +use ockam_core::{async_trait, Result}; +use sqlx::*; +use std::path::PathBuf; + +/// This struct initialize the Postgres database with local data found in a SQLite instance. +#[derive(Debug)] +pub struct InitializeFromSqlite; + +#[async_trait] +impl RustMigration for InitializeFromSqlite { + fn name(&self) -> &str { + Self::name() + } + + fn version(&self) -> Version { + Self::version() + } + + async fn migrate( + &self, + legacy_sqlite_path: Option, + connection: &mut AnyConnection, + ) -> Result { + if let Some(path) = legacy_sqlite_path { + Self::initialize_postgres(path, connection).await + } else { + Ok(true) + } + } +} + +impl InitializeFromSqlite { + /// Migration version + pub fn version() -> Version { + Version(20250116100000) + } + + /// Migration name + pub fn name() -> &'static str { + "migration_20250116100000_sqlite_initialization" + } + + pub(crate) async fn initialize_postgres( + legacy_sqlite_path: PathBuf, + connection: &mut AnyConnection, + ) -> Result { + let sqlite_database = SqlxDatabase::create_sqlite(legacy_sqlite_path).await?; + + Self::migrate_aead_secrets(sqlite_database.clone(), connection).await?; + Self::migrate_authority_enrollment_tokens(sqlite_database.clone(), connection).await?; + Self::migrate_credentials(sqlite_database.clone(), connection).await?; + Self::migrate_identities(sqlite_database.clone(), connection).await?; + Self::migrate_identity_attributes(sqlite_database.clone(), connection).await?; + Self::migrate_members(sqlite_database.clone(), connection).await?; + Self::migrate_named_identities(sqlite_database.clone(), connection).await?; + Self::migrate_purpose_keys(sqlite_database.clone(), connection).await?; + Self::migrate_signing_secrets(sqlite_database.clone(), connection).await?; + Self::migrate_x25519_secrets(sqlite_database.clone(), connection).await?; + + Ok(true) + } + + async fn migrate_aead_secrets( + sqlite_database: SqlxDatabase, + connection: &mut AnyConnection, + ) -> Result<()> { + let get_secrets = query_as("SELECT handle, type as secret_type, secret FROM aead_secret"); + + let secrets: Vec = get_secrets + .fetch_all(&*sqlite_database.pool) + .await + .into_core()?; + + let mut transaction = Connection::begin(&mut *connection).await.into_core()?; + for secret in secrets { + query("INSERT INTO aead_secret (handle, type, secret) VALUES ($1, $2, $3)") + .bind(secret.handle) + .bind(secret.secret_type) + .bind(secret.secret) + .execute(&mut *transaction) + .await + .void()?; + } + transaction.commit().await.void()?; + Ok(()) + } + + async fn migrate_authority_enrollment_tokens( + sqlite_database: SqlxDatabase, + connection: &mut AnyConnection, + ) -> Result<()> { + let get_tokens = query_as("SELECT one_time_code, reference, issued_by, created_at, expires_at, ttl_count, attributes FROM authority_enrollment_token"); + + let tokens: Vec = get_tokens + .fetch_all(&*sqlite_database.pool) + .await + .into_core()?; + + let mut transaction = Connection::begin(&mut *connection).await.into_core()?; + for token in tokens { + query("INSERT INTO authority_enrollment_token (one_time_code, reference, issued_by, created_at, expires_at, ttl_count, attributes) VALUES ($1, $2, $3, $4, $5, $6, $7)") + .bind(token.one_time_code) + .bind(token.reference) + .bind(token.issued_by) + .bind(token.created_at) + .bind(token.expires_at) + .bind(token.ttl_count) + .bind(token.attributes) + .execute(&mut *transaction) + .await + .void()?; + } + transaction.commit().await.void()?; + Ok(()) + } + + async fn migrate_credentials( + sqlite_database: SqlxDatabase, + connection: &mut AnyConnection, + ) -> Result<()> { + let get_credentials = query_as("SELECT subject_identifier, issuer_identifier, scope, credential, expires_at, node_name FROM credential"); + + let credentials: Vec = get_credentials + .fetch_all(&*sqlite_database.pool) + .await + .into_core()?; + + let mut transaction = Connection::begin(&mut *connection).await.into_core()?; + for credential in credentials { + query("INSERT INTO credential (subject_identifier, issuer_identifier, scope, credential, expires_at, node_name) VALUES ($1, $2, $3, $4, $5, $6)") + .bind(credential.subject_identifier) + .bind(credential.issuer_identifier) + .bind(credential.scope) + .bind(credential.credential) + .bind(credential.expires_at) + .bind(credential.node_name) + .execute(&mut *transaction) + .await + .void()?; + } + transaction.commit().await.void()?; + Ok(()) + } + + async fn migrate_identities( + sqlite_database: SqlxDatabase, + connection: &mut AnyConnection, + ) -> Result<()> { + let get_identities = query_as("SELECT identifier, change_history FROM identity"); + + let identities: Vec = get_identities + .fetch_all(&*sqlite_database.pool) + .await + .into_core()?; + + let mut transaction = Connection::begin(&mut *connection).await.into_core()?; + for identity in identities { + query("INSERT INTO identity (identifier, change_history) VALUES ($1, $2)") + .bind(identity.identifier) + .bind(identity.change_history) + .execute(&mut *transaction) + .await + .void()?; + } + transaction.commit().await.void()?; + Ok(()) + } + + async fn migrate_identity_attributes( + sqlite_database: SqlxDatabase, + connection: &mut AnyConnection, + ) -> Result<()> { + let get_attributes = query_as("SELECT identifier, attributes, added, expires, attested_by, node_name FROM identity_attributes"); + + let attributes_rows: Vec = get_attributes + .fetch_all(&*sqlite_database.pool) + .await + .into_core()?; + + let mut transaction = Connection::begin(&mut *connection).await.into_core()?; + for attributes in attributes_rows { + query("INSERT INTO identity_attributes (identifier, attributes, added, expires, attested_by, node_name) VALUES ($1, $2, $3, $4, $5, $6)") + .bind(attributes.identifier) + .bind(attributes.attributes) + .bind(attributes.added) + .bind(attributes.expires) + .bind(attributes.attested_by) + .bind(attributes.node_name) + .execute(&mut *transaction) + .await + .void()?; + } + transaction.commit().await.void()?; + Ok(()) + } + + async fn migrate_members( + sqlite_database: SqlxDatabase, + connection: &mut AnyConnection, + ) -> Result<()> { + let get_members = query_as("SELECT identifier, added_by, added_at, is_pre_trusted, attributes, authority_id FROM authority_member"); + + let members: Vec = get_members + .fetch_all(&*sqlite_database.pool) + .await + .into_core()?; + + let mut transaction = Connection::begin(&mut *connection).await.into_core()?; + for member in members { + query(r#"INSERT INTO authority_member (identifier, added_by, added_at, is_pre_trusted, attributes, authority_id) + VALUES ($1, $2, $3, $4, $5, $6) + "#) + .bind(member.identifier) + .bind(member.added_by) + .bind(member.added_at) + .bind(member.is_pre_trusted.to_bool()) + .bind(member.attributes) + .bind(member.authority_id).execute(&mut *transaction).await.void()?; + } + transaction.commit().await.void()?; + Ok(()) + } + + /// We don't migrate the vault name, everything goes to the default vault, and we don't + /// consider any identity to be the default identity. + async fn migrate_named_identities( + sqlite_database: SqlxDatabase, + connection: &mut AnyConnection, + ) -> Result<()> { + let get_identities = + query_as("SELECT identifier, name, vault_name, is_default FROM named_identity"); + + let identities: Vec = get_identities + .fetch_all(&*sqlite_database.pool) + .await + .into_core()?; + + let mut transaction = Connection::begin(&mut *connection).await.into_core()?; + for identity in identities { + query("INSERT INTO named_identity (identifier, name, vault_name, is_default) VALUES ($1, $2, $3, $4)") + .bind(identity.identifier) + .bind(identity.name) + .bind("default") + .bind(false) + .execute(&mut *transaction) + .await + .void()?; + } + transaction.commit().await.void()?; + Ok(()) + } + + async fn migrate_purpose_keys( + sqlite_database: SqlxDatabase, + connection: &mut AnyConnection, + ) -> Result<()> { + let get_purpose_keys = + query_as("SELECT identifier, purpose, purpose_key_attestation FROM purpose_key"); + + let purpose_keys: Vec = get_purpose_keys + .fetch_all(&*sqlite_database.pool) + .await + .into_core()?; + + let mut transaction = Connection::begin(&mut *connection).await.into_core()?; + for purpose_key in purpose_keys { + query("INSERT INTO purpose_key (identifier, purpose, purpose_key_attestation) VALUES ($1, $2, $3)") + .bind(purpose_key.identifier) + .bind(purpose_key.purpose) + .bind(purpose_key.purpose_key_attestation) + .execute(&mut *transaction) + .await + .void()?; + } + transaction.commit().await.void()?; + Ok(()) + } + + async fn migrate_signing_secrets( + sqlite_database: SqlxDatabase, + connection: &mut AnyConnection, + ) -> Result<()> { + let get_secrets = query_as("SELECT handle, secret_type, secret FROM signing_secret"); + + let secrets: Vec = get_secrets + .fetch_all(&*sqlite_database.pool) + .await + .into_core()?; + + let mut transaction = Connection::begin(&mut *connection).await.into_core()?; + for secret in secrets { + query("INSERT INTO signing_secret (handle, secret_type, secret) VALUES ($1, $2, $3)") + .bind(secret.handle) + .bind(secret.secret_type) + .bind(secret.secret) + .execute(&mut *transaction) + .await + .void()?; + } + transaction.commit().await.void()?; + Ok(()) + } + + async fn migrate_x25519_secrets( + sqlite_database: SqlxDatabase, + connection: &mut AnyConnection, + ) -> Result<()> { + let get_secrets = query_as("SELECT handle, secret FROM x25519_secret"); + + let secrets: Vec = get_secrets + .fetch_all(&*sqlite_database.pool) + .await + .into_core()?; + + let mut transaction = Connection::begin(&mut *connection).await.into_core()?; + for secret in secrets { + query("INSERT INTO x25519_secret (handle, secret) VALUES ($1, $2)") + .bind(secret.handle) + .bind(secret.secret) + .execute(&mut *transaction) + .await + .void()?; + } + transaction.commit().await.void()?; + Ok(()) + } +} + +#[cfg(test)] +mod test { + use super::*; + use crate::database::node_migration_set::NodeMigrationSet; + use crate::database::{MigrationSet, SqlxDatabase}; + use std::collections::BTreeMap; + use tempfile::NamedTempFile; + + #[tokio::test] + async fn test_migration() -> Result<()> { + // create a sqlite database and insert some data + let db_file = NamedTempFile::new().unwrap(); + let db_file = db_file.path(); + let sqlite_database = SqlxDatabase::create_sqlite(db_file).await?; + insert_aead_secrets(sqlite_database.clone()).await?; + insert_authority_enrollment_tokens(sqlite_database.clone()).await?; + insert_credentials(sqlite_database.clone()).await?; + insert_identities(sqlite_database.clone()).await?; + insert_identity_attributes(sqlite_database.clone()).await?; + insert_members(sqlite_database.clone()).await?; + insert_named_identities(sqlite_database.clone()).await?; + insert_purpose_keys(sqlite_database.clone()).await?; + insert_signing_secrets(sqlite_database.clone()).await?; + insert_x25519_secrets(sqlite_database.clone()).await?; + + // create a fresh postgres database, set-up to migrate legacy sqlite data + let postgres_database = SqlxDatabase::create_postgres(Some(db_file.to_path_buf())).await?; + postgres_database.drop_all_postgres_tables().await?; + + // run the migration + let migration_set = + NodeMigrationSet::from_configuration(postgres_database.configuration.clone()); + let migrator = migration_set.create_migrator()?; + migrator.migrate(&postgres_database.pool).await?; + + check_aead_secrets(postgres_database.clone()).await?; + check_authority_enrollment_tokens(postgres_database.clone()).await?; + check_credentials(postgres_database.clone()).await?; + check_identities(postgres_database.clone()).await?; + check_identity_attributes(postgres_database.clone()).await?; + check_members(postgres_database.clone()).await?; + check_named_identities(postgres_database.clone()).await?; + check_purpose_keys(postgres_database.clone()).await?; + check_signing_secrets(postgres_database.clone()).await?; + check_x25519_secrets(postgres_database.clone()).await?; + + Ok(()) + } + + /// HELPERS + async fn insert_aead_secrets(sqlite_database: SqlxDatabase) -> Result<()> { + for index in &["1", "2"] { + let q = format!( + "INSERT INTO aead_secret (handle, type, secret) VALUES ($1, 'secret_{index}', $2)" + ); + let handle = format!("handle_{index}"); + let secret = format!("secret_{index}"); + let query = query(&q).bind(handle.as_bytes()).bind(secret.as_bytes()); + query.execute(&*sqlite_database.pool).await.void()?; + } + Ok(()) + } + + async fn check_aead_secrets(postgres_database: SqlxDatabase) -> Result<()> { + let query = query_as("SELECT handle, type as secret_type, secret FROM aead_secret"); + let secrets: Vec = query + .fetch_all(&*postgres_database.pool) + .await + .into_core()?; + + assert_eq!( + secrets + .iter() + .map(|m| m.secret_type.clone()) + .collect::>(), + vec!["secret_1", "secret_2"] + ); + Ok(()) + } + + async fn insert_authority_enrollment_tokens(sqlite_database: SqlxDatabase) -> Result<()> { + for index in &["1", "2"] { + let q = format!( + r#"INSERT INTO authority_enrollment_token (one_time_code, reference, issued_by, created_at, expires_at, ttl_count, attributes) + VALUES ('code_{index}', 'reference_{index}', 'issued_by_{index}', 10, 20, 20, $1)"# + ); + let attributes = format!("attributes_{index}"); + let query = query(&q).bind(attributes.as_bytes()); + query.execute(&*sqlite_database.pool).await.void()?; + } + Ok(()) + } + + async fn check_authority_enrollment_tokens(postgres_database: SqlxDatabase) -> Result<()> { + let query = query_as("SELECT one_time_code, reference, issued_by, created_at, expires_at, ttl_count, attributes FROM authority_enrollment_token"); + let tickets: Vec = query + .fetch_all(&*postgres_database.pool) + .await + .into_core()?; + + assert_eq!( + tickets + .iter() + .map(|m| m.one_time_code.clone()) + .collect::>(), + vec!["code_1", "code_2"] + ); + Ok(()) + } + + async fn insert_credentials(sqlite_database: SqlxDatabase) -> Result<()> { + for index in &["1", "2"] { + let q = format!( + r#"INSERT INTO credential (subject_identifier, issuer_identifier, scope, credential, expires_at, node_name) + VALUES ('subject_identifier_{index}', 'issuer_identifier_{index}', 'scope_{index}', 'credential_{index}', 0, 'node_name_{index}')"# + ); + query(&q).execute(&*sqlite_database.pool).await.void()?; + } + Ok(()) + } + + async fn check_credentials(postgres_database: SqlxDatabase) -> Result<()> { + let query = query_as("SELECT subject_identifier, issuer_identifier, scope, credential, expires_at, node_name FROM credential"); + let credentials: Vec = query + .fetch_all(&*postgres_database.pool) + .await + .into_core()?; + + assert_eq!( + credentials + .iter() + .map(|m| m.subject_identifier.clone()) + .collect::>(), + vec!["subject_identifier_1", "subject_identifier_2"] + ); + Ok(()) + } + + async fn insert_identities(sqlite_database: SqlxDatabase) -> Result<()> { + for index in &["1", "2"] { + let query = + query(r#"INSERT INTO identity (identifier, change_history) VALUES ($1, $2)"#) + .bind(format!("identity_{index}")) + .bind(format!("change_history_{index}")); + query.execute(&*sqlite_database.pool).await.void()?; + } + Ok(()) + } + + async fn check_identities(postgres_database: SqlxDatabase) -> Result<()> { + let query = query_as("SELECT identifier, change_history FROM identity"); + let identities: Vec = query + .fetch_all(&*postgres_database.pool) + .await + .into_core()?; + + assert_eq!( + identities + .iter() + .map(|m| m.identifier.clone()) + .collect::>(), + // The controller identity is hard-coded in the database. + vec![ + "I84502ce0d9a0a91bae29026b84e19be69fb4203a6bdd1424c85a43c812772a00", + "identity_1", + "identity_2" + ] + ); + Ok(()) + } + + async fn insert_identity_attributes(sqlite_database: SqlxDatabase) -> Result<()> { + for index in &["1", "2"] { + let q = format!( + r#"INSERT INTO identity_attributes (identifier, attributes, added, expires, attested_by, node_name) + VALUES ('identity_{index}', $1, 0, 10, 'attested_by_{index}', 'node_name_{index}')"# + ); + query(&q) + .bind(format!("attributes_{index}").as_bytes()) + .execute(&*sqlite_database.pool) + .await + .void()?; + } + Ok(()) + } + + async fn check_identity_attributes(postgres_database: SqlxDatabase) -> Result<()> { + let query = query_as("SELECT identifier, attributes, added, expires, attested_by, node_name FROM identity_attributes"); + let attributes: Vec = query + .fetch_all(&*postgres_database.pool) + .await + .into_core()?; + + assert_eq!( + attributes + .iter() + .map(|m| m.identifier.clone()) + .collect::>(), + // The controller identity is hard-coded in the database. + vec!["identity_1", "identity_2"] + ); + Ok(()) + } + + async fn insert_members(sqlite_database: SqlxDatabase) -> Result<()> { + let mut attributes = BTreeMap::new(); + attributes.insert("key", "value"); + for index in &["1", "2"] { + let query = query(r#"INSERT INTO authority_member (identifier, added_by, added_at, is_pre_trusted, attributes, authority_id) + VALUES ($1, $2, $3, $4, $5, $6)"#) + .bind(format!("member_{index}")) + .bind(format!("issuer_{index}")) + .bind(0) + .bind(true) + .bind(ockam_core::cbor_encode_preallocate(attributes.clone())?) + .bind("authority"); + query.execute(&*sqlite_database.pool).await.void()?; + } + Ok(()) + } + + async fn check_members(postgres_database: SqlxDatabase) -> Result<()> { + let query = query_as("SELECT identifier, added_by, added_at, is_pre_trusted, attributes, authority_id FROM authority_member"); + let members: Vec = query + .fetch_all(&*postgres_database.pool) + .await + .into_core()?; + + assert_eq!( + members + .iter() + .map(|m| m.identifier.clone()) + .collect::>(), + vec!["member_1", "member_2"] + ); + Ok(()) + } + + async fn insert_named_identities(sqlite_database: SqlxDatabase) -> Result<()> { + for index in &["1", "2"] { + let query = + query(r#"INSERT INTO named_identity (identifier, name, vault_name, is_default) VALUES ($1, $2, $3, $4)"#) + .bind(format!("identity_{index}")) + .bind(format!("name_{index}")) + .bind("default") + .bind(false); + query.execute(&*sqlite_database.pool).await.void()?; + } + Ok(()) + } + + async fn check_named_identities(postgres_database: SqlxDatabase) -> Result<()> { + let query = query_as("SELECT identifier, name, vault_name, is_default FROM named_identity"); + let identities: Vec = query + .fetch_all(&*postgres_database.pool) + .await + .into_core()?; + + assert_eq!( + identities + .iter() + .map(|m| ( + m.identifier.clone(), + m.vault_name.clone(), + m.is_default.to_bool() + )) + .collect::>(), + vec![ + ("identity_1".to_string(), "default".to_string(), false), + ("identity_2".to_string(), "default".to_string(), false) + ] + ); + Ok(()) + } + + async fn insert_purpose_keys(sqlite_database: SqlxDatabase) -> Result<()> { + for index in &["1", "2"] { + let q = format!( + r#"INSERT INTO purpose_key (identifier, purpose, purpose_key_attestation) + VALUES ('identifier_{index}', 'purpose_{index}', $1)"# + ); + query(&q) + .bind(format!("purpose_key_attestation_{index}").as_bytes()) + .execute(&*sqlite_database.pool) + .await + .void()?; + } + Ok(()) + } + + async fn check_purpose_keys(postgres_database: SqlxDatabase) -> Result<()> { + let query = + query_as("SELECT identifier, purpose, purpose_key_attestation FROM purpose_key"); + let keys: Vec = query + .fetch_all(&*postgres_database.pool) + .await + .into_core()?; + + assert_eq!( + keys.iter() + .map(|m| m.identifier.clone()) + .collect::>(), + vec!["identifier_1", "identifier_2"] + ); + Ok(()) + } + + async fn insert_signing_secrets(sqlite_database: SqlxDatabase) -> Result<()> { + for index in &["1", "2"] { + let q = format!( + "INSERT INTO signing_secret (handle, secret_type, secret) VALUES ($1, 'secret_{index}', $2)" + ); + let handle = format!("handle_{index}"); + let secret = format!("secret_{index}"); + let query = query(&q).bind(handle.as_bytes()).bind(secret.as_bytes()); + query.execute(&*sqlite_database.pool).await.void()?; + } + Ok(()) + } + + async fn check_signing_secrets(postgres_database: SqlxDatabase) -> Result<()> { + let query = query_as("SELECT handle, secret_type, secret FROM signing_secret"); + let secrets: Vec = query + .fetch_all(&*postgres_database.pool) + .await + .into_core()?; + + assert_eq!( + secrets + .iter() + .map(|m| m.secret_type.clone()) + .collect::>(), + vec!["secret_1", "secret_2"] + ); + Ok(()) + } + + async fn insert_x25519_secrets(sqlite_database: SqlxDatabase) -> Result<()> { + for index in &["1", "2"] { + let q = "INSERT INTO x25519_secret (handle, secret) VALUES ($1, $2)"; + let handle = format!("handle_{index}"); + let secret = format!("secret_{index}"); + let query = query(q).bind(handle.as_bytes()).bind(secret.as_bytes()); + query.execute(&*sqlite_database.pool).await.void()?; + } + Ok(()) + } + + async fn check_x25519_secrets(postgres_database: SqlxDatabase) -> Result<()> { + let query = query_as("SELECT handle, secret FROM x25519_secret"); + let secrets: Vec = query + .fetch_all(&*postgres_database.pool) + .await + .into_core()?; + + assert_eq!( + secrets + .iter() + .map(|m| m.handle.clone()) + .collect::>>(), + vec![ + "handle_1".as_bytes().to_vec(), + "handle_2".as_bytes().to_vec() + ] + ); + Ok(()) + } +} + +#[derive(sqlx::FromRow)] +pub(crate) struct AeadSecretRow { + handle: Vec, + secret_type: String, + secret: Vec, +} + +#[derive(sqlx::FromRow)] +pub(crate) struct AuthorityEnrollmentTicketRow { + one_time_code: String, + reference: Option, + issued_by: String, + created_at: i64, + expires_at: i64, + ttl_count: i64, + attributes: Vec, +} + +#[derive(sqlx::FromRow)] +pub(crate) struct AuthorityMemberRow { + identifier: String, + added_by: String, + added_at: i64, + is_pre_trusted: Boolean, + attributes: Vec, + authority_id: String, +} + +#[derive(sqlx::FromRow)] +pub(crate) struct CredentialRow { + subject_identifier: String, + issuer_identifier: String, + scope: String, + credential: String, + expires_at: i64, + node_name: String, +} + +#[derive(sqlx::FromRow)] +pub(crate) struct IdentityRow { + identifier: String, + change_history: String, +} + +#[derive(sqlx::FromRow)] +pub(crate) struct IdentityAttributesRow { + identifier: String, + attributes: Vec, + added: i64, + expires: i64, + attested_by: String, + node_name: String, +} + +/// Clippy warns about dead code here but it shouldn't +#[allow(dead_code)] +#[derive(sqlx::FromRow)] +pub(crate) struct NamedIdentityRow { + identifier: String, + name: String, + vault_name: String, + is_default: Boolean, +} + +#[derive(sqlx::FromRow)] +pub(crate) struct PurposeKeyRow { + identifier: String, + purpose: String, + purpose_key_attestation: Vec, +} + +#[derive(sqlx::FromRow)] +pub(crate) struct SigningSecretRow { + handle: Vec, + secret_type: String, + secret: Vec, +} + +#[derive(sqlx::FromRow)] +pub(crate) struct X25519SecretRow { + handle: Vec, + secret: Vec, +} diff --git a/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/postgres/mod.rs b/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/postgres/mod.rs new file mode 100644 index 00000000000..cf77fce469b --- /dev/null +++ b/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/postgres/mod.rs @@ -0,0 +1,3 @@ +/// This migration reads the data contained in an existing sqlite database and inserts that data +/// inside the shared Postgres database. +pub mod migration_20250116100000_sqlite_initialization; diff --git a/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/sqlite/migration_20231231100000_node_name_identity_attributes.rs b/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/sqlite/migration_20231231100000_node_name_identity_attributes.rs index ef5d2c75daa..e9ab43e0d16 100644 --- a/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/sqlite/migration_20231231100000_node_name_identity_attributes.rs +++ b/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/sqlite/migration_20231231100000_node_name_identity_attributes.rs @@ -2,6 +2,7 @@ use crate::database::{Boolean, FromSqlxError, Nullable, RustMigration, ToVoid, V use ockam_core::{async_trait, Result}; use sqlx::any::AnyRow; use sqlx::*; +use std::path::PathBuf; /// This struct adds a node name column to the identity attributes table #[derive(Debug)] @@ -17,7 +18,11 @@ impl RustMigration for NodeNameIdentityAttributes { Self::version() } - async fn migrate(&self, connection: &mut AnyConnection) -> Result { + async fn migrate( + &self, + _legacy_sqlite_path: Option, + connection: &mut AnyConnection, + ) -> Result { Self::migrate_attributes_node_name(connection).await } } diff --git a/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/sqlite/migration_20240111100001_add_authority_tables.rs b/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/sqlite/migration_20240111100001_add_authority_tables.rs index c408f1337a7..3ab83d7a4ae 100644 --- a/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/sqlite/migration_20240111100001_add_authority_tables.rs +++ b/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/sqlite/migration_20240111100001_add_authority_tables.rs @@ -2,6 +2,7 @@ use crate::database::migrations::RustMigration; use crate::database::{Boolean, FromSqlxError, Nullable, ToVoid, Version}; use ockam_core::{async_trait, Result}; use sqlx::*; +use std::path::PathBuf; /// This migration moves attributes from identity_attributes to the authority_member table for authority nodes #[derive(Debug)] @@ -17,7 +18,11 @@ impl RustMigration for AuthorityAttributes { Self::version() } - async fn migrate(&self, connection: &mut AnyConnection) -> Result { + async fn migrate( + &self, + _legacy_sqlite_path: Option, + connection: &mut AnyConnection, + ) -> Result { Self::migrate_authority_attributes_to_members(connection).await } } diff --git a/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/sqlite/migration_20240111100002_delete_trust_context.rs b/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/sqlite/migration_20240111100002_delete_trust_context.rs index a6ee124686a..6134ffd1526 100644 --- a/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/sqlite/migration_20240111100002_delete_trust_context.rs +++ b/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/sqlite/migration_20240111100002_delete_trust_context.rs @@ -5,6 +5,7 @@ use minicbor::{CborLen, Decode, Encode}; use ockam_core::{async_trait, Result}; use regex::Regex; use sqlx::*; +use std::path::PathBuf; /// This migration updates policies to not rely on trust_context_id, /// also introduces `node_name` and replicates policy for each existing node @@ -21,7 +22,11 @@ impl RustMigration for PolicyTrustContextId { Self::version() } - async fn migrate(&self, connection: &mut AnyConnection) -> Result { + async fn migrate( + &self, + _legacy_sqlite_path: Option, + connection: &mut AnyConnection, + ) -> Result { Self::migrate_update_policies(connection).await } } diff --git a/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/sqlite/migration_20240212100000_split_policies.rs b/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/sqlite/migration_20240212100000_split_policies.rs index f18c6ed632e..a2a7e9bf662 100644 --- a/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/sqlite/migration_20240212100000_split_policies.rs +++ b/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/sqlite/migration_20240212100000_split_policies.rs @@ -2,6 +2,7 @@ use crate::database::migrations::RustMigration; use crate::database::{FromSqlxError, ToVoid, Version}; use ockam_core::{async_trait, Result}; use sqlx::*; +use std::path::PathBuf; /// This migration moves policies attached to resource types from /// table "resource_policy" to "resource_type_policy" @@ -18,7 +19,11 @@ impl RustMigration for SplitPolicies { Self::version() } - async fn migrate(&self, connection: &mut AnyConnection) -> Result { + async fn migrate( + &self, + _legacy_sqlite_path: Option, + connection: &mut AnyConnection, + ) -> Result { Self::migrate_policies(connection).await } } diff --git a/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/sqlite/migration_20240313100000_remove_orphan_resources.rs b/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/sqlite/migration_20240313100000_remove_orphan_resources.rs index 56e061c92b5..d0d4bb4b8bc 100644 --- a/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/sqlite/migration_20240313100000_remove_orphan_resources.rs +++ b/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/sqlite/migration_20240313100000_remove_orphan_resources.rs @@ -2,6 +2,7 @@ use crate::database::migrations::RustMigration; use crate::database::{FromSqlxError, ToVoid, Version}; use ockam_core::{async_trait, Result}; use sqlx::*; +use std::path::PathBuf; /// This migration removes orphan resources from the resource table #[derive(Debug)] @@ -17,7 +18,11 @@ impl RustMigration for RemoveOrphanResources { Self::version() } - async fn migrate(&self, connection: &mut AnyConnection) -> Result { + async fn migrate( + &self, + _legacy_sqlite_path: Option, + connection: &mut AnyConnection, + ) -> Result { Self::migrate(connection).await } } diff --git a/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/sqlite/migration_20240503100000_update_policy_expressions.rs b/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/sqlite/migration_20240503100000_update_policy_expressions.rs index 5a51edf76d3..4ef60b5e140 100644 --- a/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/sqlite/migration_20240503100000_update_policy_expressions.rs +++ b/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/sqlite/migration_20240503100000_update_policy_expressions.rs @@ -2,6 +2,7 @@ use crate::database::migrations::RustMigration; use crate::database::{FromSqlxError, ToVoid, Version}; use ockam_core::{async_trait, Result}; use sqlx::*; +use std::path::PathBuf; /// This migration makes sure that policy expressions that were created as subject.has_credential /// now start with an operator: (= subject.has_credential "true") @@ -18,7 +19,11 @@ impl RustMigration for UpdatePolicyExpressions { Self::version() } - async fn migrate(&self, connection: &mut AnyConnection) -> Result { + async fn migrate( + &self, + _legacy_sqlite_path: Option, + connection: &mut AnyConnection, + ) -> Result { Self::migrate_policy_expressions(connection).await } } diff --git a/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/sqlite/migration_20250114100000_members_authority_id.rs b/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/sqlite/migration_20250114100000_members_authority_id.rs index 1af451123c5..74407a2b902 100644 --- a/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/sqlite/migration_20250114100000_members_authority_id.rs +++ b/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/rust/sqlite/migration_20250114100000_members_authority_id.rs @@ -2,6 +2,7 @@ use crate::database::migrations::RustMigration; use crate::database::{FromSqlxError, ToVoid, Version}; use ockam_core::{async_trait, Result}; use sqlx::*; +use std::path::PathBuf; /// This migration sets the authority id for existing members #[derive(Debug)] @@ -17,7 +18,11 @@ impl RustMigration for SetAuthorityId { Self::version() } - async fn migrate(&self, connection: &mut AnyConnection) -> Result { + async fn migrate( + &self, + _legacy_sqlite_path: Option, + connection: &mut AnyConnection, + ) -> Result { Self::set_authority_id(connection).await } } diff --git a/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/sql/postgres/20250115100000_create_database.sql b/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/sql/postgres/20250115100000_create_database.sql index 09b3369ffa4..f2249cae367 100644 --- a/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/sql/postgres/20250115100000_create_database.sql +++ b/implementations/rust/ockam/ockam_node/src/storage/database/migrations/node_migrations/sql/postgres/20250115100000_create_database.sql @@ -5,8 +5,8 @@ -- Create a table to support rust migrations CREATE TABLE IF NOT EXISTS _rust_migrations ( - name TEXT NOT NULL, - run_on TIMESTAMP NOT NULL + name TEXT NOT NULL, + run_on INTEGER NOT NULL ); CREATE UNIQUE INDEX IF NOT EXISTS name_index ON _rust_migrations (name); @@ -58,12 +58,12 @@ CREATE INDEX identity_node_name_index ON identity_attributes (node_name); -- This table stores credentials as received by the application CREATE TABLE credential ( - subject_identifier TEXT NOT NULL, - issuer_identifier TEXT NOT NULL, - scope TEXT NOT NULL, - credential BYTEA NOT NULL, + subject_identifier TEXT NOT NULL, + issuer_identifier TEXT NOT NULL, + scope TEXT NOT NULL, + credential TEXT NOT NULL, expires_at INTEGER, - node_name TEXT NOT NULL -- node name to isolate credential that each node has + node_name TEXT NOT NULL -- node name to isolate credential that each node has ); CREATE UNIQUE INDEX credential_issuer_subject_scope_index ON credential (issuer_identifier, subject_identifier, scope); diff --git a/implementations/rust/ockam/ockam_node/src/storage/database/sqlx_database.rs b/implementations/rust/ockam/ockam_node/src/storage/database/sqlx_database.rs index 460c62a84c9..c5b35e71fa3 100644 --- a/implementations/rust/ockam/ockam_node/src/storage/database/sqlx_database.rs +++ b/implementations/rust/ockam/ockam_node/src/storage/database/sqlx_database.rs @@ -86,6 +86,14 @@ impl SqlxDatabase { Self::create_application_database(&DatabaseConfiguration::sqlite(path)).await } + /// Constructor for a postgres database + pub async fn create_postgres(legacy_sqlite_path: Option) -> Result { + match DatabaseConfiguration::postgres_with_legacy_sqlite_path(legacy_sqlite_path)? { + Some(configuration) => SqlxDatabase::create(&configuration).await, + None => Err(Error::new(Origin::Core, Kind::NotFound, "There is no postgres database configuration, or it is incomplete. Please run ockam environment to check the database environment variables".to_string())), + } + } + /// Constructor for a local postgres database with no data pub async fn create_new_postgres() -> Result { match DatabaseConfiguration::postgres()? {