diff --git a/zk_toolbox/Cargo.lock b/zk_toolbox/Cargo.lock index 1469b183152b..1401ca022904 100644 --- a/zk_toolbox/Cargo.lock +++ b/zk_toolbox/Cargo.lock @@ -531,6 +531,7 @@ dependencies = [ "serde_yaml", "sqlx", "strum_macros 0.26.2", + "tokio", "toml", "url", "xshell", @@ -4533,6 +4534,7 @@ dependencies = [ "console", "ethers", "human-panic", + "lazy_static", "serde", "serde_json", "serde_yaml", @@ -4550,7 +4552,16 @@ dependencies = [ name = "zk_supervisor" version = "0.1.0" dependencies = [ + "anyhow", + "clap", + "common", + "config", "human-panic", + "strum 0.26.2", + "strum_macros 0.26.2", + "tokio", + "url", + "xshell", ] [[package]] diff --git a/zk_toolbox/Cargo.toml b/zk_toolbox/Cargo.toml index 539c656292a4..ae4b40fa435e 100644 --- a/zk_toolbox/Cargo.toml +++ b/zk_toolbox/Cargo.toml @@ -32,7 +32,9 @@ clap = { version = "4.4", features = ["derive", "wrap_help"] } cliclack = "0.2.5" console = "0.15.8" ethers = "2.0" +futures = "0.3.30" human-panic = "2.0" +lazy_static = "1.4.0" once_cell = "1.19.0" rand = "0.8.5" serde = { version = "1.0", features = ["derive"] } @@ -41,9 +43,8 @@ serde_yaml = "0.9" sqlx = { version = "0.7.4", features = ["runtime-tokio", "migrate", "postgres"] } strum = "0.26.2" strum_macros = "0.26.2" +thiserror = "1.0.57" tokio = { version = "1.37", features = ["full"] } toml = "0.8.12" url = { version = "2.5.0", features = ["serde"] } xshell = "0.2.6" -futures = "0.3.30" -thiserror = "1.0.57" diff --git a/zk_toolbox/crates/common/Cargo.toml b/zk_toolbox/crates/common/Cargo.toml index efdde1cdfc18..00c3b7775112 100644 --- a/zk_toolbox/crates/common/Cargo.toml +++ b/zk_toolbox/crates/common/Cargo.toml @@ -16,13 +16,14 @@ clap.workspace = true cliclack.workspace = true console.workspace = true ethers.workspace = true +futures.workspace = true once_cell.workspace = true serde.workspace = true serde_json.workspace = true serde_yaml.workspace = true sqlx.workspace = true strum_macros.workspace = true +tokio.workspace = true toml.workspace = true url.workspace = true xshell.workspace = true -futures.workspace = true diff --git a/zk_toolbox/crates/common/src/cmd.rs b/zk_toolbox/crates/common/src/cmd.rs index 8b18c7733059..e39f1e18972c 100644 --- a/zk_toolbox/crates/common/src/cmd.rs +++ b/zk_toolbox/crates/common/src/cmd.rs @@ -1,3 +1,5 @@ +use std::process::Output; + use anyhow::bail; use console::style; @@ -31,13 +33,6 @@ impl<'a> Cmd<'a> { /// Run the command without capturing its output. pub fn run(&mut self) -> anyhow::Result<()> { - self.run_cmd()?; - Ok(()) - } - - /// Run the command and capture its output, logging the command - /// and its output if verbose selected. - fn run_cmd(&mut self) -> anyhow::Result<()> { if global_config().verbose || self.force_run { logger::debug(format!("Running: {}", self.inner)); logger::new_empty_line(); @@ -60,6 +55,25 @@ impl<'a> Cmd<'a> { Ok(()) } + /// Run the command and return its output. + pub fn run_with_output(&mut self) -> anyhow::Result { + if global_config().verbose || self.force_run { + logger::debug(format!("Running: {}", self.inner)); + logger::new_empty_line(); + } + + self.inner.set_ignore_status(true); + let output = self.inner.output()?; + + if global_config().verbose || self.force_run { + logger::raw(log_output(&output)); + logger::new_empty_line(); + logger::new_line(); + } + + Ok(output) + } + fn check_output_status(&self, output: &std::process::Output) -> anyhow::Result<()> { if !output.status.success() { logger::new_line(); diff --git a/zk_toolbox/crates/common/src/db.rs b/zk_toolbox/crates/common/src/db.rs index 887880b2c55c..c0a681bc74c0 100644 --- a/zk_toolbox/crates/common/src/db.rs +++ b/zk_toolbox/crates/common/src/db.rs @@ -1,5 +1,7 @@ use std::{collections::HashMap, path::PathBuf}; +use anyhow::anyhow; +use serde::{Deserialize, Serialize}; use sqlx::{ migrate::{Migrate, MigrateError, Migrator}, Connection, PgConnection, @@ -9,22 +11,63 @@ use xshell::Shell; use crate::{config::global_config, logger}; -pub async fn init_db(db_url: &Url, name: &str) -> anyhow::Result<()> { +/// Database configuration. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct DatabaseConfig { + /// Database URL. + pub url: Url, + /// Database name. + pub name: String, +} + +impl DatabaseConfig { + /// Create a new `Db` instance. + pub fn new(url: Url, name: String) -> Self { + Self { url, name } + } + + /// Create a new `Db` instance from a URL. + pub fn from_url(url: Url) -> anyhow::Result { + let name = url + .path_segments() + .ok_or(anyhow!("Failed to parse database name from URL"))? + .last() + .ok_or(anyhow!("Failed to parse database name from URL"))?; + let url_without_db_name = { + let mut url = url.clone(); + url.set_path(""); + url + }; + Ok(Self { + url: url_without_db_name, + name: name.to_string(), + }) + } + + /// Get the full URL of the database. + pub fn full_url(&self) -> Url { + let mut url = self.url.clone(); + url.set_path(&self.name); + url + } +} + +pub async fn init_db(db: &DatabaseConfig) -> anyhow::Result<()> { // Connect to the database. - let mut connection = PgConnection::connect(db_url.as_ref()).await?; + let mut connection = PgConnection::connect(db.url.as_str()).await?; - let query = format!("CREATE DATABASE {}", name); + let query = format!("CREATE DATABASE {}", db.name); // Create DB. sqlx::query(&query).execute(&mut connection).await?; Ok(()) } -pub async fn drop_db_if_exists(db_url: &Url, name: &str) -> anyhow::Result<()> { +pub async fn drop_db_if_exists(db: &DatabaseConfig) -> anyhow::Result<()> { // Connect to the database. - let mut connection = PgConnection::connect(db_url.as_ref()).await?; + let mut connection = PgConnection::connect(db.url.as_str()).await?; - let query = format!("DROP DATABASE IF EXISTS {}", name); + let query = format!("DROP DATABASE IF EXISTS {}", db.name); // DROP DB. sqlx::query(&query).execute(&mut connection).await?; @@ -34,7 +77,7 @@ pub async fn drop_db_if_exists(db_url: &Url, name: &str) -> anyhow::Result<()> { pub async fn migrate_db( shell: &Shell, migrations_folder: PathBuf, - db_url: &str, + db_url: &Url, ) -> anyhow::Result<()> { // Most of this file is copy-pasted from SQLx CLI: // https://github.com/launchbadge/sqlx/blob/main/sqlx-cli/src/migrate.rs @@ -45,7 +88,7 @@ pub async fn migrate_db( } let migrator = Migrator::new(migrations_folder).await?; - let mut conn = PgConnection::connect(db_url).await?; + let mut conn = PgConnection::connect(db_url.as_str()).await?; conn.ensure_migrations_table().await?; let version = conn.dirty_version().await?; @@ -83,7 +126,7 @@ pub async fn migrate_db( let text = if skip { "Skipped" } else { "Applied" }; if global_config().verbose { - logger::raw(&format!( + logger::step(&format!( " {} {}/{} {} ({elapsed:?})", text, migration.version, @@ -104,3 +147,15 @@ pub async fn migrate_db( Ok(()) } + +pub async fn wait_for_db(db_url: &Url, tries: u32) -> anyhow::Result<()> { + for i in 0..tries { + if PgConnection::connect(db_url.as_str()).await.is_ok() { + return Ok(()); + } + if i < tries - 1 { + tokio::time::sleep(std::time::Duration::from_secs(1)).await; + } + } + anyhow::bail!("Unable to connect to Postgres, connection cannot be established"); +} diff --git a/zk_toolbox/crates/common/src/term/logger.rs b/zk_toolbox/crates/common/src/term/logger.rs index 9e13c2958078..b505123114be 100644 --- a/zk_toolbox/crates/common/src/term/logger.rs +++ b/zk_toolbox/crates/common/src/term/logger.rs @@ -43,10 +43,14 @@ pub fn success(msg: impl Display) { log::success(msg).unwrap(); } -pub fn raw(msg: impl Display) { +pub fn step(msg: impl Display) { log::step(msg).unwrap(); } +pub fn raw(msg: impl Display) { + term_write(msg); +} + pub fn note(msg: impl Display, content: impl Display) { cliclack::note(msg, content).unwrap(); } diff --git a/zk_toolbox/crates/common/src/term/spinner.rs b/zk_toolbox/crates/common/src/term/spinner.rs index 3e9322ba636c..dcfaaf44d44d 100644 --- a/zk_toolbox/crates/common/src/term/spinner.rs +++ b/zk_toolbox/crates/common/src/term/spinner.rs @@ -34,4 +34,13 @@ impl Spinner { self.time.elapsed().as_secs_f64() )); } + + /// Interrupt the spinner with a failed message. + pub fn fail(self) { + self.pb.error(format!( + "{} failed in {} secs", + self.msg, + self.time.elapsed().as_secs_f64() + )); + } } diff --git a/zk_toolbox/crates/config/src/secrets.rs b/zk_toolbox/crates/config/src/secrets.rs index 829d903adb66..ebacc5d437cb 100644 --- a/zk_toolbox/crates/config/src/secrets.rs +++ b/zk_toolbox/crates/config/src/secrets.rs @@ -5,8 +5,8 @@ use crate::{consts::SECRETS_FILE, traits::FileConfigWithDefaultName}; #[derive(Debug, Clone, Serialize, Deserialize)] pub struct DatabaseSecrets { - pub server_url: String, - pub prover_url: String, + pub server_url: Url, + pub prover_url: Url, #[serde(flatten)] pub other: serde_json::Value, } @@ -29,28 +29,3 @@ pub struct SecretsConfig { impl FileConfigWithDefaultName for SecretsConfig { const FILE_NAME: &'static str = SECRETS_FILE; } - -#[derive(Debug, Serialize)] -pub struct DatabaseConfig { - pub base_url: Url, - pub database_name: String, -} - -impl DatabaseConfig { - pub fn new(base_url: Url, database_name: String) -> Self { - Self { - base_url, - database_name, - } - } - - pub fn full_url(&self) -> String { - format!("{}/{}", self.base_url, self.database_name) - } -} - -#[derive(Debug, Serialize)] -pub struct DatabasesConfig { - pub server: DatabaseConfig, - pub prover: DatabaseConfig, -} diff --git a/zk_toolbox/crates/zk_inception/Cargo.toml b/zk_toolbox/crates/zk_inception/Cargo.toml index 8123746f1abf..ff22e982e3cc 100644 --- a/zk_toolbox/crates/zk_inception/Cargo.toml +++ b/zk_toolbox/crates/zk_inception/Cargo.toml @@ -17,6 +17,7 @@ cliclack.workspace = true config.workspace = true console.workspace = true human-panic.workspace = true +lazy_static.workspace = true serde_yaml.workspace = true serde.workspace = true serde_json.workspace = true diff --git a/zk_toolbox/crates/zk_inception/src/commands/chain/args/genesis.rs b/zk_toolbox/crates/zk_inception/src/commands/chain/args/genesis.rs index c8229066a2eb..c5a761fb74b4 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/chain/args/genesis.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/chain/args/genesis.rs @@ -1,6 +1,6 @@ use clap::Parser; -use common::{slugify, Prompt}; -use config::{ChainConfig, DatabaseConfig, DatabasesConfig}; +use common::{db::DatabaseConfig, slugify, Prompt}; +use config::ChainConfig; use serde::{Deserialize, Serialize}; use url::Url; @@ -9,11 +9,11 @@ use crate::defaults::{generate_db_names, DBNames, DATABASE_PROVER_URL, DATABASE_ #[derive(Debug, Clone, Serialize, Deserialize, Parser, Default)] pub struct GenesisArgs { #[clap(long, help = "Server database url without database name")] - pub server_db_url: Option, + pub server_db_url: Option, #[clap(long, help = "Server database name")] pub server_db_name: Option, #[clap(long, help = "Prover database url without database name")] - pub prover_db_url: Option, + pub prover_db_url: Option, #[clap(long, help = "Prover database name")] pub prover_db_name: Option, #[clap(long, short, help = "Use default database urls and names")] @@ -31,10 +31,8 @@ impl GenesisArgs { let chain_name = config.name.clone(); if self.use_default { GenesisArgsFinal { - server_db_url: DATABASE_SERVER_URL.to_string(), - server_db_name: server_name, - prover_db_url: DATABASE_PROVER_URL.to_string(), - prover_db_name: prover_name, + server_db: DatabaseConfig::new(DATABASE_SERVER_URL.clone(), server_name), + prover_db: DatabaseConfig::new(DATABASE_PROVER_URL.clone(), prover_name), dont_drop: self.dont_drop, } } else { @@ -42,7 +40,7 @@ impl GenesisArgs { Prompt::new(&format!( "Please provide server database url for chain {chain_name}" )) - .default(DATABASE_SERVER_URL) + .default(DATABASE_SERVER_URL.as_str()) .ask() }); let server_db_name = slugify(&self.server_db_name.unwrap_or_else(|| { @@ -56,7 +54,7 @@ impl GenesisArgs { Prompt::new(&format!( "Please provide prover database url for chain {chain_name}" )) - .default(DATABASE_PROVER_URL) + .default(DATABASE_PROVER_URL.as_str()) .ask() }); let prover_db_name = slugify(&self.prover_db_name.unwrap_or_else(|| { @@ -67,10 +65,8 @@ impl GenesisArgs { .ask() })); GenesisArgsFinal { - server_db_url, - server_db_name, - prover_db_url, - prover_db_name, + server_db: DatabaseConfig::new(server_db_url, server_db_name), + prover_db: DatabaseConfig::new(prover_db_url, prover_db_name), dont_drop: self.dont_drop, } } @@ -79,21 +75,7 @@ impl GenesisArgs { #[derive(Debug, Clone, Serialize, Deserialize)] pub struct GenesisArgsFinal { - pub server_db_url: String, - pub server_db_name: String, - pub prover_db_url: String, - pub prover_db_name: String, + pub server_db: DatabaseConfig, + pub prover_db: DatabaseConfig, pub dont_drop: bool, } - -impl GenesisArgsFinal { - pub fn databases_config(&self) -> anyhow::Result { - let server_url = Url::parse(&self.server_db_url)?; - let prover_url = Url::parse(&self.prover_db_url)?; - - Ok(DatabasesConfig { - server: DatabaseConfig::new(server_url, self.server_db_name.clone()), - prover: DatabaseConfig::new(prover_url, self.prover_db_name.clone()), - }) - } -} diff --git a/zk_toolbox/crates/zk_inception/src/commands/chain/genesis.rs b/zk_toolbox/crates/zk_inception/src/commands/chain/genesis.rs index 1bc9d8dd0c36..0909187abe17 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/chain/genesis.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/chain/genesis.rs @@ -3,11 +3,11 @@ use std::path::PathBuf; use anyhow::Context; use common::{ config::global_config, - db::{drop_db_if_exists, init_db, migrate_db}, + db::{drop_db_if_exists, init_db, migrate_db, DatabaseConfig}, logger, spinner::Spinner, }; -use config::{ChainConfig, DatabasesConfig, EcosystemConfig}; +use config::{ChainConfig, EcosystemConfig}; use xshell::Shell; use super::args::genesis::GenesisArgsFinal; @@ -43,17 +43,15 @@ pub async fn genesis( shell.remove_path(&config.rocks_db_path)?; shell.create_dir(&config.rocks_db_path)?; - let db_config = args - .databases_config() - .context("Database config was not fully generated")?; update_general_config(shell, config)?; - update_database_secrets(shell, config, &db_config)?; + update_database_secrets(shell, config, &args.server_db, &args.prover_db)?; logger::note( "Selected config:", logger::object_to_string(serde_json::json!({ "chain_config": config, - "db_config": db_config, + "server_db_config": args.server_db, + "prover_db_config": args.prover_db, })), ); logger::info("Starting genesis process"); @@ -61,7 +59,8 @@ pub async fn genesis( let spinner = Spinner::new("Initializing databases..."); initialize_databases( shell, - db_config, + &args.server_db, + &args.prover_db, config.link_to_code.clone(), args.dont_drop, ) @@ -79,7 +78,8 @@ pub async fn genesis( async fn initialize_databases( shell: &Shell, - db_config: DatabasesConfig, + server_db_config: &DatabaseConfig, + prover_db_config: &DatabaseConfig, link_to_code: PathBuf, dont_drop: bool, ) -> anyhow::Result<()> { @@ -89,15 +89,15 @@ async fn initialize_databases( logger::debug("Initializing server database") } if !dont_drop { - drop_db_if_exists(&db_config.server.base_url, &db_config.server.database_name) + drop_db_if_exists(server_db_config) .await .context("Failed to drop server database")?; - init_db(&db_config.server.base_url, &db_config.server.database_name).await?; + init_db(server_db_config).await?; } migrate_db( shell, path_to_server_migration, - &db_config.server.full_url(), + &server_db_config.full_url(), ) .await?; @@ -105,16 +105,16 @@ async fn initialize_databases( logger::debug("Initializing prover database") } if !dont_drop { - drop_db_if_exists(&db_config.prover.base_url, &db_config.prover.database_name) + drop_db_if_exists(prover_db_config) .await .context("Failed to drop prover database")?; - init_db(&db_config.prover.base_url, &db_config.prover.database_name).await?; + init_db(prover_db_config).await?; } let path_to_prover_migration = link_to_code.join(PROVER_MIGRATIONS); migrate_db( shell, path_to_prover_migration, - &db_config.prover.full_url(), + &prover_db_config.full_url(), ) .await?; diff --git a/zk_toolbox/crates/zk_inception/src/commands/ecosystem/mod.rs b/zk_toolbox/crates/zk_inception/src/commands/ecosystem/mod.rs index 1e232b5cf6c6..e2db65b213f8 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/ecosystem/mod.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/ecosystem/mod.rs @@ -12,6 +12,7 @@ pub mod create_configs; mod init; #[derive(Subcommand, Debug)] +#[allow(clippy::large_enum_variant)] pub enum EcosystemCommands { /// Create a new ecosystem and chain, /// setting necessary configurations for later initialization diff --git a/zk_toolbox/crates/zk_inception/src/config_manipulations.rs b/zk_toolbox/crates/zk_inception/src/config_manipulations.rs index a5edcb7bde4a..3c350fa8d894 100644 --- a/zk_toolbox/crates/zk_inception/src/config_manipulations.rs +++ b/zk_toolbox/crates/zk_inception/src/config_manipulations.rs @@ -1,10 +1,11 @@ +use common::db::DatabaseConfig; use config::{ forge_interface::{ initialize_bridges::output::InitializeBridgeOutput, paymaster::DeployPaymasterOutput, register_chain::output::RegisterChainOutput, }, traits::{ReadConfigWithBasePath, SaveConfigWithBasePath}, - ChainConfig, ContractsConfig, DatabasesConfig, GeneralConfig, GenesisConfig, SecretsConfig, + ChainConfig, ContractsConfig, GeneralConfig, GenesisConfig, SecretsConfig, }; use types::ProverMode; use xshell::Shell; @@ -25,11 +26,12 @@ pub(crate) fn update_genesis(shell: &Shell, config: &ChainConfig) -> anyhow::Res pub(crate) fn update_database_secrets( shell: &Shell, config: &ChainConfig, - db_config: &DatabasesConfig, + server_db_config: &DatabaseConfig, + prover_db_config: &DatabaseConfig, ) -> anyhow::Result<()> { let mut secrets = SecretsConfig::read_with_base_path(shell, &config.configs)?; - secrets.database.server_url = db_config.server.full_url(); - secrets.database.prover_url = db_config.prover.full_url(); + secrets.database.server_url = server_db_config.full_url(); + secrets.database.prover_url = prover_db_config.full_url(); secrets.save_with_base_path(shell, &config.configs)?; Ok(()) } diff --git a/zk_toolbox/crates/zk_inception/src/defaults.rs b/zk_toolbox/crates/zk_inception/src/defaults.rs index 4b768abe907d..04b735e02275 100644 --- a/zk_toolbox/crates/zk_inception/src/defaults.rs +++ b/zk_toolbox/crates/zk_inception/src/defaults.rs @@ -1,7 +1,13 @@ use config::ChainConfig; +use lazy_static::lazy_static; +use url::Url; -pub const DATABASE_SERVER_URL: &str = "postgres://postgres:notsecurepassword@localhost:5432"; -pub const DATABASE_PROVER_URL: &str = "postgres://postgres:notsecurepassword@localhost:5432"; +lazy_static! { + pub static ref DATABASE_SERVER_URL: Url = + Url::parse("postgres://postgres:notsecurepassword@localhost:5432").unwrap(); + pub static ref DATABASE_PROVER_URL: Url = + Url::parse("postgres://postgres:notsecurepassword@localhost:5432").unwrap(); +} pub const ROCKS_DB_STATE_KEEPER: &str = "main/state_keeper"; pub const ROCKS_DB_TREE: &str = "main/tree"; diff --git a/zk_toolbox/crates/zk_supervisor/Cargo.toml b/zk_toolbox/crates/zk_supervisor/Cargo.toml index 74e04fc68aac..79d2bac74905 100644 --- a/zk_toolbox/crates/zk_supervisor/Cargo.toml +++ b/zk_toolbox/crates/zk_supervisor/Cargo.toml @@ -11,4 +11,13 @@ description.workspace = true keywords.workspace = true [dependencies] +anyhow.workspace = true +clap.workspace = true +common.workspace = true +config.workspace = true human-panic.workspace = true +strum.workspace = true +strum_macros.workspace = true +tokio.workspace = true +url.workspace = true +xshell.workspace = true diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/database/args/mod.rs b/zk_toolbox/crates/zk_supervisor/src/commands/database/args/mod.rs new file mode 100644 index 000000000000..e8171f2e11e8 --- /dev/null +++ b/zk_toolbox/crates/zk_supervisor/src/commands/database/args/mod.rs @@ -0,0 +1,36 @@ +use clap::Parser; + +use crate::dals::SelectedDals; + +pub mod new_migration; + +#[derive(Debug, Parser)] +pub struct DatabaseCommonArgs { + /// Prover + #[clap(short, long, default_missing_value = "true", num_args = 0..=1)] + pub prover: Option, + /// Core + #[clap(short, long, default_missing_value = "true", num_args = 0..=1)] + pub core: Option, +} + +impl DatabaseCommonArgs { + pub fn fill_values_with_prompt(self, verb: &str) -> DatabaseCommonArgsFinal { + let prover = self.prover.unwrap_or_else(|| { + common::PromptConfirm::new(format!("Do you want to {verb} the prover database?")).ask() + }); + + let core = self.core.unwrap_or_else(|| { + common::PromptConfirm::new(format!("Do you want to {verb} the core database?")).ask() + }); + + DatabaseCommonArgsFinal { + selected_dals: SelectedDals { prover, core }, + } + } +} + +#[derive(Debug)] +pub struct DatabaseCommonArgsFinal { + pub selected_dals: SelectedDals, +} diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/database/args/new_migration.rs b/zk_toolbox/crates/zk_supervisor/src/commands/database/args/new_migration.rs new file mode 100644 index 000000000000..2364ed0b8cb7 --- /dev/null +++ b/zk_toolbox/crates/zk_supervisor/src/commands/database/args/new_migration.rs @@ -0,0 +1,46 @@ +use clap::{Parser, ValueEnum}; +use common::{Prompt, PromptSelect}; +use strum::IntoEnumIterator; +use strum_macros::{Display, EnumIter}; + +#[derive(Debug, Parser)] +pub struct DatabaseNewMigrationArgs { + /// Database to create new migration for + #[clap(long)] + pub database: Option, + /// Migration name + #[clap(long)] + pub name: Option, +} + +impl DatabaseNewMigrationArgs { + pub fn fill_values_with_prompt(self) -> DatabaseNewMigrationArgsFinal { + let selected_database = self.database.unwrap_or_else(|| { + PromptSelect::new( + "What database do you want to create a new migration for?", + SelectedDatabase::iter(), + ) + .ask() + }); + let name = self + .name + .unwrap_or_else(|| Prompt::new("How do you want to name the migration?").ask()); + + DatabaseNewMigrationArgsFinal { + selected_database, + name, + } + } +} + +#[derive(Debug)] +pub struct DatabaseNewMigrationArgsFinal { + pub selected_database: SelectedDatabase, + pub name: String, +} + +#[derive(Debug, Clone, ValueEnum, EnumIter, PartialEq, Eq, Display)] +pub enum SelectedDatabase { + Prover, + Core, +} diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/database/check_sqlx_data.rs b/zk_toolbox/crates/zk_supervisor/src/commands/database/check_sqlx_data.rs new file mode 100644 index 000000000000..584098957774 --- /dev/null +++ b/zk_toolbox/crates/zk_supervisor/src/commands/database/check_sqlx_data.rs @@ -0,0 +1,49 @@ +use std::path::Path; + +use common::{cmd::Cmd, logger, spinner::Spinner}; +use config::EcosystemConfig; +use xshell::{cmd, Shell}; + +use super::args::DatabaseCommonArgs; +use crate::dals::{get_dals, Dal}; + +pub fn run(shell: &Shell, args: DatabaseCommonArgs) -> anyhow::Result<()> { + let args = args.fill_values_with_prompt("check sqlx data for"); + if args.selected_dals.none() { + logger::outro("No databases selected to check"); + return Ok(()); + } + + let ecosystem_config = EcosystemConfig::from_file(shell)?; + + logger::info("Checking sqlx data"); + + let dals = get_dals(shell, &args.selected_dals)?; + for dal in dals { + check_sqlx_data(shell, &ecosystem_config.link_to_code, dal)?; + } + + logger::outro("Databases sqlx data checked successfully"); + + Ok(()) +} + +pub fn check_sqlx_data( + shell: &Shell, + link_to_code: impl AsRef, + dal: Dal, +) -> anyhow::Result<()> { + let dir = link_to_code.as_ref().join(&dal.path); + let _dir_guard = shell.push_dir(dir); + let url = dal.url.as_str(); + + let spinner = Spinner::new(&format!("Checking sqlx data for dal {}...", dal.path)); + Cmd::new(cmd!( + shell, + "cargo sqlx prepare --check --database-url {url} -- --tests" + )) + .run()?; + spinner.finish(); + + Ok(()) +} diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/database/drop.rs b/zk_toolbox/crates/zk_supervisor/src/commands/database/drop.rs new file mode 100644 index 000000000000..543a9d04eb0c --- /dev/null +++ b/zk_toolbox/crates/zk_supervisor/src/commands/database/drop.rs @@ -0,0 +1,36 @@ +use common::{ + db::{drop_db_if_exists, DatabaseConfig}, + logger, + spinner::Spinner, +}; +use xshell::Shell; + +use super::args::DatabaseCommonArgs; +use crate::dals::{get_dals, Dal}; + +pub async fn run(shell: &Shell, args: DatabaseCommonArgs) -> anyhow::Result<()> { + let args = args.fill_values_with_prompt("drop"); + if args.selected_dals.none() { + logger::outro("No databases selected to drop"); + return Ok(()); + } + + logger::info("Dropping databases"); + + let dals = get_dals(shell, &args.selected_dals)?; + for dal in dals { + drop_database(dal).await?; + } + + logger::outro("Databases dropped successfully"); + + Ok(()) +} + +pub async fn drop_database(dal: Dal) -> anyhow::Result<()> { + let spinner = Spinner::new(&format!("Dropping DB for dal {}...", dal.path)); + let db = DatabaseConfig::from_url(dal.url)?; + drop_db_if_exists(&db).await?; + spinner.finish(); + Ok(()) +} diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/database/migrate.rs b/zk_toolbox/crates/zk_supervisor/src/commands/database/migrate.rs new file mode 100644 index 000000000000..c26f2771eb2b --- /dev/null +++ b/zk_toolbox/crates/zk_supervisor/src/commands/database/migrate.rs @@ -0,0 +1,45 @@ +use std::path::Path; + +use common::{cmd::Cmd, logger, spinner::Spinner}; +use config::EcosystemConfig; +use xshell::{cmd, Shell}; + +use super::args::DatabaseCommonArgs; +use crate::dals::{get_dals, Dal}; + +pub fn run(shell: &Shell, args: DatabaseCommonArgs) -> anyhow::Result<()> { + let args = args.fill_values_with_prompt("migrate"); + if args.selected_dals.none() { + logger::outro("No databases selected to migrate"); + return Ok(()); + } + + logger::info("Migrating databases"); + let ecosystem_config = EcosystemConfig::from_file(shell)?; + + let dals = get_dals(shell, &args.selected_dals)?; + for dal in dals { + migrate_database(shell, &ecosystem_config.link_to_code, dal)?; + } + + logger::outro("Databases migrated successfully"); + + Ok(()) +} + +fn migrate_database(shell: &Shell, link_to_code: impl AsRef, dal: Dal) -> anyhow::Result<()> { + let dir = link_to_code.as_ref().join(&dal.path); + let _dir_guard = shell.push_dir(dir); + let url = dal.url.as_str(); + + let spinner = Spinner::new(&format!("Migrating DB for dal {}...", dal.path)); + Cmd::new(cmd!( + shell, + "cargo sqlx database create --database-url {url}" + )) + .run()?; + Cmd::new(cmd!(shell, "cargo sqlx migrate run --database-url {url}")).run()?; + spinner.finish(); + + Ok(()) +} diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/database/mod.rs b/zk_toolbox/crates/zk_supervisor/src/commands/database/mod.rs new file mode 100644 index 000000000000..eb090352c350 --- /dev/null +++ b/zk_toolbox/crates/zk_supervisor/src/commands/database/mod.rs @@ -0,0 +1,43 @@ +use clap::Subcommand; +use xshell::Shell; + +use self::args::{new_migration::DatabaseNewMigrationArgs, DatabaseCommonArgs}; + +mod args; +mod check_sqlx_data; +mod drop; +mod migrate; +mod new_migration; +mod prepare; +mod reset; +mod setup; + +#[derive(Subcommand, Debug)] +pub enum DatabaseCommands { + /// Check sqlx-data.json is up to date + CheckSqlxData(DatabaseCommonArgs), + /// Drop databases + Drop(DatabaseCommonArgs), + /// Migrate databases + Migrate(DatabaseCommonArgs), + /// Create new migration + NewMigration(DatabaseNewMigrationArgs), + /// Prepare sqlx-data.json + Prepare(DatabaseCommonArgs), + /// Reset databases + Reset(DatabaseCommonArgs), + /// Setup databases + Setup(DatabaseCommonArgs), +} + +pub async fn run(shell: &Shell, args: DatabaseCommands) -> anyhow::Result<()> { + match args { + DatabaseCommands::CheckSqlxData(args) => check_sqlx_data::run(shell, args), + DatabaseCommands::Drop(args) => drop::run(shell, args).await, + DatabaseCommands::Migrate(args) => migrate::run(shell, args), + DatabaseCommands::NewMigration(args) => new_migration::run(shell, args), + DatabaseCommands::Prepare(args) => prepare::run(shell, args), + DatabaseCommands::Reset(args) => reset::run(shell, args).await, + DatabaseCommands::Setup(args) => setup::run(shell, args), + } +} diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/database/new_migration.rs b/zk_toolbox/crates/zk_supervisor/src/commands/database/new_migration.rs new file mode 100644 index 000000000000..5a8e42620154 --- /dev/null +++ b/zk_toolbox/crates/zk_supervisor/src/commands/database/new_migration.rs @@ -0,0 +1,41 @@ +use std::path::Path; + +use common::{cmd::Cmd, spinner::Spinner}; +use config::EcosystemConfig; +use xshell::{cmd, Shell}; + +use super::args::new_migration::{DatabaseNewMigrationArgs, SelectedDatabase}; +use crate::dals::{get_core_dal, get_prover_dal, Dal}; + +pub fn run(shell: &Shell, args: DatabaseNewMigrationArgs) -> anyhow::Result<()> { + let args = args.fill_values_with_prompt(); + + let dal = match args.selected_database { + SelectedDatabase::Core => get_core_dal(shell)?, + SelectedDatabase::Prover => get_prover_dal(shell)?, + }; + let ecosystem_config = EcosystemConfig::from_file(shell)?; + + generate_migration(shell, ecosystem_config.link_to_code, dal, args.name)?; + + Ok(()) +} + +fn generate_migration( + shell: &Shell, + link_to_code: impl AsRef, + dal: Dal, + name: String, +) -> anyhow::Result<()> { + let dir = link_to_code.as_ref().join(&dal.path); + let _dir_guard = shell.push_dir(dir); + + let spinner = Spinner::new(&format!( + "Creating new DB migration for dal {}...", + dal.path + )); + Cmd::new(cmd!(shell, "cargo sqlx migrate add -r {name}")).run()?; + spinner.finish(); + + Ok(()) +} diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/database/prepare.rs b/zk_toolbox/crates/zk_supervisor/src/commands/database/prepare.rs new file mode 100644 index 000000000000..df38812a40af --- /dev/null +++ b/zk_toolbox/crates/zk_supervisor/src/commands/database/prepare.rs @@ -0,0 +1,49 @@ +use std::path::Path; + +use common::{cmd::Cmd, logger, spinner::Spinner}; +use config::EcosystemConfig; +use xshell::{cmd, Shell}; + +use super::args::DatabaseCommonArgs; +use crate::dals::{get_dals, Dal}; + +pub fn run(shell: &Shell, args: DatabaseCommonArgs) -> anyhow::Result<()> { + let args = args.fill_values_with_prompt("prepare"); + if args.selected_dals.none() { + logger::outro("No databases selected to prepare"); + return Ok(()); + } + + let ecosystem_config = EcosystemConfig::from_file(shell)?; + + logger::info("Preparing sqlx data"); + + let dals = get_dals(shell, &args.selected_dals)?; + for dal in dals { + prepare_sqlx_data(shell, &ecosystem_config.link_to_code, dal)?; + } + + logger::outro("Databases sqlx data prepared successfully"); + + Ok(()) +} + +pub fn prepare_sqlx_data( + shell: &Shell, + link_to_code: impl AsRef, + dal: Dal, +) -> anyhow::Result<()> { + let dir = link_to_code.as_ref().join(&dal.path); + let _dir_guard = shell.push_dir(dir); + let url = dal.url.as_str(); + + let spinner = Spinner::new(&format!("Preparing sqlx data for dal {}...", dal.path)); + Cmd::new(cmd!( + shell, + "cargo sqlx prepare --database-url {url} -- --tests" + )) + .run()?; + spinner.finish(); + + Ok(()) +} diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/database/reset.rs b/zk_toolbox/crates/zk_supervisor/src/commands/database/reset.rs new file mode 100644 index 000000000000..00eab1a1b1c3 --- /dev/null +++ b/zk_toolbox/crates/zk_supervisor/src/commands/database/reset.rs @@ -0,0 +1,38 @@ +use std::path::Path; + +use common::logger; +use config::EcosystemConfig; +use xshell::Shell; + +use super::{args::DatabaseCommonArgs, drop::drop_database, setup::setup_database}; +use crate::dals::{get_dals, Dal}; + +pub async fn run(shell: &Shell, args: DatabaseCommonArgs) -> anyhow::Result<()> { + let args = args.fill_values_with_prompt("reset"); + if args.selected_dals.none() { + logger::outro("No databases selected"); + return Ok(()); + } + + let ecoseystem_config = EcosystemConfig::from_file(shell)?; + + let dals = get_dals(shell, &args.selected_dals)?; + for dal in dals { + logger::info(&format!("Resetting database {}", dal.path)); + reset_database(shell, ecoseystem_config.link_to_code.clone(), dal).await?; + } + + logger::outro("Databases resetted"); + + Ok(()) +} + +async fn reset_database( + shell: &Shell, + link_to_code: impl AsRef, + dal: Dal, +) -> anyhow::Result<()> { + drop_database(dal.clone()).await?; + setup_database(shell, link_to_code, dal)?; + Ok(()) +} diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/database/setup.rs b/zk_toolbox/crates/zk_supervisor/src/commands/database/setup.rs new file mode 100644 index 000000000000..d645f0b16568 --- /dev/null +++ b/zk_toolbox/crates/zk_supervisor/src/commands/database/setup.rs @@ -0,0 +1,50 @@ +use std::path::Path; + +use common::{cmd::Cmd, logger, spinner::Spinner}; +use config::EcosystemConfig; +use xshell::{cmd, Shell}; + +use super::args::DatabaseCommonArgs; +use crate::dals::{get_dals, Dal}; + +pub fn run(shell: &Shell, args: DatabaseCommonArgs) -> anyhow::Result<()> { + let args = args.fill_values_with_prompt("setup"); + if args.selected_dals.none() { + logger::outro("No databases selected to setup"); + return Ok(()); + } + + let ecosystem_config = EcosystemConfig::from_file(shell)?; + + logger::info("Setting up databases"); + + let dals = get_dals(shell, &args.selected_dals)?; + for dal in dals { + setup_database(shell, &ecosystem_config.link_to_code, dal)?; + } + + logger::outro("Databases set up successfully"); + + Ok(()) +} + +pub fn setup_database( + shell: &Shell, + link_to_code: impl AsRef, + dal: Dal, +) -> anyhow::Result<()> { + let dir = link_to_code.as_ref().join(&dal.path); + let _dir_guard = shell.push_dir(dir); + let url = dal.url.as_str(); + + let spinner = Spinner::new(&format!("Setting up DB for dal {}...", dal.path)); + Cmd::new(cmd!( + shell, + "cargo sqlx database create --database-url {url}" + )) + .run()?; + Cmd::new(cmd!(shell, "cargo sqlx migrate run --database-url {url}")).run()?; + spinner.finish(); + + Ok(()) +} diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/mod.rs b/zk_toolbox/crates/zk_supervisor/src/commands/mod.rs new file mode 100644 index 000000000000..8fd0a6be869b --- /dev/null +++ b/zk_toolbox/crates/zk_supervisor/src/commands/mod.rs @@ -0,0 +1 @@ +pub mod database; diff --git a/zk_toolbox/crates/zk_supervisor/src/dals.rs b/zk_toolbox/crates/zk_supervisor/src/dals.rs new file mode 100644 index 000000000000..2b5ae7fcc6ff --- /dev/null +++ b/zk_toolbox/crates/zk_supervisor/src/dals.rs @@ -0,0 +1,68 @@ +use anyhow::anyhow; +use common::config::global_config; +use config::{EcosystemConfig, SecretsConfig}; +use url::Url; +use xshell::Shell; + +const CORE_DAL_PATH: &str = "core/lib/dal"; +const PROVER_DAL_PATH: &str = "prover/prover_dal"; + +#[derive(Debug, Clone)] +pub struct SelectedDals { + pub prover: bool, + pub core: bool, +} + +impl SelectedDals { + /// Returns true if no databases are selected + pub fn none(&self) -> bool { + !self.prover && !self.core + } +} + +#[derive(Debug, Clone)] +pub struct Dal { + pub path: String, + pub url: Url, +} + +pub fn get_dals(shell: &Shell, selected_dals: &SelectedDals) -> anyhow::Result> { + let mut dals = vec![]; + + if selected_dals.prover { + dals.push(get_prover_dal(shell)?); + } + if selected_dals.core { + dals.push(get_core_dal(shell)?); + } + + Ok(dals) +} + +pub fn get_prover_dal(shell: &Shell) -> anyhow::Result { + let secrets = get_secrets(shell)?; + + Ok(Dal { + path: PROVER_DAL_PATH.to_string(), + url: secrets.database.prover_url.clone(), + }) +} + +pub fn get_core_dal(shell: &Shell) -> anyhow::Result { + let secrets = get_secrets(shell)?; + + Ok(Dal { + path: CORE_DAL_PATH.to_string(), + url: secrets.database.server_url.clone(), + }) +} + +fn get_secrets(shell: &Shell) -> anyhow::Result { + let ecosystem_config = EcosystemConfig::from_file(shell)?; + let chain_config = ecosystem_config + .load_chain(global_config().chain_name.clone()) + .ok_or(anyhow!("Chain not found"))?; + let secrets = chain_config.get_secrets_config()?; + + Ok(secrets) +} diff --git a/zk_toolbox/crates/zk_supervisor/src/main.rs b/zk_toolbox/crates/zk_supervisor/src/main.rs index 9936141be106..a46733cab7d3 100644 --- a/zk_toolbox/crates/zk_supervisor/src/main.rs +++ b/zk_toolbox/crates/zk_supervisor/src/main.rs @@ -1,4 +1,114 @@ -fn main() { +use clap::{Parser, Subcommand}; +use commands::database::DatabaseCommands; +use common::{ + check_prerequisites, + config::{global_config, init_global_config, GlobalConfig}, + init_prompt_theme, logger, +}; +use config::EcosystemConfig; +use xshell::Shell; + +mod commands; +mod dals; + +#[derive(Parser, Debug)] +#[command(version, about)] +struct Supervisor { + #[command(subcommand)] + command: SupervisorSubcommands, + #[clap(flatten)] + global: SupervisorGlobalArgs, +} + +#[derive(Subcommand, Debug)] +enum SupervisorSubcommands { + /// Database related commands + #[command(subcommand)] + Database(DatabaseCommands), +} + +#[derive(Parser, Debug)] +#[clap(next_help_heading = "Global options")] +struct SupervisorGlobalArgs { + /// Verbose mode + #[clap(short, long, global = true)] + verbose: bool, + /// Chain to use + #[clap(long, global = true)] + chain: Option, + /// Ignores prerequisites checks + #[clap(long, global = true)] + ignore_prerequisites: bool, +} + +#[tokio::main] +async fn main() -> anyhow::Result<()> { human_panic::setup_panic!(); - println!("Hello, world!"); + + init_prompt_theme(); + + logger::new_empty_line(); + logger::intro(); + + let shell = Shell::new().unwrap(); + let args = Supervisor::parse(); + + init_global_config_inner(&shell, &args.global)?; + + if !global_config().ignore_prerequisites { + check_prerequisites(&shell); + } + + match run_subcommand(args, &shell).await { + Ok(_) => {} + Err(e) => { + logger::error(e.to_string()); + + if e.chain().count() > 1 { + logger::error_note( + "Caused by:", + &e.chain() + .skip(1) + .enumerate() + .map(|(i, cause)| format!(" {i}: {}", cause)) + .collect::>() + .join("\n"), + ); + } + + logger::outro("Failed"); + std::process::exit(1); + } + } + + Ok(()) +} + +async fn run_subcommand(args: Supervisor, shell: &Shell) -> anyhow::Result<()> { + match args.command { + SupervisorSubcommands::Database(command) => commands::database::run(shell, command).await?, + } + Ok(()) +} + +fn init_global_config_inner(shell: &Shell, args: &SupervisorGlobalArgs) -> anyhow::Result<()> { + if let Some(name) = &args.chain { + if let Ok(config) = EcosystemConfig::from_file(shell) { + let chains = config.list_of_chains(); + if !chains.contains(name) { + anyhow::bail!( + "Chain with name {} doesnt exist, please choose one of {:?}", + name, + &chains + ); + } + } + } + + init_global_config(GlobalConfig { + verbose: args.verbose, + chain_name: args.chain.clone(), + ignore_prerequisites: args.ignore_prerequisites, + }); + Ok(()) }