Skip to content

Commit

Permalink
wip
Browse files Browse the repository at this point in the history
  • Loading branch information
hansieodendaal committed Jul 12, 2024
1 parent 5f22335 commit 4160b9a
Show file tree
Hide file tree
Showing 11 changed files with 456 additions and 519 deletions.
28 changes: 28 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions applications/minotari_console_wallet/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ clap = { version = "3.2", features = ["derive", "env"] }
config = "0.14.0"
crossterm = { version = "0.25.0" }
digest = "0.10"
dirs = "5.0"
futures = { version = "^0.3.16", default-features = false, features = [
"alloc",
] }
Expand Down
542 changes: 243 additions & 299 deletions applications/minotari_console_wallet/src/automation/commands.rs

Large diffs are not rendered by default.

135 changes: 127 additions & 8 deletions applications/minotari_console_wallet/src/automation/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,20 +25,41 @@ pub mod error;
// removed temporarily add back in when used.
// mod prompt;

use serde::{Deserialize, Serialize};
use std::{
fs,
fs::{File, OpenOptions},
io::{BufRead, BufReader, Write},
path::{Path, PathBuf},
};

use serde::{de::DeserializeOwned, Deserialize, Serialize};
use tari_common_types::{
tari_address::TariAddress,
transaction::TxId,
types::{Commitment, PrivateKey, PublicKey, Signature},
types::{Commitment, HashOutput, PrivateKey, PublicKey, Signature},
};
use tari_core::transactions::{
key_manager::TariKeyId,
tari_amount::MicroMinotari,
transaction_components::{EncryptedData, OutputFeatures},
};
use tari_script::{CheckSigSchnorrSignature, ExecutionStack, TariScript};

use crate::automation::error::CommandError;

/// Outputs for self with `FaucetCreatePartyDetails`
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
struct Step1SessionInfo {
session_id: String,
fee_per_gram: MicroMinotari,
commitment_to_spend: String,
output_hash: String,
recipient_address: TariAddress,
}

/// Outputs for self with `FaucetCreatePartyDetails`
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
struct Step1OutputsForSelf {
struct Step2OutputsForSelf {
wallet_spend_key_id: TariKeyId,
script_nonce_key_id: TariKeyId,
sender_offset_key_id: TariKeyId,
Expand All @@ -47,24 +68,24 @@ struct Step1OutputsForSelf {

/// Outputs for leader with `FaucetCreatePartyDetails`
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
struct Step1OutputsForLeader {
struct Step2OutputsForLeader {
script_input_signature: CheckSigSchnorrSignature,
wallet_public_spend_key: PublicKey,
public_script_nonce_key: PublicKey,
public_sender_offset_key: PublicKey,
public_sender_offset_nonce_key: PublicKey,
shared_secret_public_key: PublicKey,
dh_shared_secret_public_key: PublicKey,
}

/// Outputs for self with `FaucetEncumberAggregateUtxo`
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
struct Step2OutputsForSelf {
struct Step3OutputsForSelf {
tx_id: TxId,
}

/// Outputs for parties with `FaucetEncumberAggregateUtxo`
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
struct Step2OutputsForParties {
struct Step3OutputsForParties {
input_commitment: Commitment,
input_stack: ExecutionStack,
input_script: TariScript,
Expand All @@ -81,8 +102,106 @@ struct Step2OutputsForParties {

/// Outputs for leader with `FaucetCreateScriptSig` and `FaucetCreateMetaSig`
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
struct Step3OutputsForLeader {
struct Step4OutputsForLeader {
script_signature: Signature,
metadata_signature: Signature,
script_offset: PrivateKey,
}

#[derive(Debug)]
struct PartialRead {
lines_to_read: usize,
lines_to_skip: usize,
}

/// Reads an entire file into a single JSON object
pub fn json_from_file_single_object<P: AsRef<Path>, T: DeserializeOwned>(
path: P,
partial_read: Option<PartialRead>,
) -> Result<T, CommandError> {
if let Some(val) = partial_read {
let lines = BufReader::new(
File::open(path.as_ref())
.map_err(|e| CommandError::JsonFile(format!("{e} '{}'", path.as_ref().display())))?,
)
.lines()
.take(val.lines_to_read)
.skip(val.lines_to_skip);
let mut json_str = String::new();
for line in lines {
let line = line.map_err(|e| CommandError::JsonFile(format!("{e} '{}'", path.as_ref().display())))?;
json_str.push_str(&line);
}
serde_json::from_str(&json_str)
.map_err(|e| CommandError::JsonFile(format!("{e} '{}'", path.as_ref().display())))
} else {
serde_json::from_reader(BufReader::new(
File::open(path.as_ref())
.map_err(|e| CommandError::JsonFile(format!("{e} '{}'", path.as_ref().display())))?,
))
.map_err(|e| CommandError::JsonFile(format!("{e} '{}'", path.as_ref().display())))
}
}

/// Write a single JSON object to file as a single line
pub fn write_json_object_to_file_as_line<T: Serialize>(
file: &Path,
reset_file: bool,
outputs: T,
) -> Result<(), CommandError> {
if let Some(file_path) = file.parent() {
if !file_path.exists() {
fs::create_dir_all(file_path).map_err(|e| CommandError::JsonFile(format!("{} ({})", e, file.display())))?;
}
}
if reset_file && file.exists() {
fs::remove_file(file).map_err(|e| CommandError::JsonFile(e.to_string()))?;
}
append_json_line_to_file(file, outputs)?;
Ok(())
}

fn append_json_line_to_file<P: AsRef<Path>, T: Serialize>(file: P, output: T) -> Result<(), CommandError> {
fs::create_dir_all(file.as_ref().parent().unwrap()).map_err(|e| CommandError::JsonFile(e.to_string()))?;
let mut file_object = OpenOptions::new()
.create(true)
.append(true)
.open(file)
.map_err(|e| CommandError::JsonFile(e.to_string()))?;
let json = serde_json::to_string(&output).map_err(|e| CommandError::JsonFile(e.to_string()))?;
writeln!(file_object, "{json}").map_err(|e| CommandError::JsonFile(e.to_string()))?;
Ok(())
}

/// Write outputs to a JSON file
pub fn write_to_json_file<T: Serialize>(file: &Path, reset_file: bool, data: T) -> Result<(), CommandError> {
if let Some(file_path) = file.parent() {
if !file_path.exists() {
fs::create_dir_all(file_path).map_err(|e| CommandError::JsonFile(format!("{} ({})", e, file.display())))?;
}
}
if reset_file && file.exists() {
fs::remove_file(file).map_err(|e| CommandError::JsonFile(e.to_string()))?;
}
append_to_json_file(file, data)?;
Ok(())
}

fn append_to_json_file<P: AsRef<Path>, T: Serialize>(file: P, data: T) -> Result<(), CommandError> {
fs::create_dir_all(file.as_ref().parent().unwrap()).map_err(|e| CommandError::JsonFile(e.to_string()))?;
let mut file_object = OpenOptions::new()
.create(true)
.append(true)
.open(file)
.map_err(|e| CommandError::JsonFile(e.to_string()))?;
let json = serde_json::to_string_pretty(&data).map_err(|e| CommandError::JsonFile(e.to_string()))?;
writeln!(file_object, "{json}").map_err(|e| CommandError::JsonFile(e.to_string()))?;
Ok(())
}

fn out_dir(session_id: &str) -> Result<PathBuf, CommandError> {
let base_dir = dirs::cache_dir().ok_or(CommandError::InvalidArgument(
"Could not find cache directory".to_string(),
))?;
Ok(base_dir.join(session_id))
}
Loading

0 comments on commit 4160b9a

Please sign in to comment.