From 95f7fad87c74d88e5f635d56606f357b36aa471e Mon Sep 17 00:00:00 2001 From: alvinosh <38663469+alvinosh@users.noreply.github.com> Date: Fri, 8 Mar 2024 22:10:12 +0100 Subject: [PATCH] 19: Print URLs (#27) --- Cargo.lock | 13 +- USAGE.md | 34 ++- src/commands/file.rs | 38 ++-- src/commands/link.rs | 31 +-- src/error.rs | 57 +++++ src/main.rs | 497 +++++++++++++++++++++++++++++------------- src/models/cli.rs | 3 + src/models/entry.rs | 4 +- src/models/format.rs | 8 +- src/models/storage.rs | 239 ++++++++++---------- src/util.rs | 104 +++++---- 11 files changed, 655 insertions(+), 373 deletions(-) create mode 100644 src/error.rs diff --git a/Cargo.lock b/Cargo.lock index 7d8fda3a..6a133ce4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -92,6 +92,7 @@ version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ae92a5119aa49cdbcf6b9f893fe4e1d98b04ccbf82ee0584ad948a44a734dea" dependencies = [ + "anyhow", "arklib", "chrono", "clap", @@ -3362,7 +3363,7 @@ checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.42", + "syn 2.0.52", ] [[package]] @@ -3896,20 +3897,20 @@ checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85" name = "threadpool" version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f11c217e1416d6f036b870f14e0413d480dbf28edbee1f877abaf0206af43bb7" +checksum = "1e45bcbe8ed29775f228095caf2cd67af7a4ccf756ebff23a306bf3e8b47b24b" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.51" +version = "1.0.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01742297787513b79cf8e29d1056ede1313e2420b7b3b15d0a768b4921f549df" +checksum = "a953cb265bef375dae3de6663da4d3804eee9682ea80d8e2542529b73c531c81" dependencies = [ "proc-macro2", "quote", - "syn 2.0.42", + "syn 2.0.52", ] [[package]] @@ -3983,7 +3984,7 @@ checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.42", + "syn 2.0.52", ] [[package]] diff --git a/USAGE.md b/USAGE.md index 1e8d4e5e..19d79286 100644 --- a/USAGE.md +++ b/USAGE.md @@ -74,7 +74,7 @@ $ ark-cli list ~/Pictures/ ``` But it's a bit boring and doesn't really tell anything, right? Various flags should be used to gain more knowledge about your collections of resources: -* `--entry=id|path|both` to show the path,the id or both of a resource +* `--entry=id|path|both|link` to show the path,the id or both of a resource * `--timestamp=true` to show or not the last modified timestamp of a resource * `--tags=true` to show or not the tags for every resource * `--scores=true` to show or not the scores for every resource @@ -85,28 +85,40 @@ For instance, you can list files with their paths and attached tags: ``` $ ark-cli list -pt -30-4257856154 with tags search -18-1909444406 with tags hello -22-207093268 with tags search,engine -38-103010298 with tags NO_TAGS +30-4257856154 search +18-1909444406 hello +22-207093268 search,engine +38-103010298 NO_TAGS +``` + +You Can list the links of the files + +``` +$ark-cli list -l + +https://google.com +https://news.ycombinator.com +https://youtube.com +https://github.com + ``` Or, sort by score: ``` $ ark-cli list -s --sort=asc -30-4257856154 with score NO_SCORE -18-1909444406 with score 2 -38-103010298 with score 10 -22-207093268 with score 15 +30-4257856154 NO_SCORE +18-1909444406 2 +38-103010298 10 +22-207093268 15 ``` Finally, you can filter resources using their tags: ``` $ /tmp/ark-cli list -t --filter=search -30-4257856154 with tags search -22-207093268 with tags search,engine +30-4257856154 search +22-207093268 search,engine ``` ## :zap: Low-level utilities :zap: diff --git a/src/commands/file.rs b/src/commands/file.rs index 6b8e6190..a9149137 100644 --- a/src/commands/file.rs +++ b/src/commands/file.rs @@ -1,24 +1,22 @@ +use crate::error::AppError; use crate::models::{format, format::Format}; -use arklib::{modify, modify_json, AtomicFile}; +use arklib::{modify, modify_json, AtomicFile, Result as ArklibResult}; pub fn file_append( atomic_file: &AtomicFile, content: &str, format: Format, -) -> Result<(), String> { +) -> Result<(), AppError> { match format { - Format::Raw => modify(&atomic_file, |current| { + Format::Raw => Ok(modify(atomic_file, |current| { let mut combined_vec: Vec = current.to_vec(); combined_vec.extend_from_slice(content.as_bytes()); combined_vec - }) - .map_err(|_| "ERROR: Could not append string".to_string()), + })?), Format::KeyValue => { - let values = format::key_value_to_str(&content) - .map_err(|_| "ERROR: Could not parse json".to_string())?; + let values = format::key_value_to_str(content)?; - append_json(&atomic_file, values.to_vec()) - .map_err(|_| "ERROR: Could not append json".to_string()) + Ok(append_json(atomic_file, values.to_vec())?) } } } @@ -27,16 +25,16 @@ pub fn file_insert( atomic_file: &AtomicFile, content: &str, format: Format, -) -> Result<(), String> { +) -> Result<(), AppError> { match format { - Format::Raw => modify(&atomic_file, |_| content.as_bytes().to_vec()) - .map_err(|_| "ERROR: Could not insert string".to_string()), + Format::Raw => { + Ok(modify(atomic_file, |_| content.as_bytes().to_vec())?) + } Format::KeyValue => { - let values = format::key_value_to_str(&content) - .map_err(|_| "ERROR: Could not parse json".to_string())?; + let values = format::key_value_to_str(content)?; modify_json( - &atomic_file, + atomic_file, |current: &mut Option| { let mut new = serde_json::Map::new(); for (key, value) in &values { @@ -48,7 +46,7 @@ pub fn file_insert( *current = Some(serde_json::Value::Object(new)); }, ) - .map_err(|e| e.to_string()) + .map_err(|e| AppError::FileOperationError(e.to_string())) } } } @@ -56,8 +54,8 @@ pub fn file_insert( fn append_json( atomic_file: &AtomicFile, data: Vec<(String, String)>, -) -> arklib::Result<()> { - modify_json(&atomic_file, |current: &mut Option| { +) -> ArklibResult<()> { + modify_json(atomic_file, |current: &mut Option| { let current_data = match current { Some(current) => { if let Ok(value) = serde_json::to_value(current) { @@ -74,7 +72,7 @@ fn append_json( }; let mut new = serde_json::Map::new(); - if let None = current_data { + if current_data.is_none() { for (key, value) in &data { new.insert( key.clone(), @@ -128,7 +126,7 @@ pub fn format_file(file: &AtomicFile) -> Option { .expect("Not a file") .to_str() .unwrap() - .split("_"); + .split('_'); let name = split.next().unwrap(); diff --git a/src/commands/link.rs b/src/commands/link.rs index 182580fe..fee36cc5 100644 --- a/src/commands/link.rs +++ b/src/commands/link.rs @@ -1,51 +1,54 @@ -use std::path::PathBuf; - use arklib::{id::ResourceId, link::Link}; +use std::path::PathBuf; use url::Url; -use crate::util::provide_index; +use crate::error::AppError; +use crate::util::provide_index; // Import your custom AppError type pub async fn create_link( root: &PathBuf, url: &str, title: &str, desc: Option, -) -> Result<(), String> { - let url = Url::parse(url).map_err(|_| "Invalid url")?; +) -> Result<(), AppError> { + let url = Url::parse(url) + .map_err(|_| AppError::LinkCreationError("Invalid url".to_owned()))?; let link: Link = Link::new(url, title.to_owned(), desc.to_owned()); - link.save(&root, true) + link.save(root, true) .await - .map_err(|e| e.to_string()) + .map_err(|e| AppError::LinkCreationError(e.to_string())) } pub fn load_link( root: &PathBuf, file_path: &Option, id: &Option, -) -> Result { +) -> Result { let path_from_index = id.map(|id| { - let index = provide_index(&root); + let index = provide_index(root); index.id2path[&id].as_path().to_path_buf() }); let path_from_user = file_path; let path = match (path_from_user, path_from_index) { (Some(path), Some(path2)) => { - if path.canonicalize().unwrap() != path2 { - Err(format!( + if path.canonicalize()? != path2 { + Err(AppError::LinkLoadError(format!( "Path {:?} was requested. But id {} maps to path {:?}", path, id.unwrap(), path2, - )) + ))) } else { Ok(path.to_path_buf()) } } (Some(path), None) => Ok(path.to_path_buf()), (None, Some(path)) => Ok(path), - (None, None) => Err("Provide a path or id for request.".to_owned())?, + (None, None) => Err(AppError::LinkLoadError( + "Provide a path or id for request.".to_owned(), + ))?, }?; - arklib::link::Link::load(root, &path).map_err(|e| e.to_string()) + Ok(arklib::link::Link::load(root, &path)?) } diff --git a/src/error.rs b/src/error.rs new file mode 100644 index 00000000..59bd92d6 --- /dev/null +++ b/src/error.rs @@ -0,0 +1,57 @@ +use arklib::ArklibError; +use std::io; +use thiserror::Error; + +#[derive(Debug, Error)] +pub enum InlineJsonParseError { + #[error("Invalid JSON: entries must be key-value pairs seperated by ':'")] + InvalidKeyValPair, +} + +#[derive(Debug, Error)] +pub enum AppError { + #[error("Couldn't retrieve home directory!")] + HomeDirNotFound, + + #[error("Couldn't create .ark directory: {0}")] + ArkDirectoryCreationError(String), + + #[error("Couldn't load app id: {0}")] + AppIdLoadError(String), + + #[error("Could not provide/read index: {0}")] + IndexError(String), + + #[error("Could not create storage: {0}")] + StorageCreationError(String), + + #[error("Failed to create link: {0}")] + LinkCreationError(String), + + #[error("Could not load link: {0}")] + LinkLoadError(String), + + #[error("File operation error: {0}")] + FileOperationError(String), + + #[error("Failed to create backup: {0}")] + BackupCreationError(String), + + #[error("Unknown render option")] + InvalidRenderOption, + + #[error("Storage not found: {0}")] + StorageNotFound(String), + + #[error("Invalid entry option")] + InvalidEntryOption, + + #[error(transparent)] + IoError(#[from] io::Error), + + #[error(transparent)] + ArklibError(#[from] ArklibError), + + #[error(transparent)] + InlineJsonParseError(#[from] InlineJsonParseError), +} diff --git a/src/main.rs b/src/main.rs index 3375f309..51b441ea 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,5 +1,5 @@ use std::fs::{create_dir_all, File}; -use std::io::Write; +use std::io::{Read, Write}; use std::path::PathBuf; use std::str::FromStr; @@ -22,12 +22,15 @@ use crate::models::format::Format; use crate::models::sort::Sort; use crate::models::storage::{Storage, StorageType}; +use crate::error::AppError; + use util::{ discover_roots, monitor_index, provide_root, read_storage_value, storages_exists, timestamp, translate_storage, }; mod commands; +mod error; mod models; mod util; @@ -35,28 +38,41 @@ const ARK_CONFIG: &str = ".config/ark"; const ARK_BACKUPS_PATH: &str = ".ark-backups"; const ROOTS_CFG_FILENAME: &str = "roots"; +struct StorageEntry { + path: Option, + resource: Option, + content: Option, + tags: Option>, + scores: Option, + datetime: Option, +} + #[tokio::main] -async fn main() { +async fn main() -> anyhow::Result<()> { env_logger::init(); let args = models::cli::Cli::parse(); - let app_id_dir = home_dir().expect("Couldn't retrieve home directory!"); + let app_id_dir = home_dir().ok_or(AppError::HomeDirNotFound)?; + let ark_dir = app_id_dir.join(".ark"); + if !ark_dir.exists() { - std::fs::create_dir(&ark_dir).unwrap(); + std::fs::create_dir(&ark_dir) + .map_err(|e| AppError::ArkDirectoryCreationError(e.to_string()))?; } + println!("Loading app id at {}...", ark_dir.display()); - let _ = app_id::load(ark_dir).map_err(|e| { - println!("Couldn't load app id: {}", e); - std::process::exit(1); - }); + + let _ = app_id::load(ark_dir) + .map_err(|e| AppError::AppIdLoadError(e.to_string()))?; match &args.command { Command::List { entry, entry_id, entry_path, + entry_link, root_dir, modified, @@ -65,111 +81,293 @@ async fn main() { sort, filter, } => { - let root = provide_root(root_dir); - - let entry_output = match (entry, entry_id, entry_path) { - (Some(e), false, false) => e, - (None, true, false) => &EntryOutput::Id, - (None, false, true) => &EntryOutput::Path, - (None, true, true) => &EntryOutput::Both, - (None, false, false) => &EntryOutput::Id, // default mode - _ => panic!( - "incompatible entry output options, please choose only one" - ), - }; - - let index = provide_index(&root).expect("could not provide index"); - - let resource_index = index.read().expect("could not read index"); - - let mut resources = resource_index + let root = provide_root(root_dir)?; + + let entry_output = match (entry, entry_id, entry_path, entry_link) { + (Some(e), false, false, false) => Ok(*e), + (None, true, false, false) => Ok(EntryOutput::Id), + (None, false, true, false) => Ok(EntryOutput::Path), + (None, true, true, false) => Ok(EntryOutput::Both), + (None, false, false, false) => Ok(EntryOutput::Id), + (None, false, false, true) => Ok(EntryOutput::Link), + _ => Err(AppError::InvalidEntryOption), + }?; + + let mut storage_entries: Vec = provide_index(&root) + .map_err(|_| { + AppError::IndexError("Could not provide index".to_owned()) + })? + .read() + .map_err(|_| { + AppError::IndexError("Could not read index".to_owned()) + })? .path2id .iter() - .map(|(path, resource)| { - let tags_list = read_storage_value( - &root, - "tags", - &resource.id.to_string(), - &None, - ) - .unwrap_or("NO_TAGS".to_string()); - - let scores_list = read_storage_value( - &root, - "scores", - &resource.id.to_string(), - &None, - ) - .unwrap_or("NO_SCORE".to_string()); - - let datetime = DateTime::::from(resource.modified); - - (path, resource, tags_list, scores_list, datetime) + .filter_map(|(path, resource)| { + let tags = if *tags { + Some( + read_storage_value( + &root, + "tags", + &resource.id.to_string(), + &None, + ) + .map_or(vec![], |s| { + s.split(',') + .map(|s| s.trim().to_string()) + .collect::>() + }), + ) + } else { + None + }; + + let scores = if *scores { + Some( + read_storage_value( + &root, + "scores", + &resource.id.to_string(), + &None, + ) + .map_or(0, |s| s.parse::().unwrap_or(0)), + ) + } else { + None + }; + + let datetime = if *modified { + let format = "%b %e %H:%M %Y"; + Some( + DateTime::::from(resource.modified) + .format(format) + .to_string(), + ) + } else { + None + }; + + let (path, resource, content) = match entry_output { + EntryOutput::Both => ( + Some(path.to_owned().into_path_buf()), + Some(resource.id), + None, + ), + EntryOutput::Path => { + (Some(path.to_owned().into_path_buf()), None, None) + } + EntryOutput::Id => (None, Some(resource.id), None), + EntryOutput::Link => match File::open(&path) { + Ok(mut file) => { + let mut contents = String::new(); + match file.read_to_string(&mut contents) { + Ok(_) => (None, None, Some(contents)), + Err(_) => return None, + } + } + Err(_) => return None, + }, + }; + + Some(StorageEntry { + path, + resource, + content, + tags, + scores, + datetime, + }) }) .collect::>(); match sort { - Some(Sort::Asc) => resources - .sort_by(|(_, _, _, _, a), (_, _, _, _, b)| a.cmp(b)), + Some(Sort::Asc) => { + storage_entries.sort_by(|a, b| a.datetime.cmp(&b.datetime)) + } - Some(Sort::Desc) => resources - .sort_by(|(_, _, _, _, a), (_, _, _, _, b)| b.cmp(a)), + Some(Sort::Desc) => { + storage_entries.sort_by(|a, b| b.datetime.cmp(&a.datetime)) + } None => (), }; if let Some(filter) = filter { - resources = resources - .into_iter() - .filter(|(_, _, tags_list, _, _)| { - tags_list - .split(',') - .any(|tag| tag.trim() == filter) - }) - .collect(); + storage_entries.retain(|entry| { + entry + .tags + .as_ref() + .map(|tags| tags.contains(filter)) + .unwrap_or(false) + }); } - for (path, resource, tags_list, scores_list, datetime) in resources - { - let mut output = String::new(); + let no_tags = "NO_TAGS"; + let no_scores = "NO_SCORE"; + + let longest_path = storage_entries + .iter() + .map(|entry| { + if let Some(path) = entry.path.as_ref() { + path.display().to_string().len() + } else { + 0 + } + }) + .max_by(|a, b| a.cmp(b)) + .unwrap_or(0); + + let longest_id = storage_entries.iter().fold(0, |acc, entry| { + if let Some(resource) = &entry.resource { + let id_len = resource.to_string().len(); + if id_len > acc { + id_len + } else { + acc + } + } else { + acc + } + }); + + let longest_tags = storage_entries.iter().fold(0, |acc, entry| { + let tags_len = entry + .tags + .as_ref() + .map(|tags| { + if tags.is_empty() { + no_tags.len() + } else { + tags.join(", ").len() + } + }) + .unwrap_or(0); + if tags_len > acc { + tags_len + } else { + acc + } + }); + + let longest_scores = + storage_entries.iter().fold(0, |acc, entry| { + let scores_len = entry + .scores + .as_ref() + .map(|score| { + if *score == 0 { + no_scores.len() + } else { + score.to_string().len() + } + }) + .unwrap_or(0); + if scores_len > acc { + scores_len + } else { + acc + } + }); - let entry_str = match entry_output { - EntryOutput::Id => resource.id.to_string(), - EntryOutput::Path => path.display().to_string(), - EntryOutput::Both => { - format!("{}@{}", resource.id, path.display()) + let longest_datetime = + storage_entries.iter().fold(0, |acc, entry| { + let datetime_len = entry + .datetime + .as_ref() + .map(|datetime| datetime.len()) + .unwrap_or(0); + if datetime_len > acc { + datetime_len + } else { + acc } - }; + }); + + let longest_content = + storage_entries.iter().fold(0, |acc, entry| { + let content_len = entry + .content + .as_ref() + .map(|content| content.len()) + .unwrap_or(0); + if content_len > acc { + content_len + } else { + acc + } + }); + + for entry in &storage_entries { + let mut output = String::new(); - output.push_str(&entry_str); + if let Some(content) = &entry.content { + output.push_str(&format!( + "{:width$} ", + content, + width = longest_content + )); + } - if *modified { - let timestamp_str = datetime - .format("%Y-%m-%d %H:%M:%S.%f") - .to_string(); + if let Some(path) = &entry.path { output.push_str(&format!( - " last modified on {}", - timestamp_str + "{:width$} ", + path.display(), + width = longest_path )); } - if *tags { - output.push_str(&format!(" with tags {}", tags_list)); + if let Some(resource) = &entry.resource { + output.push_str(&format!( + "{:width$} ", + resource.to_string(), + width = longest_id + )); + } + + if let Some(tags) = &entry.tags { + let tags_out = if tags.is_empty() { + no_tags.to_owned() + } else { + tags.join(", ") + }; + + output.push_str(&format!( + "{:width$} ", + tags_out, + width = longest_tags + )); + } + + if let Some(scores) = &entry.scores { + let scores_out = if *scores == 0 { + no_scores.to_owned() + } else { + scores.to_string() + }; + + output.push_str(&format!( + "{:width$} ", + scores_out, + width = longest_scores + )); } - if *scores { - output.push_str(&format!(" with score {}", scores_list)); + if let Some(datetime) = &entry.datetime { + output.push_str(&format!( + "{:width$} ", + datetime, + width = longest_datetime + )); } println!("{}", output); } } - Command::Backup { roots_cfg } => { let timestamp = timestamp().as_secs(); let backup_dir = home_dir() - .expect("Couldn't retrieve home directory!") - .join(&ARK_BACKUPS_PATH) - .join(×tamp.to_string()); + .ok_or(AppError::HomeDirNotFound)? + .join(ARK_BACKUPS_PATH) + .join(timestamp.to_string()); if backup_dir.is_dir() { println!("Wait at least 1 second, please!"); @@ -177,11 +375,11 @@ async fn main() { } println!("Preparing backup:"); - let roots = discover_roots(roots_cfg); + let roots = discover_roots(roots_cfg)?; let (valid, invalid): (Vec, Vec) = roots .into_iter() - .partition(|root| storages_exists(&root)); + .partition(|root| storages_exists(root)); if !invalid.is_empty() { println!("These folders don't contain any storages:"); @@ -195,16 +393,20 @@ async fn main() { std::process::exit(0) } - create_dir_all(&backup_dir) - .expect("Couldn't create backup directory!"); + create_dir_all(&backup_dir).map_err(|_| { + AppError::BackupCreationError( + "Couldn't create backup directory!".to_owned(), + ) + })?; let mut roots_cfg_backup = - File::create(&backup_dir.join(&ROOTS_CFG_FILENAME)) - .expect("Couldn't backup roots config!"); + File::create(backup_dir.join(ROOTS_CFG_FILENAME))?; valid.iter().for_each(|root| { - writeln!(roots_cfg_backup, "{}", root.display()) - .expect("Couldn't write to roots config backup!") + let res = writeln!(roots_cfg_backup, "{}", root.display()); + if let Err(e) = res { + println!("Failed to write root to backup file: {}", e); + } }); println!("Performing backups:"); @@ -213,14 +415,14 @@ async fn main() { .enumerate() .for_each(|(i, root)| { println!("\tRoot {}", root.display()); - let storage_backup = backup_dir.join(&i.to_string()); + let storage_backup = backup_dir.join(i.to_string()); let mut options = CopyOptions::new(); options.overwrite = true; options.copy_inside = true; let result = dir::copy( - root.join(&arklib::ARK_FOLDER), + root.join(arklib::ARK_FOLDER), storage_backup, &options, ); @@ -232,19 +434,19 @@ async fn main() { println!("Backup created:\n\t{}", backup_dir.display()); } - Command::Collisions { root_dir } => monitor_index(&root_dir, None), + Command::Collisions { root_dir } => monitor_index(root_dir, None)?, Command::Monitor { root_dir, interval } => { let millis = interval.unwrap_or(1000); - monitor_index(&root_dir, Some(millis)) + monitor_index(root_dir, Some(millis))? } Command::Render { path, quality } => { let filepath = path.to_owned().unwrap(); let quality = match quality.to_owned().unwrap().as_str() { - "high" => PDFQuality::High, - "medium" => PDFQuality::Medium, - "low" => PDFQuality::Low, - _ => panic!("unknown render option"), - }; + "high" => Ok(PDFQuality::High), + "medium" => Ok(PDFQuality::Medium), + "low" => Ok(PDFQuality::Low), + _ => Err(AppError::InvalidRenderOption), + }?; let buf = File::open(&filepath).unwrap(); let dest_path = filepath.with_file_name( filepath @@ -256,7 +458,7 @@ async fn main() { + ".png", ); let img = arklib::pdf::render_preview_page(buf, quality); - img.save(PathBuf::from(dest_path)).unwrap(); + img.save(dest_path).unwrap(); } Command::Link(link) => match &link { Link::Create { @@ -265,11 +467,17 @@ async fn main() { title, desc, } => { - let root = provide_root(root_dir); - let url = url.as_ref().expect("ERROR: Url was not provided"); - let title = title - .as_ref() - .expect("ERROR: Title was not provided"); + let root = provide_root(root_dir)?; + let url = url.as_ref().ok_or_else(|| { + AppError::LinkCreationError( + "Url was not provided".to_owned(), + ) + })?; + let title = title.as_ref().ok_or_else(|| { + AppError::LinkCreationError( + "Title was not provided".to_owned(), + ) + })?; println!("Saving link..."); @@ -284,7 +492,7 @@ async fn main() { Ok(_) => { println!("Link saved successfully!"); } - Err(e) => println!("ERROR: {}", e), + Err(e) => println!("{}", e), } } @@ -293,15 +501,9 @@ async fn main() { file_path, id, } => { - let root = provide_root(root_dir); - let link = commands::link::load_link(&root, file_path, id); - - match link { - Ok(link) => { - println!("Link data:\n{:?}", link); - } - Err(e) => println!("ERROR: {}", e), - } + let root = provide_root(root_dir)?; + let link = commands::link::load_link(&root, file_path, id)?; + println!("Link data:\n{:?}", link); } }, Command::File(file) => match &file { @@ -315,7 +517,7 @@ async fn main() { } => { let (file_path, storage_type) = translate_storage(&Some(root_dir.to_owned()), storage) - .expect("ERROR: Could not find storage folder"); + .ok_or(AppError::StorageNotFound(storage.to_owned()))?; let storage_type = storage_type.unwrap_or(match type_ { Some(t) => *t, @@ -324,15 +526,11 @@ async fn main() { let format = format.unwrap_or(Format::Raw); - let mut storage = Storage::new(file_path, storage_type) - .expect("ERROR: Could not create storage"); + let mut storage = Storage::new(file_path, storage_type)?; - let resource_id = ResourceId::from_str(id) - .expect("ERROR: Could not parse id"); + let resource_id = ResourceId::from_str(id)?; - storage - .append(resource_id, content, format) - .expect("ERROR: Could not append content to storage"); + storage.append(resource_id, content, format)?; } FileCommand::Insert { @@ -345,7 +543,7 @@ async fn main() { } => { let (file_path, storage_type) = translate_storage(&Some(root_dir.to_owned()), storage) - .expect("ERROR: Could not find storage folder"); + .ok_or(AppError::StorageNotFound(storage.to_owned()))?; let storage_type = storage_type.unwrap_or(match type_ { Some(t) => *t, @@ -354,15 +552,11 @@ async fn main() { let format = format.unwrap_or(Format::Raw); - let mut storage = Storage::new(file_path, storage_type) - .expect("ERROR: Could not create storage"); + let mut storage = Storage::new(file_path, storage_type)?; - let resource_id = ResourceId::from_str(id) - .expect("ERROR: Could not parse id"); + let resource_id = ResourceId::from_str(id)?; - storage - .insert(resource_id, content, format) - .expect("ERROR: Could not insert content to storage"); + storage.insert(resource_id, content, format)?; } FileCommand::Read { @@ -373,25 +567,20 @@ async fn main() { } => { let (file_path, storage_type) = translate_storage(&Some(root_dir.to_owned()), storage) - .expect("ERROR: Could not find storage folder"); + .ok_or(AppError::StorageNotFound(storage.to_owned()))?; let storage_type = storage_type.unwrap_or(match type_ { Some(t) => *t, None => StorageType::File, }); - let mut storage = Storage::new(file_path, storage_type) - .expect("ERROR: Could not create storage"); + let mut storage = Storage::new(file_path, storage_type)?; - let resource_id = ResourceId::from_str(id) - .expect("ERROR: Could not parse id"); + let resource_id = ResourceId::from_str(id)?; - let output = storage.read(resource_id); + let output = storage.read(resource_id)?; - match output { - Ok(output) => println!("{}", output), - Err(e) => println!("ERROR: {}", e), - } + println!("{}", output); } }, Command::Storage(cmd) => match &cmd { @@ -401,34 +590,34 @@ async fn main() { type_, versions, } => { - let storage = storage - .as_ref() - .expect("ERROR: Storage was not provided"); + let storage = + storage + .as_ref() + .ok_or(AppError::StorageCreationError( + "Storage was not provided".to_owned(), + ))?; let versions = versions.unwrap_or(false); let (file_path, storage_type) = translate_storage(root_dir, storage) - .expect("ERROR: Could not find storage folder"); + .ok_or(AppError::StorageNotFound(storage.to_owned()))?; let storage_type = storage_type.unwrap_or(match type_ { Some(t) => *t, None => StorageType::File, }); - let mut storage = Storage::new(file_path, storage_type) - .expect("ERROR: Could not create storage"); + let mut storage = Storage::new(file_path, storage_type)?; - storage - .load() - .expect("ERROR: Could not load storage"); + storage.load()?; - let output = storage - .list(versions) - .expect("ERROR: Could not list storage content"); + let output = storage.list(versions)?; println!("{}", output); } }, - } + }; + + Ok(()) } diff --git a/src/models/cli.rs b/src/models/cli.rs index 322b39d8..24f3d485 100644 --- a/src/models/cli.rs +++ b/src/models/cli.rs @@ -52,6 +52,9 @@ pub enum Command { #[clap(long, short = 'p', action)] entry_path: bool, + #[clap(long, short = 'l', action)] + entry_link: bool, + #[clap(long, short, action)] modified: bool, diff --git a/src/models/entry.rs b/src/models/entry.rs index 174d54a6..475909bf 100644 --- a/src/models/entry.rs +++ b/src/models/entry.rs @@ -1,7 +1,8 @@ use clap::Parser; -#[derive(Parser, Debug)] +#[derive(Parser, Debug, Clone, Copy, PartialEq, Eq)] pub enum EntryOutput { + Link, Id, Path, Both, @@ -15,6 +16,7 @@ impl std::str::FromStr for EntryOutput { "id" => Ok(EntryOutput::Id), "path" => Ok(EntryOutput::Path), "both" => Ok(EntryOutput::Both), + "link" => Ok(EntryOutput::Link), _ => Err("Entry output must be either 'id', 'path' or 'both'"), } } diff --git a/src/models/format.rs b/src/models/format.rs index 33f487eb..c8d6fb55 100644 --- a/src/models/format.rs +++ b/src/models/format.rs @@ -1,3 +1,5 @@ +use crate::error::InlineJsonParseError; + #[derive(Debug, Clone, Copy)] pub enum Format { KeyValue, @@ -16,7 +18,9 @@ impl std::str::FromStr for Format { } } -pub fn key_value_to_str(s: &str) -> Result, String> { +pub fn key_value_to_str( + s: &str, +) -> Result, InlineJsonParseError> { let pairs: Vec<&str> = s.split(',').collect(); let mut values = Vec::new(); @@ -28,7 +32,7 @@ pub fn key_value_to_str(s: &str) -> Result, String> { let value = key_value[1].trim().to_string(); values.push((key, value)); } else { - return Err("Invalid key-value pair format".to_owned()); + return Err(InlineJsonParseError::InvalidKeyValPair); } } diff --git a/src/models/storage.rs b/src/models/storage.rs index f9ed932b..6e49d32b 100644 --- a/src/models/storage.rs +++ b/src/models/storage.rs @@ -7,6 +7,7 @@ use crate::{ self, file::{format_file, format_line}, }, + error::AppError, models::format::Format, }; @@ -38,53 +39,42 @@ impl Storage { pub fn new>( path: P, storage_type: StorageType, - ) -> Result { + ) -> Result { let path = path.into(); if !path.exists() { std::fs::create_dir_all(&path).map_err(|e| { - format!( + AppError::StorageCreationError(format!( "Failed to create storage folder at {:?} with error: {:?}", path, e - ) + )) })?; } Ok(Self { - path: path.into(), + path, storage_type, files: Vec::new(), }) } #[allow(dead_code)] - pub fn load(&mut self) -> Result<(), String> { + pub fn load(&mut self) -> Result<(), AppError> { match self.storage_type { StorageType::File => { - let atomic_file = - AtomicFile::new(self.path.clone()).map_err(|e| { - format!( - "Failed to create atomic file at {:?} with error: {:?}", - self.path, e - ) - })?; + let atomic_file = AtomicFile::new(self.path.clone())?; - let atomic_file_data = atomic_file.load().map_err(|e| { - format!( - "Failed to load atomic file at {:?} with error: {:?}", - self.path, e - ) - })?; + let atomic_file_data = atomic_file.load()?; - let data = atomic_file_data.read_to_string().map_err(|_| { - "Could not read atomic file content.".to_string() - })?; + let data = atomic_file_data.read_to_string()?; for (i, line) in data.lines().enumerate() { let mut line = line.split(':'); let id = line.next().unwrap(); match id.parse::().map_err(|_| { - format!("Failed to parse ResourceId from line: {i}",) + AppError::IndexError(format!( + "Failed to parse ResourceId from line: {i}", + )) }) { Ok(id) => self.files.push(id), Err(e) => { @@ -96,20 +86,26 @@ impl Storage { StorageType::Folder => { let folder_entries = std::fs::read_dir(&self.path).map_err(|e| { - format!( + AppError::FileOperationError(format!( "Failed to read folder at {:?} with error: {:?}", self.path, e - ) + )) })?; for entry in folder_entries { let entry = entry.map_err(|e| { - format!("Error reading folder entry: {:?}", e) + AppError::FileOperationError(format!( + "Error reading folder entry: {:?}", + e + )) })?; if let Some(file_name) = entry.file_name().to_str() { let id = file_name.parse::().map_err(|_| { - format!("Failed to parse ResourceId from folder entry: {:?}", file_name) + AppError::IndexError(format!( + "Failed to parse ResourceId from folder entry: {:?}", + file_name + )) })?; self.files.push(id); } @@ -125,21 +121,23 @@ impl Storage { id: ResourceId, content: &str, format: Format, - ) -> Result<(), String> { + ) -> Result<(), AppError> { match self.storage_type { StorageType::File => { let atomic_file = AtomicFile::new(&self.path).map_err(|e| { - format!( + AppError::FileOperationError(format!( "Failed to create atomic file at {} with error: {:?}", self.path.display(), e - ) + )) })?; let content = match format { Format::KeyValue => return Err( - "Key value format is not supported for file storage" - .to_owned(), + AppError::StorageCreationError( + "Key value format is not supported for file storage" + .to_owned(), + ), ), Format::Raw => format!("{}:{}\n", id, content), }; @@ -149,81 +147,74 @@ impl Storage { &content, Format::Raw, ) { - Ok(_) => { - return Ok(()); - } - Err(e) => { - return Err(e); - } + Ok(_) => Ok(()), + Err(e) => Err(e), } } StorageType::Folder => { let folder_path = self.path.join(id.to_string()); if !folder_path.exists() { std::fs::create_dir_all(&folder_path).map_err(|e| { - format!( + AppError::StorageCreationError(format!( "Failed to create folder at {:?} with error: {:?}", folder_path, e - ) + )) })?; } let atomic_file = AtomicFile::new(&folder_path) .map_err(|e| { - format!( + AppError::FileOperationError(format!( "Failed to create atomic file at {} with error: {:?}", self.path.display(), e - ) + )) })?; - match commands::file::file_append( - &atomic_file, - &content, - format, - ) { - Ok(_) => { - return Ok(()); - } - Err(e) => { - return Err(e); - } + match commands::file::file_append(&atomic_file, content, format) + { + Ok(_) => Ok(()), + Err(e) => Err(e), } } - }; + } } - pub fn read(&mut self, id: ResourceId) -> Result { + pub fn read(&mut self, id: ResourceId) -> Result { match self.storage_type { StorageType::File => { let atomic_file = AtomicFile::new(&self.path).map_err(|e| { - format!( + AppError::FileOperationError(format!( "Failed to create atomic file at {} with error: {:?}", self.path.display(), e - ) + )) })?; let atomic_file_data = atomic_file.load().map_err(|e| { - format!( + AppError::FileOperationError(format!( "Failed to load atomic file at {:?} with error: {:?}", self.path, e - ) + )) })?; let data = atomic_file_data.read_to_string().map_err(|_| { - "Could not read atomic file content.".to_string() + AppError::FileOperationError( + "Could not read atomic file content.".to_string(), + ) })?; for (i, line) in data.lines().enumerate() { let mut line = line.split(':'); let line_id: &str = line.next().unwrap(); match line_id.parse::().map_err(|_| { - format!("Failed to parse ResourceId from line: {i}",) + AppError::IndexError(format!( + "Failed to parse ResourceId from line: {i}", + )) }) { Ok(line_id) => { if id == line_id { let data = line.next().unwrap(); - return Ok(format!("{}", data)); + return Ok(data.to_string()); } } Err(e) => { @@ -232,31 +223,39 @@ impl Storage { } } - Err(format!("Resource with id {} not found", id)) + Err(AppError::StorageNotFound(format!( + "Resource with id {} not found", + id + ))) } StorageType::Folder => { let folder_path = self.path.join(id.to_string()); if !folder_path.exists() { - return Err(format!("Resource with id {} not found", id)); + return Err(AppError::StorageNotFound(format!( + "Resource with id {} not found", + id + ))); } let atomic_file = AtomicFile::new(&folder_path) .map_err(|e| { - format!( + AppError::FileOperationError(format!( "Failed to create atomic file at {} with error: {:?}", self.path.display(), e - ) + )) })?; let atomic_file_data = atomic_file.load().map_err(|e| { - format!( + AppError::FileOperationError(format!( "Failed to load atomic file at {:?} with error: {:?}", self.path, e - ) + )) })?; let data = atomic_file_data.read_to_string().map_err(|_| { - "Could not read atomic file content.".to_string() + AppError::FileOperationError( + "Could not read atomic file content.".to_string(), + ) })?; Ok(data) @@ -269,21 +268,23 @@ impl Storage { id: ResourceId, content: &str, format: Format, - ) -> Result<(), String> { + ) -> Result<(), AppError> { match self.storage_type { StorageType::File => { let atomic_file = AtomicFile::new(&self.path).map_err(|e| { - format!( + AppError::FileOperationError(format!( "Failed to create atomic file at {} with error: {:?}", self.path.display(), e - ) + )) })?; let content = match format { Format::KeyValue => return Err( - "Key value format is not supported for file storage" - .to_owned(), + AppError::StorageCreationError( + "Key value format is not supported for file storage" + .to_owned(), + ), ), Format::Raw => format!("{}:{}\n", id, content), }; @@ -293,81 +294,79 @@ impl Storage { &content, Format::Raw, ) { - Ok(_) => { - return Ok(()); - } - Err(e) => { - return Err(e); - } + Ok(_) => Ok(()), + Err(e) => Err(e), } } StorageType::Folder => { let folder_path = self.path.join(id.to_string()); if !folder_path.exists() { std::fs::create_dir_all(&folder_path).map_err(|e| { - format!( + AppError::StorageCreationError(format!( "Failed to create folder at {:?} with error: {:?}", folder_path, e - ) + )) })?; } let atomic_file = AtomicFile::new(&folder_path) .map_err(|e| { - format!( + AppError::FileOperationError(format!( "Failed to create atomic file at {} with error: {:?}", self.path.display(), e - ) + )) })?; - match commands::file::file_insert( - &atomic_file, - &content, - format, - ) { - Ok(_) => { - return Ok(()); - } - Err(e) => { - return Err(e); - } + match commands::file::file_insert(&atomic_file, content, format) + { + Ok(_) => Ok(()), + Err(e) => Err(e), } } - }; + } } - pub fn list(&self, versions: bool) -> Result { + pub fn list(&self, versions: bool) -> Result { let mut output = String::new(); if !versions { for id in &self.files { - writeln!(output, "{}", id) - .map_err(|_| "Could not write to output".to_string())?; + writeln!(output, "{}", id).map_err(|_| { + AppError::FileOperationError( + "Could not write to output".to_string(), + ) + })?; } } else { match self.storage_type { StorageType::File => { let atomic_file = AtomicFile::new(&self.path) .map_err(|e| { - format!( + AppError::FileOperationError(format!( "Failed to create atomic file at {} with error: {:?}", self.path.display(), e - ) + )) })?; let atomic_file_data = atomic_file.load().map_err(|e| { - format!( + AppError::FileOperationError(format!( "Failed to load atomic file at {:?} with error: {:?}", self.path, e - ) + )) })?; - writeln!(output, "{: <16} {}", "id", "value") - .map_err(|_| "Could not write to output".to_string())?; + writeln!(output, "{: <16} value", "id").map_err(|_| { + AppError::FileOperationError( + "Could not write to output".to_string(), + ) + })?; let data = atomic_file_data.read_to_string().map_err(|_| { - "Could not read atomic file content.".to_string() + AppError::FileOperationError( + "Could not read atomic file content." + .to_string(), + ) })?; for line in data.lines() { @@ -375,24 +374,24 @@ impl Storage { let id = line.next(); let data = line.next(); - match (id, data) { - (Some(id), Some(data)) => { - writeln!(output, "{: <16} {}", id, data) - .map_err(|_| { - "Could not write to output".to_string() - })?; - } - _ => {} + if let (Some(id), Some(data)) = (id, data) { + writeln!(output, "{: <16} {}", id, data).map_err( + |_| { + AppError::FileOperationError( + "Could not write to output".to_string(), + ) + }, + )?; } } } StorageType::Folder => { let folder_entries = std::fs::read_dir(&self.path) .map_err(|e| { - format!( + AppError::FileOperationError(format!( "Failed to read folder at {:?} with error: {:?}", self.path, e - ) + )) })? .filter_map(|v| v.ok()) .filter(|e| { @@ -412,12 +411,18 @@ impl Storage { "{}", format_line("version", "name", "machine", "path"), ) - .map_err(|_| "Could not write to output".to_string())?; + .map_err(|_| { + AppError::FileOperationError( + "Could not write to output".to_string(), + ) + })?; for entry in folder_entries { if let Some(file) = format_file(&entry) { writeln!(output, "{}", file).map_err(|_| { - "Could not write to output".to_string() + AppError::FileOperationError( + "Could not write to output".to_string(), + ) })?; } } diff --git a/src/util.rs b/src/util.rs index 3d3ffa95..65905e76 100644 --- a/src/util.rs +++ b/src/util.rs @@ -15,58 +15,56 @@ use std::thread; use std::time::{Duration, Instant, SystemTime, UNIX_EPOCH}; use std::{fs::File, path::PathBuf}; +use crate::error::AppError; use crate::models::storage::{Storage, StorageType}; use crate::ARK_CONFIG; -pub fn discover_roots(roots_cfg: &Option) -> Vec { +pub fn discover_roots( + roots_cfg: &Option, +) -> Result, AppError> { if let Some(path) = roots_cfg { println!( "\tRoots config provided explicitly:\n\t\t{}", path.display() ); - let config = File::open(&path).expect("File doesn't exist!"); + let config = File::open(path)?; - parse_roots(config) + Ok(parse_roots(config)) + } else if let Ok(config) = File::open(ARK_CONFIG) { + println!( + "\tRoots config was found automatically:\n\t\t{}", + &ARK_CONFIG + ); + + Ok(parse_roots(config)) } else { - if let Ok(config) = File::open(&ARK_CONFIG) { - println!( - "\tRoots config was found automatically:\n\t\t{}", - &ARK_CONFIG - ); + println!("\tRoots config wasn't found."); + + println!("Looking for a folder containing tag storage:"); + let path = + canonicalize(current_dir().expect("Can't open current directory!")) + .expect("Couldn't canonicalize working directory!"); + + let result = path.ancestors().find(|path| { + println!("\t{}", path.display()); + storages_exists(path) + }); - parse_roots(config) + if let Some(root) = result { + println!("Root folder found:\n\t{}", root.display()); + Ok(vec![root.to_path_buf()]) } else { - println!("\tRoots config wasn't found."); - - println!("Looking for a folder containing tag storage:"); - let path = canonicalize( - current_dir().expect("Can't open current directory!"), - ) - .expect("Couldn't canonicalize working directory!"); - - let result = path.ancestors().find(|path| { - println!("\t{}", path.display()); - storages_exists(path) - }); - - if let Some(root) = result { - println!("Root folder found:\n\t{}", root.display()); - vec![root.to_path_buf()] - } else { - println!("Root folder wasn't found."); - vec![] - } + println!("Root folder wasn't found."); + Ok(vec![]) } } } -pub fn provide_root(root_dir: &Option) -> PathBuf { +pub fn provide_root(root_dir: &Option) -> Result { if let Some(path) = root_dir { - path.clone() + Ok(path.clone()) } else { - current_dir() - .expect("Can't open current directory!") - .clone() + Ok(current_dir()?) } } @@ -78,12 +76,15 @@ pub fn provide_index(root_dir: &PathBuf) -> ResourceIndex { index.clone() } -pub fn monitor_index(root_dir: &Option, interval: Option) { - let dir_path = provide_root(root_dir); +pub fn monitor_index( + root_dir: &Option, + interval: Option, +) -> Result<(), AppError> { + let dir_path = provide_root(root_dir)?; println!("Building index of folder {}", dir_path.display()); let start = Instant::now(); - let dir_path = provide_root(root_dir); + let result = arklib::provide_index(dir_path); let duration = start.elapsed(); @@ -126,10 +127,12 @@ pub fn monitor_index(root_dir: &Option, interval: Option) { } Err(err) => println!("Failure: {:?}", err), } + + Ok(()) } pub fn storages_exists(path: &Path) -> bool { - let meta = metadata(path.join(&arklib::ARK_FOLDER)); + let meta = metadata(path.join(arklib::ARK_FOLDER)); if let Ok(meta) = meta { return meta.is_dir(); } @@ -138,7 +141,7 @@ pub fn storages_exists(path: &Path) -> bool { } pub fn parse_roots(config: File) -> Vec { - return BufReader::new(config) + BufReader::new(config) .lines() .filter_map(|line| match line { Ok(path) => Some(PathBuf::from(path)), @@ -147,14 +150,14 @@ pub fn parse_roots(config: File) -> Vec { None } }) - .collect(); + .collect() } pub fn timestamp() -> Duration { let start = SystemTime::now(); - return start + start .duration_since(UNIX_EPOCH) - .expect("Time went backwards!"); + .expect("Time went backwards!") } pub fn translate_storage( @@ -170,42 +173,49 @@ pub fn translate_storage( match storage.to_lowercase().as_str() { "tags" => Some(( provide_root(root) + .ok()? .join(ARK_FOLDER) .join(TAG_STORAGE_FILE), Some(StorageType::File), )), "scores" => Some(( provide_root(root) + .ok()? .join(ARK_FOLDER) .join(SCORE_STORAGE_FILE), Some(StorageType::File), )), "stats" => Some(( provide_root(root) + .ok()? .join(ARK_FOLDER) .join(STATS_FOLDER), Some(StorageType::Folder), )), "properties" => Some(( provide_root(root) + .ok()? .join(ARK_FOLDER) .join(PROPERTIES_STORAGE_FOLDER), Some(StorageType::Folder), )), "metadata" => Some(( provide_root(root) + .ok()? .join(ARK_FOLDER) .join(METADATA_STORAGE_FOLDER), Some(StorageType::Folder), )), "previews" => Some(( provide_root(root) + .ok()? .join(ARK_FOLDER) .join(PREVIEWS_STORAGE_FOLDER), Some(StorageType::Folder), )), "thumbnails" => Some(( provide_root(root) + .ok()? .join(ARK_FOLDER) .join(THUMBNAILS_STORAGE_FOLDER), Some(StorageType::Folder), @@ -219,10 +229,10 @@ pub fn read_storage_value( storage: &str, id: &str, type_: &Option, -) -> Result { +) -> Result { let (file_path, storage_type) = translate_storage(&Some(root_dir.to_owned()), storage) - .expect("ERROR: Could not find storage folder"); + .ok_or(AppError::StorageNotFound(storage.to_owned()))?; let storage_type = storage_type.unwrap_or(match type_ { Some(type_) => match type_.to_lowercase().as_str() { @@ -233,11 +243,9 @@ pub fn read_storage_value( None => StorageType::File, }); - let mut storage = Storage::new(file_path, storage_type) - .expect("ERROR: Could not create storage"); + let mut storage = Storage::new(file_path, storage_type)?; - let resource_id = - ResourceId::from_str(id).expect("ERROR: Could not parse id"); + let resource_id = ResourceId::from_str(id)?; storage.read(resource_id) }