Skip to content

Commit

Permalink
Merge pull request #768 from ReFirmLabs/json_stdout
Browse files Browse the repository at this point in the history
Add ability to stream JSON results to stdout
  • Loading branch information
devttys0 authored Nov 26, 2024
2 parents 97bb6ef + d31f336 commit 24ccbc4
Show file tree
Hide file tree
Showing 3 changed files with 73 additions and 63 deletions.
2 changes: 1 addition & 1 deletion src/cliparser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ pub struct CliArgs {
#[arg(short = 'E', long, conflicts_with = "extract")]
pub entropy: bool,

/// Log JSON results to a file
/// Log JSON results to a file ('-' for stdout)
#[arg(short, long)]
pub log: Option<String>,

Expand Down
122 changes: 65 additions & 57 deletions src/json.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,66 +8,80 @@ use std::io::Write;
use crate::binwalk::AnalysisResults;
use crate::entropy::FileEntropy;

const STDOUT: &str = "-";
const JSON_LIST_START: &str = "[\n";
const JSON_LIST_END: &str = "\n]\n";
const JSON_LIST_SEP: &str = ",\n";

#[derive(Debug, Serialize, Deserialize)]
pub enum JSONType {
Entropy(FileEntropy),
Analysis(AnalysisResults),
}

/// If file does not exist, write "[\n<json_data>\n]".
/// Else, seek to EOF -1 and write ",\n<json_data>\n]".
pub fn log(json_file: &Option<String>, results: JSONType) {
const JSON_LIST_START: &str = "[\n";
const JSON_LIST_END: &str = "\n]";
const JSON_COMMA_SEPERATOR: &str = ",\n";
#[derive(Debug, Default, Clone)]
pub struct JsonLogger {
pub json_file: Option<String>,
pub json_file_initialized: bool,
}

match json_file {
None => (),
Some(file_name) => {
// Convert analysis results to JSON
match serde_json::to_string_pretty(&results) {
Err(e) => panic!("Failed to convert analysis results to JSON: {}", e),
Ok(json) => {
// Open file for reading and writing, create if does not already exist
match fs::OpenOptions::new()
.create(true)
.append(true)
.read(true)
.open(file_name)
{
Err(e) => {
error!("Failed to open JSON log file '{}': {}", file_name, e);
}
Ok(mut fp) => {
// Seek to the end of the file and get the cursor position
match fp.seek(io::SeekFrom::End(0)) {
Err(e) => {
error!("Failed to see to end of JSON file: {}", e);
}
Ok(pos) => {
if pos == 0 {
// If EOF is at offset 0, this file is empty and needs an opening JSON list character
write_to_json_file(&fp, JSON_LIST_START.to_string());
} else {
// If there is already data in the file we want to overwrite the last byte, which should be a closing JSON list character, with a comma
if let Err(e) = fp.seek(io::SeekFrom::Start(
pos - (JSON_LIST_END.len() as u64),
)) {
error!("Failed to seek to EOF-1 in JSON file: {}", e);
return;
} else {
write_to_json_file(
&fp,
JSON_COMMA_SEPERATOR.to_string(),
);
}
}
impl JsonLogger {
pub fn new(log_file: Option<String>) -> JsonLogger {
let mut new_instance = JsonLogger {
..Default::default()
};

if log_file.is_some() {
new_instance.json_file = Some(log_file.unwrap().clone());
}

// Write the JSON data to file
write_to_json_file(&fp, json);
new_instance
}

// Write a closing JSON list character to file
write_to_json_file(&fp, JSON_LIST_END.to_string());
pub fn close(&self) {
self.write_json(JSON_LIST_END.to_string());
}

pub fn log(&mut self, results: JSONType) {
// Convert analysis results to JSON
match serde_json::to_string_pretty(&results) {
Err(e) => error!("Failed to convert analysis results to JSON: {}", e),
Ok(json) => {
if !self.json_file_initialized {
self.write_json(JSON_LIST_START.to_string());
self.json_file_initialized = true;
} else {
self.write_json(JSON_LIST_SEP.to_string());
}
self.write_json(json);
}
}
}

fn write_json(&self, data: String) {
if let Some(log_file) = &self.json_file {
if log_file == STDOUT {
print!("{data}");
} else {
// Open file for reading and writing, create if does not already exist
match fs::OpenOptions::new()
.create(true)
.append(true)
.read(true)
.open(log_file)
{
Err(e) => {
error!("Failed to open JSON log file '{}': {}", log_file, e);
}
Ok(mut fp) => {
// Seek to the end of the file and get the cursor position
match fp.seek(io::SeekFrom::End(0)) {
Err(e) => {
error!("Failed to seek to end of JSON file: {}", e);
}
Ok(_) => {
if let Err(e) = fp.write_all(data.as_bytes()) {
error!("Failed to write to JSON log file: {}", e);
}
}
}
Expand All @@ -77,9 +91,3 @@ pub fn log(json_file: &Option<String>, results: JSONType) {
}
}
}

fn write_to_json_file(mut fp: &fs::File, data: String) {
if let Err(e) = fp.write_all(data.as_bytes()) {
error!("Failed to write to JSON log file: {}", e);
}
}
12 changes: 7 additions & 5 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,16 +63,16 @@ fn main() {
panic!("No target file name specified! Try --help.");
}

let mut json_logger = json::JsonLogger::new(cliargs.log);

// If entropy analysis was requested, generate the entropy graph and return
if cliargs.entropy {
display::print_plain(cliargs.quiet, "Calculating file entropy...");

if let Ok(entropy_results) = entropy::plot(cliargs.file_name.unwrap()) {
// Log entropy results to JSON file, if requested
json::log(
&cliargs.log,
json::JSONType::Entropy(entropy_results.clone()),
);
json_logger.log(json::JSONType::Entropy(entropy_results.clone()));
json_logger.close();

display::print_plain(cliargs.quiet, "entropy graph saved to: ");
display::println_plain(cliargs.quiet, &entropy_results.file);
Expand Down Expand Up @@ -190,7 +190,7 @@ fn main() {
file_count += 1;

// Log analysis results to JSON file
json::log(&cliargs.log, json::JSONType::Analysis(results.clone()));
json_logger.log(json::JSONType::Analysis(results.clone()));

// Nothing found? Nothing else to do for this file.
if results.file_map.is_empty() {
Expand Down Expand Up @@ -219,6 +219,8 @@ fn main() {
}
}

json_logger.close();

// If BINWALK_RM_SYMLINK env var was set, delete the base_target_file symlink
if (cliargs.carve || cliargs.extract) && std::env::var(BINWALK_RM_SYMLINK).is_ok() {
if let Err(e) = std::fs::remove_file(&binwalker.base_target_file) {
Expand Down

0 comments on commit 24ccbc4

Please sign in to comment.