Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Small various fixes #112

Merged
merged 11 commits into from
Dec 6, 2024
21 changes: 12 additions & 9 deletions src/bin/cargo-ziggy/coverage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,10 @@ impl Cover {

let _ = process::Command::new(format!("./target/coverage/debug/{}", &self.target))
.arg(format!("{}", shared_corpus.display()))
.env(
"LLVM_PROFILE_FILE",
"target/coverage/debug/deps/coverage-%p-%m.profraw",
)
.spawn()
.unwrap()
.wait_with_output()
Expand Down Expand Up @@ -84,23 +88,23 @@ impl Cover {
let cargo = env::var("CARGO").unwrap_or_else(|_| String::from("cargo"));

let mut coverage_rustflags = env::var("COVERAGE_RUSTFLAGS")
.unwrap_or_else(|_| String::from("--cfg=coverage -Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort"));
.unwrap_or_else(|_| String::from("-Cinstrument-coverage"));
coverage_rustflags.push_str(&env::var("RUSTFLAGS").unwrap_or_default());

process::Command::new(cargo)
let build = process::Command::new(cargo)
.args([
"rustc",
"--target-dir=target/coverage",
"--features=ziggy/coverage",
])
.env("RUSTFLAGS", coverage_rustflags)
.env("RUSTDOCFLAGS", "-Cpanic=unwind")
.env("CARGO_INCREMENTAL", "0")
.env("RUSTC_BOOTSTRAP", "1") // Trick to avoid forcing user to use rust nightly
.spawn()
.context("⚠️ couldn't spawn rustc for coverage")?
.wait()
.context("⚠️ couldn't wait for the rustc during coverage")?;
if !build.success() {
return Err(anyhow!("⚠️ build failed"));
}
Ok(())
}

Expand Down Expand Up @@ -129,11 +133,10 @@ impl Cover {
}

pub fn clean_old_cov() -> Result<(), anyhow::Error> {
if let Ok(gcda_files) = glob("target/coverage/debug/deps/*.gcda") {
for file in gcda_files.flatten() {
if let Ok(profile_files) = glob("target/coverage/debug/deps/*.profraw") {
for file in profile_files.flatten() {
let file_string = &file.display();
fs::remove_file(&file)
.context(format!("⚠️ couldn't find {} during coverage", file_string))?;
fs::remove_file(&file).context(format!("⚠️ couldn't remove {}", file_string))?;
}
}
Ok(())
Expand Down
90 changes: 57 additions & 33 deletions src/bin/cargo-ziggy/fuzz.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,7 @@ use std::{
use strip_ansi_escapes::strip_str;

/// Main logic for managing fuzzers and the fuzzing process in ziggy.

/// ## Initial minimization logic

/// When launching fuzzers, if initial corpora exist, they are merged together and we minimize it
/// with both AFL++ and Honggfuzz.
/// ```text
Expand All @@ -34,7 +32,6 @@ use strip_ansi_escapes::strip_str;
/// honggfuzz -i corpus -o corpus
/// ```
/// The `all_afl_corpora` directory corresponds to the `output/target_name/afl/**/queue/` directories.

impl Fuzz {
pub fn corpus(&self) -> String {
self.corpus
Expand Down Expand Up @@ -153,7 +150,7 @@ impl Fuzz {

self.start_time = Instant::now();

let mut last_synced_queue_id: u32 = 0;
let mut last_synced_created_time: Option<SystemTime> = None;
let mut last_sync_time = Instant::now();
let mut afl_output_ok = false;

Expand Down Expand Up @@ -303,24 +300,60 @@ impl Fuzz {
// If both fuzzers are running, we copy over AFL++'s queue for consumption by Honggfuzz.
// Otherwise, if only AFL++ is up we copy AFL++'s queue to the global corpus.
// We do this every 10 seconds
if self.afl() && last_sync_time.elapsed().as_secs() > 10 {
let afl_corpus = glob(&format!(
"{}/afl/mainaflfuzzer/queue/*",
self.output_target(),
))?
.flatten();
for file in afl_corpus {
if let Some((file_id, file_name)) = extract_file_id(&file) {
if file_id > last_synced_queue_id {
let copy_destination = match self.honggfuzz() {
true => format!("{}/queue/{file_name}", self.output_target()),
false => format!("{}/corpus/{file_name}", self.output_target()),
};
let _ = fs::copy(&file, copy_destination);
last_synced_queue_id = file_id;
if last_sync_time.elapsed().as_secs() > 10 {
let mut files = vec![];
if self.afl() {
files.append(
&mut glob(&format!(
"{}/afl/mainaflfuzzer/queue/*",
self.output_target(),
))?
.flatten()
.collect(),
);
}
if self.honggfuzz() {
files.append(
&mut glob(&format!("{}/honggfuzz/corpus/*", self.output_target(),))?
.flatten()
.collect(),
);
}
let mut newest_time = last_synced_created_time;
let valid_files = files.iter().filter(|file| {
if let Ok(metadata) = file.metadata() {
let created = metadata.created().unwrap();
if last_synced_created_time.is_none_or(|time| created > time) {
if newest_time.is_none_or(|time| created > time) {
newest_time = Some(created);
}
return true;
}
}
false
});
for file in valid_files {
if let Some(file_name) = file.file_name() {
if self.honggfuzz() {
let _ = fs::copy(
file,
format!("{}/queue/{:?}", self.output_target(), file_name),
);
}
// Hash the file to get its file name
let hasher = process::Command::new("md5sum").arg(file).output().unwrap();
let hash_vec = hasher.stdout.split(|&b| b == b' ').next().unwrap_or(&[]);
let hash = std::str::from_utf8(hash_vec).unwrap_or_default();
let _ = process::Command::new("cp")
.args([
format!("{}", file.display()),
format!("{}/corpus/{hash}", self.output_target()),
])
.output()
.unwrap();
}
}
last_synced_created_time = newest_time;
last_sync_time = Instant::now();
}

Expand Down Expand Up @@ -568,9 +601,9 @@ impl Fuzz {
.env(
"HFUZZ_RUN_ARGS",
format!(
"--input={} -o{} -n{honggfuzz_jobs} -F{} --dynamic_input={}/queue {timeout_option} {dictionary_option}",
&self.corpus(),
"--input={} -o{}/honggfuzz/corpus -n{honggfuzz_jobs} -F{} --dynamic_input={}/queue {timeout_option} {dictionary_option}",
&self.corpus(),
&self.output_target(),
self.max_length,
self.output_target(),
),
Expand Down Expand Up @@ -664,7 +697,9 @@ impl Fuzz {
.map_or(String::from("err"), |corpus| format!("{}", corpus.count()));

let engine = match (self.no_afl, self.no_honggfuzz, self.jobs) {
(false, _, _) => FuzzingEngines::AFLPlusPlus,
(false, false, 1) => FuzzingEngines::AFLPlusPlus,
(false, false, _) => FuzzingEngines::All,
(false, true, _) => FuzzingEngines::AFLPlusPlus,
(true, false, _) => FuzzingEngines::Honggfuzz,
(true, true, _) => return Err(anyhow!("Pick at least one fuzzer")),
};
Expand Down Expand Up @@ -960,14 +995,3 @@ pub fn stop_fuzzers(processes: &mut Vec<process::Child>) -> Result<(), Error> {
}
Ok(())
}

pub fn extract_file_id(file: &Path) -> Option<(u32, String)> {
let file_name = file.file_name()?.to_str()?;
if file_name.len() < 9 {
return None;
}
let (id_part, _) = file_name.split_at(9);
let str_id = id_part.strip_prefix("id:")?;
let file_id = str_id.parse::<u32>().ok()?;
Some((file_id, String::from(file_name)))
}
3 changes: 2 additions & 1 deletion src/bin/cargo-ziggy/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ pub const DEFAULT_UNMODIFIED_TARGET: &str = "automatically guessed";

#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, ValueEnum)]
pub enum FuzzingEngines {
All,
AFLPlusPlus,
Honggfuzz,
}
Expand Down Expand Up @@ -242,7 +243,7 @@ pub struct Minimize {
#[clap(short, long, value_name = "NUM", default_value_t = 1)]
jobs: u32,

#[clap(short, long, value_enum, default_value_t = FuzzingEngines::AFLPlusPlus)]
#[clap(short, long, value_enum, default_value_t = FuzzingEngines::All)]
engine: FuzzingEngines,
}

Expand Down
41 changes: 38 additions & 3 deletions src/bin/cargo-ziggy/minimize.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
use crate::{find_target, Build, FuzzingEngines, Minimize};
use anyhow::{Context, Result};
use anyhow::{anyhow, Context, Result};
use std::{
env,
fs::{self, File},
process,
process, thread,
time::Duration,
};

impl Minimize {
Expand All @@ -19,11 +20,33 @@ impl Minimize {
self.target =
find_target(&self.target).context("⚠️ couldn't find target when minimizing")?;

if fs::read_dir(self.output_corpus()).is_ok() {
return Err(anyhow!(
"Directory {} exists, please move it before running minimization",
self.output_corpus()
));
}

let entries = fs::read_dir(self.input_corpus())?;
let original_count = entries.filter_map(|entry| entry.ok()).count();
println!("Running minimization on a corpus of {original_count} files");

match self.engine {
FuzzingEngines::All => {
let min_afl = self.clone();
let handle_afl = thread::spawn(move || {
min_afl.minimize_afl().unwrap();
});
thread::sleep(Duration::from_millis(1000));

let min_honggfuzz = self.clone();
let handle_honggfuzz = thread::spawn(move || {
min_honggfuzz.minimize_honggfuzz().unwrap();
});

handle_afl.join().unwrap();
handle_honggfuzz.join().unwrap();
}
FuzzingEngines::AFLPlusPlus => {
self.minimize_afl()?;
}
Expand All @@ -32,8 +55,20 @@ impl Minimize {
}
}

// We rename every file to its md5 hash
let min_entries = fs::read_dir(self.output_corpus())?;
let minimized_count = min_entries.filter_map(|entry| entry.ok()).count();
for file in min_entries.flatten() {
let hasher = process::Command::new("md5sum")
.arg(file.path())
.output()
.unwrap();
let hash_vec = hasher.stdout.split(|&b| b == b' ').next().unwrap_or(&[]);
let hash = std::str::from_utf8(hash_vec).unwrap_or_default();
let _ = fs::rename(file.path(), format!("{}/{hash}", self.output_corpus()));
}

let min_entries_hashed = fs::read_dir(self.output_corpus())?;
let minimized_count = min_entries_hashed.filter_map(|entry| entry.ok()).count();
println!("Minimized corpus contains {minimized_count} files");

Ok(())
Expand Down
1 change: 1 addition & 0 deletions tests/arbitrary_fuzz.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ fn kill_subprocesses_recursively(pid: &str) {
}
}

#[allow(clippy::zombie_processes)]
#[test]
fn integration() {
let unix_time = format!(
Expand Down
1 change: 1 addition & 0 deletions tests/asan_fuzz.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ fn kill_subprocesses_recursively(pid: &str) {
}
}

#[allow(clippy::zombie_processes)]
#[test]
fn asan_crashes() {
// Not optimal but seems to work fine
Expand Down
7 changes: 4 additions & 3 deletions tests/url_fuzz.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use std::{
env,
env, fs,
path::PathBuf,
process, thread,
time::{Duration, SystemTime, UNIX_EPOCH},
Expand Down Expand Up @@ -28,6 +28,7 @@ fn kill_subprocesses_recursively(pid: &str) {
}
}

#[allow(clippy::zombie_processes)]
#[test]
fn integration() {
let unix_time = format!(
Expand Down Expand Up @@ -119,6 +120,8 @@ fn integration() {
.join("minimization_afl.log")
.is_file());

fs::remove_dir_all(temp_dir_path.join("url-fuzz").join("corpus_minimized")).unwrap();

// cargo ziggy minimize -e honggfuzz
let minimization = process::Command::new(&cargo_ziggy)
.arg("ziggy")
Expand All @@ -136,7 +139,6 @@ fn integration() {
.join("minimization_honggfuzz.log")
.is_file());

/* Removed until https://github.com/mozilla/grcov/issues/1240 has a fix
// cargo ziggy cover
let coverage = process::Command::new(&cargo_ziggy)
.arg("ziggy")
Expand All @@ -152,7 +154,6 @@ fn integration() {
.join("coverage")
.join("index.html")
.is_file());
*/

// cargo ziggy plot
let plot = process::Command::new(&cargo_ziggy)
Expand Down
Loading