Skip to content

Commit

Permalink
✨ Extend use statements and Data Accounts paths for fuzz snapshots
Browse files Browse the repository at this point in the history
  • Loading branch information
lukacan committed Mar 10, 2024
1 parent 36ed252 commit 23206f1
Show file tree
Hide file tree
Showing 14 changed files with 281 additions and 162 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ incremented upon a breaking change and the patch version will be incremented for

## [Unreleased]
### Added
- feat/support of automatically obtaining fully qualified paths of Data Accounts Custom types for `accounts_snapshots.rs` ([#141](https://github.com/Ackee-Blockchain/trdelnik/pull/141))
- feat/support of non-corresponding instruction and context names ([#130](https://github.com/Ackee-Blockchain/trdelnik/pull/130))
- feat/refactored and improved program flow during init and build, added activity indicator ([#129](https://github.com/Ackee-Blockchain/trdelnik/pull/129))
- feat/allow solana versions up to v1.17.* and pin Rust 1.77 nightly compiler ([#128](https://github.com/Ackee-Blockchain/trdelnik/pull/128))
Expand Down
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions crates/client/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -70,3 +70,4 @@ shellexpand = { workspace = true }
pathdiff = "0.2.1"
solana-banks-client = "<1.18"
indicatif = "0.17.8"
regex = "1.10.3"
2 changes: 1 addition & 1 deletion crates/client/src/cleaner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ impl Cleaner {
fs::remove_dir_all(hfuzz_target_path).await?;
} else {
println!(
"skipping {}/{}/{}/{} directory: not found",
"{SKIP} [{}/{}/{}/{}] directory not found",
TESTS_WORKSPACE_DIRECTORY, FUZZ_TEST_DIRECTORY, FUZZING, HFUZZ_TARGET
)
}
Expand Down
44 changes: 15 additions & 29 deletions crates/client/src/commander.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use crate::config::Config;
use crate::idl::IdlProgram;
use crate::{
idl::{self, Idl},
idl::{self},
Client,
};
use fehler::{throw, throws};
Expand All @@ -17,6 +18,8 @@ use tokio::{
signal,
};

use crate::constants::*;

#[derive(Error, Debug)]
pub enum Error {
#[error("{0:?}")]
Expand Down Expand Up @@ -173,7 +176,7 @@ impl Commander {

if let Ok(crash_files) = get_crash_files(&crash_dir, &ext) {
if !crash_files.is_empty() {
println!("Error: The crash directory {} already contains crash files from previous runs. \n\nTo run Trdelnik fuzzer with exit code, you must either (backup and) remove the old crash files or alternatively change the crash folder using for example the --crashdir option and the HFUZZ_RUN_ARGS env variable such as:\nHFUZZ_RUN_ARGS=\"--crashdir ./new_crash_dir\"", crash_dir.to_string_lossy());
println!("{ERROR} The crash directory {} already contains crash files from previous runs. \n\nTo run Trdelnik fuzzer with exit code, you must either (backup and) remove the old crash files or alternatively change the crash folder using for example the --crashdir option and the HFUZZ_RUN_ARGS env variable such as:\nHFUZZ_RUN_ARGS=\"--crashdir ./new_crash_dir\"", crash_dir.to_string_lossy());
process::exit(1);
}
}
Expand Down Expand Up @@ -256,7 +259,7 @@ impl Commander {
let crash_file = std::path::Path::new(&self.root as &str).join(crash_file_path);

if !crash_file.try_exists()? {
println!("The crash file {:?} not found!", crash_file);
println!("{ERROR} The crash file [{:?}] not found", crash_file);
throw!(Error::CrashFileNotFound);
}

Expand Down Expand Up @@ -321,7 +324,7 @@ impl Commander {
.unwrap(),
);

let msg = format!("\x1b[92mExpanding\x1b[0m: {package_name}... this may take a while");
let msg = format!("{EXPANDING_PROGRESS_BAR} [{package_name}] ... this may take a while");
progress_bar.set_message(msg);
while mutex.load(std::sync::atomic::Ordering::SeqCst) {
progress_bar.inc(1);
Expand Down Expand Up @@ -354,9 +357,8 @@ impl Commander {
#[throws]
pub async fn expand_program_packages(
packages: &[cargo_metadata::Package],
) -> (Idl, Vec<(String, cargo_metadata::camino::Utf8PathBuf)>) {
let shared_mutex = std::sync::Arc::new(std::sync::Mutex::new(Vec::new()));
let shared_mutex_fuzzer = std::sync::Arc::new(std::sync::Mutex::new(Vec::new()));
) -> Vec<(String, cargo_metadata::camino::Utf8PathBuf, IdlProgram)> {
let shared_mutex_data = std::sync::Arc::new(std::sync::Mutex::new(Vec::new()));

for package in packages.iter() {
let mutex = std::sync::Arc::new(std::sync::atomic::AtomicBool::new(true));
Expand All @@ -373,8 +375,7 @@ impl Commander {
.src_path
.clone();

let c_shared_mutex = std::sync::Arc::clone(&shared_mutex);
let c_shared_mutex_fuzzer = std::sync::Arc::clone(&shared_mutex_fuzzer);
let c_shared_mutex_data = std::sync::Arc::clone(&shared_mutex_data);

let cargo_thread = std::thread::spawn(move || -> Result<(), Error> {
let output = Self::expand_package(&name);
Expand All @@ -388,15 +389,11 @@ impl Commander {
let code = String::from_utf8(output.stdout).expect("Reading stdout failed");

let idl_program = idl::parse_to_idl_program(name, &code)?;
let mut vec = c_shared_mutex
.lock()
.expect("Acquire IdlProgram lock failed");
let mut vec_fuzzer = c_shared_mutex_fuzzer
let mut program_data = c_shared_mutex_data
.lock()
.expect("Acquire Fuzzer data lock failed");
.expect("Acquire Programs Data lock failed");

vec.push(idl_program);
vec_fuzzer.push((code, lib_path));
program_data.push((code, lib_path, idl_program));

Ok(())
} else {
Expand All @@ -409,19 +406,8 @@ impl Commander {
Self::expand_progress_bar(&package.name, &mutex);
cargo_thread.join().unwrap()?;
}
let idl_programs = shared_mutex.lock().unwrap().to_vec();
let codes_libs_pairs = shared_mutex_fuzzer.lock().unwrap().to_vec();

if idl_programs.is_empty() {
throw!(Error::NoProgramsFound);
} else {
(
Idl {
programs: idl_programs,
},
codes_libs_pairs,
)
}
let programs_data = shared_mutex_data.lock().unwrap().to_vec();
programs_data
}
/// Executes a cargo command to expand the Rust source code of a specified package.
///
Expand Down
10 changes: 5 additions & 5 deletions crates/client/src/fuzzer/fuzzer_generator.rs
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
use std::collections::HashMap;

use crate::idl::Idl;
use crate::idl::IdlProgram;
use cargo_metadata::camino::Utf8PathBuf;
use proc_macro2::Ident;
use quote::{format_ident, ToTokens};
use syn::{parse_quote, parse_str};

/// Generates `fuzz_instructions.rs` from [Idl] created from Anchor programs.
pub fn generate_source_code(idl: &Idl) -> String {
let code = idl
.programs
pub fn generate_source_code(code_path: &[(String, Utf8PathBuf, IdlProgram)]) -> String {
let code = code_path
.iter()
.map(|idl_program| {
.map(|(_, _, idl_program)| {
let program_name = &idl_program.name.snake_case;
let fuzz_instructions_module_name = format_ident!("{}_fuzz_instructions", program_name);
let module_name: syn::Ident = parse_str(program_name).unwrap();
Expand Down
140 changes: 128 additions & 12 deletions crates/client/src/fuzzer/snapshot_generator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,16 @@ use syn::{parse_quote, Attribute, Fields, GenericArgument, Item, ItemStruct, Pat

use anchor_lang::anchor_syn::parser::accounts::parse_account_field;

pub fn generate_snapshots_code(code_path: &[(String, Utf8PathBuf)]) -> Result<String, String> {
let code = code_path.iter().map(|(code, path)| {
use regex::Regex;

use crate::idl::{find_item_path, IdlProgram};

use crate::constants::*;

pub fn generate_snapshots_code(
code_path: &[(String, Utf8PathBuf, IdlProgram)],
) -> Result<String, String> {
let code = code_path.iter().map(|(code, path, idlprogram)| {
let mut mod_program = None::<syn::ItemMod>;
let mut file = File::open(path).map_err(|e| e.to_string())?;
let mut content = String::new();
Expand All @@ -44,11 +52,13 @@ pub fn generate_snapshots_code(code_path: &[(String, Utf8PathBuf)]) -> Result<St

let ix_ctx_pairs = get_ix_ctx_pairs(&items)?;

let (structs, impls, type_aliases) = get_snapshot_structs_and_impls(code, &ix_ctx_pairs)?;
let (structs, impls, type_aliases) =
get_snapshot_structs_and_impls(code, &ix_ctx_pairs, &idlprogram.name.snake_case)?;

let use_statements = quote! {
use trdelnik_client::anchor_lang::{prelude::*, self};
use trdelnik_client::fuzzing::FuzzingError;
use crate::PROGRAM_ID;
}
.into_token_stream();
Ok(format!(
Expand All @@ -65,6 +75,7 @@ pub fn generate_snapshots_code(code_path: &[(String, Utf8PathBuf)]) -> Result<St
fn get_snapshot_structs_and_impls(
code: &str,
ix_ctx_pairs: &[(Ident, GenericArgument)],
name: &String,
) -> Result<(String, String, String), String> {
let mut structs = String::new();
let mut impls = String::new();
Expand Down Expand Up @@ -104,12 +115,21 @@ fn get_snapshot_structs_and_impls(
.map_err(|e| e.to_string())?;

let ix_snapshot_name = format_ident!("{}Snapshot", ix_name);
let wrapped_struct =
create_snapshot_struct(&ix_snapshot_name, ctx_struct_item, &fields_parsed)
.unwrap();
let deser_code =
deserialize_ctx_struct_anchor(&ix_snapshot_name, &fields_parsed)
.map_err(|e| e.to_string())?;
let wrapped_struct = create_snapshot_struct(
&ix_snapshot_name,
ctx_struct_item,
&fields_parsed,
&parse_result,
name,
)
.unwrap();
let deser_code = deserialize_ctx_struct_anchor(
&ix_snapshot_name,
&fields_parsed,
&parse_result,
name,
)
.map_err(|e| e.to_string())?;
structs = format!("{}{}", structs, wrapped_struct.into_token_stream());
impls = format!("{}{}", impls, deser_code.into_token_stream());
unique_ctxs.insert(ctx.clone(), ix_snapshot_name);
Expand Down Expand Up @@ -210,6 +230,8 @@ fn create_snapshot_struct(
snapshot_name: &Ident,
orig_struct: &ItemStruct,
parsed_fields: &[AccountField],
parse_result: &syn::File,
name: &String,
) -> Result<TokenStream, Box<dyn Error>> {
let wrapped_fields = match orig_struct.fields.clone() {
Fields::Named(named) => {
Expand Down Expand Up @@ -243,7 +265,7 @@ fn create_snapshot_struct(
.starts_with("AccountInfo<");
}
else {
println!("\x1b[1;93mWarning\x1b[0m: The context `{}` has a field named `{}` of composite type `{}`. \
println!("{WARNING} The context `{}` has a field named `{}` of composite type `{}`. \
The automatic deserialization of composite types is currently not supported. You will have \
to implement it manually in the generated `accounts_snapshots.rs` file. The field deserialization \
was replaced by a `todo!()` macro. Also, you might want to adapt the corresponding FuzzInstruction \
Expand All @@ -256,9 +278,15 @@ fn create_snapshot_struct(
(true, true) => {
Ok(quote! {pub #field_name: Option<&'info #field_type>,})
}
(true, _) => Ok(quote! {pub #field_name: Option<#field_type>,}),
(true, _) => {
let field_type = construct_full_path(&field_type.to_token_stream(),parse_result,name).unwrap_or(field_type.clone());
Ok(quote! {pub #field_name: Option<#field_type>,})
},
(_, true) => Ok(quote! {pub #field_name: &'info #field_type,}),
_ => Ok(quote! {pub #field_name: #field_type,}),
_ => {
let field_type = construct_full_path(&field_type.to_token_stream(),parse_result,name).unwrap_or(field_type.clone());
Ok(quote! {pub #field_name: #field_type,})
},
}
});

Expand Down Expand Up @@ -302,6 +330,8 @@ fn extract_inner_type(field_type: &Type) -> Option<&Type> {
fn deserialize_ctx_struct_anchor(
snapshot_name: &Ident,
parsed_fields: &[AccountField],
parse_result: &syn::File,
program_name: &String,
) -> Result<TokenStream, Box<dyn Error>> {
let names_deser_pairs: Vec<(TokenStream, TokenStream)> = parsed_fields
.iter()
Expand All @@ -315,6 +345,8 @@ fn deserialize_ctx_struct_anchor(
is_optional,
return_type,
deser_method,
parse_result,
program_name,
),
None if matches!(&f.ty, Ty::UncheckedAccount) => {
acc_unchecked_tokens(&field_name, is_optional)
Expand Down Expand Up @@ -443,7 +475,16 @@ fn deserialize_account_tokens(
is_optional: bool,
return_type: TokenStream,
deser_method: TokenStream,
parse_result: &syn::File,
program_name: &String,
) -> TokenStream {
let return_type = if let Some(with_full_path) =
construct_full_path(&return_type, parse_result, program_name)
{
with_full_path.to_token_stream()
} else {
return_type
};
if is_optional {
let name_str = name.to_string();
// TODO make this more idiomatic
Expand Down Expand Up @@ -538,3 +579,78 @@ fn has_program_attribute(attrs: &Vec<Attribute>) -> bool {
}
false
}

/// Constructs a full path for a given field type within the parsed syntax tree of a Rust file.
///
/// This function is designed to work with the `Account` and `AccountLoader` structs from the
/// `anchor_lang` crate, resolving their types to fully qualified paths based on the syntax tree
/// provided. It utilizes regular expressions to match against the struct and function syntax for
/// these specific types.
///
/// # Arguments
///
/// * `field_type` - A reference to the token stream representing the type of a field.
/// * `parse_result` - A reference to the parsed file (`syn::File`) containing the Rust source code.
/// * `name` - A reference to a string representing the name of the program.
///
/// # Returns
///
/// An `Option<Type>` which is:
/// - `Some(Type)` where `Type` is the modified type with its path fully qualified, if the type matches
/// the `Account` or `AccountLoader` struct syntax and a corresponding item is found.
/// - `None` if no matching type is found or the type cannot be parsed.
///
/// # Example
///
/// Suppose you have a field type `Account<'info, UserData>`, and `UserData` is defined within
/// the file being analyzed. This function will replace `UserData` with its fully qualified path
/// based on the analysis of `parse_result`, helping with tasks like code generation or analysis
/// where fully qualified paths are required.
fn construct_full_path(
field_type: &TokenStream,
parse_result: &syn::File,
name: &String,
) -> Option<Type> {
// Combine regex patterns to match both struct and function syntax for Account and AccountLoader
// this can be obviously extended if needed for further types.
let regex_patterns = [
(
r"^Account<'info,\s*(.*?)>$",
r"anchor_lang::accounts::account::Account<([^>]+)>",
),
(
r"^AccountLoader<'info,\s*(.*?)>$",
r"anchor_lang::accounts::account_loader::AccountLoader<([^>]+)>",
),
];

// remove spaces in the field_type expression.
let type_as_string = field_type.to_token_stream().to_string().replace(' ', "");

regex_patterns
.iter()
.find_map(|(struct_pattern, fn_pattern)| {
// construct regular expressions
let struct_re = Regex::new(struct_pattern).unwrap();
let fn_re = Regex::new(fn_pattern).unwrap();

// check if either of expression matches
struct_re
.captures(&type_as_string)
.or_else(|| fn_re.captures(&type_as_string))
.and_then(|caps| {
let data_account = caps[1].to_string();
// there may be inner data account specified as crate::abcd::XYZ
// so due to this we extract the last part, or use whole as default.
let data_account = data_account.split("::").last().unwrap_or(&data_account);
// try to obtain full path
find_item_path(data_account, parse_result).map(|full_path| {
let full_final_path = format!("{name}{full_path}");
let type_with_full_path =
type_as_string.replace(data_account, &full_final_path);
syn::parse_str::<Type>(&type_with_full_path).ok()
})
})
})
.flatten()
}
Loading

0 comments on commit 23206f1

Please sign in to comment.