Skip to content

Commit

Permalink
Fix source map integration tests (#220)
Browse files Browse the repository at this point in the history
* add combined json compilation flag for integration tests

* git ignore compiled combined.json

* removed some unwrap causing crash

* add exit code for integration tests failure

* add differentiation between fuzzing failure and panic

* fix local file path concat
  • Loading branch information
publicqi authored Oct 2, 2023
1 parent 6c66a07 commit e265585
Show file tree
Hide file tree
Showing 3 changed files with 50 additions and 10 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -16,3 +16,4 @@ work_dir
.z3-trace
*.code-workspace
*.txt
combined.json
22 changes: 20 additions & 2 deletions integration_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@

TIMEOUT_BIN = "timeout" if os.name == "posix" else "gtimeout"

crashed_any = False

def read_onchain_tests():
tests = ""
with open("onchain_tests.txt", "r") as file:
Expand All @@ -17,17 +19,19 @@ def read_onchain_tests():
return tests

def test_one(path):
global crashed_any
# cleanup
os.system(f"rm -rf {path}/build")

# compile with solc
p = subprocess.run(
" ".join(["solc", f"{path}/*.sol", "-o", f"{path}/",
"--bin", "--abi", "--overwrite", "--base-path", "."]),
"--bin", "--abi", "--overwrite", "--base-path", ".", "--combined-json", "bin-runtime,srcmap-runtime"]),
shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)

if b"Error" in p.stderr or b"Error" in p.stdout:
print(f"Error compiling {path}")
crashed_any = True
return

# run fuzzer and check whether the stdout has string success
Expand All @@ -54,6 +58,8 @@ def test_one(path):
print("================ STDOUT =================")
print(p.stdout.decode("utf-8"))
print(f"=== Failed to fuzz {path}")
if b"panicked" in p.stderr or b"panicked" in p.stdout:
crashed_any = True
else:
print(f"=== Success: {path}, Finished in {time.time() - start_time}s")

Expand All @@ -63,9 +69,10 @@ def test_one(path):


def test_onchain(test):

global crashed_any
if len(test) != 4:
print(f"=== Invalid test: {test}")
crashed_any = True
return

# randomly sleep for 0 - 30s to avoid peak traffic
Expand All @@ -75,11 +82,13 @@ def test_onchain(test):

if chain not in ["eth", "bsc", "polygon"]:
print(f"=== Unsupported chain: {chain}")
crashed_any = True
return

etherscan_key = os.getenv(f"{chain.upper()}_ETHERSCAN_API_KEY")
if etherscan_key is None:
print(f"=== No etherscan api key for {chain}")
crashed_any = True
return
my_env = os.environ.copy()
my_env["ETH_RPC_URL"] = os.getenv(f"{chain.upper()}_RPC_URL")
Expand Down Expand Up @@ -112,6 +121,12 @@ def test_onchain(test):
print(f"=== Success: Tested onchain for contracts: {name}, Finished in {time.time() - start_time}s")
open(f"res_{name}.txt", "w+").write(p.stderr.decode("utf-8") + " ".join(cmd) + "\n" + p.stdout.decode("utf-8"))
return
if b"panicked" in p.stderr or b"panicked" in p.stdout:
crashed_any = True
print("================ STDERR =================")
print(p.stderr.decode("utf-8"))
print("================ STDOUT =================")
print(p.stdout.decode("utf-8"))
time.sleep(30)


Expand Down Expand Up @@ -172,3 +187,6 @@ def build_flash_loan_v2_fuzzer():
tests = read_onchain_tests()
with multiprocessing.Pool(10) as p:
p.map(test_onchain, tests)

if crashed_any:
exit(1)
37 changes: 29 additions & 8 deletions src/evm/contract_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -700,7 +700,10 @@ pub fn save_builder_addr_source_code(build_job_result: &BuildJobResult, addr: &E
let mut files_downloaded = HashSet::<String>::new();

let addr_dir = format!("{}/sources/{:?}", work_dir, addr);
std::fs::create_dir_all(addr_dir.clone()).unwrap();
let path = Path::new(addr_dir.as_str());
if !path.exists() {
std::fs::create_dir_all(path).unwrap();
}

for (_pc, loc) in src_map {
match loc.file.clone() {
Expand All @@ -709,7 +712,11 @@ pub fn save_builder_addr_source_code(build_job_result: &BuildJobResult, addr: &E
if file.contains("/") {
// we make the parent directory
let parent_dir = format!("{}/{}", addr_dir, file.split("/").take(file.split("/").count() - 1).collect::<Vec<&str>>().join("/"));
std::fs::create_dir_all(parent_dir).unwrap();
let path = Path::new(parent_dir.as_str());
// same as above, it's okay to skip
if !path.exists() {
std::fs::create_dir_all(path).unwrap();
}
}
let file_path = format!("{}/{}", addr_dir, file);
println!("Downloading {} to {}", &file, &file_path);
Expand All @@ -733,27 +740,41 @@ pub fn save_builder_addr_source_code(build_job_result: &BuildJobResult, addr: &E
pub fn copy_local_source_code(source_dir_pattern: &String, work_dir: &String, addr_map: &ProjectSourceMapTy, base_path: &String) {
for (addr, src_map) in addr_map.clone() {
// each addr has its own source map
if src_map.is_none() {
continue;
}
let src_map = src_map.unwrap();
let mut files_copied = HashSet::<String>::new();
// mkdir -p work_dir/addr
let addr_dir = format!("{}/sources/{:?}", work_dir, addr);
std::fs::create_dir_all(addr_dir.clone()).unwrap();
let path = Path::new(addr_dir.as_str());
if !path.exists() {
std::fs::create_dir_all(path).unwrap();
}

for (_pc, loc) in src_map {
match loc.file {
Some(file) => {
// copy file to work_dir/sources/addr/file
if !files_copied.contains(&file) {
let file_path = if base_path.len() > 0 {
format!("{}{}/{}", source_dir_pattern.replace("*", ""), base_path, file)
} else {
format!("{}/{}", source_dir_pattern.replace("*", ""), file)
let file_path = match Path::new(file.as_str()).exists() {
true => file.to_string(),
false => {
if base_path.len() > 0 {
format!("{}{}/{}", source_dir_pattern.replace("*", ""), base_path, file)
} else {
format!("{}/{}", source_dir_pattern.replace("*", ""), file)
}
}
};

if Path::new(&file_path).exists() {
// NOTICE, HERE PATH TRAVERSAL IS POSSIBLE
println!("Copying {} to {}", &file_path, &addr_dir);
std::fs::create_dir_all(addr_dir.clone()).unwrap();
let path = Path::new(&addr_dir);
if !path.exists() {
std::fs::create_dir_all(path).unwrap();
}
if file.contains("/") {
// we make the parent directory
let parent_dir = format!("{}/{}", addr_dir, file.split("/").take(file.split("/").count() - 1).collect::<Vec<&str>>().join("/"));
Expand Down

0 comments on commit e265585

Please sign in to comment.