Skip to content
This repository has been archived by the owner on Oct 19, 2024. It is now read-only.

feat(solc): compiler pipeline improvements #866

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 17 additions & 1 deletion ethers-solc/src/artifacts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use colored::Colorize;
use md5::Digest;
use semver::Version;
use std::{
collections::BTreeMap,
collections::{BTreeMap, HashSet},
convert::TryFrom,
fmt, fs,
path::{Path, PathBuf},
Expand Down Expand Up @@ -595,6 +595,22 @@ impl CompilerOutput {
pub fn split(self) -> (SourceFiles, OutputContracts) {
(SourceFiles(self.sources), OutputContracts(self.contracts))
}

/// Retains only those files the given iterator yields
///
/// In other words, removes all contracts for files not included in the iterator
pub fn retain_files<'a, I>(&mut self, files: I)
where
I: IntoIterator<Item = &'a str>,
{
let files: HashSet<_> = files.into_iter().collect();

self.contracts.retain(|f, _| files.contains(f.as_str()));
self.sources.retain(|f, _| files.contains(f.as_str()));
self.errors.retain(|err| {
err.source_location.as_ref().map(|s| files.contains(s.file.as_str())).unwrap_or(true)
});
}
}

/// A wrapper helper type for the `Contracts` type alias
Expand Down
46 changes: 28 additions & 18 deletions ethers-solc/src/cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,10 +47,21 @@ impl SolFilesCache {
self.files.is_empty()
}

/// How many entries the cache contains where each entry represents a sourc file
pub fn len(&self) -> usize {
self.files.len()
}

/// How many `Artifacts` this cache references, where a source file can have multiple artifacts
pub fn artifacts_len(&self) -> usize {
self.entries().map(|entry| entry.artifacts().count()).sum()
}

/// Returns an iterator over all `CacheEntry` this cache contains
pub fn entries(&self) -> impl Iterator<Item = &CacheEntry> {
self.files.values()
}

/// Returns the corresponding `CacheEntry` for the file if it exists
pub fn entry(&self, file: impl AsRef<Path>) -> Option<&CacheEntry> {
self.files.get(file.as_ref())
Expand Down Expand Up @@ -117,7 +128,7 @@ impl SolFilesCache {
let file = fs::File::create(path).map_err(|err| SolcError::io(err, path))?;
tracing::trace!(
"writing cache with {} entries to json file: \"{}\"",
self.files.len(),
self.len(),
path.display()
);
serde_json::to_writer_pretty(file, self)?;
Expand Down Expand Up @@ -528,7 +539,7 @@ pub(crate) struct ArtifactsCacheInner<'a, T: ArtifactOutput> {
/// [`crate::ArtifactOutput::on_output()`] all artifacts, their disk paths, are determined and
/// can be populated before the updated [`crate::SolFilesCache`] is finally written to disk,
/// see [`Cache::finish()`]
pub dirty_entries: HashMap<PathBuf, (CacheEntry, HashSet<Version>)>,
pub dirty_source_files: HashMap<PathBuf, (CacheEntry, HashSet<Version>)>,
/// the file hashes
pub content_hashes: HashMap<PathBuf, String>,
}
Expand Down Expand Up @@ -562,11 +573,11 @@ impl<'a, T: ArtifactOutput> ArtifactsCacheInner<'a, T> {
///
/// If there is already an entry available for the file the given version is added to the set
fn insert_new_cache_entry(&mut self, file: &Path, source: &Source, version: Version) {
if let Some((_, versions)) = self.dirty_entries.get_mut(file) {
if let Some((_, versions)) = self.dirty_source_files.get_mut(file) {
versions.insert(version);
} else {
let entry = self.create_cache_entry(file, source);
self.dirty_entries.insert(file.to_path_buf(), (entry, HashSet::from([version])));
self.dirty_source_files.insert(file.to_path_buf(), (entry, HashSet::from([version])));
}
}

Expand Down Expand Up @@ -619,22 +630,20 @@ impl<'a, T: ArtifactOutput> ArtifactsCacheInner<'a, T> {
if let Some(hash) = self.content_hashes.get(file) {
if let Some(entry) = self.cache.entry(&file) {
if entry.content_hash.as_bytes() != hash.as_bytes() {
tracing::trace!(
"changed content hash for cached artifact \"{}\"",
file.display()
);
tracing::trace!("changed content hash for source file \"{}\"", file.display());
return true
}
if self.project.solc_config != entry.solc_config {
tracing::trace!(
"changed solc config for cached artifact \"{}\"",
file.display()
);
tracing::trace!("changed solc config for source file \"{}\"", file.display());
return true
}

if !entry.contains_version(version) {
tracing::trace!("missing linked artifacts for version \"{}\"", version);
tracing::trace!(
"missing linked artifacts for source file `{}` for version \"{}\"",
file.display(),
version
);
return true
}

Expand Down Expand Up @@ -689,7 +698,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> {

// read all artifacts
let cached_artifacts = if project.paths.artifacts.exists() {
tracing::trace!("reading artifacts from cache..");
tracing::trace!("reading artifacts from cache...");
// if we failed to read the whole set of artifacts we use an empty set
let artifacts = cache.read_artifacts::<T::Artifact>().unwrap_or_default();
tracing::trace!("read {} artifacts from cache", artifacts.artifact_files().count());
Expand All @@ -704,7 +713,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> {
edges,
project,
filtered: Default::default(),
dirty_entries: Default::default(),
dirty_source_files: Default::default(),
content_hashes: Default::default(),
};

Expand Down Expand Up @@ -755,7 +764,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> {
let ArtifactsCacheInner {
mut cache,
mut cached_artifacts,
mut dirty_entries,
mut dirty_source_files,
filtered,
project,
..
Expand All @@ -771,7 +780,7 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> {
// the versions, so we add the artifacts on a file by file basis
for (file, artifacts) in written_artifacts.as_ref() {
let file_path = Path::new(&file);
if let Some((entry, versions)) = dirty_entries.get_mut(file_path) {
if let Some((entry, versions)) = dirty_source_files.get_mut(file_path) {
entry.insert_artifacts(artifacts.iter().map(|(name, artifacts)| {
let artifacts = artifacts
.iter()
Expand Down Expand Up @@ -800,7 +809,8 @@ impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> {
}

// add the new cache entries to the cache file
cache.extend(dirty_entries.into_iter().map(|(file, (entry, _))| (file, entry)));
cache
.extend(dirty_source_files.into_iter().map(|(file, (entry, _))| (file, entry)));

cache.strip_artifact_files_prefixes(project.artifacts_path());
// write to disk
Expand Down
11 changes: 11 additions & 0 deletions ethers-solc/src/compile/contracts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,17 @@ impl VersionedContracts {
})
}

/// Returns an iterator over (`file`, `name`, `Contract`, `Version`)
pub fn contracts_with_files_and_version(
&self,
) -> impl Iterator<Item = (&String, &String, &Contract, &Version)> {
self.0.iter().flat_map(|(file, contracts)| {
contracts.iter().flat_map(move |(name, c)| {
c.iter().map(move |c| (file, name, &c.contract, &c.version))
})
})
}

/// Returns an iterator over all contracts and their source names.
///
/// ```
Expand Down
40 changes: 36 additions & 4 deletions ethers-solc/src/compile/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,10 @@ use crate::{
utils, CompilerInput, CompilerOutput,
};
use semver::{Version, VersionReq};
use serde::{de::DeserializeOwned, Serialize};
use serde::{de::DeserializeOwned, Deserialize, Serialize};

use std::{
fmt,
fmt::Formatter,
io::BufRead,
path::{Path, PathBuf},
process::{Command, Output, Stdio},
Expand Down Expand Up @@ -173,7 +172,7 @@ impl From<SolcVersion> for Version {
}

impl fmt::Display for SolcVersion {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.as_ref())
}
}
Expand All @@ -186,7 +185,7 @@ impl fmt::Display for SolcVersion {
/// 1. `SOLC_PATH` environment variable
/// 2. [svm](https://github.com/roynalnaruto/svm-rs)'s `global_version` (set via `svm use <version>`), stored at `<svm_home>/.global_version`
/// 3. `solc` otherwise
#[derive(Debug, Clone, Eq, PartialEq, PartialOrd, Ord)]
#[derive(Debug, Clone, Eq, PartialEq, PartialOrd, Ord, Serialize, Deserialize)]
pub struct Solc {
/// Path to the `solc` executable
pub solc: PathBuf,
Expand All @@ -213,6 +212,16 @@ impl Default for Solc {
}
}

impl fmt::Display for Solc {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.solc.display())?;
if !self.args.is_empty() {
write!(f, " {}", self.args.join(" "))?;
}
Ok(())
}
}

impl Solc {
/// A new instance which points to `solc`
pub fn new(path: impl Into<PathBuf>) -> Self {
Expand Down Expand Up @@ -466,6 +475,29 @@ impl Solc {
self.compile(&CompilerInput::new(path)?)
}

/// Same as [`Self::compile()`], but only returns those files which are included in the
/// `CompilerInput`.
///
/// In other words, this removes those files from the `CompilerOutput` that are __not__ included
/// in the provided `CompilerInput`.
///
/// # Example
///
/// ```no_run
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// use ethers_solc::{CompilerInput, Solc};
/// let solc = Solc::default();
/// let input = CompilerInput::new("./contracts")?;
/// let output = solc.compile_exact(&input)?;
/// # Ok(())
/// # }
/// ```
pub fn compile_exact(&self, input: &CompilerInput) -> Result<CompilerOutput> {
let mut out = self.compile(input)?;
out.retain_files(input.sources.keys().filter_map(|p| p.to_str()));
Ok(out)
}

/// Run `solc --stand-json` and return the `solc`'s output as
/// `CompilerOutput`
///
Expand Down
36 changes: 33 additions & 3 deletions ethers-solc/src/compile/project.rs
Original file line number Diff line number Diff line change
Expand Up @@ -301,6 +301,15 @@ impl CompilerSources {
CompilerSources::Parallel(input, j) => compile_parallel(input, j, settings, paths),
}
}

#[cfg(test)]
#[allow(unused)]
fn sources(&self) -> &VersionedSources {
match self {
CompilerSources::Sequential(v) => v,
CompilerSources::Parallel(v, _) => v,
}
}
}

/// Compiles the input set sequentially and returns an aggregated set of the solc `CompilerOutput`s
Expand Down Expand Up @@ -350,7 +359,11 @@ fn compile_parallel(
paths: &ProjectPathsConfig,
) -> Result<AggregatedCompilerOutput> {
debug_assert!(num_jobs > 1);
tracing::trace!("compile sources in parallel using up to {} solc jobs", num_jobs);
tracing::trace!(
"compile {} sources in parallel using up to {} solc jobs",
input.len(),
num_jobs
);

let mut jobs = Vec::with_capacity(input.len());
for (solc, (version, sources)) in input {
Expand Down Expand Up @@ -384,6 +397,8 @@ fn compile_parallel(
.collect::<Result<Vec<_>>>()
})?;

// TODO need to do post filtering as the output can contain more files than provided in the
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Wdym post filtering?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

oh this should have been removed...no longer relevant

// input
let mut aggregated = AggregatedCompilerOutput::default();
aggregated.extend_all(outputs);

Expand All @@ -395,6 +410,7 @@ fn compile_parallel(
mod tests {
use super::*;
use crate::{project_util::TempProject, MinimalCombinedArtifacts};

use std::path::PathBuf;

#[allow(unused)]
Expand All @@ -415,7 +431,7 @@ mod tests {
let prep = compiler.preprocess().unwrap();
let cache = prep.cache.as_cached().unwrap();
// 3 contracts
assert_eq!(cache.dirty_entries.len(), 3);
assert_eq!(cache.dirty_source_files.len(), 3);
assert!(cache.filtered.is_empty());
assert!(cache.cache.is_empty());

Expand All @@ -435,6 +451,20 @@ mod tests {
let inner = project.project();
let compiler = ProjectCompiler::new(inner).unwrap();
let prep = compiler.preprocess().unwrap();
assert!(prep.cache.as_cached().unwrap().dirty_entries.is_empty())
assert!(prep.cache.as_cached().unwrap().dirty_source_files.is_empty())
}

#[test]
#[ignore]
fn can_compile_real_project() {
init_tracing();
let paths = ProjectPathsConfig::builder()
.root("../../foundry-integration-tests/testdata/solmate")
.build()
.unwrap();
let project = Project::builder().paths(paths).build().unwrap();
let compiler = ProjectCompiler::new(&project).unwrap();
let out = compiler.compile().unwrap();
println!("{}", out);
}
}
Loading