Skip to content

attempt to fix caching #25

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 11 commits into from
May 23, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 20 additions & 11 deletions crates/compilers/src/buildinfo.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,9 @@ pub struct BuildInfo<I, O> {
pub id: String,
#[serde(rename = "_format")]
pub format: String,
pub solc_version: Version,
pub solc_long_version: Version,
pub input_version: Version,
pub input_version_long: Version,
pub compiler_version: Version,
pub input: I,
pub output: O,
}
Expand Down Expand Up @@ -92,24 +93,27 @@ impl<L: Language> RawBuildInfo<L> {
pub fn new<I: CompilerInput<Language = L>, E: CompilationError, C: CompilerContract>(
input: &I,
output: &CompilerOutput<E, C>,
compiler_version: &semver::Version,
full_build_info: bool,
) -> Result<Self> {
let version = input.version().clone();
let input_version = input.version().clone();
let build_context = BuildContext::new(input, output)?;

let mut hasher = md5::Md5::new();

hasher.update(ETHERS_FORMAT_VERSION);

let solc_short = format!("{}.{}.{}", version.major, version.minor, version.patch);
hasher.update(&solc_short);
hasher.update(version.to_string());
let input_version_short =
format!("{}.{}.{}", input_version.major, input_version.minor, input_version.patch);
hasher.update(&input_version_short);
hasher.update(compiler_version.to_string());
hasher.update(input_version.to_string());

let input = serde_json::to_value(input)?;

hasher.update(&serde_json::to_string(&input)?);
hasher.update(&serde_json::to_string(&output)?);

// create the hash for `{_format,solcVersion,solcLongVersion,input}`
// create the hash for `{_format,compilerVersion,inputVersion,inputLongVersion,input}`
// N.B. this is not exactly the same as hashing the json representation of these values but
// the must efficient one
let result = hasher.finalize();
Expand All @@ -119,8 +123,12 @@ impl<L: Language> RawBuildInfo<L> {

if full_build_info {
build_info.insert("_format".to_string(), serde_json::to_value(ETHERS_FORMAT_VERSION)?);
build_info.insert("solcVersion".to_string(), serde_json::to_value(&solc_short)?);
build_info.insert("solcLongVersion".to_string(), serde_json::to_value(&version)?);
build_info
.insert("compilerVersion".to_string(), serde_json::to_value(compiler_version)?);
build_info
.insert("inputVersion".to_string(), serde_json::to_value(&input_version_short)?);
build_info
.insert("inputVersionLong".to_string(), serde_json::to_value(&input_version)?);
build_info.insert("input".to_string(), input);
build_info.insert("output".to_string(), serde_json::to_value(output)?);
}
Expand All @@ -146,7 +154,8 @@ mod tests {
v,
);
let output = CompilerOutput::<Error, Contract>::default();
let raw_info = RawBuildInfo::new(&input, &output, true).unwrap();
let raw_info =
RawBuildInfo::new(&input, &output, &semver::Version::new(0, 0, 0), true).unwrap();
let _info: BuildInfo<SolcVersionedInput, CompilerOutput<Error, Contract>> =
serde_json::from_str(&serde_json::to_string(&raw_info).unwrap()).unwrap();
}
Expand Down
11 changes: 9 additions & 2 deletions crates/compilers/src/cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ use semver::Version;
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use std::{
collections::{btree_map::BTreeMap, hash_map, BTreeSet, HashMap, HashSet},
fs,
fs::{self},
path::{Path, PathBuf},
time::{Duration, UNIX_EPOCH},
};
Expand Down Expand Up @@ -1051,7 +1051,14 @@ impl<'a, T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
if let Ok(cache) = CompilerCache::read_joined(&project.paths) {
if cache.paths == paths && preprocessed == cache.preprocessed {
// unchanged project paths and same preprocess cache option
return cache;
if cache.builds.iter().all(|x| {
project.paths.build_infos.join(x).with_extension("json").exists()
}) {
return cache;
} else {
// clear all artifacts
let _ = std::fs::remove_dir_all(&project.paths.artifacts);
}
}
}
}
Expand Down
57 changes: 42 additions & 15 deletions crates/compilers/src/compile/project.rs
Original file line number Diff line number Diff line change
Expand Up @@ -438,16 +438,22 @@ impl<L: Language, S: CompilerSettings> CompilerSources<'_, L, S> {
/// Filters out all sources that don't need to be compiled, see [`ArtifactsCache::filter`]
fn filter<
T: ArtifactOutput<CompilerContract = C::CompilerContract>,
C: Compiler<Language = L>,
C: Compiler<Language = L, Settings = S>,
>(
&mut self,
cache: &mut ArtifactsCache<'_, T, C>,
) {
cache.remove_dirty_sources();
for versioned_sources in self.sources.values_mut() {
for (version, sources, (profile, _)) in versioned_sources {
for (language, versioned_sources) in self.sources.iter_mut() {
for (version, sources, (profile, settings)) in versioned_sources {
let input =
C::Input::build(sources.clone(), settings.clone(), *language, version.clone());
let version = compound_version(
cache.project().compiler.compiler_version(&input),
input.version(),
);
trace!("Filtering {} sources for {}", sources.len(), version);
cache.filter(sources, version, profile);
cache.filter(sources, &version, profile);
trace!(
"Detected {} sources to compile {:?}",
sources.dirty().count(),
Expand Down Expand Up @@ -540,14 +546,16 @@ impl<L: Language, S: CompilerSettings> CompilerSources<'_, L, S> {
let mut aggregated = AggregatedCompilerOutput::default();

for (input, mut output, profile, actually_dirty) in results {
let version = input.version();

let version = compound_version(
project.compiler.compiler_version(&input).clone(),
input.version(),
);
// Mark all files as seen by the compiler
for file in &actually_dirty {
cache.compiler_seen(file);
}

let build_info = RawBuildInfo::new(&input, &output, project.build_info)?;
let build_info = RawBuildInfo::new(&input, &output, &version, project.build_info)?;

output.retain_files(
actually_dirty
Expand Down Expand Up @@ -583,15 +591,11 @@ fn compile_sequential<'a, C: Compiler>(
};
report::compiler_spawn(
&compiler.compiler_name(&input),
versions.as_ref(),
&versions,
actually_dirty.as_slice(),
);
let output = compiler.compile(&input)?;
report::compiler_success(
&compiler.compiler_name(&input),
versions.as_ref(),
&start.elapsed(),
);
report::compiler_success(&compiler.compiler_name(&input), &versions, &start.elapsed());

Ok((input, output, profile, actually_dirty))
})
Expand Down Expand Up @@ -628,13 +632,13 @@ fn compile_parallel<'a, C: Compiler>(
let start = Instant::now();
report::compiler_spawn(
&compiler.compiler_name(&input),
versions.as_ref(),
&versions,
actually_dirty.as_slice(),
);
compiler.compile(&input).map(move |output| {
report::compiler_success(
&compiler.compiler_name(&input),
versions.as_ref(),
&versions,
&start.elapsed(),
);
(input, output, profile, actually_dirty)
Expand All @@ -644,6 +648,29 @@ fn compile_parallel<'a, C: Compiler>(
})
}

fn compound_version(mut compiler_version: Version, input_version: &Version) -> Version {
if compiler_version != *input_version {
let build = if compiler_version.build.is_empty() {
semver::BuildMetadata::new(&format!(
"{}.{}.{}",
input_version.major, input_version.minor, input_version.patch,
))
.expect("can't fail due to parsing")
} else {
semver::BuildMetadata::new(&format!(
"{}-{}.{}.{}",
compiler_version.build.as_str(),
input_version.major,
input_version.minor,
input_version.patch,
))
.expect("can't fail due to parsing")
};
compiler_version.build = build;
};
compiler_version
}

#[cfg(test)]
#[cfg(all(feature = "project-util", feature = "svm-solc"))]
mod tests {
Expand Down
7 changes: 4 additions & 3 deletions crates/compilers/src/compilers/resolc/compiler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,8 @@ impl Compiler for Resolc {

impl SimpleCompilerName for Resolc {
fn compiler_name_default() -> std::borrow::Cow<'static, str> {
"Resolc and Solc".into()
// Single `Resolc` is sufficient because we now add `Solc` to `compiler_version` buildMeta.
"Resolc".into()
}
}

Expand Down Expand Up @@ -242,13 +243,13 @@ impl Resolc {
rvm::VersionManager::new(true).map_err(|e| SolcError::Message(e.to_string()))?;
let binary = if let Some(resolc_version) = _resolc_version {
if version_manager.is_installed(resolc_version) {
version_manager.get(resolc_version, _solc_version.clone()).ok()
version_manager.get(resolc_version, _solc_version).ok()
} else {
None
}
} else {
let versions: Vec<Binary> = version_manager
.list_available(_solc_version.clone())
.list_available(_solc_version)
.map_err(|e| SolcError::Message(e.to_string()))?
.into_iter()
.collect();
Expand Down
2 changes: 1 addition & 1 deletion crates/compilers/src/compilers/solc/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -489,7 +489,7 @@ mod tests {
SolcLanguage::Solidity,
v.clone(),
);
let build_info = RawBuildInfo::new(&input, &out_converted, true).unwrap();
let build_info = RawBuildInfo::new(&input, &out_converted, &v, true).unwrap();
let mut aggregated = AggregatedCompilerOutput::<SolcCompiler>::default();
aggregated.extend(v, build_info, "default", out_converted);
assert!(!aggregated.is_unchanged());
Expand Down
20 changes: 19 additions & 1 deletion crates/compilers/tests/project.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4532,17 +4532,35 @@ fn test_output_hash_cache_invalidation() {
let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/dapp-sample");
let paths = ProjectPathsConfig::builder().sources(root.join("src")).lib(root.join("lib"));
let mut project = TempProject::<MultiCompiler, ConfigurableArtifacts>::new(paths).unwrap();
project.project_mut().compiler = resolc();
project.project_mut().build_info = true;

// First compilation - should compile everything since cache is empty.
let compiled = project.compile().unwrap();
compiled.assert_success();
assert!(!compiled.is_unchanged(), "First compilation should not be cached");

project.project_mut().compiler = MultiCompiler {
solidity: SolidityCompiler::Resolc(
Resolc::find_or_install(
&semver::Version::parse("0.1.0-dev.13").unwrap(),
SolcCompiler::default(),
)
.unwrap(),
),
..Default::default()
};

// Second compilation - should use cache since nothing changed.
let compiled = project.compile().unwrap();
compiled.assert_success();
assert!(compiled.is_unchanged(), "Second compilation should use cache");
assert!(!compiled.is_unchanged(), "Second compilation should use cache");

// Second compilation - should use cache since nothing changed.
let compiled = project.compile().unwrap();
compiled.assert_success();

assert!(compiled.is_unchanged(), "Third compilation should use cache");

// Adding a file to output directory should NOT invalidate cache
let artifacts_path = project.project().artifacts_path();
Expand Down