diff --git a/crates/compilers/src/buildinfo.rs b/crates/compilers/src/buildinfo.rs index d71363111..f14564f58 100644 --- a/crates/compilers/src/buildinfo.rs +++ b/crates/compilers/src/buildinfo.rs @@ -22,8 +22,9 @@ pub struct BuildInfo { pub id: String, #[serde(rename = "_format")] pub format: String, - pub solc_version: Version, - pub solc_long_version: Version, + pub input_version: Version, + pub input_version_long: Version, + pub compiler_version: Version, pub input: I, pub output: O, } @@ -92,24 +93,27 @@ impl RawBuildInfo { pub fn new, E: CompilationError, C: CompilerContract>( input: &I, output: &CompilerOutput, + compiler_version: &semver::Version, full_build_info: bool, ) -> Result { - let version = input.version().clone(); + let input_version = input.version().clone(); let build_context = BuildContext::new(input, output)?; let mut hasher = md5::Md5::new(); hasher.update(ETHERS_FORMAT_VERSION); - let solc_short = format!("{}.{}.{}", version.major, version.minor, version.patch); - hasher.update(&solc_short); - hasher.update(version.to_string()); + let input_version_short = + format!("{}.{}.{}", input_version.major, input_version.minor, input_version.patch); + hasher.update(&input_version_short); + hasher.update(compiler_version.to_string()); + hasher.update(input_version.to_string()); let input = serde_json::to_value(input)?; + hasher.update(&serde_json::to_string(&input)?); - hasher.update(&serde_json::to_string(&output)?); - // create the hash for `{_format,solcVersion,solcLongVersion,input}` + // create the hash for `{_format,compilerVersion,inputVersion,inputLongVersion,input}` // N.B. this is not exactly the same as hashing the json representation of these values but // the must efficient one let result = hasher.finalize(); @@ -119,8 +123,12 @@ impl RawBuildInfo { if full_build_info { build_info.insert("_format".to_string(), serde_json::to_value(ETHERS_FORMAT_VERSION)?); - build_info.insert("solcVersion".to_string(), serde_json::to_value(&solc_short)?); - build_info.insert("solcLongVersion".to_string(), serde_json::to_value(&version)?); + build_info + .insert("compilerVersion".to_string(), serde_json::to_value(compiler_version)?); + build_info + .insert("inputVersion".to_string(), serde_json::to_value(&input_version_short)?); + build_info + .insert("inputVersionLong".to_string(), serde_json::to_value(&input_version)?); build_info.insert("input".to_string(), input); build_info.insert("output".to_string(), serde_json::to_value(output)?); } @@ -146,7 +154,8 @@ mod tests { v, ); let output = CompilerOutput::::default(); - let raw_info = RawBuildInfo::new(&input, &output, true).unwrap(); + let raw_info = + RawBuildInfo::new(&input, &output, &semver::Version::new(0, 0, 0), true).unwrap(); let _info: BuildInfo> = serde_json::from_str(&serde_json::to_string(&raw_info).unwrap()).unwrap(); } diff --git a/crates/compilers/src/cache.rs b/crates/compilers/src/cache.rs index 8b5696de2..2282da138 100644 --- a/crates/compilers/src/cache.rs +++ b/crates/compilers/src/cache.rs @@ -20,7 +20,7 @@ use semver::Version; use serde::{de::DeserializeOwned, Deserialize, Serialize}; use std::{ collections::{btree_map::BTreeMap, hash_map, BTreeSet, HashMap, HashSet}, - fs, + fs::{self}, path::{Path, PathBuf}, time::{Duration, UNIX_EPOCH}, }; @@ -1051,7 +1051,14 @@ impl<'a, T: ArtifactOutput, C: Compiler> if let Ok(cache) = CompilerCache::read_joined(&project.paths) { if cache.paths == paths && preprocessed == cache.preprocessed { // unchanged project paths and same preprocess cache option - return cache; + if cache.builds.iter().all(|x| { + project.paths.build_infos.join(x).with_extension("json").exists() + }) { + return cache; + } else { + // clear all artifacts + let _ = std::fs::remove_dir_all(&project.paths.artifacts); + } } } } diff --git a/crates/compilers/src/compile/project.rs b/crates/compilers/src/compile/project.rs index 7b57f39c8..15d11f878 100644 --- a/crates/compilers/src/compile/project.rs +++ b/crates/compilers/src/compile/project.rs @@ -438,16 +438,22 @@ impl CompilerSources<'_, L, S> { /// Filters out all sources that don't need to be compiled, see [`ArtifactsCache::filter`] fn filter< T: ArtifactOutput, - C: Compiler, + C: Compiler, >( &mut self, cache: &mut ArtifactsCache<'_, T, C>, ) { cache.remove_dirty_sources(); - for versioned_sources in self.sources.values_mut() { - for (version, sources, (profile, _)) in versioned_sources { + for (language, versioned_sources) in self.sources.iter_mut() { + for (version, sources, (profile, settings)) in versioned_sources { + let input = + C::Input::build(sources.clone(), settings.clone(), *language, version.clone()); + let version = compound_version( + cache.project().compiler.compiler_version(&input), + input.version(), + ); trace!("Filtering {} sources for {}", sources.len(), version); - cache.filter(sources, version, profile); + cache.filter(sources, &version, profile); trace!( "Detected {} sources to compile {:?}", sources.dirty().count(), @@ -540,14 +546,16 @@ impl CompilerSources<'_, L, S> { let mut aggregated = AggregatedCompilerOutput::default(); for (input, mut output, profile, actually_dirty) in results { - let version = input.version(); - + let version = compound_version( + project.compiler.compiler_version(&input).clone(), + input.version(), + ); // Mark all files as seen by the compiler for file in &actually_dirty { cache.compiler_seen(file); } - let build_info = RawBuildInfo::new(&input, &output, project.build_info)?; + let build_info = RawBuildInfo::new(&input, &output, &version, project.build_info)?; output.retain_files( actually_dirty @@ -583,15 +591,11 @@ fn compile_sequential<'a, C: Compiler>( }; report::compiler_spawn( &compiler.compiler_name(&input), - versions.as_ref(), + &versions, actually_dirty.as_slice(), ); let output = compiler.compile(&input)?; - report::compiler_success( - &compiler.compiler_name(&input), - versions.as_ref(), - &start.elapsed(), - ); + report::compiler_success(&compiler.compiler_name(&input), &versions, &start.elapsed()); Ok((input, output, profile, actually_dirty)) }) @@ -628,13 +632,13 @@ fn compile_parallel<'a, C: Compiler>( let start = Instant::now(); report::compiler_spawn( &compiler.compiler_name(&input), - versions.as_ref(), + &versions, actually_dirty.as_slice(), ); compiler.compile(&input).map(move |output| { report::compiler_success( &compiler.compiler_name(&input), - versions.as_ref(), + &versions, &start.elapsed(), ); (input, output, profile, actually_dirty) @@ -644,6 +648,29 @@ fn compile_parallel<'a, C: Compiler>( }) } +fn compound_version(mut compiler_version: Version, input_version: &Version) -> Version { + if compiler_version != *input_version { + let build = if compiler_version.build.is_empty() { + semver::BuildMetadata::new(&format!( + "{}.{}.{}", + input_version.major, input_version.minor, input_version.patch, + )) + .expect("can't fail due to parsing") + } else { + semver::BuildMetadata::new(&format!( + "{}-{}.{}.{}", + compiler_version.build.as_str(), + input_version.major, + input_version.minor, + input_version.patch, + )) + .expect("can't fail due to parsing") + }; + compiler_version.build = build; + }; + compiler_version +} + #[cfg(test)] #[cfg(all(feature = "project-util", feature = "svm-solc"))] mod tests { diff --git a/crates/compilers/src/compilers/resolc/compiler.rs b/crates/compilers/src/compilers/resolc/compiler.rs index 27fdc9b49..fbe4e08a4 100644 --- a/crates/compilers/src/compilers/resolc/compiler.rs +++ b/crates/compilers/src/compilers/resolc/compiler.rs @@ -75,7 +75,8 @@ impl Compiler for Resolc { impl SimpleCompilerName for Resolc { fn compiler_name_default() -> std::borrow::Cow<'static, str> { - "Resolc and Solc".into() + // Single `Resolc` is sufficient because we now add `Solc` to `compiler_version` buildMeta. + "Resolc".into() } } @@ -242,13 +243,13 @@ impl Resolc { rvm::VersionManager::new(true).map_err(|e| SolcError::Message(e.to_string()))?; let binary = if let Some(resolc_version) = _resolc_version { if version_manager.is_installed(resolc_version) { - version_manager.get(resolc_version, _solc_version.clone()).ok() + version_manager.get(resolc_version, _solc_version).ok() } else { None } } else { let versions: Vec = version_manager - .list_available(_solc_version.clone()) + .list_available(_solc_version) .map_err(|e| SolcError::Message(e.to_string()))? .into_iter() .collect(); diff --git a/crates/compilers/src/compilers/solc/mod.rs b/crates/compilers/src/compilers/solc/mod.rs index 1ec43f229..beaa2f174 100644 --- a/crates/compilers/src/compilers/solc/mod.rs +++ b/crates/compilers/src/compilers/solc/mod.rs @@ -489,7 +489,7 @@ mod tests { SolcLanguage::Solidity, v.clone(), ); - let build_info = RawBuildInfo::new(&input, &out_converted, true).unwrap(); + let build_info = RawBuildInfo::new(&input, &out_converted, &v, true).unwrap(); let mut aggregated = AggregatedCompilerOutput::::default(); aggregated.extend(v, build_info, "default", out_converted); assert!(!aggregated.is_unchanged()); diff --git a/crates/compilers/tests/project.rs b/crates/compilers/tests/project.rs index 9e02fdde3..e9f44d832 100644 --- a/crates/compilers/tests/project.rs +++ b/crates/compilers/tests/project.rs @@ -4532,6 +4532,7 @@ fn test_output_hash_cache_invalidation() { let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/dapp-sample"); let paths = ProjectPathsConfig::builder().sources(root.join("src")).lib(root.join("lib")); let mut project = TempProject::::new(paths).unwrap(); + project.project_mut().compiler = resolc(); project.project_mut().build_info = true; // First compilation - should compile everything since cache is empty. @@ -4539,10 +4540,27 @@ fn test_output_hash_cache_invalidation() { compiled.assert_success(); assert!(!compiled.is_unchanged(), "First compilation should not be cached"); + project.project_mut().compiler = MultiCompiler { + solidity: SolidityCompiler::Resolc( + Resolc::find_or_install( + &semver::Version::parse("0.1.0-dev.13").unwrap(), + SolcCompiler::default(), + ) + .unwrap(), + ), + ..Default::default() + }; + // Second compilation - should use cache since nothing changed. let compiled = project.compile().unwrap(); compiled.assert_success(); - assert!(compiled.is_unchanged(), "Second compilation should use cache"); + assert!(!compiled.is_unchanged(), "Second compilation should use cache"); + + // Second compilation - should use cache since nothing changed. + let compiled = project.compile().unwrap(); + compiled.assert_success(); + + assert!(compiled.is_unchanged(), "Third compilation should use cache"); // Adding a file to output directory should NOT invalidate cache let artifacts_path = project.project().artifacts_path();