diff --git a/src/compiler/c.rs b/src/compiler/c.rs index 9476178a5..efeee6438 100644 --- a/src/compiler/c.rs +++ b/src/compiler/c.rs @@ -134,6 +134,7 @@ impl ParsedArguments { /// A generic implementation of the `Compilation` trait for C/C++ compilers. struct CCompilation { parsed_args: ParsedArguments, + is_locally_preprocessed: bool, #[cfg(feature = "dist-client")] preprocessed_input: Vec, executable: PathBuf, @@ -365,7 +366,7 @@ where I: CCompilerImpl, { async fn generate_hash_key( - self: Box, + &mut self, creator: &T, cwd: PathBuf, env_vars: Vec<(OsString, OsString)>, @@ -376,33 +377,29 @@ where cache_control: CacheControl, ) -> Result> { let start_of_compilation = std::time::SystemTime::now(); - let CCompilerHasher { - parsed_args, - executable, - executable_digest, - compiler, - } = *self; - let extra_hashes = hash_all(&parsed_args.extra_hash_files, &pool.clone()).await?; + let extra_hashes = hash_all(&self.parsed_args.extra_hash_files, &pool.clone()).await?; // Create an argument vector containing both preprocessor and arch args, to // use in creating a hash key - let mut preprocessor_and_arch_args = parsed_args.preprocessor_args.clone(); - preprocessor_and_arch_args.extend(parsed_args.arch_args.to_vec()); + let mut preprocessor_and_arch_args = self.parsed_args.preprocessor_args.clone(); + preprocessor_and_arch_args.extend(self.parsed_args.arch_args.to_vec()); // common_args is used in preprocessing too - preprocessor_and_arch_args.extend(parsed_args.common_args.to_vec()); + preprocessor_and_arch_args.extend(self.parsed_args.common_args.to_vec()); - let absolute_input_path: Cow<'_, _> = if parsed_args.input.is_absolute() { - Cow::Borrowed(&parsed_args.input) + let absolute_input_path: Cow<'_, _> = if self.parsed_args.input.is_absolute() { + Cow::Borrowed(&self.parsed_args.input) } else { - Cow::Owned(cwd.join(&parsed_args.input)) + Cow::Owned(cwd.join(&self.parsed_args.input)) }; // Try to look for a cached preprocessing step for this compilation // request. let preprocessor_cache_mode_config = storage.preprocessor_cache_mode_config(); - let too_hard_for_preprocessor_cache_mode = - parsed_args.too_hard_for_preprocessor_cache_mode.is_some(); - if let Some(arg) = &parsed_args.too_hard_for_preprocessor_cache_mode { + let too_hard_for_preprocessor_cache_mode = self + .parsed_args + .too_hard_for_preprocessor_cache_mode + .is_some(); + if let Some(arg) = &self.parsed_args.too_hard_for_preprocessor_cache_mode { debug!( "parse_arguments: Cannot use preprocessor cache because of {:?}", arg @@ -410,9 +407,8 @@ where } let use_preprocessor_cache_mode = { - // Disable preprocessor cache when doing distributed compilation (+ other conditions) - let can_use_preprocessor_cache_mode = !may_dist - && preprocessor_cache_mode_config.use_preprocessor_cache_mode + let can_use_preprocessor_cache_mode = preprocessor_cache_mode_config + .use_preprocessor_cache_mode && !too_hard_for_preprocessor_cache_mode; let mut use_preprocessor_cache_mode = can_use_preprocessor_cache_mode; @@ -441,13 +437,13 @@ where let mut preprocessor_key = if use_preprocessor_cache_mode { preprocessor_cache_entry_hash_key( - &executable_digest, - parsed_args.language, + &self.executable_digest, + self.parsed_args.language, &preprocessor_and_arch_args, &extra_hashes, &env_vars, &absolute_input_path, - compiler.plusplus(), + self.compiler.plusplus(), preprocessor_cache_mode_config, )? } else { @@ -494,17 +490,21 @@ where // the toolchain will not contain the correct path // to invoke the compiler! Add the compiler // executable path to try and prevent this - let weak_toolchain_key = - format!("{}-{}", executable.to_string_lossy(), executable_digest); + let weak_toolchain_key = format!( + "{}-{}", + self.executable.to_string_lossy(), + self.executable_digest + ); return Ok(HashResult { key, compilation: Box::new(CCompilation { - parsed_args: parsed_args.to_owned(), + parsed_args: self.parsed_args.to_owned(), + is_locally_preprocessed: false, #[cfg(feature = "dist-client")] - // TODO or is it never relevant since dist? - preprocessed_input: vec![], - executable: executable.to_owned(), - compiler: compiler.to_owned(), + preprocessed_input: PREPROCESSING_SKIPPED_COMPILE_POISON + .to_vec(), + executable: self.executable.to_owned(), + compiler: self.compiler.to_owned(), cwd: cwd.to_owned(), env_vars: env_vars.to_owned(), }), @@ -518,11 +518,12 @@ where } } - let result = compiler + let result = self + .compiler .preprocess( creator, - &executable, - &parsed_args, + &self.executable, + &self.parsed_args, &cwd, &env_vars, may_dist, @@ -530,13 +531,13 @@ where use_preprocessor_cache_mode, ) .await; - let out_pretty = parsed_args.output_pretty().into_owned(); + let out_pretty = self.parsed_args.output_pretty().into_owned(); let result = result.map_err(|e| { debug!("[{}]: preprocessor failed: {:?}", out_pretty, e); e }); - let outputs = parsed_args.outputs.clone(); + let outputs = self.parsed_args.outputs.clone(); let args_cwd = cwd.clone(); let mut preprocessor_result = result.or_else(move |err| { @@ -595,24 +596,24 @@ where trace!( "[{}]: Preprocessor output is {} bytes", - parsed_args.output_pretty(), + self.parsed_args.output_pretty(), preprocessor_result.stdout.len() ); // Create an argument vector containing both common and arch args, to // use in creating a hash key - let mut common_and_arch_args = parsed_args.common_args.clone(); - common_and_arch_args.extend(parsed_args.arch_args.to_vec()); + let mut common_and_arch_args = self.parsed_args.common_args.clone(); + common_and_arch_args.extend(self.parsed_args.arch_args.to_vec()); let key = { hash_key( - &executable_digest, - parsed_args.language, + &self.executable_digest, + self.parsed_args.language, &common_and_arch_args, &extra_hashes, &env_vars, &preprocessor_result.stdout, - compiler.plusplus(), + self.compiler.plusplus(), ) }; @@ -639,15 +640,20 @@ where // A compiler binary may be a symlink to another and so has the same digest, but that means // the toolchain will not contain the correct path to invoke the compiler! Add the compiler // executable path to try and prevent this - let weak_toolchain_key = format!("{}-{}", executable.to_string_lossy(), executable_digest); + let weak_toolchain_key = format!( + "{}-{}", + self.executable.to_string_lossy(), + self.executable_digest + ); Ok(HashResult { key, compilation: Box::new(CCompilation { - parsed_args, + parsed_args: self.parsed_args.clone(), + is_locally_preprocessed: true, #[cfg(feature = "dist-client")] preprocessed_input: preprocessor_result.stdout, - executable, - compiler, + executable: self.executable.clone(), + compiler: self.compiler.clone(), cwd, env_vars, }), @@ -1152,6 +1158,14 @@ fn include_is_too_new( false } +// Used as "preprocessed code" when no preprocessing was done so that compilation fails. That should never +// happen though because, where necessary, the situation is detected and preprocessing is *then* done to +// salvage the situation. Previously, an empty u8 vector was used, which is unfortunately a valid C and C++ +// compilation unit and caused errors that only surfaced when linking: the symbols expected from the +// compilation unit were of course not produced. +#[cfg(feature = "dist-client")] +const PREPROCESSING_SKIPPED_COMPILE_POISON: &[u8] = b"([{SCCACHE -*-* INVALID_C_CPP_CODE([{\""; + impl Compilation for CCompilation { fn generate_compile_commands( &self, @@ -1162,21 +1176,12 @@ impl Compilation for CCompilation Option, Cacheable, )> { - let CCompilation { - ref parsed_args, - ref executable, - ref compiler, - ref cwd, - ref env_vars, - .. - } = *self; - - compiler.generate_compile_commands( + self.compiler.generate_compile_commands( path_transformer, - executable, - parsed_args, - cwd, - env_vars, + &self.executable, + &self.parsed_args, + &self.cwd, + &self.env_vars, rewrite_includes_only, ) } @@ -1212,6 +1217,10 @@ impl Compilation for CCompilation Ok((inputs_packager, toolchain_packager, outputs_rewriter)) } + fn is_locally_preprocessed(&self) -> bool { + self.is_locally_preprocessed + } + fn outputs<'a>(&'a self) -> Box + 'a> { Box::new( self.parsed_args diff --git a/src/compiler/compiler.rs b/src/compiler/compiler.rs index 2ae421679..19e879909 100644 --- a/src/compiler/compiler.rs +++ b/src/compiler/compiler.rs @@ -393,7 +393,7 @@ where /// information that can be reused for compilation if necessary. #[allow(clippy::too_many_arguments)] async fn generate_hash_key( - self: Box, + &mut self, creator: &T, cwd: PathBuf, env_vars: Vec<(OsString, OsString)>, @@ -411,7 +411,7 @@ where /// compile and store the result. #[allow(clippy::too_many_arguments)] async fn get_cached_or_compile( - self: Box, + &mut self, service: &server::SccacheService, dist_client: Option>, creator: T, @@ -434,7 +434,7 @@ where .generate_hash_key( &creator, cwd.clone(), - env_vars, + env_vars.clone(), may_dist, &pool, rewrite_includes_only, @@ -498,15 +498,35 @@ where out_pretty, fmt_duration_as_secs(&duration) ); - let stdout = entry.get_stdout(); - let stderr = entry.get_stderr(); let output = process::Output { status: exit_status(0), - stdout, - stderr, + stdout: entry.get_stdout(), + stderr: entry.get_stderr(), }; + + let filtered_outputs = if compilation.is_locally_preprocessed() { + // In this mode, cache entries are exclusively distinguished by their preprocessed + // source contents. But two files may differ in their names and / or the names of + // included files while still producing the same preprocessed output, so they get the + // same cache entry. That entry will have wrong (file names) dependency informaton in + // the dependency file except for the compilation unit that originally produced it. + // Since we did local preprocessing, that should already have produced the dependency + // file - just leave that one alone and don't overwrite it from the cache. + outputs + .iter() + .filter(|fobj_source| fobj_source.key != "d") + .cloned() + .collect() + } else { + // In this mode, no local preprocessing was done, so the dependency file (if any) + // has not been created. But in this mode, the cache key also includes a lot of + // information about filenames (and less relevant here, file hashes), so it *is* safe + // to restore the dependency file from the cache. + outputs.clone() + }; + let hit = CompileResult::CacheHit(duration); - match entry.extract_objects(outputs.clone(), &pool).await { + match entry.extract_objects(filtered_outputs, &pool).await { Ok(()) => Ok(CacheLookupResult::Success(hit, output)), Err(e) => { if e.downcast_ref::().is_some() { @@ -569,6 +589,29 @@ where // Cache miss, so compile it. let start = Instant::now(); + #[cfg(feature = "dist-client")] + if may_dist + && !compilation.is_locally_preprocessed() + && cache_control == CacheControl::Default + { + // This compilation only had enough information to find and use a cache entry (or to + // run a local compile, which doesn't need locally preprocessed code). + // For distributed compilation, the local preprocessing step still needs to be done. + return self + .get_cached_or_compile( + service, + dist_client, + creator, + storage, + arguments, + cwd, + env_vars, + CacheControl::ForceRecache, + pool, + ) + .await; + } + let (cacheable, dist_type, compiler_result) = dist_or_local_compile( service, dist_client, @@ -934,6 +977,10 @@ where _path_transformer: dist::PathTransformer, ) -> Result; + fn is_locally_preprocessed(&self) -> bool { + true + } + /// Returns an iterator over the results of this compilation. /// /// Each item is a descriptive (and unique) name of the output paired with @@ -2108,7 +2155,7 @@ LLVM version: 6.0", &creator, Ok(MockChild::new(exit_status(0), "preprocessor output", "")), ); - let hasher = match c.parse_arguments(&arguments, ".".as_ref(), &[]) { + let mut hasher = match c.parse_arguments(&arguments, ".".as_ref(), &[]) { CompilerArguments::Ok(h) => h, o => panic!("Bad result from parse_arguments: {:?}", o), }; @@ -2176,7 +2223,7 @@ LLVM version: 6.0", &creator, Ok(MockChild::new(exit_status(0), "preprocessor output", "")), ); - let hasher = match c.parse_arguments(argument, ".".as_ref(), &[]) { + let mut hasher = match c.parse_arguments(argument, ".".as_ref(), &[]) { CompilerArguments::Ok(h) => h, o => panic!("Bad result from parse_arguments: {:?}", o), }; @@ -2285,11 +2332,10 @@ LLVM version: 6.0", }); let cwd = f.tempdir.path(); let arguments = ovec!["-c", "foo.c", "-o", "foo.o"]; - let hasher = match c.parse_arguments(&arguments, ".".as_ref(), &[]) { + let mut hasher = match c.parse_arguments(&arguments, ".".as_ref(), &[]) { CompilerArguments::Ok(h) => h, o => panic!("Bad result from parse_arguments: {:?}", o), }; - let hasher2 = hasher.clone(); let (cached, res) = runtime .block_on(async { hasher @@ -2329,7 +2375,7 @@ LLVM version: 6.0", // There should be no actual compiler invocation. let (cached, res) = runtime .block_on(async { - hasher2 + hasher .get_cached_or_compile( &service, None, @@ -2415,11 +2461,10 @@ LLVM version: 6.0", let cwd = f.tempdir.path(); let arguments = ovec!["-c", "foo.c", "-o", "foo.o"]; - let hasher = match c.parse_arguments(&arguments, ".".as_ref(), &[]) { + let mut hasher = match c.parse_arguments(&arguments, ".".as_ref(), &[]) { CompilerArguments::Ok(h) => h, o => panic!("Bad result from parse_arguments: {:?}", o), }; - let hasher2 = hasher.clone(); let (cached, res) = runtime .block_on(async { hasher @@ -2459,7 +2504,7 @@ LLVM version: 6.0", // There should be no actual compiler invocation. let (cached, res) = runtime .block_on(async { - hasher2 + hasher .get_cached_or_compile( &service, Some(dist_client.clone()), @@ -2538,7 +2583,7 @@ LLVM version: 6.0", }); let cwd = f.tempdir.path(); let arguments = ovec!["-c", "foo.c", "-o", "foo.o"]; - let hasher = match c.parse_arguments(&arguments, ".".as_ref(), &[]) { + let mut hasher = match c.parse_arguments(&arguments, ".".as_ref(), &[]) { CompilerArguments::Ok(h) => h, o => panic!("Bad result from parse_arguments: {:?}", o), }; @@ -2631,7 +2676,7 @@ LLVM version: 6.0", let cwd = f.tempdir.path(); let arguments = ovec!["-c", "foo.c", "-o", "foo.o"]; - let hasher = match c.parse_arguments(&arguments, ".".as_ref(), &[]) { + let mut hasher = match c.parse_arguments(&arguments, ".".as_ref(), &[]) { CompilerArguments::Ok(h) => h, o => panic!("Bad result from parse_arguments: {:?}", o), }; @@ -2723,11 +2768,10 @@ LLVM version: 6.0", } let cwd = f.tempdir.path(); let arguments = ovec!["-c", "foo.c", "-o", "foo.o"]; - let hasher = match c.parse_arguments(&arguments, ".".as_ref(), &[]) { + let mut hasher = match c.parse_arguments(&arguments, ".".as_ref(), &[]) { CompilerArguments::Ok(h) => h, o => panic!("Bad result from parse_arguments: {:?}", o), }; - let hasher2 = hasher.clone(); let (cached, res) = runtime .block_on(async { hasher @@ -2759,7 +2803,7 @@ LLVM version: 6.0", assert_eq!(COMPILER_STDERR, res.stderr.as_slice()); // Now compile again, but force recaching. fs::remove_file(&obj).unwrap(); - let (cached, res) = hasher2 + let (cached, res) = hasher .get_cached_or_compile( &service, None, @@ -2846,7 +2890,7 @@ LLVM version: 6.0", ); let cwd = f.tempdir.path(); let arguments = ovec!["-c", "foo.c", "-o", "foo.o"]; - let hasher = match c.parse_arguments(&arguments, ".".as_ref(), &[]) { + let mut hasher = match c.parse_arguments(&arguments, ".".as_ref(), &[]) { CompilerArguments::Ok(h) => h, o => panic!("Bad result from parse_arguments: {:?}", o), }; @@ -2963,7 +3007,7 @@ LLVM version: 6.0", if obj.is_file() { fs::remove_file(&obj).unwrap(); } - let hasher = hasher.clone(); + let mut hasher = hasher.clone(); let (cached, res) = hasher .get_cached_or_compile( &service, diff --git a/src/compiler/gcc.rs b/src/compiler/gcc.rs index aefd7cd54..d295a6ead 100644 --- a/src/compiler/gcc.rs +++ b/src/compiler/gcc.rs @@ -290,6 +290,7 @@ where let mut language_extensions = true; // by default, GCC allows extensions let mut split_dwarf = false; let mut need_explicit_dep_target = false; + let mut dep_path = None; enum DepArgumentRequirePath { NotNeeded, Missing, @@ -371,7 +372,6 @@ where } Some(Output(p)) => output_arg = Some(p.clone()), Some(NeedDepTarget) => { - too_hard_for_preprocessor_cache_mode = Some(arg.to_os_string()); need_explicit_dep_target = true; if let DepArgumentRequirePath::NotNeeded = need_explicit_dep_argument_path { need_explicit_dep_argument_path = DepArgumentRequirePath::Missing; @@ -381,8 +381,9 @@ where dep_flag = OsString::from(arg.flag_str().expect("Dep target flag expected")); dep_target = Some(s.clone()); } - Some(DepArgumentPath(_)) => { + Some(DepArgumentPath(path)) => { need_explicit_dep_argument_path = DepArgumentRequirePath::Provided; + dep_path = Some(path.clone()); } Some(SerializeDiagnostics(path)) => { serialize_diagnostics = Some(path.clone()); @@ -642,6 +643,16 @@ where dependency_args.push(Path::new(&output).with_extension("d").into_os_string()); } + if let Some(path) = dep_path { + outputs.insert( + "d", + ArtifactDescriptor { + path: path.clone(), + optional: false, + }, + ); + } + if let Some(path) = serialize_diagnostics { outputs.insert( "dia", @@ -1450,6 +1461,13 @@ mod test { path: "foo.o".into(), optional: false } + ), + ( + "d", + ArtifactDescriptor { + path: "foo.o.d".into(), + optional: false + } ) ); assert_eq!(ovec!["-MF", "foo.o.d"], dependency_args); @@ -1543,6 +1561,13 @@ mod test { path: "foo.o".into(), optional: false } + ), + ( + "d", + ArtifactDescriptor { + path: "foo.o.d".into(), + optional: false + } ) ); assert_eq!(ovec!["-MF", "foo.o.d"], dependency_args); @@ -1578,6 +1603,13 @@ mod test { path: "foo.o".into(), optional: false } + ), + ( + "d", + ArtifactDescriptor { + path: "foo.o.d".into(), + optional: false + } ) ); assert_eq!( @@ -1617,6 +1649,13 @@ mod test { path: "foo.o".into(), optional: false } + ), + ( + "d", + ArtifactDescriptor { + path: "foo.o.d".into(), + optional: false + } ) ); assert_eq!( @@ -1857,6 +1896,13 @@ mod test { path: "foo.o".into(), optional: false } + ), + ( + "d", + ArtifactDescriptor { + path: "foo.o.d".into(), + optional: false + } ) ); assert_eq!( diff --git a/src/compiler/nvcc.rs b/src/compiler/nvcc.rs index c9d4ecd59..8659086ea 100644 --- a/src/compiler/nvcc.rs +++ b/src/compiler/nvcc.rs @@ -1809,6 +1809,13 @@ mod test { path: "foo.o".into(), optional: false } + ), + ( + "d", + ArtifactDescriptor { + path: "foo.o.d".into(), + optional: false + } ) ); assert_eq!( diff --git a/src/compiler/nvhpc.rs b/src/compiler/nvhpc.rs index 0eb2c924a..507e5d1a3 100644 --- a/src/compiler/nvhpc.rs +++ b/src/compiler/nvhpc.rs @@ -352,6 +352,13 @@ mod test { path: "foo.o".into(), optional: false } + ), + ( + "d", + ArtifactDescriptor { + path: "foo.o.d".into(), + optional: false + } ) ); assert_eq!( diff --git a/src/compiler/rust.rs b/src/compiler/rust.rs index 771896ff1..03730f834 100644 --- a/src/compiler/rust.rs +++ b/src/compiler/rust.rs @@ -1311,7 +1311,7 @@ where T: CommandCreatorSync, { async fn generate_hash_key( - self: Box, + &mut self, creator: &T, cwd: PathBuf, env_vars: Vec<(OsString, OsString)>, @@ -1321,35 +1321,11 @@ where _storage: Arc, _cache_control: CacheControl, ) -> Result> { - let RustHasher { - executable, - host, - version, - sysroot, - compiler_shlibs_digests, - #[cfg(feature = "dist-client")] - rlib_dep_reader, - parsed_args: - ParsedArguments { - arguments, - output_dir, - externs, - crate_link_paths, - staticlibs, - crate_name, - crate_types, - dep_info, - emit, - has_json, - profile, - gcno, - target_json, - .. - }, - } = *self; - trace!("[{}]: generate_hash_key", crate_name); + trace!("[{}]: generate_hash_key", self.parsed_args.crate_name); // TODO: this doesn't produce correct arguments if they should be concatenated - should use iter_os_strings - let os_string_arguments: Vec<(OsString, Option)> = arguments + let os_string_arguments: Vec<(OsString, Option)> = self + .parsed_args + .arguments .iter() .map(|arg| { ( @@ -1378,8 +1354,8 @@ where let source_files_and_hashes_and_env_deps = async { let (source_files, env_deps) = get_source_files_and_env_deps( creator, - &crate_name, - &executable, + &self.parsed_args.crate_name, + &self.executable, &filtered_arguments, &cwd, &env_vars, @@ -1391,20 +1367,38 @@ where }; // Hash the contents of the externs listed on the commandline. - trace!("[{}]: hashing {} externs", crate_name, externs.len()); - let abs_externs = externs.iter().map(|e| cwd.join(e)).collect::>(); + trace!( + "[{}]: hashing {} externs", + self.parsed_args.crate_name, + self.parsed_args.externs.len() + ); + let abs_externs = self + .parsed_args + .externs + .iter() + .map(|e| cwd.join(e)) + .collect::>(); let extern_hashes = hash_all(&abs_externs, pool); // Hash the contents of the staticlibs listed on the commandline. - trace!("[{}]: hashing {} staticlibs", crate_name, staticlibs.len()); - let abs_staticlibs = staticlibs.iter().map(|s| cwd.join(s)).collect::>(); + trace!( + "[{}]: hashing {} staticlibs", + self.parsed_args.crate_name, + self.parsed_args.staticlibs.len() + ); + let abs_staticlibs = self + .parsed_args + .staticlibs + .iter() + .map(|s| cwd.join(s)) + .collect::>(); let staticlib_hashes = hash_all_archives(&abs_staticlibs, pool); // Hash the content of the specified target json file, if any. let mut target_json_files = Vec::new(); - if let Some(path) = &target_json { + if let Some(path) = &self.parsed_args.target_json { trace!( "[{}]: hashing target json file {}", - crate_name, + self.parsed_args.crate_name, path.display() ); let abs_target_json = cwd.join(path); @@ -1432,7 +1426,7 @@ where // 1. A version m.update(CACHE_VERSION); // 2. compiler_shlibs_digests - for d in compiler_shlibs_digests { + for d in &self.compiler_shlibs_digests { m.update(d.as_bytes()); } let weak_toolchain_key = m.clone().finish(); @@ -1454,7 +1448,7 @@ where // We also exclude `--target` if it specifies a path to a .json file. The file content // is used as hash input below. // If `--target` specifies a string, it continues to be hashed as part of the arguments. - .filter(|&(arg, _)| target_json.is_none() || arg != "--target") + .filter(|&(arg, _)| self.parsed_args.target_json.is_none() || arg != "--target") // A few argument types were not passed in a deterministic order // by older versions of cargo: --extern, -L, --cfg. We'll filter the rest of those // out, sort them, and append them to the rest of the arguments. @@ -1518,7 +1512,7 @@ where // 9. The cwd of the compile. This will wind up in the rlib. cwd.hash(&mut HashToDigest { digest: &mut m }); // 10. The version of the compiler. - version.hash(&mut HashToDigest { digest: &mut m }); + self.version.hash(&mut HashToDigest { digest: &mut m }); // Turn arguments into a simple Vec to calculate outputs. let flat_os_string_arguments: Vec = os_string_arguments @@ -1528,7 +1522,7 @@ where let mut outputs = get_compiler_outputs( creator, - &executable, + &self.executable, flat_os_string_arguments, &cwd, &env_vars, @@ -1546,14 +1540,18 @@ where // rmeta. // // This can go away once the above rustc PR makes it in. - let emit_generates_only_metadata = - !emit.is_empty() && emit.iter().all(|e| e == "metadata" || e == "dep-info"); + let emit_generates_only_metadata = !self.parsed_args.emit.is_empty() + && self + .parsed_args + .emit + .iter() + .all(|e| e == "metadata" || e == "dep-info"); if emit_generates_only_metadata { outputs.retain(|o| o.ends_with(".rlib") || o.ends_with(".rmeta")); } - if emit.contains("metadata") { + if self.parsed_args.emit.contains("metadata") { // rustc currently does not report rmeta outputs with --print file-names // --emit metadata the rlib is printed, and with --emit metadata,link // only the rlib is printed. @@ -1569,7 +1567,7 @@ where if !outputs.contains(&rmeta) { outputs.push(rmeta); } - if !emit.contains("link") { + if !self.parsed_args.emit.contains("link") { outputs.retain(|p| *p != lib); } } @@ -1581,7 +1579,7 @@ where let mut outputs = outputs .into_iter() .map(|o| { - let p = output_dir.join(&o); + let p = self.parsed_args.output_dir.join(&o); ( o, ArtifactDescriptor { @@ -1591,8 +1589,8 @@ where ) }) .collect::>(); - let dep_info = if let Some(dep_info) = dep_info { - let p = output_dir.join(&dep_info); + let dep_info = if let Some(dep_info) = &self.parsed_args.dep_info { + let p = self.parsed_args.output_dir.join(dep_info); outputs.insert( dep_info.to_string_lossy().into_owned(), ArtifactDescriptor { @@ -1604,8 +1602,8 @@ where } else { None }; - if let Some(profile) = profile { - let p = output_dir.join(&profile); + if let Some(profile) = &self.parsed_args.profile { + let p = self.parsed_args.output_dir.join(profile); outputs.insert( profile.to_string_lossy().into_owned(), ArtifactDescriptor { @@ -1614,8 +1612,8 @@ where }, ); } - if let Some(gcno) = gcno { - let p = output_dir.join(&gcno); + if let Some(gcno) = &self.parsed_args.gcno { + let p = self.parsed_args.output_dir.join(gcno); outputs.insert( gcno.to_string_lossy().into_owned(), ArtifactDescriptor { @@ -1624,9 +1622,9 @@ where }, ); } - let mut arguments = arguments; + let mut arguments = self.parsed_args.arguments.clone(); // Request color output unless json was requested. The client will strip colors if needed. - if !has_json { + if !self.parsed_args.has_json { arguments.push(Argument::WithValue( "--color", ArgData::Color("always".into()), @@ -1643,20 +1641,20 @@ where Ok(HashResult { key: m.finish(), compilation: Box::new(RustCompilation { - executable, - host, - sysroot, + executable: self.executable.clone(), + host: self.host.clone(), + sysroot: self.sysroot.clone(), arguments, inputs, outputs, - crate_link_paths, - crate_name, - crate_types, + crate_link_paths: self.parsed_args.crate_link_paths.clone(), + crate_name: self.parsed_args.crate_name.clone(), + crate_types: self.parsed_args.crate_types.clone(), dep_info, cwd, env_vars, #[cfg(feature = "dist-client")] - rlib_dep_reader, + rlib_dep_reader: self.rlib_dep_reader.clone(), }), weak_toolchain_key, }) @@ -3386,7 +3384,7 @@ proc_macro false let mut emit = HashSet::new(); emit.insert("link".to_string()); emit.insert("metadata".to_string()); - let hasher = Box::new(RustHasher { + let mut hasher = Box::new(RustHasher { executable: "rustc".into(), host: "x86-64-unknown-unknown-unknown".to_owned(), version: TEST_RUSTC_VERSION.to_string(), @@ -3516,7 +3514,7 @@ proc_macro false f.touch(e.to_str().unwrap()).expect(&s); } pre_func(f.tempdir.path()).expect("Failed to execute pre_func"); - let hasher = Box::new(RustHasher { + let mut hasher = Box::new(RustHasher { executable: "rustc".into(), host: "x86-64-unknown-unknown-unknown".to_owned(), version: TEST_RUSTC_VERSION.to_string(), diff --git a/src/server.rs b/src/server.rs index 76a63ed27..9483ef4f9 100644 --- a/src/server.rs +++ b/src/server.rs @@ -1288,7 +1288,7 @@ where pub async fn start_compile_task( self, compiler: Box>, - hasher: Box>, + mut hasher: Box>, arguments: Vec, cwd: PathBuf, env_vars: Vec<(OsString, OsString)>,