Auto merge of #154724 - Daniel-B-Smith:smithdb3/fix-94878, r=<try>

Computing crate_hash from metadata encoding instead of HIR (implements #94878) (very draft)
This commit is contained in:
bors
2026-04-27 21:48:56 +00:00
11 changed files with 362 additions and 188 deletions
+5 -2
View File
@@ -562,8 +562,11 @@ pub fn lower_to_hir(tcx: TyCtxt<'_>, (): ()) -> mid_hir::Crate<'_> {
}
// Don't hash unless necessary, because it's expensive.
let opt_hir_hash =
if tcx.needs_crate_hash() { Some(compute_hir_hash(tcx, &owners)) } else { None };
let opt_hir_hash = if tcx.needs_crate_hash() && !tcx.needs_metadata() {
Some(compute_hir_hash(tcx, &owners))
} else {
None
};
let delayed_resolver = Steal::new((resolver, krate));
mid_hir::Crate::new(owners, delayed_ids, delayed_resolver, opt_hir_hash)
+4 -4
View File
@@ -324,10 +324,6 @@ pub fn run_compiler(at_args: &[String], callbacks: &mut (dyn Callbacks + Send))
tcx.ensure_ok().analysis(());
if let Some(metrics_dir) = &sess.opts.unstable_opts.metrics_dir {
dump_feature_usage_metrics(tcx, metrics_dir);
}
if callbacks.after_analysis(compiler, tcx) == Compilation::Stop {
return early_exit();
}
@@ -340,6 +336,10 @@ pub fn run_compiler(at_args: &[String], callbacks: &mut (dyn Callbacks + Send))
let linker = Linker::codegen_and_build_linker(tcx, &*compiler.codegen_backend);
if let Some(metrics_dir) = &sess.opts.unstable_opts.metrics_dir {
dump_feature_usage_metrics(tcx, metrics_dir);
}
tcx.report_unused_features();
Some(linker)
+7 -2
View File
@@ -948,8 +948,13 @@ pub fn create_and_enter_global_ctxt<T, F: for<'tcx> FnOnce(TyCtxt<'tcx>) -> T>(
let definitions = FreezeLock::new(Definitions::new(stable_crate_id));
let stable_crate_ids = FreezeLock::new(StableCrateIdMap::default());
let untracked =
Untracked { cstore, source_span: AppendOnlyIndexVec::new(), definitions, stable_crate_ids };
let untracked = Untracked {
cstore,
source_span: AppendOnlyIndexVec::new(),
definitions,
stable_crate_ids,
local_crate_hash: OnceLock::new(),
};
// We're constructing the HIR here; we don't care what we will
// read, since we haven't even constructed the *input* to
@@ -2,6 +2,9 @@
use std::mem;
use std::sync::Arc;
use rustc_data_structures::fingerprint::Fingerprint;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::svh::Svh;
use rustc_hir::attrs::Deprecation;
use rustc_hir::def::{CtorKind, DefKind};
use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, LOCAL_CRATE};
@@ -9,6 +12,7 @@
use rustc_middle::arena::ArenaAllocatable;
use rustc_middle::bug;
use rustc_middle::metadata::{AmbigModChild, ModChild};
use rustc_middle::middle::debugger_visualizer::DebuggerVisualizerFile;
use rustc_middle::middle::exported_symbols::ExportedSymbol;
use rustc_middle::middle::stability::DeprecationEntry;
use rustc_middle::queries::ExternProviders;
@@ -20,7 +24,7 @@
use rustc_session::StableCrateId;
use rustc_session::cstore::{CrateStore, ExternCrate};
use rustc_span::hygiene::ExpnId;
use rustc_span::{Span, Symbol, kw};
use rustc_span::{Span, Symbol, kw, with_metavar_spans};
use super::{Decodable, DecodeIterator};
use crate::creader::{CStore, LoadedMacro};
@@ -750,4 +754,104 @@ fn provide_cstore_hooks(providers: &mut Providers) {
cdata.imported_source_file(tcx, file_index as u32);
}
};
providers.queries.crate_hash = |tcx: TyCtxt<'_>, _: LocalCrate| {
if tcx.needs_metadata() {
*tcx.untracked()
.local_crate_hash
.get()
.expect("crate_hash(LOCAL_CRATE) called before metadata encoding")
} else {
crate_hash(tcx)
}
};
}
pub(super) fn crate_hash(tcx: TyCtxt<'_>) -> Svh {
let krate = tcx.hir_crate(());
let hir_body_hash = krate.opt_hir_hash.expect("HIR hash missing while computing crate hash");
let upstream_crates = upstream_crates(tcx);
let resolutions = tcx.resolutions(());
// We hash the final, remapped names of all local source files so we
// don't have to include the path prefix remapping commandline args.
// If we included the full mapping in the SVH, we could only have
// reproducible builds by compiling from the same directory. So we just
// hash the result of the mapping instead of the mapping itself.
let mut source_file_names: Vec<_> = tcx
.sess
.source_map()
.files()
.iter()
.filter(|source_file| source_file.cnum == LOCAL_CRATE)
.map(|source_file| source_file.stable_id)
.collect();
source_file_names.sort_unstable();
// We have to take care of debugger visualizers explicitly. The HIR (and
// thus `hir_body_hash`) contains the #[debugger_visualizer] attributes but
// these attributes only store the file path to the visualizer file, not
// their content. Yet that content is exported into crate metadata, so any
// changes to it need to be reflected in the crate hash.
let debugger_visualizers: Vec<_> = tcx
.debugger_visualizers(LOCAL_CRATE)
.iter()
// We ignore the path to the visualizer file since it's not going to be
// encoded in crate metadata and we already hash the full contents of
// the file.
.map(DebuggerVisualizerFile::path_erased)
.collect();
let crate_hash: Fingerprint = tcx.with_stable_hashing_context(|mut hcx| {
let mut stable_hasher = StableHasher::new();
hir_body_hash.hash_stable(&mut hcx, &mut stable_hasher);
upstream_crates.hash_stable(&mut hcx, &mut stable_hasher);
source_file_names.hash_stable(&mut hcx, &mut stable_hasher);
debugger_visualizers.hash_stable(&mut hcx, &mut stable_hasher);
if tcx.sess.opts.incremental.is_some() {
let definitions = tcx.untracked().definitions.freeze();
let mut owner_spans: Vec<_> = tcx
.hir_crate_items(())
.definitions()
.map(|def_id| {
let def_path_hash = definitions.def_path_hash(def_id);
let span = tcx.source_span(def_id);
debug_assert_eq!(span.parent(), None);
(def_path_hash, span)
})
.collect();
owner_spans.sort_unstable_by_key(|bn| bn.0);
owner_spans.hash_stable(&mut hcx, &mut stable_hasher);
}
tcx.sess.opts.dep_tracking_hash(true).hash_stable(&mut hcx, &mut stable_hasher);
tcx.stable_crate_id(LOCAL_CRATE).hash_stable(&mut hcx, &mut stable_hasher);
// Hash visibility information since it does not appear in HIR.
// FIXME: Figure out how to remove `visibilities_for_hashing` by hashing visibilities on
// the fly in the resolver, storing only their accumulated hash in `ResolverGlobalCtxt`,
// and combining it with other hashes here.
resolutions.visibilities_for_hashing.hash_stable(&mut hcx, &mut stable_hasher);
with_metavar_spans(|mspans| {
mspans.freeze_and_get_read_spans().hash_stable(&mut hcx, &mut stable_hasher);
});
stable_hasher.finish()
});
Svh::new(crate_hash)
}
fn upstream_crates(tcx: TyCtxt<'_>) -> Vec<(StableCrateId, Svh)> {
let mut upstream_crates: Vec<_> = tcx
.crates(())
.iter()
.map(|&cnum| {
let stable_crate_id = tcx.stable_crate_id(cnum);
let hash = tcx.crate_hash(cnum);
(stable_crate_id, hash)
})
.collect();
upstream_crates.sort_unstable_by_key(|&(stable_crate_id, _)| stable_crate_id);
upstream_crates
}
+229 -74
View File
@@ -2,11 +2,16 @@
use std::collections::hash_map::Entry;
use std::fs::File;
use std::io::{Read, Seek, Write};
use std::ops::Deref;
use std::path::{Path, PathBuf};
use std::sync::Arc;
//use rustc_data_structures::fingerprint::Fingerprint;
//use rustc_data_structures::Svh;
use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
use rustc_data_structures::memmap::{Mmap, MmapMut};
use rustc_data_structures::owned_slice::slice_owned;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::sync::{par_for_each_in, par_join};
use rustc_data_structures::temp_dir::MaybeTempDir;
use rustc_data_structures::thousands::usize_with_underscores;
@@ -17,6 +22,7 @@
use rustc_hir::find_attr;
use rustc_hir_pretty::id_to_string;
use rustc_middle::dep_graph::WorkProductId;
use rustc_middle::ich::StableHashingContext;
use rustc_middle::middle::dependency_format::Linkage;
use rustc_middle::mir::interpret;
use rustc_middle::query::Providers;
@@ -39,8 +45,14 @@
use crate::errors::{FailCreateFileEncoder, FailWriteFile};
use crate::rmeta::*;
pub(super) struct EncodeContext<'a, 'tcx> {
// Struct to enable split borrows.
pub(super) struct ContextEncoder<'a> {
opaque: opaque::FileEncoder,
stable_hasher: StableHasher,
hcx: StableHashingContext<'a>,
}
pub(super) struct EncodeContext<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
feat: &'tcx rustc_feature::Features,
tables: TableBuilders,
@@ -67,6 +79,8 @@ pub(super) struct EncodeContext<'a, 'tcx> {
hygiene_ctxt: &'a HygieneEncodeContext,
// Used for both `Symbol`s and `ByteSymbol`s.
symbol_index_table: FxHashMap<u32, usize>,
encoder: ContextEncoder<'a>,
}
/// If the current crate is a proc-macro, returns early with `LazyArray::default()`.
@@ -80,10 +94,56 @@ macro_rules! empty_proc_macro {
};
}
macro_rules! context_encoder_methods {
($($name:ident($ty:ty);)*) => {
#[inline]
$(fn $name(&mut self, value: $ty) {
value.hash_stable(&mut self.hcx, &mut self.stable_hasher);
self.opaque.$name(value)
})*
}
}
impl<'a> Encoder for ContextEncoder<'a> {
context_encoder_methods! {
emit_usize(usize);
emit_u128(u128);
emit_u64(u64);
emit_u32(u32);
emit_u16(u16);
emit_u8(u8);
emit_isize(isize);
emit_i128(i128);
emit_i64(i64);
emit_i32(i32);
emit_i16(i16);
emit_raw_bytes(&[u8]);
}
}
impl<'a> ContextEncoder<'a> {
#[inline]
pub(super) fn position(&self) -> usize {
self.opaque.position()
}
#[inline]
pub(super) fn write_m_with<const N: usize>(&mut self, b: &[u8; N], m: usize) {
(b[..m]).hash_stable(&mut self.hcx, &mut self.stable_hasher);
self.opaque.write_with(|dest| {
*dest = *b;
m
});
}
}
macro_rules! encoder_methods {
($($name:ident($ty:ty);)*) => {
#[inline]
$(fn $name(&mut self, value: $ty) {
self.opaque.$name(value)
self.encoder.$name(value)
})*
}
}
@@ -177,25 +237,19 @@ fn encode_span(&mut self, span: Span) {
let last_location = *o.get();
// This cannot underflow. Metadata is written with increasing position(), so any
// previously saved offset must be smaller than the current position.
let offset = self.opaque.position() - last_location;
let offset = self.encoder.position() - last_location;
if offset < last_location {
let needed = bytes_needed(offset);
SpanTag::indirect(true, needed as u8).encode(self);
self.opaque.write_with(|dest| {
*dest = offset.to_le_bytes();
needed
});
self.encoder.write_m_with(&offset.to_le_bytes(), needed);
} else {
let needed = bytes_needed(last_location);
SpanTag::indirect(false, needed as u8).encode(self);
self.opaque.write_with(|dest| {
*dest = last_location.to_le_bytes();
needed
});
self.encoder.write_m_with(&last_location.to_le_bytes(), needed);
}
}
Entry::Vacant(v) => {
let position = self.opaque.position();
let position = self.encoder.position();
v.insert(position);
// Data is encoded with a SpanTag prefix (see below).
span.data().encode(self);
@@ -372,7 +426,7 @@ impl<'a, 'tcx> TyEncoder<'tcx> for EncodeContext<'a, 'tcx> {
const CLEAR_CROSS_CRATE: bool = true;
fn position(&self) -> usize {
self.opaque.position()
self.encoder.position()
}
fn type_shorthands(&mut self) -> &mut FxHashMap<Ty<'tcx>, usize> {
@@ -489,21 +543,21 @@ fn encode_symbol_or_byte_symbol(
) {
// if symbol/byte symbol is predefined, emit tag and symbol index
if Symbol::is_predefined(index) {
self.opaque.emit_u8(SYMBOL_PREDEFINED);
self.opaque.emit_u32(index);
self.encoder.emit_u8(SYMBOL_PREDEFINED);
self.encoder.emit_u32(index);
} else {
// otherwise write it as string or as offset to it
match self.symbol_index_table.entry(index) {
Entry::Vacant(o) => {
self.opaque.emit_u8(SYMBOL_STR);
let pos = self.opaque.position();
self.encoder.emit_u8(SYMBOL_STR);
let pos = self.encoder.position();
o.insert(pos);
emit_str_or_byte_str(self);
}
Entry::Occupied(o) => {
let x = *o.get();
self.emit_u8(SYMBOL_OFFSET);
self.emit_usize(x);
self.encoder.emit_u8(SYMBOL_OFFSET);
self.encoder.emit_usize(x);
}
}
}
@@ -598,7 +652,7 @@ fn encode_source_map(&mut self) -> LazyTable<u32, Option<LazyValue<rustc_span::S
adapted.set_some(on_disk_index, self.lazy(adapted_source_file));
}
adapted.encode(&mut self.opaque)
adapted.encode(&mut self.encoder)
}
fn encode_crate_root(&mut self) -> LazyValue<CrateRoot> {
@@ -682,7 +736,7 @@ macro_rules! stat {
// encode_def_path_table.
let proc_macro_data = stat!("proc-macro-data", || self.encode_proc_macros());
let tables = stat!("tables", || self.tables.encode(&mut self.opaque));
let tables = stat!("tables", || self.tables.encode(&mut self.encoder));
let debugger_visualizers =
stat!("debugger-visualizers", || self.encode_debugger_visualizers());
@@ -720,11 +774,26 @@ macro_rules! stat {
let root = stat!("final", || {
let attrs = tcx.hir_krate_attrs();
let new_hash = Svh::new(self.encoder.stable_hasher.clone().finish());
/*eprintln!("crate: {:?}", tcx.crate_name(LOCAL_CRATE));
eprintln!("crate HASH: {:?}", new_hash);
if let Some(hash) = tcx.untracked().local_crate_hash.get() {
eprintln!("resetting hash: {:?}", hash);
}*/
tcx.untracked().local_crate_hash.set(new_hash).expect("local_crate_hash set twice");
/*let old_hash = tcx.crate_hash(new_hash);
eprintln!("OLD HASH: {:?}", old_hash);
eprintln!("NEW HASH: {:?}", new_hash);
assert_eq!(old_hash, new_hash, "Hash mismatch!");*/
self.lazy(CrateRoot {
header: CrateHeader {
name: tcx.crate_name(LOCAL_CRATE),
triple: tcx.sess.opts.target_triple.clone(),
hash: tcx.crate_hash(LOCAL_CRATE),
hash: new_hash,
is_proc_macro_crate: proc_macro_data.is_some(),
is_stub: false,
},
@@ -786,19 +855,20 @@ macro_rules! stat {
if tcx.sess.opts.unstable_opts.meta_stats {
use std::fmt::Write;
self.opaque.flush();
let opaque = &mut self.encoder.opaque;
opaque.flush();
// Rewind and re-read all the metadata to count the zero bytes we wrote.
let pos_before_rewind = self.opaque.file().stream_position().unwrap();
let pos_before_rewind = opaque.file().stream_position().unwrap();
let mut zero_bytes = 0;
self.opaque.file().rewind().unwrap();
let file = std::io::BufReader::new(self.opaque.file());
opaque.file().rewind().unwrap();
let file = std::io::BufReader::new(opaque.file());
for e in file.bytes() {
if e.unwrap() == 0 {
zero_bytes += 1;
}
}
assert_eq!(self.opaque.file().stream_position().unwrap(), pos_before_rewind);
assert_eq!(opaque.file().stream_position().unwrap(), pos_before_rewind);
stats.sort_by_key(|&(_, usize)| usize);
stats.reverse(); // bigger items first
@@ -1970,9 +2040,9 @@ fn encode_hygiene(&mut self) -> (SyntaxContextTable, ExpnDataTable, ExpnHashTabl
);
(
syntax_contexts.encode(&mut self.opaque),
expn_data_table.encode(&mut self.opaque),
expn_hash_table.encode(&mut self.opaque),
syntax_contexts.encode(&mut self.encoder),
expn_data_table.encode(&mut self.encoder),
expn_hash_table.encode(&mut self.encoder),
)
}
@@ -2429,20 +2499,35 @@ pub fn encode_metadata(tcx: TyCtxt<'_>, path: &Path, ref_path: Option<&Path>) {
tcx.dep_graph.assert_ignored();
// Generate the metadata stub manually, as that is a small file compared to full metadata.
if let Some(ref_path) = ref_path {
/*if let Some(ref_path) = ref_path {
let _prof_timer = tcx.prof.verbose_generic_activity("generate_crate_metadata_stub");
with_encode_metadata_header(tcx, ref_path, |ecx| {
//let new_hash = Svh::new(ecx.encoder.stable_hasher.clone().finish());
/*eprintln!("crate: {:?}", tcx.crate_name(LOCAL_CRATE));
eprintln!("encoding hash HASH: {:?}", new_hash);
if let Some(hash) = tcx.untracked().local_crate_hash.get() {
eprintln!("resetting hash: {:?}", hash);
}
tcx.untracked().local_crate_hash.set(new_hash).expect("local_crate_hash set twice");*/
let header: LazyValue<CrateHeader> = ecx.lazy(CrateHeader {
name: tcx.crate_name(LOCAL_CRATE),
triple: tcx.sess.opts.target_triple.clone(),
hash: tcx.crate_hash(LOCAL_CRATE),
hash: tcx.crate_hash(LOCAL_CRATE), /*tcx
.untracked()
.local_crate_hash
.get()
.expect("The hash should have been calculated during metadataencoding"),*/
is_proc_macro_crate: false,
is_stub: true,
});
header.position.get()
})
}
}*/
let _prof_timer = tcx.prof.verbose_generic_activity("generate_crate_metadata");
@@ -2462,6 +2547,44 @@ pub fn encode_metadata(tcx: TyCtxt<'_>, path: &Path, ref_path: Option<&Path>) {
Ok(_) => {}
Err(err) => tcx.dcx().emit_fatal(FailCreateFileEncoder { err }),
};
let file = std::fs::File::open(&source_file).unwrap();
let mmap = unsafe { Mmap::map(file) }.unwrap();
let owned = slice_owned(mmap, Deref::deref);
let blob = MetadataBlob::new(owned);
let header = blob.expect("file already created").get_header();
tcx.untracked().local_crate_hash.set(header.hash).expect("local_crate_hash set twice");
if let Some(ref_path) = ref_path {
let _prof_timer = tcx.prof.verbose_generic_activity("generate_crate_metadata_stub");
with_encode_metadata_header(tcx, ref_path, |ecx| {
//let new_hash = Svh::new(ecx.encoder.stable_hasher.clone().finish());
/*eprintln!("crate: {:?}", tcx.crate_name(LOCAL_CRATE));
eprintln!("encoding hash HASH: {:?}", new_hash);
if let Some(hash) = tcx.untracked().local_crate_hash.get() {
eprintln!("resetting hash: {:?}", hash);
}
tcx.untracked().local_crate_hash.set(new_hash).expect("local_crate_hash set twice");*/
let header: LazyValue<CrateHeader> = ecx.lazy(CrateHeader {
name: tcx.crate_name(LOCAL_CRATE),
triple: tcx.sess.opts.target_triple.clone(),
hash: tcx.crate_hash(LOCAL_CRATE), /*tcx
.untracked()
.local_crate_hash
.get()
.expect("The hash should have been calculated during metadataencoding"),*/
is_proc_macro_crate: false,
is_stub: true,
});
header.position.get()
})
}
return;
};
@@ -2490,12 +2613,12 @@ pub fn encode_metadata(tcx: TyCtxt<'_>, path: &Path, ref_path: Option<&Path>) {
let root = ecx.encode_crate_root();
// Flush buffer to ensure backing file has the correct size.
ecx.opaque.flush();
ecx.encoder.opaque.flush();
// Record metadata size for self-profiling
tcx.prof.artifact_size(
"crate_metadata",
"crate_metadata",
ecx.opaque.file().metadata().unwrap().len(),
ecx.encoder.opaque.file().metadata().unwrap().len(),
);
root.position.get()
@@ -2503,6 +2626,36 @@ pub fn encode_metadata(tcx: TyCtxt<'_>, path: &Path, ref_path: Option<&Path>) {
},
None,
);
if let Some(ref_path) = ref_path {
let _prof_timer = tcx.prof.verbose_generic_activity("generate_crate_metadata_stub");
with_encode_metadata_header(tcx, ref_path, |ecx| {
//let new_hash = Svh::new(ecx.encoder.stable_hasher.clone().finish());
/*eprintln!("crate: {:?}", tcx.crate_name(LOCAL_CRATE));
eprintln!("encoding hash HASH: {:?}", new_hash);
if let Some(hash) = tcx.untracked().local_crate_hash.get() {
eprintln!("resetting hash: {:?}", hash);
}
tcx.untracked().local_crate_hash.set(new_hash).expect("local_crate_hash set twice");*/
let header: LazyValue<CrateHeader> = ecx.lazy(CrateHeader {
name: tcx.crate_name(LOCAL_CRATE),
triple: tcx.sess.opts.target_triple.clone(),
hash: tcx.crate_hash(LOCAL_CRATE), /*tcx
.untracked()
.local_crate_hash
.get()
.expect("The hash should have been calculated during metadataencoding"),*/
is_proc_macro_crate: false,
is_stub: true,
});
header.position.get()
})
}
}
fn with_encode_metadata_header(
@@ -2510,53 +2663,55 @@ fn with_encode_metadata_header(
path: &Path,
f: impl FnOnce(&mut EncodeContext<'_, '_>) -> usize,
) {
let mut encoder = opaque::FileEncoder::new(path)
.unwrap_or_else(|err| tcx.dcx().emit_fatal(FailCreateFileEncoder { err }));
encoder.emit_raw_bytes(METADATA_HEADER);
tcx.with_stable_hashing_context(|hcx| {
let mut encoder = opaque::FileEncoder::new(path)
.unwrap_or_else(|err| tcx.dcx().emit_fatal(FailCreateFileEncoder { err }));
encoder.emit_raw_bytes(METADATA_HEADER);
// Will be filled with the root position after encoding everything.
encoder.emit_raw_bytes(&0u64.to_le_bytes());
// Will be filled with the root position after encoding everything.
encoder.emit_raw_bytes(&0u64.to_le_bytes());
let source_map_files = tcx.sess.source_map().files();
let source_file_cache = (Arc::clone(&source_map_files[0]), 0);
let required_source_files = Some(FxIndexSet::default());
drop(source_map_files);
let source_map_files = tcx.sess.source_map().files();
let source_file_cache = (Arc::clone(&source_map_files[0]), 0);
let required_source_files = Some(FxIndexSet::default());
drop(source_map_files);
let hygiene_ctxt = HygieneEncodeContext::default();
let hygiene_ctxt = HygieneEncodeContext::default();
let mut ecx = EncodeContext {
opaque: encoder,
tcx,
feat: tcx.features(),
tables: Default::default(),
lazy_state: LazyState::NoNode,
span_shorthands: Default::default(),
type_shorthands: Default::default(),
predicate_shorthands: Default::default(),
source_file_cache,
interpret_allocs: Default::default(),
required_source_files,
is_proc_macro: tcx.crate_types().contains(&CrateType::ProcMacro),
hygiene_ctxt: &hygiene_ctxt,
symbol_index_table: Default::default(),
};
let mut ecx = EncodeContext {
tcx,
feat: tcx.features(),
tables: Default::default(),
lazy_state: LazyState::NoNode,
span_shorthands: Default::default(),
type_shorthands: Default::default(),
predicate_shorthands: Default::default(),
source_file_cache,
interpret_allocs: Default::default(),
required_source_files,
is_proc_macro: tcx.crate_types().contains(&CrateType::ProcMacro),
hygiene_ctxt: &hygiene_ctxt,
symbol_index_table: Default::default(),
encoder: ContextEncoder { opaque: encoder, stable_hasher: StableHasher::new(), hcx },
};
// Encode the rustc version string in a predictable location.
rustc_version(tcx.sess.cfg_version).encode(&mut ecx);
// Encode the rustc version string in a predictable location.
rustc_version(tcx.sess.cfg_version).encode(&mut ecx);
let root_position = f(&mut ecx);
let root_position = f(&mut ecx);
// Make sure we report any errors from writing to the file.
// If we forget this, compilation can succeed with an incomplete rmeta file,
// causing an ICE when the rmeta file is read by another compilation.
if let Err((path, err)) = ecx.opaque.finish() {
tcx.dcx().emit_fatal(FailWriteFile { path: &path, err });
}
// Make sure we report any errors from writing to the file.
// If we forget this, compilation can succeed with an incomplete rmeta file,
// causing an ICE when the rmeta file is read by another compilation.
if let Err((path, err)) = ecx.encoder.opaque.finish() {
tcx.dcx().emit_fatal(FailWriteFile { path: &path, err });
}
let file = ecx.opaque.file();
if let Err(err) = encode_root_position(file, root_position) {
tcx.dcx().emit_fatal(FailWriteFile { path: ecx.opaque.path(), err });
}
let file = ecx.encoder.opaque.file();
if let Err(err) = encode_root_position(file, root_position) {
tcx.dcx().emit_fatal(FailWriteFile { path: ecx.encoder.opaque.path(), err });
}
})
}
fn encode_root_position(mut file: &File, pos: usize) -> Result<(), std::io::Error> {
+2 -2
View File
@@ -35,7 +35,6 @@
use rustc_middle::ty::fast_reject::SimplifiedType;
use rustc_middle::ty::{self, Ty, TyCtxt};
use rustc_middle::util::Providers;
use rustc_serialize::opaque::FileEncoder;
use rustc_session::config::mitigation_coverage::DeniedPartialMitigation;
use rustc_session::config::{SymbolManglingVersion, TargetModifier};
use rustc_session::cstore::{CrateDepKind, ForeignModule, LinkagePreference, NativeLib};
@@ -46,6 +45,7 @@
use table::TableBuilder;
use crate::eii::EiiMapEncodedKeyValue;
use crate::rmeta::encoder::ContextEncoder;
mod decoder;
mod def_path_hash_map;
@@ -364,7 +364,7 @@ struct TableBuilders {
}
impl TableBuilders {
fn encode(&self, buf: &mut FileEncoder) -> LazyTables {
fn encode(&self, buf: &mut ContextEncoder<'_>) -> LazyTables {
LazyTables {
$($name1: self.$name1.encode(buf),)+
$($name2: self.$name2.encode(buf),)+
+3 -5
View File
@@ -2,6 +2,7 @@
use rustc_index::Idx;
use crate::rmeta::decoder::MetaBlob;
use crate::rmeta::encoder::ContextEncoder;
use crate::rmeta::*;
pub(super) trait IsDefault: Default {
@@ -486,15 +487,12 @@ pub(crate) fn set(&mut self, i: I, value: T) {
}
}
pub(crate) fn encode(&self, buf: &mut FileEncoder) -> LazyTable<I, T> {
pub(crate) fn encode(&self, buf: &mut ContextEncoder<'_>) -> LazyTable<I, T> {
let pos = buf.position();
let width = self.width;
for block in &self.blocks {
buf.write_with(|dest| {
*dest = *block;
width
});
buf.write_m_with(block, width);
}
LazyTable::from_position_and_encoded_size(
+2 -97
View File
@@ -4,22 +4,16 @@
use rustc_abi::ExternAbi;
use rustc_ast::visit::{VisitorResult, walk_list};
use rustc_data_structures::fingerprint::Fingerprint;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::svh::Svh;
use rustc_data_structures::sync::{DynSend, DynSync, par_for_each_in, spawn, try_par_for_each_in};
use rustc_hir::def::{DefKind, Res};
use rustc_hir::def_id::{DefId, LOCAL_CRATE, LocalDefId, LocalModDefId};
use rustc_hir::def_id::{DefId, LocalDefId, LocalModDefId};
use rustc_hir::definitions::{DefKey, DefPath, DefPathHash};
use rustc_hir::intravisit::Visitor;
use rustc_hir::*;
use rustc_hir_pretty as pprust_hir;
use rustc_span::def_id::StableCrateId;
use rustc_span::{ErrorGuaranteed, Ident, Span, Symbol, kw, with_metavar_spans};
use rustc_span::{ErrorGuaranteed, Ident, Span, Symbol, kw};
use crate::hir::{ModuleItems, nested_filter};
use crate::middle::debugger_visualizer::DebuggerVisualizerFile;
use crate::query::LocalCrate;
use crate::ty::TyCtxt;
/// An iterator that walks up the ancestor tree of a given `HirId`.
@@ -1123,95 +1117,6 @@ fn nested(&self, state: &mut pprust_hir::State<'_>, nested: pprust_hir::Nested)
}
}
pub(super) fn crate_hash(tcx: TyCtxt<'_>, _: LocalCrate) -> Svh {
let krate = tcx.hir_crate(());
let hir_body_hash = krate.opt_hir_hash.expect("HIR hash missing while computing crate hash");
let upstream_crates = upstream_crates(tcx);
let resolutions = tcx.resolutions(());
// We hash the final, remapped names of all local source files so we
// don't have to include the path prefix remapping commandline args.
// If we included the full mapping in the SVH, we could only have
// reproducible builds by compiling from the same directory. So we just
// hash the result of the mapping instead of the mapping itself.
let mut source_file_names: Vec<_> = tcx
.sess
.source_map()
.files()
.iter()
.filter(|source_file| source_file.cnum == LOCAL_CRATE)
.map(|source_file| source_file.stable_id)
.collect();
source_file_names.sort_unstable();
// We have to take care of debugger visualizers explicitly. The HIR (and
// thus `hir_body_hash`) contains the #[debugger_visualizer] attributes but
// these attributes only store the file path to the visualizer file, not
// their content. Yet that content is exported into crate metadata, so any
// changes to it need to be reflected in the crate hash.
let debugger_visualizers: Vec<_> = tcx
.debugger_visualizers(LOCAL_CRATE)
.iter()
// We ignore the path to the visualizer file since it's not going to be
// encoded in crate metadata and we already hash the full contents of
// the file.
.map(DebuggerVisualizerFile::path_erased)
.collect();
let crate_hash: Fingerprint = tcx.with_stable_hashing_context(|mut hcx| {
let mut stable_hasher = StableHasher::new();
hir_body_hash.hash_stable(&mut hcx, &mut stable_hasher);
upstream_crates.hash_stable(&mut hcx, &mut stable_hasher);
source_file_names.hash_stable(&mut hcx, &mut stable_hasher);
debugger_visualizers.hash_stable(&mut hcx, &mut stable_hasher);
if tcx.sess.opts.incremental.is_some() {
let definitions = tcx.untracked().definitions.freeze();
let mut owner_spans: Vec<_> = tcx
.hir_crate_items(())
.definitions()
.map(|def_id| {
let def_path_hash = definitions.def_path_hash(def_id);
let span = tcx.source_span(def_id);
debug_assert_eq!(span.parent(), None);
(def_path_hash, span)
})
.collect();
owner_spans.sort_unstable_by_key(|bn| bn.0);
owner_spans.hash_stable(&mut hcx, &mut stable_hasher);
}
tcx.sess.opts.dep_tracking_hash(true).hash_stable(&mut hcx, &mut stable_hasher);
tcx.stable_crate_id(LOCAL_CRATE).hash_stable(&mut hcx, &mut stable_hasher);
// Hash visibility information since it does not appear in HIR.
// FIXME: Figure out how to remove `visibilities_for_hashing` by hashing visibilities on
// the fly in the resolver, storing only their accumulated hash in `ResolverGlobalCtxt`,
// and combining it with other hashes here.
resolutions.visibilities_for_hashing.hash_stable(&mut hcx, &mut stable_hasher);
with_metavar_spans(|mspans| {
mspans.freeze_and_get_read_spans().hash_stable(&mut hcx, &mut stable_hasher);
});
stable_hasher.finish()
});
Svh::new(crate_hash)
}
fn upstream_crates(tcx: TyCtxt<'_>) -> Vec<(StableCrateId, Svh)> {
let mut upstream_crates: Vec<_> = tcx
.crates(())
.iter()
.map(|&cnum| {
let stable_crate_id = tcx.stable_crate_id(cnum);
let hash = tcx.crate_hash(cnum);
(stable_crate_id, hash)
})
.collect();
upstream_crates.sort_unstable_by_key(|&(stable_crate_id, _)| stable_crate_id);
upstream_crates
}
pub(super) fn hir_module_items(tcx: TyCtxt<'_>, module_id: LocalModDefId) -> ModuleItems {
let mut collector = ItemCollector::new(tcx, false);
-1
View File
@@ -454,7 +454,6 @@ pub struct Hashes {
pub fn provide(providers: &mut Providers) {
providers.hir_crate_items = map::hir_crate_items;
providers.crate_hash = map::crate_hash;
providers.hir_module_items = map::hir_module_items;
providers.local_def_id_to_hir_id = |tcx, def_id| match tcx.hir_crate(()).owner(tcx, def_id) {
MaybeOwner::Owner(_) => HirId::make_owner(def_id),
+1
View File
@@ -208,6 +208,7 @@
query hir_crate(key: ()) -> &'tcx Crate<'tcx> {
arena_cache
eval_always
no_hash
desc { "getting the crate HIR" }
}
+4
View File
@@ -4,8 +4,10 @@
use std::any::Any;
use std::path::PathBuf;
use std::sync::OnceLock;
use rustc_abi::ExternAbi;
use rustc_data_structures::svh::Svh;
use rustc_data_structures::sync::{self, AppendOnlyIndexVec, FreezeLock};
use rustc_hir::attrs::{CfgEntry, NativeLibKind, PeImportNameType};
use rustc_hir::def_id::{
@@ -223,4 +225,6 @@ pub struct Untracked {
pub definitions: FreezeLock<Definitions>,
/// The interned [StableCrateId]s.
pub stable_crate_ids: FreezeLock<StableCrateIdMap>,
/// The hash of the local crate as computed in metadata encoding.
pub local_crate_hash: OnceLock<Svh>,
}