Skip to content

Commit e8ec9f4

Browse files
committed
Auto merge of #120743 - Nadrieril:rollup-h2mxjc2, r=Nadrieril
Rollup of 13 pull requests Successful merges: - #110482 (Add armv8r-none-eabihf target for the Cortex-R52.) - #119162 (Add unstable `-Z direct-access-external-data` cmdline flag for `rustc`) - #120302 (various const interning cleanups) - #120455 ( Add FileCheck annotations to MIR-opt SROA tests) - #120470 (Mark "unused binding" suggestion as maybe incorrect) - #120479 (Suggest turning `if let` into irrefutable `let` if appropriate) - #120564 (coverage: Split out counter increment sites from BCB node/edge counters) - #120633 (pattern_analysis: gather up place-relevant info) - #120664 (Add parallel rustc ui tests) - #120721 (fix `llvm_out` to use the correct LLVM root) - #120726 (Don't use bashism in checktools.sh) - #120733 (MirPass: make name more const) - #120735 (Remove some `unchecked_claim_error_was_emitted` calls) Failed merges: - #120727 (exhaustiveness: Prefer "`0..MAX` not covered" to "`_` not covered") r? `@ghost` `@rustbot` modify labels: rollup
2 parents d6c46a2 + 600d3bb commit e8ec9f4

File tree

53 files changed

+933
-394
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

53 files changed

+933
-394
lines changed

compiler/rustc_codegen_llvm/src/mono_item.rs

+15-10
Original file line numberDiff line numberDiff line change
@@ -123,25 +123,30 @@ impl CodegenCx<'_, '_> {
123123
return false;
124124
}
125125

126+
// Match clang by only supporting COFF and ELF for now.
127+
if self.tcx.sess.target.is_like_osx {
128+
return false;
129+
}
130+
131+
// With pie relocation model calls of functions defined in the translation
132+
// unit can use copy relocations.
133+
if self.tcx.sess.relocation_model() == RelocModel::Pie && !is_declaration {
134+
return true;
135+
}
136+
126137
// Thread-local variables generally don't support copy relocations.
127138
let is_thread_local_var = llvm::LLVMIsAGlobalVariable(llval)
128139
.is_some_and(|v| llvm::LLVMIsThreadLocal(v) == llvm::True);
129140
if is_thread_local_var {
130141
return false;
131142
}
132143

133-
// Match clang by only supporting COFF and ELF for now.
134-
if self.tcx.sess.target.is_like_osx {
135-
return false;
144+
// Respect the direct-access-external-data to override default behavior if present.
145+
if let Some(direct) = self.tcx.sess.direct_access_external_data() {
146+
return direct;
136147
}
137148

138149
// Static relocation model should force copy relocations everywhere.
139-
if self.tcx.sess.relocation_model() == RelocModel::Static {
140-
return true;
141-
}
142-
143-
// With pie relocation model calls of functions defined in the translation
144-
// unit can use copy relocations.
145-
self.tcx.sess.relocation_model() == RelocModel::Pie && !is_declaration
150+
self.tcx.sess.relocation_model() == RelocModel::Static
146151
}
147152
}

compiler/rustc_const_eval/src/const_eval/eval_queries.rs

+2-6
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
use std::mem;
2-
31
use either::{Left, Right};
42

53
use rustc_hir::def::DefKind;
@@ -24,12 +22,13 @@ use crate::interpret::{
2422
};
2523

2624
// Returns a pointer to where the result lives
25+
#[instrument(level = "trace", skip(ecx, body), ret)]
2726
fn eval_body_using_ecx<'mir, 'tcx>(
2827
ecx: &mut CompileTimeEvalContext<'mir, 'tcx>,
2928
cid: GlobalId<'tcx>,
3029
body: &'mir mir::Body<'tcx>,
3130
) -> InterpResult<'tcx, MPlaceTy<'tcx>> {
32-
debug!("eval_body_using_ecx: {:?}, {:?}", cid, ecx.param_env);
31+
trace!(?ecx.param_env);
3332
let tcx = *ecx.tcx;
3433
assert!(
3534
cid.promoted.is_some()
@@ -75,11 +74,8 @@ fn eval_body_using_ecx<'mir, 'tcx>(
7574
None => InternKind::Constant,
7675
}
7776
};
78-
let check_alignment = mem::replace(&mut ecx.machine.check_alignment, CheckAlignment::No); // interning doesn't need to respect alignment
7977
intern_const_alloc_recursive(ecx, intern_kind, &ret)?;
80-
ecx.machine.check_alignment = check_alignment;
8178

82-
debug!("eval_body_using_ecx done: {:?}", ret);
8379
Ok(ret)
8480
}
8581

compiler/rustc_const_eval/src/interpret/intern.rs

+59-57
Original file line numberDiff line numberDiff line change
@@ -41,13 +41,12 @@ pub trait CompileTimeMachine<'mir, 'tcx: 'mir, T> = Machine<
4141
/// allocation is interned immutably; if it is `Mutability::Mut`, then the allocation *must be*
4242
/// already mutable (as a sanity check).
4343
///
44-
/// `recursive_alloc` is called for all recursively encountered allocations.
44+
/// Returns an iterator over all relocations referred to by this allocation.
4545
fn intern_shallow<'rt, 'mir, 'tcx, T, M: CompileTimeMachine<'mir, 'tcx, T>>(
4646
ecx: &'rt mut InterpCx<'mir, 'tcx, M>,
4747
alloc_id: AllocId,
4848
mutability: Mutability,
49-
mut recursive_alloc: impl FnMut(&InterpCx<'mir, 'tcx, M>, CtfeProvenance),
50-
) -> Result<(), ()> {
49+
) -> Result<impl Iterator<Item = CtfeProvenance> + 'tcx, ()> {
5150
trace!("intern_shallow {:?}", alloc_id);
5251
// remove allocation
5352
let Some((_kind, mut alloc)) = ecx.memory.alloc_map.remove(&alloc_id) else {
@@ -65,14 +64,10 @@ fn intern_shallow<'rt, 'mir, 'tcx, T, M: CompileTimeMachine<'mir, 'tcx, T>>(
6564
assert_eq!(alloc.mutability, Mutability::Mut);
6665
}
6766
}
68-
// record child allocations
69-
for &(_, prov) in alloc.provenance().ptrs().iter() {
70-
recursive_alloc(ecx, prov);
71-
}
7267
// link the alloc id to the actual allocation
7368
let alloc = ecx.tcx.mk_const_alloc(alloc);
7469
ecx.tcx.set_alloc_id_memory(alloc_id, alloc);
75-
Ok(())
70+
Ok(alloc.0.0.provenance().ptrs().iter().map(|&(_, prov)| prov))
7671
}
7772

7873
/// How a constant value should be interned.
@@ -128,12 +123,16 @@ pub fn intern_const_alloc_recursive<
128123
}
129124
};
130125

131-
// Initialize recursive interning.
126+
// Intern the base allocation, and initialize todo list for recursive interning.
132127
let base_alloc_id = ret.ptr().provenance.unwrap().alloc_id();
133-
let mut todo = vec![(base_alloc_id, base_mutability)];
128+
// First we intern the base allocation, as it requires a different mutability.
129+
// This gives us the initial set of nested allocations, which will then all be processed
130+
// recursively in the loop below.
131+
let mut todo: Vec<_> =
132+
intern_shallow(ecx, base_alloc_id, base_mutability).unwrap().map(|prov| prov).collect();
134133
// We need to distinguish "has just been interned" from "was already in `tcx`",
135134
// so we track this in a separate set.
136-
let mut just_interned = FxHashSet::default();
135+
let mut just_interned: FxHashSet<_> = std::iter::once(base_alloc_id).collect();
137136
// Whether we encountered a bad mutable pointer.
138137
// We want to first report "dangling" and then "mutable", so we need to delay reporting these
139138
// errors.
@@ -147,52 +146,56 @@ pub fn intern_const_alloc_recursive<
147146
// raw pointers, so we cannot rely on validation to catch them -- and since interning runs
148147
// before validation, and interning doesn't know the type of anything, this means we can't show
149148
// better errors. Maybe we should consider doing validation before interning in the future.
150-
while let Some((alloc_id, mutability)) = todo.pop() {
149+
while let Some(prov) = todo.pop() {
150+
let alloc_id = prov.alloc_id();
151+
// Crucially, we check this *before* checking whether the `alloc_id`
152+
// has already been interned. The point of this check is to ensure that when
153+
// there are multiple pointers to the same allocation, they are *all* immutable.
154+
// Therefore it would be bad if we only checked the first pointer to any given
155+
// allocation.
156+
// (It is likely not possible to actually have multiple pointers to the same allocation,
157+
// so alternatively we could also check that and ICE if there are multiple such pointers.)
158+
if intern_kind != InternKind::Promoted
159+
&& inner_mutability == Mutability::Not
160+
&& !prov.immutable()
161+
{
162+
if ecx.tcx.try_get_global_alloc(alloc_id).is_some()
163+
&& !just_interned.contains(&alloc_id)
164+
{
165+
// This is a pointer to some memory from another constant. We encounter mutable
166+
// pointers to such memory since we do not always track immutability through
167+
// these "global" pointers. Allowing them is harmless; the point of these checks
168+
// during interning is to justify why we intern the *new* allocations immutably,
169+
// so we can completely ignore existing allocations. We also don't need to add
170+
// this to the todo list, since after all it is already interned.
171+
continue;
172+
}
173+
// Found a mutable pointer inside a const where inner allocations should be
174+
// immutable. We exclude promoteds from this, since things like `&mut []` and
175+
// `&None::<Cell<i32>>` lead to promotion that can produce mutable pointers. We rely
176+
// on the promotion analysis not screwing up to ensure that it is sound to intern
177+
// promoteds as immutable.
178+
found_bad_mutable_pointer = true;
179+
}
151180
if ecx.tcx.try_get_global_alloc(alloc_id).is_some() {
152181
// Already interned.
153182
debug_assert!(!ecx.memory.alloc_map.contains_key(&alloc_id));
154183
continue;
155184
}
156185
just_interned.insert(alloc_id);
157-
intern_shallow(ecx, alloc_id, mutability, |ecx, prov| {
158-
let alloc_id = prov.alloc_id();
159-
if intern_kind != InternKind::Promoted
160-
&& inner_mutability == Mutability::Not
161-
&& !prov.immutable()
162-
{
163-
if ecx.tcx.try_get_global_alloc(alloc_id).is_some()
164-
&& !just_interned.contains(&alloc_id)
165-
{
166-
// This is a pointer to some memory from another constant. We encounter mutable
167-
// pointers to such memory since we do not always track immutability through
168-
// these "global" pointers. Allowing them is harmless; the point of these checks
169-
// during interning is to justify why we intern the *new* allocations immutably,
170-
// so we can completely ignore existing allocations. We also don't need to add
171-
// this to the todo list, since after all it is already interned.
172-
return;
173-
}
174-
// Found a mutable pointer inside a const where inner allocations should be
175-
// immutable. We exclude promoteds from this, since things like `&mut []` and
176-
// `&None::<Cell<i32>>` lead to promotion that can produce mutable pointers. We rely
177-
// on the promotion analysis not screwing up to ensure that it is sound to intern
178-
// promoteds as immutable.
179-
found_bad_mutable_pointer = true;
180-
}
181-
// We always intern with `inner_mutability`, and furthermore we ensured above that if
182-
// that is "immutable", then there are *no* mutable pointers anywhere in the newly
183-
// interned memory -- justifying that we can indeed intern immutably. However this also
184-
// means we can *not* easily intern immutably here if `prov.immutable()` is true and
185-
// `inner_mutability` is `Mut`: there might be other pointers to that allocation, and
186-
// we'd have to somehow check that they are *all* immutable before deciding that this
187-
// allocation can be made immutable. In the future we could consider analyzing all
188-
// pointers before deciding which allocations can be made immutable; but for now we are
189-
// okay with losing some potential for immutability here. This can anyway only affect
190-
// `static mut`.
191-
todo.push((alloc_id, inner_mutability));
192-
})
193-
.map_err(|()| {
186+
// We always intern with `inner_mutability`, and furthermore we ensured above that if
187+
// that is "immutable", then there are *no* mutable pointers anywhere in the newly
188+
// interned memory -- justifying that we can indeed intern immutably. However this also
189+
// means we can *not* easily intern immutably here if `prov.immutable()` is true and
190+
// `inner_mutability` is `Mut`: there might be other pointers to that allocation, and
191+
// we'd have to somehow check that they are *all* immutable before deciding that this
192+
// allocation can be made immutable. In the future we could consider analyzing all
193+
// pointers before deciding which allocations can be made immutable; but for now we are
194+
// okay with losing some potential for immutability here. This can anyway only affect
195+
// `static mut`.
196+
todo.extend(intern_shallow(ecx, alloc_id, inner_mutability).map_err(|()| {
194197
ecx.tcx.dcx().emit_err(DanglingPtrInFinal { span: ecx.tcx.span, kind: intern_kind })
195-
})?;
198+
})?);
196199
}
197200
if found_bad_mutable_pointer {
198201
return Err(ecx
@@ -220,13 +223,13 @@ pub fn intern_const_alloc_for_constprop<
220223
return Ok(());
221224
}
222225
// Move allocation to `tcx`.
223-
intern_shallow(ecx, alloc_id, Mutability::Not, |_ecx, _| {
226+
for _ in intern_shallow(ecx, alloc_id, Mutability::Not).map_err(|()| err_ub!(DeadLocal))? {
224227
// We are not doing recursive interning, so we don't currently support provenance.
225228
// (If this assertion ever triggers, we should just implement a
226229
// proper recursive interning loop -- or just call `intern_const_alloc_recursive`.
227230
panic!("`intern_const_alloc_for_constprop` called on allocation with nested provenance")
228-
})
229-
.map_err(|()| err_ub!(DeadLocal).into())
231+
}
232+
Ok(())
230233
}
231234

232235
impl<'mir, 'tcx: 'mir, M: super::intern::CompileTimeMachine<'mir, 'tcx, !>>
@@ -247,15 +250,14 @@ impl<'mir, 'tcx: 'mir, M: super::intern::CompileTimeMachine<'mir, 'tcx, !>>
247250
let dest = self.allocate(layout, MemoryKind::Stack)?;
248251
f(self, &dest.clone().into())?;
249252
let alloc_id = dest.ptr().provenance.unwrap().alloc_id(); // this was just allocated, it must have provenance
250-
intern_shallow(self, alloc_id, Mutability::Not, |ecx, prov| {
253+
for prov in intern_shallow(self, alloc_id, Mutability::Not).unwrap() {
251254
// We are not doing recursive interning, so we don't currently support provenance.
252255
// (If this assertion ever triggers, we should just implement a
253256
// proper recursive interning loop -- or just call `intern_const_alloc_recursive`.
254-
if !ecx.tcx.try_get_global_alloc(prov.alloc_id()).is_some() {
257+
if !self.tcx.try_get_global_alloc(prov.alloc_id()).is_some() {
255258
panic!("`intern_with_temp_alloc` with nested allocations");
256259
}
257-
})
258-
.unwrap();
260+
}
259261
Ok(alloc_id)
260262
}
261263
}

compiler/rustc_driver_impl/src/lib.rs

+8-7
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,9 @@ use rustc_data_structures::profiling::{
2424
get_resident_set_size, print_time_passes_entry, TimePassesFormat,
2525
};
2626
use rustc_errors::registry::Registry;
27-
use rustc_errors::{markdown, ColorConfig, DiagCtxt, ErrCode, ErrorGuaranteed, PResult};
27+
use rustc_errors::{
28+
markdown, ColorConfig, DiagCtxt, ErrCode, ErrorGuaranteed, FatalError, PResult,
29+
};
2830
use rustc_feature::find_gated_cfg;
2931
use rustc_interface::util::{self, collect_crate_types, get_codegen_backend};
3032
use rustc_interface::{interface, Queries};
@@ -1231,11 +1233,10 @@ fn parse_crate_attrs<'a>(sess: &'a Session) -> PResult<'a, ast::AttrVec> {
12311233
/// The compiler currently unwinds with a special sentinel value to abort
12321234
/// compilation on fatal errors. This function catches that sentinel and turns
12331235
/// the panic into a `Result` instead.
1234-
pub fn catch_fatal_errors<F: FnOnce() -> R, R>(f: F) -> Result<R, ErrorGuaranteed> {
1236+
pub fn catch_fatal_errors<F: FnOnce() -> R, R>(f: F) -> Result<R, FatalError> {
12351237
catch_unwind(panic::AssertUnwindSafe(f)).map_err(|value| {
12361238
if value.is::<rustc_errors::FatalErrorMarker>() {
1237-
#[allow(deprecated)]
1238-
ErrorGuaranteed::unchecked_claim_error_was_emitted()
1239+
FatalError
12391240
} else {
12401241
panic::resume_unwind(value);
12411242
}
@@ -1245,9 +1246,9 @@ pub fn catch_fatal_errors<F: FnOnce() -> R, R>(f: F) -> Result<R, ErrorGuarantee
12451246
/// Variant of `catch_fatal_errors` for the `interface::Result` return type
12461247
/// that also computes the exit code.
12471248
pub fn catch_with_exit_code(f: impl FnOnce() -> interface::Result<()>) -> i32 {
1248-
match catch_fatal_errors(f).flatten() {
1249-
Ok(()) => EXIT_SUCCESS,
1250-
Err(_) => EXIT_FAILURE,
1249+
match catch_fatal_errors(f) {
1250+
Ok(Ok(())) => EXIT_SUCCESS,
1251+
_ => EXIT_FAILURE,
12511252
}
12521253
}
12531254

compiler/rustc_errors/src/diagnostic_builder.rs

+10-6
Original file line numberDiff line numberDiff line change
@@ -99,16 +99,20 @@ impl<'a, G: EmissionGuarantee> DiagnosticBuilder<'a, G> {
9999
}
100100

101101
/// `ErrorGuaranteed::emit_producing_guarantee` uses this.
102-
// FIXME(eddyb) make `ErrorGuaranteed` impossible to create outside `.emit()`.
103102
fn emit_producing_error_guaranteed(mut self) -> ErrorGuaranteed {
104103
let diag = self.take_diag();
105104

106-
// Only allow a guarantee if the `level` wasn't switched to a
107-
// non-error. The field isn't `pub`, but the whole `Diagnostic` can be
108-
// overwritten with a new one, thanks to `DerefMut`.
105+
// The only error levels that produce `ErrorGuaranteed` are
106+
// `Error` and `DelayedBug`. But `DelayedBug` should never occur here
107+
// because delayed bugs have their level changed to `Bug` when they are
108+
// actually printed, so they produce an ICE.
109+
//
110+
// (Also, even though `level` isn't `pub`, the whole `Diagnostic` could
111+
// be overwritten with a new one thanks to `DerefMut`. So this assert
112+
// protects against that, too.)
109113
assert!(
110-
diag.is_error(),
111-
"emitted non-error ({:?}) diagnostic from `DiagnosticBuilder<ErrorGuaranteed>`",
114+
matches!(diag.level, Level::Error | Level::DelayedBug),
115+
"invalid diagnostic level ({:?})",
112116
diag.level,
113117
);
114118

0 commit comments

Comments
 (0)