Skip to content
This repository was archived by the owner on May 28, 2025. It is now read-only.

Commit 0e81fd6

Browse files
committed
Auto merge of rust-lang#134153 - workingjubilee:denied-insurance-claims, r=<try>
compiler: Stop manually extending the stack This rips out almost all calls to `ensure_sufficient_stack`, and disables the actual function itself for the handful of calls that proved too annoying to immediately rip out, for rebasing or type inference reasons. r? `@ghost`
2 parents 33c245b + 8636992 commit 0e81fd6

File tree

31 files changed

+736
-871
lines changed

31 files changed

+736
-871
lines changed

compiler/rustc_ast/src/ast.rs

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@ pub use UnsafeSource::*;
2626
pub use rustc_ast_ir::{Movability, Mutability, Pinnedness};
2727
use rustc_data_structures::packed::Pu128;
2828
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
29-
use rustc_data_structures::stack::ensure_sufficient_stack;
3029
use rustc_data_structures::sync::Lrc;
3130
use rustc_macros::{Decodable, Encodable, HashStable_Generic};
3231
pub use rustc_span::AttrId;
@@ -2192,12 +2191,7 @@ pub struct Ty {
21922191

21932192
impl Clone for Ty {
21942193
fn clone(&self) -> Self {
2195-
ensure_sufficient_stack(|| Self {
2196-
id: self.id,
2197-
kind: self.kind.clone(),
2198-
span: self.span,
2199-
tokens: self.tokens.clone(),
2200-
})
2194+
Self { id: self.id, kind: self.kind.clone(), span: self.span, tokens: self.tokens.clone() }
22012195
}
22022196
}
22032197

compiler/rustc_ast/src/mut_visit.rs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,6 @@ use std::ops::DerefMut;
1111
use std::panic;
1212

1313
use rustc_data_structures::flat_map_in_place::FlatMapInPlace;
14-
use rustc_data_structures::stack::ensure_sufficient_stack;
1514
use rustc_data_structures::sync::Lrc;
1615
use rustc_span::Span;
1716
use rustc_span::source_map::Spanned;
@@ -1653,7 +1652,7 @@ pub fn walk_expr<T: MutVisitor>(vis: &mut T, Expr { kind, id, span, attrs, token
16531652
ExprKind::If(cond, tr, fl) => {
16541653
vis.visit_expr(cond);
16551654
vis.visit_block(tr);
1656-
visit_opt(fl, |fl| ensure_sufficient_stack(|| vis.visit_expr(fl)));
1655+
visit_opt(fl, |fl| vis.visit_expr(fl));
16571656
}
16581657
ExprKind::While(cond, body, label) => {
16591658
visit_opt(label, |label| vis.visit_label(label));

compiler/rustc_ast_lowering/src/pat.rs

Lines changed: 105 additions & 108 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
use rustc_ast::ptr::P;
22
use rustc_ast::*;
3-
use rustc_data_structures::stack::ensure_sufficient_stack;
43
use rustc_hir as hir;
54
use rustc_hir::def::Res;
65
use rustc_span::Span;
@@ -19,117 +18,115 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
1918
}
2019

2120
fn lower_pat_mut(&mut self, mut pattern: &Pat) -> hir::Pat<'hir> {
22-
ensure_sufficient_stack(|| {
23-
// loop here to avoid recursion
24-
let pat_hir_id = self.lower_node_id(pattern.id);
25-
let node = loop {
26-
match &pattern.kind {
27-
PatKind::Wild => break hir::PatKind::Wild,
28-
PatKind::Never => break hir::PatKind::Never,
29-
PatKind::Ident(binding_mode, ident, sub) => {
30-
let lower_sub = |this: &mut Self| sub.as_ref().map(|s| this.lower_pat(s));
31-
break self.lower_pat_ident(
32-
pattern,
33-
*binding_mode,
34-
*ident,
35-
pat_hir_id,
36-
lower_sub,
37-
);
38-
}
39-
PatKind::Lit(e) => {
40-
break hir::PatKind::Lit(self.lower_expr_within_pat(e, false));
41-
}
42-
PatKind::TupleStruct(qself, path, pats) => {
43-
let qpath = self.lower_qpath(
44-
pattern.id,
45-
qself,
46-
path,
47-
ParamMode::Optional,
48-
AllowReturnTypeNotation::No,
49-
ImplTraitContext::Disallowed(ImplTraitPosition::Path),
50-
None,
51-
);
52-
let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple struct");
53-
break hir::PatKind::TupleStruct(qpath, pats, ddpos);
54-
}
55-
PatKind::Or(pats) => {
56-
break hir::PatKind::Or(
57-
self.arena.alloc_from_iter(pats.iter().map(|x| self.lower_pat_mut(x))),
58-
);
59-
}
60-
PatKind::Path(qself, path) => {
61-
let qpath = self.lower_qpath(
62-
pattern.id,
63-
qself,
64-
path,
65-
ParamMode::Optional,
66-
AllowReturnTypeNotation::No,
67-
ImplTraitContext::Disallowed(ImplTraitPosition::Path),
68-
None,
69-
);
70-
break hir::PatKind::Path(qpath);
71-
}
72-
PatKind::Struct(qself, path, fields, etc) => {
73-
let qpath = self.lower_qpath(
74-
pattern.id,
75-
qself,
76-
path,
77-
ParamMode::Optional,
78-
AllowReturnTypeNotation::No,
79-
ImplTraitContext::Disallowed(ImplTraitPosition::Path),
80-
None,
81-
);
21+
// loop here to avoid recursion
22+
let pat_hir_id = self.lower_node_id(pattern.id);
23+
let node = loop {
24+
match &pattern.kind {
25+
PatKind::Wild => break hir::PatKind::Wild,
26+
PatKind::Never => break hir::PatKind::Never,
27+
PatKind::Ident(binding_mode, ident, sub) => {
28+
let lower_sub = |this: &mut Self| sub.as_ref().map(|s| this.lower_pat(s));
29+
break self.lower_pat_ident(
30+
pattern,
31+
*binding_mode,
32+
*ident,
33+
pat_hir_id,
34+
lower_sub,
35+
);
36+
}
37+
PatKind::Lit(e) => {
38+
break hir::PatKind::Lit(self.lower_expr_within_pat(e, false));
39+
}
40+
PatKind::TupleStruct(qself, path, pats) => {
41+
let qpath = self.lower_qpath(
42+
pattern.id,
43+
qself,
44+
path,
45+
ParamMode::Optional,
46+
AllowReturnTypeNotation::No,
47+
ImplTraitContext::Disallowed(ImplTraitPosition::Path),
48+
None,
49+
);
50+
let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple struct");
51+
break hir::PatKind::TupleStruct(qpath, pats, ddpos);
52+
}
53+
PatKind::Or(pats) => {
54+
break hir::PatKind::Or(
55+
self.arena.alloc_from_iter(pats.iter().map(|x| self.lower_pat_mut(x))),
56+
);
57+
}
58+
PatKind::Path(qself, path) => {
59+
let qpath = self.lower_qpath(
60+
pattern.id,
61+
qself,
62+
path,
63+
ParamMode::Optional,
64+
AllowReturnTypeNotation::No,
65+
ImplTraitContext::Disallowed(ImplTraitPosition::Path),
66+
None,
67+
);
68+
break hir::PatKind::Path(qpath);
69+
}
70+
PatKind::Struct(qself, path, fields, etc) => {
71+
let qpath = self.lower_qpath(
72+
pattern.id,
73+
qself,
74+
path,
75+
ParamMode::Optional,
76+
AllowReturnTypeNotation::No,
77+
ImplTraitContext::Disallowed(ImplTraitPosition::Path),
78+
None,
79+
);
8280

83-
let fs = self.arena.alloc_from_iter(fields.iter().map(|f| {
84-
let hir_id = self.lower_node_id(f.id);
85-
self.lower_attrs(hir_id, &f.attrs);
81+
let fs = self.arena.alloc_from_iter(fields.iter().map(|f| {
82+
let hir_id = self.lower_node_id(f.id);
83+
self.lower_attrs(hir_id, &f.attrs);
8684

87-
hir::PatField {
88-
hir_id,
89-
ident: self.lower_ident(f.ident),
90-
pat: self.lower_pat(&f.pat),
91-
is_shorthand: f.is_shorthand,
92-
span: self.lower_span(f.span),
93-
}
94-
}));
95-
break hir::PatKind::Struct(qpath, fs, *etc == ast::PatFieldsRest::Rest);
96-
}
97-
PatKind::Tuple(pats) => {
98-
let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple");
99-
break hir::PatKind::Tuple(pats, ddpos);
100-
}
101-
PatKind::Box(inner) => {
102-
break hir::PatKind::Box(self.lower_pat(inner));
103-
}
104-
PatKind::Deref(inner) => {
105-
break hir::PatKind::Deref(self.lower_pat(inner));
106-
}
107-
PatKind::Ref(inner, mutbl) => {
108-
break hir::PatKind::Ref(self.lower_pat(inner), *mutbl);
109-
}
110-
PatKind::Range(e1, e2, Spanned { node: end, .. }) => {
111-
break hir::PatKind::Range(
112-
e1.as_deref().map(|e| self.lower_expr_within_pat(e, true)),
113-
e2.as_deref().map(|e| self.lower_expr_within_pat(e, true)),
114-
self.lower_range_end(end, e2.is_some()),
115-
);
116-
}
117-
// FIXME(guard_patterns): lower pattern guards to HIR
118-
PatKind::Guard(inner, _) => pattern = inner,
119-
PatKind::Slice(pats) => break self.lower_pat_slice(pats),
120-
PatKind::Rest => {
121-
// If we reach here the `..` pattern is not semantically allowed.
122-
break self.ban_illegal_rest_pat(pattern.span);
123-
}
124-
// return inner to be processed in next loop
125-
PatKind::Paren(inner) => pattern = inner,
126-
PatKind::MacCall(_) => panic!("{:?} shouldn't exist here", pattern.span),
127-
PatKind::Err(guar) => break hir::PatKind::Err(*guar),
85+
hir::PatField {
86+
hir_id,
87+
ident: self.lower_ident(f.ident),
88+
pat: self.lower_pat(&f.pat),
89+
is_shorthand: f.is_shorthand,
90+
span: self.lower_span(f.span),
91+
}
92+
}));
93+
break hir::PatKind::Struct(qpath, fs, *etc == ast::PatFieldsRest::Rest);
12894
}
129-
};
95+
PatKind::Tuple(pats) => {
96+
let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple");
97+
break hir::PatKind::Tuple(pats, ddpos);
98+
}
99+
PatKind::Box(inner) => {
100+
break hir::PatKind::Box(self.lower_pat(inner));
101+
}
102+
PatKind::Deref(inner) => {
103+
break hir::PatKind::Deref(self.lower_pat(inner));
104+
}
105+
PatKind::Ref(inner, mutbl) => {
106+
break hir::PatKind::Ref(self.lower_pat(inner), *mutbl);
107+
}
108+
PatKind::Range(e1, e2, Spanned { node: end, .. }) => {
109+
break hir::PatKind::Range(
110+
e1.as_deref().map(|e| self.lower_expr_within_pat(e, true)),
111+
e2.as_deref().map(|e| self.lower_expr_within_pat(e, true)),
112+
self.lower_range_end(end, e2.is_some()),
113+
);
114+
}
115+
// FIXME(guard_patterns): lower pattern guards to HIR
116+
PatKind::Guard(inner, _) => pattern = inner,
117+
PatKind::Slice(pats) => break self.lower_pat_slice(pats),
118+
PatKind::Rest => {
119+
// If we reach here the `..` pattern is not semantically allowed.
120+
break self.ban_illegal_rest_pat(pattern.span);
121+
}
122+
// return inner to be processed in next loop
123+
PatKind::Paren(inner) => pattern = inner,
124+
PatKind::MacCall(_) => panic!("{:?} shouldn't exist here", pattern.span),
125+
PatKind::Err(guar) => break hir::PatKind::Err(*guar),
126+
}
127+
};
130128

131-
self.pat_with_node_id_of(pattern, node, pat_hir_id)
132-
})
129+
self.pat_with_node_id_of(pattern, node, pat_hir_id)
133130
}
134131

135132
fn lower_pat_tuple(

compiler/rustc_const_eval/src/const_eval/valtrees.rs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
use rustc_abi::{BackendRepr, VariantIdx};
2-
use rustc_data_structures::stack::ensure_sufficient_stack;
32
use rustc_middle::mir::interpret::{EvalToValTreeResult, GlobalId, ReportedErrorInfo};
43
use rustc_middle::ty::layout::{LayoutCx, LayoutOf, TyAndLayout};
54
use rustc_middle::ty::{self, ScalarInt, Ty, TyCtxt};
@@ -105,7 +104,7 @@ fn const_to_valtree_inner<'tcx>(
105104
// Since the returned valtree does not contain the type or layout, we can just
106105
// switch to the base type.
107106
place.layout = ecx.layout_of(*base).unwrap();
108-
ensure_sufficient_stack(|| const_to_valtree_inner(ecx, &place, num_nodes))
107+
const_to_valtree_inner(ecx, &place, num_nodes)
109108
},
110109

111110

Lines changed: 1 addition & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,6 @@
11
// This is the amount of bytes that need to be left on the stack before increasing the size.
22
// It must be at least as large as the stack required by any code that does not call
33
// `ensure_sufficient_stack`.
4-
const RED_ZONE: usize = 100 * 1024; // 100k
5-
6-
// Only the first stack that is pushed, grows exponentially (2^n * STACK_PER_RECURSION) from then
7-
// on. This flag has performance relevant characteristics. Don't set it too high.
8-
#[cfg(not(target_os = "aix"))]
9-
const STACK_PER_RECURSION: usize = 1024 * 1024; // 1MB
10-
// LLVM for AIX doesn't feature TCO, increase recursion size for workaround.
11-
#[cfg(target_os = "aix")]
12-
const STACK_PER_RECURSION: usize = 16 * 1024 * 1024; // 16MB
134

145
/// Grows the stack on demand to prevent stack overflow. Call this in strategic locations
156
/// to "break up" recursive calls. E.g. almost any call to `visit_expr` or equivalent can benefit
@@ -18,5 +9,5 @@ const STACK_PER_RECURSION: usize = 16 * 1024 * 1024; // 16MB
189
/// Should not be sprinkled around carelessly, as it causes a little bit of overhead.
1910
#[inline]
2011
pub fn ensure_sufficient_stack<R>(f: impl FnOnce() -> R) -> R {
21-
stacker::maybe_grow(RED_ZONE, STACK_PER_RECURSION, f)
12+
f()
2213
}

compiler/rustc_hir_typeck/src/expr.rs

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77
88
use rustc_abi::{FIRST_VARIANT, FieldIdx};
99
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
10-
use rustc_data_structures::stack::ensure_sufficient_stack;
1110
use rustc_data_structures::unord::UnordMap;
1211
use rustc_errors::codes::*;
1312
use rustc_errors::{
@@ -246,13 +245,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
246245
self.diverges.set(self.function_diverges_because_of_empty_arguments.get())
247246
};
248247

249-
let ty = ensure_sufficient_stack(|| match &expr.kind {
248+
let ty = match &expr.kind {
250249
// Intercept the callee path expr and give it better spans.
251250
hir::ExprKind::Path(
252251
qpath @ (hir::QPath::Resolved(..) | hir::QPath::TypeRelative(..)),
253252
) => self.check_expr_path(qpath, expr, call_expr_and_args),
254253
_ => self.check_expr_kind(expr, expected),
255-
});
254+
};
256255
let ty = self.resolve_vars_if_possible(ty);
257256

258257
// Warn for non-block expressions with diverging children.

compiler/rustc_infer/src/infer/relate/generalize.rs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
use std::mem;
22

33
use rustc_data_structures::sso::SsoHashMap;
4-
use rustc_data_structures::stack::ensure_sufficient_stack;
54
use rustc_hir::def_id::DefId;
65
use rustc_middle::bug;
76
use rustc_middle::infer::unify_key::ConstVariableValue;
@@ -445,7 +444,7 @@ impl<'tcx> TypeRelation<TyCtxt<'tcx>> for Generalizer<'_, 'tcx> {
445444
debug!(?self.ambient_variance, "new ambient variance");
446445
// Recursive calls to `relate` can overflow the stack. For example a deeper version of
447446
// `ui/associated-consts/issue-93775.rs`.
448-
let r = ensure_sufficient_stack(|| self.relate(a, b));
447+
let r = self.relate(a, b);
449448
self.ambient_variance = old_ambient_variance;
450449
r
451450
}

compiler/rustc_lint/src/early.rs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77
use rustc_ast::ptr::P;
88
use rustc_ast::visit::{self as ast_visit, Visitor, walk_list};
99
use rustc_ast::{self as ast, HasAttrs};
10-
use rustc_data_structures::stack::ensure_sufficient_stack;
1110
use rustc_feature::Features;
1211
use rustc_middle::ty::RegisteredTools;
1312
use rustc_session::Session;
@@ -60,7 +59,7 @@ impl<'a, T: EarlyLintPass> EarlyContextAndPass<'a, T> {
6059
self.inlined_check_id(id);
6160
debug!("early context: enter_attrs({:?})", attrs);
6261
lint_callback!(self, check_attributes, attrs);
63-
ensure_sufficient_stack(|| f(self));
62+
f(self);
6463
debug!("early context: exit_attrs({:?})", attrs);
6564
lint_callback!(self, check_attributes_post, attrs);
6665
self.context.builder.pop(push);

0 commit comments

Comments
 (0)