Skip to content

compiler: Stop manually extending the stack #134153

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 1 commit into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 1 addition & 7 deletions compiler/rustc_ast/src/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@ pub use UnsafeSource::*;
pub use rustc_ast_ir::{Movability, Mutability, Pinnedness};
use rustc_data_structures::packed::Pu128;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_data_structures::sync::Lrc;
use rustc_macros::{Decodable, Encodable, HashStable_Generic};
pub use rustc_span::AttrId;
Expand Down Expand Up @@ -2192,12 +2191,7 @@ pub struct Ty {

impl Clone for Ty {
fn clone(&self) -> Self {
ensure_sufficient_stack(|| Self {
id: self.id,
kind: self.kind.clone(),
span: self.span,
tokens: self.tokens.clone(),
})
Self { id: self.id, kind: self.kind.clone(), span: self.span, tokens: self.tokens.clone() }
}
}

Expand Down
3 changes: 1 addition & 2 deletions compiler/rustc_ast/src/mut_visit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ use std::ops::DerefMut;
use std::panic;

use rustc_data_structures::flat_map_in_place::FlatMapInPlace;
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_data_structures::sync::Lrc;
use rustc_span::Span;
use rustc_span::source_map::Spanned;
Expand Down Expand Up @@ -1653,7 +1652,7 @@ pub fn walk_expr<T: MutVisitor>(vis: &mut T, Expr { kind, id, span, attrs, token
ExprKind::If(cond, tr, fl) => {
vis.visit_expr(cond);
vis.visit_block(tr);
visit_opt(fl, |fl| ensure_sufficient_stack(|| vis.visit_expr(fl)));
visit_opt(fl, |fl| vis.visit_expr(fl));
}
ExprKind::While(cond, body, label) => {
visit_opt(label, |label| vis.visit_label(label));
Expand Down
213 changes: 105 additions & 108 deletions compiler/rustc_ast_lowering/src/pat.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
use rustc_ast::ptr::P;
use rustc_ast::*;
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_hir as hir;
use rustc_hir::def::Res;
use rustc_span::Span;
Expand All @@ -19,117 +18,115 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
}

fn lower_pat_mut(&mut self, mut pattern: &Pat) -> hir::Pat<'hir> {
ensure_sufficient_stack(|| {
// loop here to avoid recursion
let pat_hir_id = self.lower_node_id(pattern.id);
let node = loop {
match &pattern.kind {
PatKind::Wild => break hir::PatKind::Wild,
PatKind::Never => break hir::PatKind::Never,
PatKind::Ident(binding_mode, ident, sub) => {
let lower_sub = |this: &mut Self| sub.as_ref().map(|s| this.lower_pat(s));
break self.lower_pat_ident(
pattern,
*binding_mode,
*ident,
pat_hir_id,
lower_sub,
);
}
PatKind::Lit(e) => {
break hir::PatKind::Lit(self.lower_expr_within_pat(e, false));
}
PatKind::TupleStruct(qself, path, pats) => {
let qpath = self.lower_qpath(
pattern.id,
qself,
path,
ParamMode::Optional,
AllowReturnTypeNotation::No,
ImplTraitContext::Disallowed(ImplTraitPosition::Path),
None,
);
let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple struct");
break hir::PatKind::TupleStruct(qpath, pats, ddpos);
}
PatKind::Or(pats) => {
break hir::PatKind::Or(
self.arena.alloc_from_iter(pats.iter().map(|x| self.lower_pat_mut(x))),
);
}
PatKind::Path(qself, path) => {
let qpath = self.lower_qpath(
pattern.id,
qself,
path,
ParamMode::Optional,
AllowReturnTypeNotation::No,
ImplTraitContext::Disallowed(ImplTraitPosition::Path),
None,
);
break hir::PatKind::Path(qpath);
}
PatKind::Struct(qself, path, fields, etc) => {
let qpath = self.lower_qpath(
pattern.id,
qself,
path,
ParamMode::Optional,
AllowReturnTypeNotation::No,
ImplTraitContext::Disallowed(ImplTraitPosition::Path),
None,
);
// loop here to avoid recursion
let pat_hir_id = self.lower_node_id(pattern.id);
let node = loop {
match &pattern.kind {
PatKind::Wild => break hir::PatKind::Wild,
PatKind::Never => break hir::PatKind::Never,
PatKind::Ident(binding_mode, ident, sub) => {
let lower_sub = |this: &mut Self| sub.as_ref().map(|s| this.lower_pat(s));
break self.lower_pat_ident(
pattern,
*binding_mode,
*ident,
pat_hir_id,
lower_sub,
);
}
PatKind::Lit(e) => {
break hir::PatKind::Lit(self.lower_expr_within_pat(e, false));
}
PatKind::TupleStruct(qself, path, pats) => {
let qpath = self.lower_qpath(
pattern.id,
qself,
path,
ParamMode::Optional,
AllowReturnTypeNotation::No,
ImplTraitContext::Disallowed(ImplTraitPosition::Path),
None,
);
let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple struct");
break hir::PatKind::TupleStruct(qpath, pats, ddpos);
}
PatKind::Or(pats) => {
break hir::PatKind::Or(
self.arena.alloc_from_iter(pats.iter().map(|x| self.lower_pat_mut(x))),
);
}
PatKind::Path(qself, path) => {
let qpath = self.lower_qpath(
pattern.id,
qself,
path,
ParamMode::Optional,
AllowReturnTypeNotation::No,
ImplTraitContext::Disallowed(ImplTraitPosition::Path),
None,
);
break hir::PatKind::Path(qpath);
}
PatKind::Struct(qself, path, fields, etc) => {
let qpath = self.lower_qpath(
pattern.id,
qself,
path,
ParamMode::Optional,
AllowReturnTypeNotation::No,
ImplTraitContext::Disallowed(ImplTraitPosition::Path),
None,
);

let fs = self.arena.alloc_from_iter(fields.iter().map(|f| {
let hir_id = self.lower_node_id(f.id);
self.lower_attrs(hir_id, &f.attrs);
let fs = self.arena.alloc_from_iter(fields.iter().map(|f| {
let hir_id = self.lower_node_id(f.id);
self.lower_attrs(hir_id, &f.attrs);

hir::PatField {
hir_id,
ident: self.lower_ident(f.ident),
pat: self.lower_pat(&f.pat),
is_shorthand: f.is_shorthand,
span: self.lower_span(f.span),
}
}));
break hir::PatKind::Struct(qpath, fs, *etc == ast::PatFieldsRest::Rest);
}
PatKind::Tuple(pats) => {
let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple");
break hir::PatKind::Tuple(pats, ddpos);
}
PatKind::Box(inner) => {
break hir::PatKind::Box(self.lower_pat(inner));
}
PatKind::Deref(inner) => {
break hir::PatKind::Deref(self.lower_pat(inner));
}
PatKind::Ref(inner, mutbl) => {
break hir::PatKind::Ref(self.lower_pat(inner), *mutbl);
}
PatKind::Range(e1, e2, Spanned { node: end, .. }) => {
break hir::PatKind::Range(
e1.as_deref().map(|e| self.lower_expr_within_pat(e, true)),
e2.as_deref().map(|e| self.lower_expr_within_pat(e, true)),
self.lower_range_end(end, e2.is_some()),
);
}
// FIXME(guard_patterns): lower pattern guards to HIR
PatKind::Guard(inner, _) => pattern = inner,
PatKind::Slice(pats) => break self.lower_pat_slice(pats),
PatKind::Rest => {
// If we reach here the `..` pattern is not semantically allowed.
break self.ban_illegal_rest_pat(pattern.span);
}
// return inner to be processed in next loop
PatKind::Paren(inner) => pattern = inner,
PatKind::MacCall(_) => panic!("{:?} shouldn't exist here", pattern.span),
PatKind::Err(guar) => break hir::PatKind::Err(*guar),
hir::PatField {
hir_id,
ident: self.lower_ident(f.ident),
pat: self.lower_pat(&f.pat),
is_shorthand: f.is_shorthand,
span: self.lower_span(f.span),
}
}));
break hir::PatKind::Struct(qpath, fs, *etc == ast::PatFieldsRest::Rest);
}
};
PatKind::Tuple(pats) => {
let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple");
break hir::PatKind::Tuple(pats, ddpos);
}
PatKind::Box(inner) => {
break hir::PatKind::Box(self.lower_pat(inner));
}
PatKind::Deref(inner) => {
break hir::PatKind::Deref(self.lower_pat(inner));
}
PatKind::Ref(inner, mutbl) => {
break hir::PatKind::Ref(self.lower_pat(inner), *mutbl);
}
PatKind::Range(e1, e2, Spanned { node: end, .. }) => {
break hir::PatKind::Range(
e1.as_deref().map(|e| self.lower_expr_within_pat(e, true)),
e2.as_deref().map(|e| self.lower_expr_within_pat(e, true)),
self.lower_range_end(end, e2.is_some()),
);
}
// FIXME(guard_patterns): lower pattern guards to HIR
PatKind::Guard(inner, _) => pattern = inner,
PatKind::Slice(pats) => break self.lower_pat_slice(pats),
PatKind::Rest => {
// If we reach here the `..` pattern is not semantically allowed.
break self.ban_illegal_rest_pat(pattern.span);
}
// return inner to be processed in next loop
PatKind::Paren(inner) => pattern = inner,
PatKind::MacCall(_) => panic!("{:?} shouldn't exist here", pattern.span),
PatKind::Err(guar) => break hir::PatKind::Err(*guar),
}
};

self.pat_with_node_id_of(pattern, node, pat_hir_id)
})
self.pat_with_node_id_of(pattern, node, pat_hir_id)
}

fn lower_pat_tuple(
Expand Down
3 changes: 1 addition & 2 deletions compiler/rustc_const_eval/src/const_eval/valtrees.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
use rustc_abi::{BackendRepr, VariantIdx};
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_middle::mir::interpret::{EvalToValTreeResult, GlobalId, ReportedErrorInfo};
use rustc_middle::ty::layout::{LayoutCx, LayoutOf, TyAndLayout};
use rustc_middle::ty::{self, ScalarInt, Ty, TyCtxt};
Expand Down Expand Up @@ -105,7 +104,7 @@ fn const_to_valtree_inner<'tcx>(
// Since the returned valtree does not contain the type or layout, we can just
// switch to the base type.
place.layout = ecx.layout_of(*base).unwrap();
ensure_sufficient_stack(|| const_to_valtree_inner(ecx, &place, num_nodes))
const_to_valtree_inner(ecx, &place, num_nodes)
},


Expand Down
11 changes: 1 addition & 10 deletions compiler/rustc_data_structures/src/stack.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,6 @@
// This is the amount of bytes that need to be left on the stack before increasing the size.
// It must be at least as large as the stack required by any code that does not call
// `ensure_sufficient_stack`.
const RED_ZONE: usize = 100 * 1024; // 100k

// Only the first stack that is pushed, grows exponentially (2^n * STACK_PER_RECURSION) from then
// on. This flag has performance relevant characteristics. Don't set it too high.
#[cfg(not(target_os = "aix"))]
const STACK_PER_RECURSION: usize = 1024 * 1024; // 1MB
// LLVM for AIX doesn't feature TCO, increase recursion size for workaround.
#[cfg(target_os = "aix")]
const STACK_PER_RECURSION: usize = 16 * 1024 * 1024; // 16MB

/// Grows the stack on demand to prevent stack overflow. Call this in strategic locations
/// to "break up" recursive calls. E.g. almost any call to `visit_expr` or equivalent can benefit
Expand All @@ -18,5 +9,5 @@ const STACK_PER_RECURSION: usize = 16 * 1024 * 1024; // 16MB
/// Should not be sprinkled around carelessly, as it causes a little bit of overhead.
#[inline]
pub fn ensure_sufficient_stack<R>(f: impl FnOnce() -> R) -> R {
stacker::maybe_grow(RED_ZONE, STACK_PER_RECURSION, f)
f()
}
5 changes: 2 additions & 3 deletions compiler/rustc_hir_typeck/src/expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@

use rustc_abi::{FIRST_VARIANT, FieldIdx};
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_data_structures::unord::UnordMap;
use rustc_errors::codes::*;
use rustc_errors::{
Expand Down Expand Up @@ -246,13 +245,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
self.diverges.set(self.function_diverges_because_of_empty_arguments.get())
};

let ty = ensure_sufficient_stack(|| match &expr.kind {
let ty = match &expr.kind {
// Intercept the callee path expr and give it better spans.
hir::ExprKind::Path(
qpath @ (hir::QPath::Resolved(..) | hir::QPath::TypeRelative(..)),
) => self.check_expr_path(qpath, expr, call_expr_and_args),
_ => self.check_expr_kind(expr, expected),
});
};
let ty = self.resolve_vars_if_possible(ty);

// Warn for non-block expressions with diverging children.
Expand Down
3 changes: 1 addition & 2 deletions compiler/rustc_infer/src/infer/relate/generalize.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
use std::mem;

use rustc_data_structures::sso::SsoHashMap;
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_hir::def_id::DefId;
use rustc_middle::bug;
use rustc_middle::infer::unify_key::ConstVariableValue;
Expand Down Expand Up @@ -445,7 +444,7 @@ impl<'tcx> TypeRelation<TyCtxt<'tcx>> for Generalizer<'_, 'tcx> {
debug!(?self.ambient_variance, "new ambient variance");
// Recursive calls to `relate` can overflow the stack. For example a deeper version of
// `ui/associated-consts/issue-93775.rs`.
let r = ensure_sufficient_stack(|| self.relate(a, b));
let r = self.relate(a, b);
self.ambient_variance = old_ambient_variance;
r
}
Expand Down
3 changes: 1 addition & 2 deletions compiler/rustc_lint/src/early.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
use rustc_ast::ptr::P;
use rustc_ast::visit::{self as ast_visit, Visitor, walk_list};
use rustc_ast::{self as ast, HasAttrs};
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_feature::Features;
use rustc_middle::ty::RegisteredTools;
use rustc_session::Session;
Expand Down Expand Up @@ -60,7 +59,7 @@ impl<'a, T: EarlyLintPass> EarlyContextAndPass<'a, T> {
self.inlined_check_id(id);
debug!("early context: enter_attrs({:?})", attrs);
lint_callback!(self, check_attributes, attrs);
ensure_sufficient_stack(|| f(self));
f(self);
debug!("early context: exit_attrs({:?})", attrs);
lint_callback!(self, check_attributes_post, attrs);
self.context.builder.pop(push);
Expand Down
Loading
Loading