Prevent stack overflow for deeply recursive code

This commit is contained in:
Oliver Scherer 2018-11-02 16:14:24 +01:00 committed by Mateusz Mikuła
parent 7184d137f6
commit 26edcee093
14 changed files with 448 additions and 354 deletions

View File

@ -2630,6 +2630,15 @@ dependencies = [
"core",
]
[[package]]
name = "psm"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b14fc68b454f875abc8354c2555e1d56596f74833ddc0f77f87f4871ed6a30e0"
dependencies = [
"cc",
]
[[package]]
name = "publicsuffix"
version = "1.5.3"
@ -3152,6 +3161,7 @@ checksum = "81dfcfbb0ddfd533abf8c076e3b49d1e5042d1962526a12ce2c66d514b24cca3"
dependencies = [
"rustc-ap-rustc_data_structures",
"smallvec 1.0.0",
"stacker",
]
[[package]]
@ -4657,6 +4667,19 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffbc596e092fe5f598b12ef46cc03754085ac2f4d8c739ad61c4ae266cc3b3fa"
[[package]]
name = "stacker"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d96fc4f13a0ac088e9a3cd9af1cc8c5cc1ab5deb2145cef661267dfc9c542f8a"
dependencies = [
"cc",
"cfg-if",
"libc",
"psm",
"winapi 0.3.8",
]
[[package]]
name = "std"
version = "0.0.0"

View File

@ -7,6 +7,7 @@ use rustc_data_structures::thin_vec::ThinVec;
use rustc_errors::struct_span_err;
use rustc_hir as hir;
use rustc_hir::def::Res;
use rustc_middle::limits::ensure_sufficient_stack;
use rustc_span::source_map::{respan, DesugaringKind, Span, Spanned};
use rustc_span::symbol::{sym, Symbol};
@ -20,192 +21,206 @@ impl<'hir> LoweringContext<'_, 'hir> {
}
pub(super) fn lower_expr_mut(&mut self, e: &Expr) -> hir::Expr<'hir> {
let kind = match e.kind {
ExprKind::Box(ref inner) => hir::ExprKind::Box(self.lower_expr(inner)),
ExprKind::Array(ref exprs) => hir::ExprKind::Array(self.lower_exprs(exprs)),
ExprKind::Repeat(ref expr, ref count) => {
let expr = self.lower_expr(expr);
let count = self.lower_anon_const(count);
hir::ExprKind::Repeat(expr, count)
}
ExprKind::Tup(ref elts) => hir::ExprKind::Tup(self.lower_exprs(elts)),
ExprKind::Call(ref f, ref args) => {
let f = self.lower_expr(f);
hir::ExprKind::Call(f, self.lower_exprs(args))
}
ExprKind::MethodCall(ref seg, ref args) => {
let hir_seg = self.arena.alloc(self.lower_path_segment(
e.span,
seg,
ParamMode::Optional,
0,
ParenthesizedGenericArgs::Err,
ImplTraitContext::disallowed(),
None,
));
let args = self.lower_exprs(args);
hir::ExprKind::MethodCall(hir_seg, seg.ident.span, args)
}
ExprKind::Binary(binop, ref lhs, ref rhs) => {
let binop = self.lower_binop(binop);
let lhs = self.lower_expr(lhs);
let rhs = self.lower_expr(rhs);
hir::ExprKind::Binary(binop, lhs, rhs)
}
ExprKind::Unary(op, ref ohs) => {
let op = self.lower_unop(op);
let ohs = self.lower_expr(ohs);
hir::ExprKind::Unary(op, ohs)
}
ExprKind::Lit(ref l) => hir::ExprKind::Lit(respan(l.span, l.kind.clone())),
ExprKind::Cast(ref expr, ref ty) => {
let expr = self.lower_expr(expr);
let ty = self.lower_ty(ty, ImplTraitContext::disallowed());
hir::ExprKind::Cast(expr, ty)
}
ExprKind::Type(ref expr, ref ty) => {
let expr = self.lower_expr(expr);
let ty = self.lower_ty(ty, ImplTraitContext::disallowed());
hir::ExprKind::Type(expr, ty)
}
ExprKind::AddrOf(k, m, ref ohs) => {
let ohs = self.lower_expr(ohs);
hir::ExprKind::AddrOf(k, m, ohs)
}
ExprKind::Let(ref pat, ref scrutinee) => self.lower_expr_let(e.span, pat, scrutinee),
ExprKind::If(ref cond, ref then, ref else_opt) => {
self.lower_expr_if(e.span, cond, then, else_opt.as_deref())
}
ExprKind::While(ref cond, ref body, opt_label) => self.with_loop_scope(e.id, |this| {
this.lower_expr_while_in_loop_scope(e.span, cond, body, opt_label)
}),
ExprKind::Loop(ref body, opt_label) => self.with_loop_scope(e.id, |this| {
hir::ExprKind::Loop(this.lower_block(body, false), opt_label, hir::LoopSource::Loop)
}),
ExprKind::TryBlock(ref body) => self.lower_expr_try_block(body),
ExprKind::Match(ref expr, ref arms) => hir::ExprKind::Match(
self.lower_expr(expr),
self.arena.alloc_from_iter(arms.iter().map(|x| self.lower_arm(x))),
hir::MatchSource::Normal,
),
ExprKind::Async(capture_clause, closure_node_id, ref block) => self.make_async_expr(
capture_clause,
closure_node_id,
None,
block.span,
hir::AsyncGeneratorKind::Block,
|this| this.with_new_scopes(|this| this.lower_block_expr(block)),
),
ExprKind::Await(ref expr) => self.lower_expr_await(e.span, expr),
ExprKind::Closure(
capture_clause,
asyncness,
movability,
ref decl,
ref body,
fn_decl_span,
) => {
if let Async::Yes { closure_id, .. } = asyncness {
self.lower_expr_async_closure(
capture_clause,
closure_id,
decl,
body,
fn_decl_span,
)
} else {
self.lower_expr_closure(capture_clause, movability, decl, body, fn_decl_span)
ensure_sufficient_stack(|| {
let kind = match e.kind {
ExprKind::Box(ref inner) => hir::ExprKind::Box(self.lower_expr(inner)),
ExprKind::Array(ref exprs) => hir::ExprKind::Array(self.lower_exprs(exprs)),
ExprKind::Repeat(ref expr, ref count) => {
let expr = self.lower_expr(expr);
let count = self.lower_anon_const(count);
hir::ExprKind::Repeat(expr, count)
}
}
ExprKind::Block(ref blk, opt_label) => {
hir::ExprKind::Block(self.lower_block(blk, opt_label.is_some()), opt_label)
}
ExprKind::Assign(ref el, ref er, span) => {
hir::ExprKind::Assign(self.lower_expr(el), self.lower_expr(er), span)
}
ExprKind::AssignOp(op, ref el, ref er) => hir::ExprKind::AssignOp(
self.lower_binop(op),
self.lower_expr(el),
self.lower_expr(er),
),
ExprKind::Field(ref el, ident) => hir::ExprKind::Field(self.lower_expr(el), ident),
ExprKind::Index(ref el, ref er) => {
hir::ExprKind::Index(self.lower_expr(el), self.lower_expr(er))
}
ExprKind::Range(Some(ref e1), Some(ref e2), RangeLimits::Closed) => {
self.lower_expr_range_closed(e.span, e1, e2)
}
ExprKind::Range(ref e1, ref e2, lims) => {
self.lower_expr_range(e.span, e1.as_deref(), e2.as_deref(), lims)
}
ExprKind::Path(ref qself, ref path) => {
let qpath = self.lower_qpath(
e.id,
qself,
path,
ParamMode::Optional,
ImplTraitContext::disallowed(),
);
hir::ExprKind::Path(qpath)
}
ExprKind::Break(opt_label, ref opt_expr) => {
let opt_expr = opt_expr.as_ref().map(|x| self.lower_expr(x));
hir::ExprKind::Break(self.lower_jump_destination(e.id, opt_label), opt_expr)
}
ExprKind::Continue(opt_label) => {
hir::ExprKind::Continue(self.lower_jump_destination(e.id, opt_label))
}
ExprKind::Ret(ref e) => {
let e = e.as_ref().map(|x| self.lower_expr(x));
hir::ExprKind::Ret(e)
}
ExprKind::LlvmInlineAsm(ref asm) => self.lower_expr_asm(asm),
ExprKind::Struct(ref path, ref fields, ref maybe_expr) => {
let maybe_expr = maybe_expr.as_ref().map(|x| self.lower_expr(x));
hir::ExprKind::Struct(
self.arena.alloc(self.lower_qpath(
ExprKind::Tup(ref elts) => hir::ExprKind::Tup(self.lower_exprs(elts)),
ExprKind::Call(ref f, ref args) => {
let f = self.lower_expr(f);
hir::ExprKind::Call(f, self.lower_exprs(args))
}
ExprKind::MethodCall(ref seg, ref args) => {
let hir_seg = self.arena.alloc(self.lower_path_segment(
e.span,
seg,
ParamMode::Optional,
0,
ParenthesizedGenericArgs::Err,
ImplTraitContext::disallowed(),
None,
));
let args = self.lower_exprs(args);
hir::ExprKind::MethodCall(hir_seg, seg.ident.span, args)
}
ExprKind::Binary(binop, ref lhs, ref rhs) => {
let binop = self.lower_binop(binop);
let lhs = self.lower_expr(lhs);
let rhs = self.lower_expr(rhs);
hir::ExprKind::Binary(binop, lhs, rhs)
}
ExprKind::Unary(op, ref ohs) => {
let op = self.lower_unop(op);
let ohs = self.lower_expr(ohs);
hir::ExprKind::Unary(op, ohs)
}
ExprKind::Lit(ref l) => hir::ExprKind::Lit(respan(l.span, l.kind.clone())),
ExprKind::Cast(ref expr, ref ty) => {
let expr = self.lower_expr(expr);
let ty = self.lower_ty(ty, ImplTraitContext::disallowed());
hir::ExprKind::Cast(expr, ty)
}
ExprKind::Type(ref expr, ref ty) => {
let expr = self.lower_expr(expr);
let ty = self.lower_ty(ty, ImplTraitContext::disallowed());
hir::ExprKind::Type(expr, ty)
}
ExprKind::AddrOf(k, m, ref ohs) => {
let ohs = self.lower_expr(ohs);
hir::ExprKind::AddrOf(k, m, ohs)
}
ExprKind::Let(ref pat, ref scrutinee) => {
self.lower_expr_let(e.span, pat, scrutinee)
}
ExprKind::If(ref cond, ref then, ref else_opt) => {
self.lower_expr_if(e.span, cond, then, else_opt.as_deref())
}
ExprKind::While(ref cond, ref body, opt_label) => self
.with_loop_scope(e.id, |this| {
this.lower_expr_while_in_loop_scope(e.span, cond, body, opt_label)
}),
ExprKind::Loop(ref body, opt_label) => self.with_loop_scope(e.id, |this| {
hir::ExprKind::Loop(
this.lower_block(body, false),
opt_label,
hir::LoopSource::Loop,
)
}),
ExprKind::TryBlock(ref body) => self.lower_expr_try_block(body),
ExprKind::Match(ref expr, ref arms) => hir::ExprKind::Match(
self.lower_expr(expr),
self.arena.alloc_from_iter(arms.iter().map(|x| self.lower_arm(x))),
hir::MatchSource::Normal,
),
ExprKind::Async(capture_clause, closure_node_id, ref block) => self
.make_async_expr(
capture_clause,
closure_node_id,
None,
block.span,
hir::AsyncGeneratorKind::Block,
|this| this.with_new_scopes(|this| this.lower_block_expr(block)),
),
ExprKind::Await(ref expr) => self.lower_expr_await(e.span, expr),
ExprKind::Closure(
capture_clause,
asyncness,
movability,
ref decl,
ref body,
fn_decl_span,
) => {
if let Async::Yes { closure_id, .. } = asyncness {
self.lower_expr_async_closure(
capture_clause,
closure_id,
decl,
body,
fn_decl_span,
)
} else {
self.lower_expr_closure(
capture_clause,
movability,
decl,
body,
fn_decl_span,
)
}
}
ExprKind::Block(ref blk, opt_label) => {
hir::ExprKind::Block(self.lower_block(blk, opt_label.is_some()), opt_label)
}
ExprKind::Assign(ref el, ref er, span) => {
hir::ExprKind::Assign(self.lower_expr(el), self.lower_expr(er), span)
}
ExprKind::AssignOp(op, ref el, ref er) => hir::ExprKind::AssignOp(
self.lower_binop(op),
self.lower_expr(el),
self.lower_expr(er),
),
ExprKind::Field(ref el, ident) => hir::ExprKind::Field(self.lower_expr(el), ident),
ExprKind::Index(ref el, ref er) => {
hir::ExprKind::Index(self.lower_expr(el), self.lower_expr(er))
}
ExprKind::Range(Some(ref e1), Some(ref e2), RangeLimits::Closed) => {
self.lower_expr_range_closed(e.span, e1, e2)
}
ExprKind::Range(ref e1, ref e2, lims) => {
self.lower_expr_range(e.span, e1.as_deref(), e2.as_deref(), lims)
}
ExprKind::Path(ref qself, ref path) => {
let qpath = self.lower_qpath(
e.id,
&None,
qself,
path,
ParamMode::Optional,
ImplTraitContext::disallowed(),
)),
self.arena.alloc_from_iter(fields.iter().map(|x| self.lower_field(x))),
maybe_expr,
)
}
ExprKind::Paren(ref ex) => {
let mut ex = self.lower_expr_mut(ex);
// Include parens in span, but only if it is a super-span.
if e.span.contains(ex.span) {
ex.span = e.span;
);
hir::ExprKind::Path(qpath)
}
// Merge attributes into the inner expression.
let mut attrs = e.attrs.clone();
attrs.extend::<Vec<_>>(ex.attrs.into());
ex.attrs = attrs;
return ex;
ExprKind::Break(opt_label, ref opt_expr) => {
let opt_expr = opt_expr.as_ref().map(|x| self.lower_expr(x));
hir::ExprKind::Break(self.lower_jump_destination(e.id, opt_label), opt_expr)
}
ExprKind::Continue(opt_label) => {
hir::ExprKind::Continue(self.lower_jump_destination(e.id, opt_label))
}
ExprKind::Ret(ref e) => {
let e = e.as_ref().map(|x| self.lower_expr(x));
hir::ExprKind::Ret(e)
}
ExprKind::LlvmInlineAsm(ref asm) => self.lower_expr_asm(asm),
ExprKind::Struct(ref path, ref fields, ref maybe_expr) => {
let maybe_expr = maybe_expr.as_ref().map(|x| self.lower_expr(x));
hir::ExprKind::Struct(
self.arena.alloc(self.lower_qpath(
e.id,
&None,
path,
ParamMode::Optional,
ImplTraitContext::disallowed(),
)),
self.arena.alloc_from_iter(fields.iter().map(|x| self.lower_field(x))),
maybe_expr,
)
}
ExprKind::Yield(ref opt_expr) => self.lower_expr_yield(e.span, opt_expr.as_deref()),
ExprKind::Err => hir::ExprKind::Err,
ExprKind::Try(ref sub_expr) => self.lower_expr_try(e.span, sub_expr),
ExprKind::Paren(ref ex) => {
let mut ex = self.lower_expr_mut(ex);
// Include parens in span, but only if it is a super-span.
if e.span.contains(ex.span) {
ex.span = e.span;
}
// Merge attributes into the inner expression.
let mut attrs = e.attrs.clone();
attrs.extend::<Vec<_>>(ex.attrs.into());
ex.attrs = attrs;
return ex;
}
// Desugar `ExprForLoop`
// from: `[opt_ident]: for <pat> in <head> <body>`
ExprKind::ForLoop(ref pat, ref head, ref body, opt_label) => {
return self.lower_expr_for(e, pat, head, body, opt_label);
}
ExprKind::MacCall(_) => panic!("{:?} shouldn't exist here", e.span),
};
hir::Expr {
hir_id: self.lower_node_id(e.id),
kind,
span: e.span,
attrs: e.attrs.iter().map(|a| self.lower_attr(a)).collect::<Vec<_>>().into(),
}
ExprKind::Yield(ref opt_expr) => self.lower_expr_yield(e.span, opt_expr.as_deref()),
ExprKind::Err => hir::ExprKind::Err,
// Desugar `ExprForLoop`
// from: `[opt_ident]: for <pat> in <head> <body>`
ExprKind::ForLoop(ref pat, ref head, ref body, opt_label) => {
return self.lower_expr_for(e, pat, head, body, opt_label);
}
ExprKind::Try(ref sub_expr) => self.lower_expr_try(e.span, sub_expr),
ExprKind::MacCall(_) => panic!("Shouldn't exist here"),
};
hir::Expr {
hir_id: self.lower_node_id(e.id),
kind,
span: e.span,
attrs: e.attrs.iter().map(|a| self.lower_attr(a)).collect::<Vec<_>>().into(),
}
})
}
fn lower_unop(&mut self, u: UnOp) -> hir::UnOp {

View File

@ -4,81 +4,87 @@ use rustc_ast::ast::*;
use rustc_ast::ptr::P;
use rustc_hir as hir;
use rustc_hir::def::Res;
use rustc_middle::limits::ensure_sufficient_stack;
use rustc_span::{source_map::Spanned, Span};
impl<'a, 'hir> LoweringContext<'a, 'hir> {
crate fn lower_pat(&mut self, p: &Pat) -> &'hir hir::Pat<'hir> {
let node = match p.kind {
PatKind::Wild => hir::PatKind::Wild,
PatKind::Ident(ref binding_mode, ident, ref sub) => {
let lower_sub = |this: &mut Self| sub.as_ref().map(|s| this.lower_pat(&*s));
let node = self.lower_pat_ident(p, binding_mode, ident, lower_sub);
node
}
PatKind::Lit(ref e) => hir::PatKind::Lit(self.lower_expr(e)),
PatKind::TupleStruct(ref path, ref pats) => {
let qpath = self.lower_qpath(
p.id,
&None,
path,
ParamMode::Optional,
ImplTraitContext::disallowed(),
);
let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple struct");
hir::PatKind::TupleStruct(qpath, pats, ddpos)
}
PatKind::Or(ref pats) => {
hir::PatKind::Or(self.arena.alloc_from_iter(pats.iter().map(|x| self.lower_pat(x))))
}
PatKind::Path(ref qself, ref path) => {
let qpath = self.lower_qpath(
p.id,
qself,
path,
ParamMode::Optional,
ImplTraitContext::disallowed(),
);
hir::PatKind::Path(qpath)
}
PatKind::Struct(ref path, ref fields, etc) => {
let qpath = self.lower_qpath(
p.id,
&None,
path,
ParamMode::Optional,
ImplTraitContext::disallowed(),
);
ensure_sufficient_stack(|| {
let node = match p.kind {
PatKind::Wild => hir::PatKind::Wild,
PatKind::Ident(ref binding_mode, ident, ref sub) => {
let lower_sub = |this: &mut Self| sub.as_ref().map(|s| this.lower_pat(&*s));
let node = self.lower_pat_ident(p, binding_mode, ident, lower_sub);
node
}
PatKind::Lit(ref e) => hir::PatKind::Lit(self.lower_expr(e)),
PatKind::TupleStruct(ref path, ref pats) => {
let qpath = self.lower_qpath(
p.id,
&None,
path,
ParamMode::Optional,
ImplTraitContext::disallowed(),
);
let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple struct");
hir::PatKind::TupleStruct(qpath, pats, ddpos)
}
PatKind::Or(ref pats) => hir::PatKind::Or(
self.arena.alloc_from_iter(pats.iter().map(|x| self.lower_pat(x))),
),
PatKind::Path(ref qself, ref path) => {
let qpath = self.lower_qpath(
p.id,
qself,
path,
ParamMode::Optional,
ImplTraitContext::disallowed(),
);
hir::PatKind::Path(qpath)
}
PatKind::Struct(ref path, ref fields, etc) => {
let qpath = self.lower_qpath(
p.id,
&None,
path,
ParamMode::Optional,
ImplTraitContext::disallowed(),
);
let fs = self.arena.alloc_from_iter(fields.iter().map(|f| hir::FieldPat {
hir_id: self.next_id(),
ident: f.ident,
pat: self.lower_pat(&f.pat),
is_shorthand: f.is_shorthand,
span: f.span,
}));
hir::PatKind::Struct(qpath, fs, etc)
}
PatKind::Tuple(ref pats) => {
let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple");
hir::PatKind::Tuple(pats, ddpos)
}
PatKind::Box(ref inner) => hir::PatKind::Box(self.lower_pat(inner)),
PatKind::Ref(ref inner, mutbl) => hir::PatKind::Ref(self.lower_pat(inner), mutbl),
PatKind::Range(ref e1, ref e2, Spanned { node: ref end, .. }) => hir::PatKind::Range(
e1.as_deref().map(|e| self.lower_expr(e)),
e2.as_deref().map(|e| self.lower_expr(e)),
self.lower_range_end(end, e2.is_some()),
),
PatKind::Slice(ref pats) => self.lower_pat_slice(pats),
PatKind::Rest => {
// If we reach here the `..` pattern is not semantically allowed.
self.ban_illegal_rest_pat(p.span)
}
PatKind::Paren(ref inner) => return self.lower_pat(inner),
PatKind::MacCall(_) => panic!("Shouldn't exist here"),
};
let fs = self.arena.alloc_from_iter(fields.iter().map(|f| hir::FieldPat {
hir_id: self.next_id(),
ident: f.ident,
pat: self.lower_pat(&f.pat),
is_shorthand: f.is_shorthand,
span: f.span,
}));
hir::PatKind::Struct(qpath, fs, etc)
}
PatKind::Tuple(ref pats) => {
let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple");
hir::PatKind::Tuple(pats, ddpos)
}
PatKind::Box(ref inner) => hir::PatKind::Box(self.lower_pat(inner)),
PatKind::Ref(ref inner, mutbl) => hir::PatKind::Ref(self.lower_pat(inner), mutbl),
PatKind::Range(ref e1, ref e2, Spanned { node: ref end, .. }) => {
hir::PatKind::Range(
e1.as_deref().map(|e| self.lower_expr(e)),
e2.as_deref().map(|e| self.lower_expr(e)),
self.lower_range_end(end, e2.is_some()),
)
}
PatKind::Slice(ref pats) => self.lower_pat_slice(pats),
PatKind::Rest => {
// If we reach here the `..` pattern is not semantically allowed.
self.ban_illegal_rest_pat(p.span)
}
// FIXME: consider not using recursion to lower this.
PatKind::Paren(ref inner) => return self.lower_pat(inner),
PatKind::MacCall(_) => panic!("{:?} shouldn't exist here", p.span),
};
self.pat_with_node_id_of(p, node)
self.pat_with_node_id_of(p, node)
})
}
fn lower_pat_tuple(

View File

@ -80,14 +80,7 @@ pub fn create_session(
(Lrc::new(sess), Lrc::new(codegen_backend), source_map)
}
// Temporarily have stack size set to 32MB to deal with various crates with long method
// chains or deep syntax trees, except when on Haiku.
// FIXME(oli-obk): get https://github.com/rust-lang/rust/pull/55617 the finish line
#[cfg(not(target_os = "haiku"))]
const STACK_SIZE: usize = 32 * 1024 * 1024;
#[cfg(target_os = "haiku")]
const STACK_SIZE: usize = 16 * 1024 * 1024;
const STACK_SIZE: usize = 2 * 1024 * 1024;
fn get_stack_size() -> Option<usize> {
// FIXME: Hacks on hacks. If the env is trying to override the stack size

View File

@ -34,3 +34,4 @@ byteorder = { version = "1.3" }
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
measureme = "0.7.1"
rustc_session = { path = "../librustc_session" }
stacker = "0.1.6"

View File

@ -13,6 +13,24 @@ use rustc_span::symbol::{sym, Symbol};
use std::num::IntErrorKind;
// This is the amount of bytes that need to be left on the stack before increasing the size.
// It must be at least as large as the stack required by any code that does not call
// `ensure_sufficient_stack`.
const RED_ZONE: usize = 100 * 1024; // 100k
// Ony the first stack that is pushed, grows exponentially (2^n * STACK_PER_RECURSION) from then
// on. This flag has performance relevant characteristics. Don't set it too high.
const STACK_PER_RECURSION: usize = 1 * 1024 * 1024; // 1MB
/// Grows the stack on demand to prevent stack overflow. Call this in strategic locations
/// to "break up" recursive calls. E.g. almost any call to `visit_expr` or equivalent can benefit
/// from this.
///
/// Should not be sprinkled around carelessly, as it causes a little bit of overhead.
pub fn ensure_sufficient_stack<R, F: FnOnce() -> R>(f: F) -> R {
stacker::maybe_grow(RED_ZONE, STACK_PER_RECURSION, f)
}
pub fn update_limits(sess: &Session, krate: &ast::Crate) {
update_limit(sess, krate, &sess.recursion_limit, sym::recursion_limit, 128);
update_limit(sess, krate, &sess.type_length_limit, sym::type_length_limit, 1048576);

View File

@ -1,5 +1,6 @@
pub use self::def_id_forest::DefIdForest;
use crate::middle::limits::ensure_sufficient_stack;
use crate::ty;
use crate::ty::context::TyCtxt;
use crate::ty::TyKind::*;
@ -196,7 +197,9 @@ impl<'tcx> TyS<'tcx> {
/// Calculates the forest of `DefId`s from which this type is visibly uninhabited.
fn uninhabited_from(&self, tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> DefIdForest {
match self.kind {
Adt(def, substs) => def.uninhabited_from(tcx, substs, param_env),
Adt(def, substs) => {
ensure_sufficient_stack(|| def.uninhabited_from(tcx, substs, param_env))
}
Never => DefIdForest::full(tcx),

View File

@ -68,7 +68,9 @@ impl QueryContext for TyCtxt<'tcx> {
};
// Use the `ImplicitCtxt` while we execute the query.
tls::enter_context(&new_icx, |_| compute(*self))
tls::enter_context(&new_icx, |_| {
crate::middle::limits::ensure_sufficient_stack(|| compute(*self))
})
})
}
}

View File

@ -369,7 +369,9 @@ fn collect_items_rec<'tcx>(
recursion_depth_reset = Some(check_recursion_limit(tcx, instance, recursion_depths));
check_type_length_limit(tcx, instance);
collect_neighbours(tcx, instance, &mut neighbors);
rustc::middle::limits::ensure_sufficient_stack(|| {
collect_neighbours(tcx, instance, &mut neighbors);
});
}
MonoItem::GlobalAsm(..) => {
recursion_depth_reset = None;
@ -1146,7 +1148,9 @@ fn collect_miri<'tcx>(tcx: TyCtxt<'tcx>, alloc_id: AllocId, output: &mut Vec<Mon
Some(GlobalAlloc::Memory(alloc)) => {
trace!("collecting {:?} with {:#?}", alloc_id, alloc);
for &((), inner) in alloc.relocations().values() {
collect_miri(tcx, inner, output);
rustc_middle::limits::ensure_sufficient_stack(|| {
collect_miri(tcx, inner, output);
});
}
}
Some(GlobalAlloc::Function(fn_instance)) => {

View File

@ -4,6 +4,7 @@ use crate::build::scope::DropKind;
use crate::build::{BlockAnd, BlockAndExtension, Builder};
use crate::hair::*;
use rustc_hir as hir;
use rustc_middle::limits::ensure_sufficient_stack;
use rustc_middle::middle::region;
use rustc_middle::mir::*;
@ -21,7 +22,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
M: Mirror<'tcx, Output = Expr<'tcx>>,
{
let expr = self.hir.mirror(expr);
self.expr_as_temp(block, temp_lifetime, expr, mutability)
//
// this is the only place in mir building that we need to truly need to worry about
// infinite recursion. Everything else does recurse, too, but it always gets broken up
// at some point by inserting an intermediate temporary
ensure_sufficient_stack(|| self.expr_as_temp(block, temp_lifetime, expr, mutability))
}
fn expr_as_temp(

View File

@ -20,6 +20,7 @@ use crate::traits::error_reporting::InferCtxtExt;
use rustc_ast::ast::Ident;
use rustc_errors::ErrorReported;
use rustc_hir::def_id::DefId;
use rustc_middle::limits::ensure_sufficient_stack;
use rustc_middle::ty::fold::{TypeFoldable, TypeFolder};
use rustc_middle::ty::subst::{InternalSubsts, Subst};
use rustc_middle::ty::{self, ToPolyTraitRef, ToPredicate, Ty, TyCtxt, WithConstness};
@ -261,7 +262,7 @@ where
{
debug!("normalize_with_depth(depth={}, value={:?})", depth, value);
let mut normalizer = AssocTypeNormalizer::new(selcx, param_env, cause, depth, obligations);
let result = normalizer.fold(value);
let result = ensure_sufficient_stack(|| normalizer.fold(value));
debug!(
"normalize_with_depth: depth={} result={:?} with {} obligations",
depth,

View File

@ -8,6 +8,7 @@ use crate::infer::{InferCtxt, InferOk};
use crate::traits::error_reporting::InferCtxtExt;
use crate::traits::{Obligation, ObligationCause, PredicateObligation, Reveal};
use rustc_infer::traits::Normalized;
use rustc_middle::limits::ensure_sufficient_stack;
use rustc_middle::ty::fold::{TypeFoldable, TypeFolder};
use rustc_middle::ty::subst::Subst;
use rustc_middle::ty::{self, Ty, TyCtxt};
@ -131,7 +132,7 @@ impl<'cx, 'tcx> TypeFolder<'tcx> for QueryNormalizer<'cx, 'tcx> {
ty
);
}
let folded_ty = self.fold_ty(concrete_ty);
let folded_ty = ensure_sufficient_stack(|| self.fold_ty(concrete_ty));
self.anon_depth -= 1;
folded_ty
}

View File

@ -42,6 +42,7 @@ use rustc_hir::def_id::DefId;
use rustc_hir::lang_items;
use rustc_index::bit_set::GrowableBitSet;
use rustc_middle::dep_graph::{DepKind, DepNodeIndex};
use rustc_middle::limits::ensure_sufficient_stack;
use rustc_middle::ty::fast_reject;
use rustc_middle::ty::relate::TypeRelation;
use rustc_middle::ty::subst::{GenericArg, GenericArgKind, Subst, SubstsRef};
@ -2365,13 +2366,15 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
self.infcx.commit_unconditionally(|_| {
let (skol_ty, _) = self.infcx.replace_bound_vars_with_placeholders(&ty);
let Normalized { value: normalized_ty, mut obligations } =
project::normalize_with_depth(
self,
param_env,
cause.clone(),
recursion_depth,
&skol_ty,
);
ensure_sufficient_stack(|| {
project::normalize_with_depth(
self,
param_env,
cause.clone(),
recursion_depth,
&skol_ty,
)
});
let skol_obligation = predicate_for_trait_def(
self.tcx(),
param_env,
@ -2525,13 +2528,15 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
};
let cause = obligation.derived_cause(BuiltinDerivedObligation);
self.collect_predicates_for_types(
obligation.param_env,
cause,
obligation.recursion_depth + 1,
trait_def,
nested,
)
ensure_sufficient_stack(|| {
self.collect_predicates_for_types(
obligation.param_env,
cause,
obligation.recursion_depth + 1,
trait_def,
nested,
)
})
} else {
vec![]
};
@ -2568,38 +2573,39 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
nested: ty::Binder<Vec<Ty<'tcx>>>,
) -> VtableAutoImplData<PredicateObligation<'tcx>> {
debug!("vtable_auto_impl: nested={:?}", nested);
ensure_sufficient_stack(|| {
let cause = obligation.derived_cause(BuiltinDerivedObligation);
let mut obligations = self.collect_predicates_for_types(
obligation.param_env,
cause,
obligation.recursion_depth + 1,
trait_def_id,
nested,
);
let cause = obligation.derived_cause(BuiltinDerivedObligation);
let mut obligations = self.collect_predicates_for_types(
obligation.param_env,
cause,
obligation.recursion_depth + 1,
trait_def_id,
nested,
);
let trait_obligations: Vec<PredicateObligation<'_>> =
self.infcx.commit_unconditionally(|_| {
let poly_trait_ref = obligation.predicate.to_poly_trait_ref();
let (trait_ref, _) =
self.infcx.replace_bound_vars_with_placeholders(&poly_trait_ref);
let cause = obligation.derived_cause(ImplDerivedObligation);
self.impl_or_trait_obligations(
cause,
obligation.recursion_depth + 1,
obligation.param_env,
trait_def_id,
&trait_ref.substs,
)
});
let trait_obligations: Vec<PredicateObligation<'_>> =
self.infcx.commit_unconditionally(|_| {
let poly_trait_ref = obligation.predicate.to_poly_trait_ref();
let (trait_ref, _) =
self.infcx.replace_bound_vars_with_placeholders(&poly_trait_ref);
let cause = obligation.derived_cause(ImplDerivedObligation);
self.impl_or_trait_obligations(
cause,
obligation.recursion_depth + 1,
obligation.param_env,
trait_def_id,
&trait_ref.substs,
)
});
// Adds the predicates from the trait. Note that this contains a `Self: Trait`
// predicate as usual. It won't have any effect since auto traits are coinductive.
obligations.extend(trait_obligations);
// Adds the predicates from the trait. Note that this contains a `Self: Trait`
// predicate as usual. It won't have any effect since auto traits are coinductive.
obligations.extend(trait_obligations);
debug!("vtable_auto_impl: obligations={:?}", obligations);
debug!("vtable_auto_impl: obligations={:?}", obligations);
VtableAutoImplData { trait_def_id, nested: obligations }
VtableAutoImplData { trait_def_id, nested: obligations }
})
}
fn confirm_impl_candidate(
@ -2615,13 +2621,15 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
let substs = self.rematch_impl(impl_def_id, obligation, snapshot);
debug!("confirm_impl_candidate: substs={:?}", substs);
let cause = obligation.derived_cause(ImplDerivedObligation);
self.vtable_impl(
impl_def_id,
substs,
cause,
obligation.recursion_depth + 1,
obligation.param_env,
)
ensure_sufficient_stack(|| {
self.vtable_impl(
impl_def_id,
substs,
cause,
obligation.recursion_depth + 1,
obligation.param_env,
)
})
})
}
@ -2734,13 +2742,15 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
)
.map_bound(|(trait_ref, _)| trait_ref);
let Normalized { value: trait_ref, obligations } = project::normalize_with_depth(
self,
obligation.param_env,
obligation.cause.clone(),
obligation.recursion_depth + 1,
&trait_ref,
);
let Normalized { value: trait_ref, obligations } = ensure_sufficient_stack(|| {
project::normalize_with_depth(
self,
obligation.param_env,
obligation.cause.clone(),
obligation.recursion_depth + 1,
&trait_ref,
)
});
self.confirm_poly_trait_refs(
obligation.cause.clone(),
@ -2798,13 +2808,15 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
debug!("confirm_generator_candidate({:?},{:?},{:?})", obligation, generator_def_id, substs);
let trait_ref = self.generator_trait_ref_unnormalized(obligation, substs);
let Normalized { value: trait_ref, mut obligations } = normalize_with_depth(
self,
obligation.param_env,
obligation.cause.clone(),
obligation.recursion_depth + 1,
&trait_ref,
);
let Normalized { value: trait_ref, mut obligations } = ensure_sufficient_stack(|| {
normalize_with_depth(
self,
obligation.param_env,
obligation.cause.clone(),
obligation.recursion_depth + 1,
&trait_ref,
)
});
debug!(
"confirm_generator_candidate(generator_def_id={:?}, \
@ -2843,13 +2855,15 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
};
let trait_ref = self.closure_trait_ref_unnormalized(obligation, substs);
let Normalized { value: trait_ref, mut obligations } = normalize_with_depth(
self,
obligation.param_env,
obligation.cause.clone(),
obligation.recursion_depth + 1,
&trait_ref,
);
let Normalized { value: trait_ref, mut obligations } = ensure_sufficient_stack(|| {
normalize_with_depth(
self,
obligation.param_env,
obligation.cause.clone(),
obligation.recursion_depth + 1,
&trait_ref,
)
});
debug!(
"confirm_closure_candidate(closure_def_id={:?}, trait_ref={:?}, obligations={:?})",
@ -3139,15 +3153,17 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
nested.extend(obligations);
// Construct the nested `T: Unsize<U>` predicate.
nested.push(predicate_for_trait_def(
tcx,
obligation.param_env,
obligation.cause.clone(),
obligation.predicate.def_id(),
obligation.recursion_depth + 1,
a_last.expect_ty(),
&[b_last],
));
nested.push(ensure_sufficient_stack(|| {
predicate_for_trait_def(
tcx,
obligation.param_env,
obligation.cause.clone(),
obligation.predicate.def_id(),
obligation.recursion_depth + 1,
a_last.expect_ty(),
&[b_last],
)
}));
}
_ => bug!(),
@ -3208,13 +3224,15 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
let impl_trait_ref = impl_trait_ref.subst(self.tcx(), impl_substs);
let Normalized { value: impl_trait_ref, obligations: mut nested_obligations } =
project::normalize_with_depth(
self,
obligation.param_env,
obligation.cause.clone(),
obligation.recursion_depth + 1,
&impl_trait_ref,
);
ensure_sufficient_stack(|| {
project::normalize_with_depth(
self,
obligation.param_env,
obligation.cause.clone(),
obligation.recursion_depth + 1,
&impl_trait_ref,
)
});
debug!(
"match_impl(impl_def_id={:?}, obligation={:?}, \

View File

@ -191,10 +191,12 @@ fn dtorck_constraint_for_ty<'tcx>(
ty::Array(ety, _) | ty::Slice(ety) => {
// single-element containers, behave like their element
dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ety, constraints)?;
rustc_middle::limits::ensure_sufficient_stack(|| {
dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ety, constraints)
})?;
}
ty::Tuple(tys) => {
ty::Tuple(tys) => rustc_middle::limits::ensure_sufficient_stack(|| {
for ty in tys.iter() {
dtorck_constraint_for_ty(
tcx,
@ -205,13 +207,15 @@ fn dtorck_constraint_for_ty<'tcx>(
constraints,
)?;
}
}
Ok::<_, NoSolution>(())
})?,
ty::Closure(_, substs) => {
ty::Closure(_, substs) => rustc_middle::limits::ensure_sufficient_stack(|| {
for ty in substs.as_closure().upvar_tys() {
dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ty, constraints)?;
}
}
Ok::<_, NoSolution>(())
})?,
ty::Generator(_, substs, _movability) => {
// rust-lang/rust#49918: types can be constructed, stored