diff --git a/src/librustc/middle/region.rs b/src/librustc/middle/region.rs index f18c0b1615e..f08cbb41438 100644 --- a/src/librustc/middle/region.rs +++ b/src/librustc/middle/region.rs @@ -329,6 +329,9 @@ impl RegionMaps { pub fn item_extent(&self, n: ast::NodeId) -> CodeExtent { self.lookup_code_extent(CodeExtentData::DestructionScope(n)) } + pub fn opt_destruction_extent(&self, n: ast::NodeId) -> Option { + self.code_extent_interner.borrow().get(&CodeExtentData::DestructionScope(n)).cloned() + } pub fn intern_code_extent(&self, e: CodeExtentData, parent: CodeExtent) -> CodeExtent { diff --git a/src/librustc/middle/ty.rs b/src/librustc/middle/ty.rs index 98422d1dffe..9f2c87b1a0a 100644 --- a/src/librustc/middle/ty.rs +++ b/src/librustc/middle/ty.rs @@ -3475,6 +3475,13 @@ impl<'tcx, 'container> AdtDefData<'tcx, 'container> { .expect("variant_with_id: unknown variant") } + pub fn variant_index_with_id(&self, vid: DefId) -> usize { + self.variants + .iter() + .position(|v| v.did == vid) + .expect("variant_index_with_id: unknown variant") + } + pub fn variant_of_def(&self, def: def::Def) -> &VariantDefData<'tcx, 'container> { match def { def::DefVariant(_, vid, _) => self.variant_with_id(vid), @@ -5223,14 +5230,11 @@ impl<'tcx> TyS<'tcx> { { let method_call = MethodCall::autoderef(expr_id, autoderef); let mut adjusted_ty = self; - match method_type(method_call) { - Some(method_ty) => { - // Method calls always have all late-bound regions - // fully instantiated. - let fn_ret = cx.no_late_bound_regions(&method_ty.fn_ret()).unwrap(); - adjusted_ty = fn_ret.unwrap(); - } - None => {} + if let Some(method_ty) = method_type(method_call) { + // Method calls always have all late-bound regions + // fully instantiated. + let fn_ret = cx.no_late_bound_regions(&method_ty.fn_ret()).unwrap(); + adjusted_ty = fn_ret.unwrap(); } match adjusted_ty.builtin_deref(true, NoPreference) { Some(mt) => mt.ty, diff --git a/src/librustc_mir/build/block.rs b/src/librustc_mir/build/block.rs new file mode 100644 index 00000000000..5604c78e098 --- /dev/null +++ b/src/librustc_mir/build/block.rs @@ -0,0 +1,28 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use hair::*; +use repr::*; +use build::{BlockAnd, Builder}; + +impl Builder { + pub fn ast_block(&mut self, + destination: &Lvalue, + mut block: BasicBlock, + ast_block: H::Block) + -> BlockAnd<()> { + let this = self; + let Block { extent, span: _, stmts, expr } = this.hir.mirror(ast_block); + this.in_scope(extent, block, |this| { + unpack!(block = this.stmts(block, stmts)); + this.into(destination, block, expr) + }) + } +} diff --git a/src/librustc_mir/build/cfg.rs b/src/librustc_mir/build/cfg.rs new file mode 100644 index 00000000000..955e1b7146a --- /dev/null +++ b/src/librustc_mir/build/cfg.rs @@ -0,0 +1,88 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + + + + +//! Routines for manipulating the control-flow graph. + +use build::CFG; +use hair::*; +use repr::*; + +impl CFG { + pub fn block_data(&self, blk: BasicBlock) -> &BasicBlockData { + &self.basic_blocks[blk.index()] + } + + pub fn block_data_mut(&mut self, blk: BasicBlock) -> &mut BasicBlockData { + &mut self.basic_blocks[blk.index()] + } + + pub fn end_point(&self, block: BasicBlock) -> ExecutionPoint { + ExecutionPoint { + block: block, + statement: self.block_data(block).statements.len() as u32 + } + } + + pub fn start_new_block(&mut self) -> BasicBlock { + let node_index = self.basic_blocks.len(); + self.basic_blocks.push(BasicBlockData::new(Terminator::Diverge)); + BasicBlock::new(node_index) + } + + pub fn push(&mut self, block: BasicBlock, statement: Statement) { + debug!("push({:?}, {:?})", block, statement); + self.block_data_mut(block).statements.push(statement); + } + + pub fn push_assign_constant(&mut self, + block: BasicBlock, + span: H::Span, + temp: &Lvalue, + constant: Constant) { + self.push_assign(block, span, temp, Rvalue::Use(Operand::Constant(constant))); + } + + pub fn push_drop(&mut self, block: BasicBlock, span: H::Span, + kind: DropKind, lvalue: &Lvalue) { + self.push(block, Statement { + span: span, + kind: StatementKind::Drop(kind, lvalue.clone()) + }); + } + + pub fn push_assign(&mut self, + block: BasicBlock, + span: H::Span, + lvalue: &Lvalue, + rvalue: Rvalue) { + self.push(block, Statement { + span: span, + kind: StatementKind::Assign(lvalue.clone(), rvalue) + }); + } + + pub fn terminate(&mut self, + block: BasicBlock, + terminator: Terminator) { + // Check whether this block has already been terminated. For + // this, we rely on the fact that the initial state is to have + // a Diverge terminator and an empty list of targets (which + // is not a valid state). + debug_assert!(match self.block_data(block).terminator { Terminator::Diverge => true, + _ => false }, + "terminate: block {:?} already has a terminator set", block); + + self.block_data_mut(block).terminator = terminator; + } +} + diff --git a/src/librustc_mir/build/expr/as_constant.rs b/src/librustc_mir/build/expr/as_constant.rs new file mode 100644 index 00000000000..a6d06c447a4 --- /dev/null +++ b/src/librustc_mir/build/expr/as_constant.rs @@ -0,0 +1,123 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! See docs in build/expr/mod.rs + +use rustc_data_structures::fnv::FnvHashMap; + +use build::{Builder}; +use hair::*; +use repr::*; + +impl Builder { + /// Compile `expr`, yielding a compile-time constant. Assumes that + /// `expr` is a valid compile-time constant! + pub fn as_constant(&mut self, expr: M) -> Constant + where M: Mirror> + { + let expr = self.hir.mirror(expr); + self.expr_as_constant(expr) + } + + fn expr_as_constant(&mut self, expr: Expr) -> Constant { + let this = self; + let Expr { ty: _, temp_lifetime: _, span, kind } = expr; + let kind = match kind { + ExprKind::Scope { extent: _, value } => { + return this.as_constant(value); + } + ExprKind::Paren { arg } => { + return this.as_constant(arg); + } + ExprKind::Literal { literal } => { + ConstantKind::Literal(literal) + } + ExprKind::Vec { fields } => { + let fields = this.as_constants(fields); + ConstantKind::Aggregate(AggregateKind::Vec, fields) + } + ExprKind::Tuple { fields } => { + let fields = this.as_constants(fields); + ConstantKind::Aggregate(AggregateKind::Tuple, fields) + } + ExprKind::Adt { adt_def, variant_index, substs, fields, base: None } => { + let field_names = this.hir.fields(adt_def, variant_index); + let fields = this.named_field_constants(field_names, fields); + ConstantKind::Aggregate(AggregateKind::Adt(adt_def, variant_index, substs), fields) + } + ExprKind::Repeat { value, count } => { + let value = Box::new(this.as_constant(value)); + let count = Box::new(this.as_constant(count)); + ConstantKind::Repeat(value, count) + } + ExprKind::Binary { op, lhs, rhs } => { + let lhs = Box::new(this.as_constant(lhs)); + let rhs = Box::new(this.as_constant(rhs)); + ConstantKind::BinaryOp(op, lhs, rhs) + } + ExprKind::Unary { op, arg } => { + let arg = Box::new(this.as_constant(arg)); + ConstantKind::UnaryOp(op, arg) + } + ExprKind::Field { lhs, name } => { + let lhs = this.as_constant(lhs); + ConstantKind::Projection( + Box::new(ConstantProjection { + base: lhs, + elem: ProjectionElem::Field(name), + })) + } + ExprKind::Deref { arg } => { + let arg = this.as_constant(arg); + ConstantKind::Projection( + Box::new(ConstantProjection { + base: arg, + elem: ProjectionElem::Deref, + })) + } + ExprKind::Call { fun, args } => { + let fun = this.as_constant(fun); + let args = this.as_constants(args); + ConstantKind::Call(Box::new(fun), args) + } + _ => { + this.hir.span_bug( + span, + &format!("expression is not a valid constant {:?}", kind)); + } + }; + Constant { span: span, kind: kind } + } + + fn as_constants(&mut self, + exprs: Vec>) + -> Vec> + { + exprs.into_iter().map(|expr| self.as_constant(expr)).collect() + } + + fn named_field_constants(&mut self, + field_names: Vec>, + field_exprs: Vec>) + -> Vec> + { + let fields_map: FnvHashMap<_, _> = + field_exprs.into_iter() + .map(|f| (f.name, self.as_constant(f.expr))) + .collect(); + + let fields: Vec<_> = + field_names.into_iter() + .map(|n| fields_map[&n].clone()) + .collect(); + + fields + } +} diff --git a/src/librustc_mir/build/expr/as_lvalue.rs b/src/librustc_mir/build/expr/as_lvalue.rs new file mode 100644 index 00000000000..0ceafcc9a62 --- /dev/null +++ b/src/librustc_mir/build/expr/as_lvalue.rs @@ -0,0 +1,131 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! See docs in build/expr/mod.rs + +use build::{BlockAnd, Builder}; +use build::expr::category::Category; +use hair::*; +use repr::*; + +impl Builder { + /// Compile `expr`, yielding an lvalue that we can move from etc. + pub fn as_lvalue(&mut self, + block: BasicBlock, + expr: M) + -> BlockAnd> + where M: Mirror> + { + let expr = self.hir.mirror(expr); + self.expr_as_lvalue(block, expr) + } + + fn expr_as_lvalue(&mut self, + mut block: BasicBlock, + expr: Expr) + -> BlockAnd> + { + debug!("expr_as_lvalue(block={:?}, expr={:?})", + block, expr); + + let this = self; + let expr_span = expr.span; + match expr.kind { + ExprKind::Scope { extent, value } => { + this.in_scope(extent, block, |this| { + this.as_lvalue(block, value) + }) + } + ExprKind::Paren { arg } => { + this.as_lvalue(block, arg) + } + ExprKind::Field { lhs, name } => { + let lvalue = unpack!(block = this.as_lvalue(block, lhs)); + let lvalue = lvalue.field(name); + block.and(lvalue) + } + ExprKind::Deref { arg } => { + let lvalue = unpack!(block = this.as_lvalue(block, arg)); + let lvalue = lvalue.deref(); + block.and(lvalue) + } + ExprKind::Index { lhs, index } => { + let (usize_ty, bool_ty) = (this.hir.usize_ty(), this.hir.bool_ty()); + + let slice = unpack!(block = this.as_lvalue(block, lhs)); + + let idx = unpack!(block = this.as_operand(block, index)); + + // bounds check: + let (len, lt) = (this.temp(usize_ty.clone()), this.temp(bool_ty)); + this.cfg.push_assign(block, expr_span, // len = len(slice) + &len, Rvalue::Len(slice.clone())); + this.cfg.push_assign(block, expr_span, // lt = idx < len + <, Rvalue::BinaryOp(BinOp::Lt, + idx.clone(), + Operand::Consume(len))); + + let (success, failure) = (this.cfg.start_new_block(), + this.cfg.start_new_block()); + this.cfg.terminate(block, + Terminator::If { + cond: Operand::Consume(lt), + targets: [success, failure] + }); + this.panic(failure); + success.and(slice.index(idx)) + } + ExprKind::SelfRef => { + block.and(Lvalue::Arg(0)) + } + ExprKind::VarRef { id } => { + let index = this.var_indices[&id]; + block.and(Lvalue::Var(index)) + } + ExprKind::StaticRef { id } => { + block.and(Lvalue::Static(id)) + } + + ExprKind::Vec { .. } | + ExprKind::Tuple { .. } | + ExprKind::Adt { .. } | + ExprKind::Closure { .. } | + ExprKind::Unary { .. } | + ExprKind::Binary { .. } | + ExprKind::LogicalOp { .. } | + ExprKind::Box { .. } | + ExprKind::Cast { .. } | + ExprKind::ReifyFnPointer { .. } | + ExprKind::UnsafeFnPointer { .. } | + ExprKind::Unsize { .. } | + ExprKind::Repeat { .. } | + ExprKind::Borrow { .. } | + ExprKind::If { .. } | + ExprKind::Match { .. } | + ExprKind::Loop { .. } | + ExprKind::Block { .. } | + ExprKind::Assign { .. } | + ExprKind::AssignOp { .. } | + ExprKind::Break { .. } | + ExprKind::Continue { .. } | + ExprKind::Return { .. } | + ExprKind::Literal { .. } | + ExprKind::InlineAsm { .. } | + ExprKind::Call { .. } => { + // these are not lvalues, so we need to make a temporary. + debug_assert!(match Category::of(&expr.kind) { + Some(Category::Lvalue) => false, + _ => true, + }); + this.as_temp(block, expr) + } + } + } +} diff --git a/src/librustc_mir/build/expr/as_operand.rs b/src/librustc_mir/build/expr/as_operand.rs new file mode 100644 index 00000000000..ee090571b7c --- /dev/null +++ b/src/librustc_mir/build/expr/as_operand.rs @@ -0,0 +1,68 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! See docs in build/expr/mod.rs + +use build::{BlockAnd, Builder}; +use build::expr::category::Category; +use hair::*; +use repr::*; + +impl Builder { + /// Compile `expr` into a value that can be used as an operand. + /// If `expr` is an lvalue like `x`, this will introduce a + /// temporary `tmp = x`, so that we capture the value of `x` at + /// this time. + pub fn as_operand(&mut self, + block: BasicBlock, + expr: M) + -> BlockAnd> + where M: Mirror> + { + let expr = self.hir.mirror(expr); + self.expr_as_operand(block, expr) + } + + fn expr_as_operand(&mut self, + mut block: BasicBlock, + expr: Expr) + -> BlockAnd> + { + debug!("expr_as_operand(block={:?}, expr={:?})", + block, expr); + let this = self; + + match expr.kind { + ExprKind::Scope { extent, value } => { + return this.in_scope(extent, block, |this| { + this.as_operand(block, value) + }); + } + ExprKind::Paren { arg } => { + return this.as_operand(block, arg); + } + _ => { } + } + + let category = Category::of(&expr.kind).unwrap(); + debug!("expr_as_operand: category={:?} for={:?}", category, expr.kind); + match category { + Category::Constant => { + let constant = this.as_constant(expr); + block.and(Operand::Constant(constant)) + } + Category::Lvalue | + Category::Rvalue(..) => { + let operand = unpack!(block = this.as_temp(block, expr)); + block.and(Operand::Consume(operand)) + } + } + } +} diff --git a/src/librustc_mir/build/expr/as_rvalue.rs b/src/librustc_mir/build/expr/as_rvalue.rs new file mode 100644 index 00000000000..e4d3ad21503 --- /dev/null +++ b/src/librustc_mir/build/expr/as_rvalue.rs @@ -0,0 +1,220 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! See docs in build/expr/mod.rs + +use rustc_data_structures::fnv::FnvHashMap; + +use build::{BlockAnd, Builder}; +use build::expr::category::{Category, RvalueFunc}; +use hair::*; +use repr::*; + +impl Builder { + /// Compile `expr`, yielding an rvalue. + pub fn as_rvalue(&mut self, + block: BasicBlock, + expr: M) + -> BlockAnd> + where M: Mirror> + { + let expr = self.hir.mirror(expr); + self.expr_as_rvalue(block, expr) + } + + fn expr_as_rvalue(&mut self, + mut block: BasicBlock, + expr: Expr) + -> BlockAnd> + { + debug!("expr_as_rvalue(block={:?}, expr={:?})", + block, expr); + + let this = self; + let expr_span = expr.span; + + match expr.kind { + ExprKind::Scope { extent, value } => { + this.in_scope(extent, block, |this| { + this.as_rvalue(block, value) + }) + } + ExprKind::Paren { arg } => { + this.as_rvalue(block, arg) + } + ExprKind::InlineAsm { asm } => { + block.and(Rvalue::InlineAsm(asm)) + } + ExprKind::Repeat { value, count } => { + let value_operand = unpack!(block = this.as_operand(block, value)); + let count_operand = unpack!(block = this.as_operand(block, count)); + block.and(Rvalue::Repeat(value_operand, count_operand)) + } + ExprKind::Borrow { region, borrow_kind, arg } => { + let arg_lvalue = unpack!(block = this.as_lvalue(block, arg)); + block.and(Rvalue::Ref(region, borrow_kind, arg_lvalue)) + } + ExprKind::Binary { op, lhs, rhs } => { + let lhs = unpack!(block = this.as_operand(block, lhs)); + let rhs = unpack!(block = this.as_operand(block, rhs)); + block.and(Rvalue::BinaryOp(op, lhs, rhs)) + } + ExprKind::Unary { op, arg } => { + let arg = unpack!(block = this.as_operand(block, arg)); + block.and(Rvalue::UnaryOp(op, arg)) + } + ExprKind::Box { place: _, value } => { + let value = this.hir.mirror(value); + let value_ty = value.ty.clone(); + let result = this.temp(value_ty.clone()); + + // to start, malloc some memory of suitable type (thus far, uninitialized): + let rvalue = Rvalue::Box(value.ty.clone()); + this.cfg.push_assign(block, expr_span, &result, rvalue); + + // schedule a shallow free of that memory, lest we unwind: + let extent = this.extent_of_innermost_scope().unwrap(); + this.schedule_drop(expr_span, extent, DropKind::Shallow, &result, value_ty); + + // initialize the box contents: + let contents = result.clone().deref(); + unpack!(block = this.into(&contents, block, value)); + + // now that the result is fully initialized, cancel the drop + // by "using" the result (which is linear): + block.and(Rvalue::Use(Operand::Consume(result))) + } + ExprKind::Cast { source } => { + let source = unpack!(block = this.as_operand(block, source)); + block.and(Rvalue::Cast(CastKind::Misc, source, expr.ty)) + } + ExprKind::ReifyFnPointer { source } => { + let source = unpack!(block = this.as_operand(block, source)); + block.and(Rvalue::Cast(CastKind::ReifyFnPointer, source, expr.ty)) + } + ExprKind::UnsafeFnPointer { source } => { + let source = unpack!(block = this.as_operand(block, source)); + block.and(Rvalue::Cast(CastKind::UnsafeFnPointer, source, expr.ty)) + } + ExprKind::Unsize { source } => { + let source = unpack!(block = this.as_operand(block, source)); + block.and(Rvalue::Cast(CastKind::Unsize, source, expr.ty)) + } + ExprKind::Vec { fields } => { + // (*) We would (maybe) be closer to trans if we + // handled this and other aggregate cases via + // `into()`, not `as_rvalue` -- in that case, instead + // of generating + // + // let tmp1 = ...1; + // let tmp2 = ...2; + // dest = Rvalue::Aggregate(Foo, [tmp1, tmp2]) + // + // we could just generate + // + // dest.f = ...1; + // dest.g = ...2; + // + // The problem is that then we would need to: + // + // (a) have a more complex mechanism for handling + // partial cleanup; + // (b) distinguish the case where the type `Foo` has a + // destructor, in which case creating an instance + // as a whole "arms" the destructor, and you can't + // write individual fields; and, + // (c) handle the case where the type Foo has no + // fields. We don't want `let x: ();` to compile + // to the same MIR as `let x = ();`. + + // first process the set of fields + let fields: Vec<_> = + fields.into_iter() + .map(|f| unpack!(block = this.as_operand(block, f))) + .collect(); + + block.and(Rvalue::Aggregate(AggregateKind::Vec, fields)) + } + ExprKind::Tuple { fields } => { // see (*) above + // first process the set of fields + let fields: Vec<_> = + fields.into_iter() + .map(|f| unpack!(block = this.as_operand(block, f))) + .collect(); + + block.and(Rvalue::Aggregate(AggregateKind::Tuple, fields)) + } + ExprKind::Closure { closure_id, substs, upvars } => { // see (*) above + let upvars = + upvars.into_iter() + .map(|upvar| unpack!(block = this.as_operand(block, upvar))) + .collect(); + block.and(Rvalue::Aggregate(AggregateKind::Closure(closure_id, substs), upvars)) + } + ExprKind::Adt { adt_def, variant_index, substs, fields, base } => { // see (*) above + // first process the set of fields + let fields_map: FnvHashMap<_, _> = + fields.into_iter() + .map(|f| (f.name, unpack!(block = this.as_operand(block, f.expr)))) + .collect(); + + let field_names = + this.hir.fields(adt_def, variant_index); + + let base = + base.map(|base| unpack!(block = this.as_lvalue(block, base))); + + // for the actual values we use, take either the + // expr the user specified or, if they didn't + // specify something for this field name, create a + // path relative to the base (which must have been + // supplied, or the IR is internally + // inconsistent). + let fields: Vec<_> = + field_names.into_iter() + .map(|n| match fields_map.get(&n) { + Some(v) => v.clone(), + None => Operand::Consume(base.clone().unwrap().field(n)), + }) + .collect(); + + block.and(Rvalue::Aggregate(AggregateKind::Adt(adt_def, variant_index, substs), + fields)) + } + ExprKind::Literal { .. } | + ExprKind::Block { .. } | + ExprKind::Match { .. } | + ExprKind::If { .. } | + ExprKind::Loop { .. } | + ExprKind::LogicalOp { .. } | + ExprKind::Call { .. } | + ExprKind::Field { .. } | + ExprKind::Deref { .. } | + ExprKind::Index { .. } | + ExprKind::VarRef { .. } | + ExprKind::SelfRef | + ExprKind::Assign { .. } | + ExprKind::AssignOp { .. } | + ExprKind::Break { .. } | + ExprKind::Continue { .. } | + ExprKind::Return { .. } | + ExprKind::StaticRef { .. } => { + // these do not have corresponding `Rvalue` variants, + // so make an operand and then return that + debug_assert!(match Category::of(&expr.kind) { + Some(Category::Rvalue(RvalueFunc::AsRvalue)) => false, + _ => true, + }); + let operand = unpack!(block = this.as_operand(block, expr)); + block.and(Rvalue::Use(operand)) + } + } + } +} diff --git a/src/librustc_mir/build/expr/as_temp.rs b/src/librustc_mir/build/expr/as_temp.rs new file mode 100644 index 00000000000..50f04e0177b --- /dev/null +++ b/src/librustc_mir/build/expr/as_temp.rs @@ -0,0 +1,85 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! See docs in build/expr/mod.rs + +use build::{BlockAnd, Builder}; +use build::expr::category::Category; +use hair::*; +use repr::*; + +impl Builder { + /// Compile `expr` into a fresh temporary. This is used when building + /// up rvalues so as to freeze the value that will be consumed. + pub fn as_temp(&mut self, + block: BasicBlock, + expr: M) + -> BlockAnd> + where M: Mirror> + { + let expr = self.hir.mirror(expr); + self.expr_as_temp(block, expr) + } + + fn expr_as_temp(&mut self, + mut block: BasicBlock, + expr: Expr) + -> BlockAnd> + { + debug!("expr_as_temp(block={:?}, expr={:?})", + block, expr); + let this = self; + + match expr.kind { + ExprKind::Scope { extent, value } => { + return this.in_scope(extent, block, |this| { + this.as_temp(block, value) + }); + } + ExprKind::Paren { arg } => { + return this.as_temp(block, arg); + } + _ => { } + } + + let expr_ty = expr.ty.clone(); + let temp = this.temp(expr_ty.clone()); + let temp_lifetime = match expr.temp_lifetime { + Some(t) => t, + None => { + this.hir.span_bug( + expr.span, + &format!("no temp_lifetime for expr")); + } + }; + this.schedule_drop(expr.span, temp_lifetime, DropKind::Deep, &temp, expr_ty); + + // Careful here not to cause an infinite cycle. If we always + // called `into`, then for lvalues like `x.f`, it would + // eventually fallback to us, and we'd loop. There's a reason + // for this: `as_temp` is the point where we bridge the "by + // reference" semantics of `as_lvalue` with the "by value" + // semantics of `into`, `as_operand`, `as_rvalue`, and (of + // course) `as_temp`. + match Category::of(&expr.kind).unwrap() { + Category::Lvalue => { + let expr_span = expr.span; + let lvalue = unpack!(block = this.as_lvalue(block, expr)); + let rvalue = Rvalue::Use(Operand::Consume(lvalue)); + this.cfg.push_assign(block, expr_span, &temp, rvalue); + } + _ => { + unpack!(block = this.into(&temp, block, expr)); + } + } + + block.and(temp) + } +} diff --git a/src/librustc_mir/build/expr/category.rs b/src/librustc_mir/build/expr/category.rs new file mode 100644 index 00000000000..8bfd0a1c7d2 --- /dev/null +++ b/src/librustc_mir/build/expr/category.rs @@ -0,0 +1,93 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use hair::*; + +#[derive(Debug, PartialEq)] +pub enum Category { + // An assignable memory location like `x`, `x.f`, `foo()[3]`, that + // sort of thing. Something that could appear on the LHS of an `=` + // sign. + Lvalue, + + // A literal like `23` or `"foo"`. Does not include constant + // expressions like `3 + 5`. + Constant, + + // Something that generates a new value at runtime, like `x + y` + // or `foo()`. + Rvalue(RvalueFunc), +} + +// Rvalues fall into different "styles" that will determine which fn +// is best suited to generate them. +#[derive(Debug, PartialEq)] +pub enum RvalueFunc { + // Best generated by `into`. This is generally exprs that + // cause branching, like `match`, but also includes calls. + Into, + + // Best generated by `as_rvalue`. This is usually the case. + AsRvalue, +} + +/// Determines the category for a given expression. Note that scope +/// and paren expressions have no category. +impl Category { + pub fn of(ek: &ExprKind) -> Option { + match *ek { + ExprKind::Scope { .. } | + ExprKind::Paren { .. } => + None, + + ExprKind::Field { .. } | + ExprKind::Deref { .. } | + ExprKind::Index { .. } | + ExprKind::SelfRef | + ExprKind::VarRef { .. } | + ExprKind::StaticRef { .. } => + Some(Category::Lvalue), + + ExprKind::LogicalOp { .. } | + ExprKind::If { .. } | + ExprKind::Match { .. } | + ExprKind::Call { .. } => + Some(Category::Rvalue(RvalueFunc::Into)), + + ExprKind::Vec { .. } | + ExprKind::Tuple { .. } | + ExprKind::Adt { .. } | + ExprKind::Closure { .. } | + ExprKind::Unary { .. } | + ExprKind::Binary { .. } | + ExprKind::Box { .. } | + ExprKind::Cast { .. } | + ExprKind::ReifyFnPointer { .. } | + ExprKind::UnsafeFnPointer { .. } | + ExprKind::Unsize { .. } | + ExprKind::Repeat { .. } | + ExprKind::Borrow { .. } | + ExprKind::Assign { .. } | + ExprKind::AssignOp { .. } | + ExprKind::InlineAsm { .. } => + Some(Category::Rvalue(RvalueFunc::AsRvalue)), + + ExprKind::Literal { .. } => + Some(Category::Constant), + + ExprKind::Loop { .. } | + ExprKind::Block { .. } | + ExprKind::Break { .. } | + ExprKind::Continue { .. } | + ExprKind::Return { .. } => + Some(Category::Rvalue(RvalueFunc::Into)), // TODO + } + } +} diff --git a/src/librustc_mir/build/expr/into.rs b/src/librustc_mir/build/expr/into.rs new file mode 100644 index 00000000000..b409903ad7e --- /dev/null +++ b/src/librustc_mir/build/expr/into.rs @@ -0,0 +1,282 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! See docs in build/expr/mod.rs + +use build::{BlockAnd, Builder}; +use build::expr::category::{Category, RvalueFunc}; +use build::scope::LoopScope; +use hair::*; +use repr::*; + +impl Builder { + /// Compile `expr`, storing the result into `destination`, which + /// is assumed to be uninitialized. + pub fn into_expr(&mut self, + destination: &Lvalue, + mut block: BasicBlock, + expr: Expr) + -> BlockAnd<()> + { + debug!("into_expr(destination={:?}, block={:?}, expr={:?})", + destination, block, expr); + + // since we frequently have to reference `self` from within a + // closure, where `self` would be shadowed, it's easier to + // just use the name `this` uniformly + let this = self; + let expr_span = expr.span; + + match expr.kind { + ExprKind::Scope { extent, value } => { + this.in_scope(extent, block, |this| { + this.into(destination, block, value) + }) + } + ExprKind::Paren { arg } => { + this.into(destination, block, arg) + } + ExprKind::Block { body: ast_block } => { + this.ast_block(destination, block, ast_block) + } + ExprKind::Match { discriminant, arms } => { + this.match_expr(destination, expr_span, block, discriminant, arms) + } + ExprKind::If { condition: cond_expr, then: then_expr, otherwise: else_expr } => { + let operand = unpack!(block = this.as_operand(block, cond_expr)); + + let mut then_block = this.cfg.start_new_block(); + let mut else_block = this.cfg.start_new_block(); + this.cfg.terminate(block, Terminator::If { + cond: operand, + targets: [then_block, else_block] + }); + + unpack!(then_block = this.into(destination, then_block, then_expr)); + unpack!(else_block = this.into(destination, else_block, else_expr)); + + let join_block = this.cfg.start_new_block(); + this.cfg.terminate(then_block, Terminator::Goto { target: join_block }); + this.cfg.terminate(else_block, Terminator::Goto { target: join_block }); + + join_block.unit() + } + ExprKind::LogicalOp { op, lhs, rhs } => { + // And: + // + // [block: If(lhs)] -true-> [else_block: If(rhs)] -true-> [true_block] + // | | (false) + // +----------false-----------+------------------> [false_block] + // + // Or: + // + // [block: If(lhs)] -false-> [else_block: If(rhs)] -true-> [true_block] + // | | (false) + // +----------true------------+-------------------> [false_block] + + let (true_block, false_block, mut else_block, join_block) = + (this.cfg.start_new_block(), this.cfg.start_new_block(), + this.cfg.start_new_block(), this.cfg.start_new_block()); + + let lhs = unpack!(block = this.as_operand(block, lhs)); + let blocks = match op { + LogicalOp::And => [else_block, false_block], + LogicalOp::Or => [true_block, else_block], + }; + this.cfg.terminate(block, Terminator::If { cond: lhs, targets: blocks }); + + let rhs = unpack!(else_block = this.as_operand(else_block, rhs)); + this.cfg.terminate(else_block, Terminator::If { + cond: rhs, + targets: [true_block, false_block] + }); + + this.cfg.push_assign_constant( + true_block, expr_span, destination, + Constant { + span: expr_span, + kind: ConstantKind::Literal(Literal::Bool { value: true }), + }); + + this.cfg.push_assign_constant( + false_block, expr_span, destination, + Constant { + span: expr_span, + kind: ConstantKind::Literal(Literal::Bool { value: false }), + }); + + this.cfg.terminate(true_block, Terminator::Goto { target: join_block }); + this.cfg.terminate(false_block, Terminator::Goto { target: join_block }); + + join_block.unit() + } + ExprKind::Loop { condition: opt_cond_expr, body } => { + // [block] --> [loop_block] ~~> [loop_block_end] -1-> [exit_block] + // ^ | + // | 0 + // | | + // | v + // [body_block_end] <~~~ [body_block] + // + // If `opt_cond_expr` is `None`, then the graph is somewhat simplified: + // + // [block] --> [loop_block / body_block ] ~~> [body_block_end] [exit_block] + // ^ | + // | | + // +--------------------------+ + // + + let loop_block = this.cfg.start_new_block(); + let exit_block = this.cfg.start_new_block(); + + // start the loop + this.cfg.terminate(block, Terminator::Goto { target: loop_block }); + + this.in_loop_scope(loop_block, exit_block, |this| { + // conduct the test, if necessary + let body_block; + let opt_cond_expr = opt_cond_expr; // FIXME rustc bug + if let Some(cond_expr) = opt_cond_expr { + let loop_block_end; + let cond = unpack!(loop_block_end = this.as_operand(loop_block, cond_expr)); + body_block = this.cfg.start_new_block(); + this.cfg.terminate(loop_block_end, + Terminator::If { + cond: cond, + targets: [body_block, exit_block] + }); + } else { + body_block = loop_block; + } + + // execute the body, branching back to the test + let unit_temp = this.unit_temp.clone(); + let body_block_end = unpack!(this.into(&unit_temp, body_block, body)); + this.cfg.terminate(body_block_end, Terminator::Goto { target: loop_block }); + + // final point is exit_block + exit_block.unit() + }) + } + ExprKind::Assign { lhs, rhs } => { + // Note: we evaluate assignments right-to-left. This + // is better for borrowck interaction with overloaded + // operators like x[j] = x[i]. + let rhs = unpack!(block = this.as_operand(block, rhs)); + let lhs = unpack!(block = this.as_lvalue(block, lhs)); + this.cfg.push_drop(block, expr_span, DropKind::Deep, &lhs); + this.cfg.push_assign(block, expr_span, &lhs, Rvalue::Use(rhs)); + block.unit() + } + ExprKind::AssignOp { op, lhs, rhs } => { + // FIXME(#28160) there is an interesting semantics + // question raised here -- should we "freeze" the + // value of the lhs here? I'm inclined to think not, + // since it seems closer to the semantics of the + // overloaded version, which takes `&mut self`. This + // only affects weird things like `x += {x += 1; x}` + // -- is that equal to `x + (x + 1)` or `2*(x+1)`? + + // As above, RTL. + let rhs = unpack!(block = this.as_operand(block, rhs)); + let lhs = unpack!(block = this.as_lvalue(block, lhs)); + + // we don't have to drop prior contents or anything + // because AssignOp is only legal for Copy types + // (overloaded ops should be desugared into a call). + this.cfg.push_assign(block, expr_span, &lhs, + Rvalue::BinaryOp(op, + Operand::Consume(lhs.clone()), + rhs)); + + block.unit() + } + ExprKind::Continue { label } => { + this.break_or_continue(expr_span, label, block, + |loop_scope| loop_scope.continue_block) + } + ExprKind::Break { label } => { + this.break_or_continue(expr_span, label, block, + |loop_scope| loop_scope.break_block) + } + ExprKind::Return { value } => { + unpack!(block = this.into(&Lvalue::ReturnPointer, block, value)); + let extent = this.extent_of_outermost_scope().unwrap(); + this.exit_scope(expr_span, extent, block, END_BLOCK); + this.cfg.start_new_block().unit() + } + ExprKind::Call { fun, args } => { + let fun = unpack!(block = this.as_lvalue(block, fun)); + let args: Vec<_> = + args.into_iter() + .map(|arg| unpack!(block = this.as_lvalue(block, arg))) + .collect(); + let success = this.cfg.start_new_block(); + let panic = this.diverge_cleanup(); + this.cfg.terminate(block, + Terminator::Call { + data: CallData { + destination: destination.clone(), + func: fun, + args: args + }, + targets: [success, panic] + }); + success.unit() + } + + // these are the cases that are more naturally handled by some other mode + ExprKind::Unary { .. } | + ExprKind::Binary { .. } | + ExprKind::Box { .. } | + ExprKind::Cast { .. } | + ExprKind::ReifyFnPointer { .. } | + ExprKind::UnsafeFnPointer { .. } | + ExprKind::Unsize { .. } | + ExprKind::Repeat { .. } | + ExprKind::Borrow { .. } | + ExprKind::VarRef { .. } | + ExprKind::SelfRef | + ExprKind::StaticRef { .. } | + ExprKind::Vec { .. } | + ExprKind::Tuple { .. } | + ExprKind::Adt { .. } | + ExprKind::Closure { .. } | + ExprKind::Index { .. } | + ExprKind::Deref { .. } | + ExprKind::Literal { .. } | + ExprKind::InlineAsm { .. } | + ExprKind::Field { .. } => { + debug_assert!(match Category::of(&expr.kind).unwrap() { + Category::Rvalue(RvalueFunc::Into) => false, + _ => true, + }); + + let rvalue = unpack!(block = this.as_rvalue(block, expr)); + this.cfg.push_assign(block, expr_span, destination, rvalue); + block.unit() + } + } + } + + fn break_or_continue(&mut self, + span: H::Span, + label: Option, + block: BasicBlock, + exit_selector: F) + -> BlockAnd<()> + where F: FnOnce(&LoopScope) -> BasicBlock + { + let loop_scope = self.find_loop_scope(span, label); + let exit_block = exit_selector(&loop_scope); + self.exit_scope(span, loop_scope.extent, block, exit_block); + self.cfg.start_new_block().unit() + } +} diff --git a/src/librustc_mir/build/expr/mod.rs b/src/librustc_mir/build/expr/mod.rs new file mode 100644 index 00000000000..0f168f307aa --- /dev/null +++ b/src/librustc_mir/build/expr/mod.rs @@ -0,0 +1,79 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Translates expressions into MIR. As a caller into this module, you +//! have many options, but the first thing you have to decide is +//! whether you are evaluating this expression for its *value*, its +//! *location*, or as a *constant*. +//! +//! Typically, you want the value: e.g., if you are doing `expr_a + +//! expr_b`, you want the values of those expressions. In that case, +//! you want one of the following functions. Note that if the expr has +//! a type that is not `Copy`, then using any of these functions will +//! "move" the value out of its current home (if any). +//! +//! - `into` -- writes the value into a specific location, which +//! should be uninitialized +//! - `as_operand` -- evaluates the value and yields an `Operand`, +//! suitable for use as an argument to an `Rvalue` +//! - `as_temp` -- evaluates into a temporary; this is similar to `as_operand` +//! except it always returns a fresh lvalue, even for constants +//! - `as_rvalue` -- yields an `Rvalue`, suitable for use in an assignment; +//! as of this writing, never needed outside of the `expr` module itself +//! +//! Sometimes though want the expression's *location*. An example +//! would be during a match statement, or the operand of the `&` +//! operator. In that case, you want `as_lvalue`. This will create a +//! temporary if necessary. +//! +//! Finally, if it's a constant you seek, then call +//! `as_constant`. This creates a `Constant`, but naturally it can +//! only be used on constant expressions and hence is needed only in +//! very limited contexts. +//! +//! ### Implementation notes +//! +//! For any given kind of expression, there is generally one way that +//! can be translated most naturally. This is specified by the +//! `Category::of` function in the `category` module. For example, a +//! struct expression (or other expression that creates a new value) +//! is typically easiest to write in terms of `as_rvalue` or `into`, +//! whereas a reference to a field is easiest to write in terms of +//! `as_lvalue`. (The exception to this is scope and paren +//! expressions, which have no category.) +//! +//! Therefore, the various functions above make use of one another in +//! a descending fashion. For any given expression, you should pick +//! the most suitable spot to implement it, and then just let the +//! other fns cycle around. The handoff works like this: +//! +//! - `into(lv)` -> fallback is to create a rvalue with `as_rvalue` and assign it to `lv` +//! - `as_rvalue` -> fallback is to create an Operand with `as_operand` and use `Rvalue::use` +//! - `as_operand` -> either invokes `as_constant` or `as_temp` +//! - `as_constant` -> (no fallback) +//! - `as_temp` -> creates a temporary and either calls `as_lvalue` or `into` +//! - `as_lvalue` -> for rvalues, falls back to `as_temp` and returns that +//! +//! As you can see, there is a cycle where `into` can (in theory) fallback to `as_temp` +//! which can fallback to `into`. So if one of the `ExprKind` variants is not, in fact, +//! implemented in the category where it is supposed to be, there will be a problem. +//! +//! Of those fallbacks, the most interesting one is `as_temp`, because +//! it discriminates based on the category of the expression. This is +//! basically the point where the "by value" operations are bridged +//! over to the "by reference" mode (`as_lvalue`). + +mod as_constant; +mod as_lvalue; +mod as_rvalue; +mod as_operand; +mod as_temp; +mod category; +mod into; diff --git a/src/librustc_mir/build/into.rs b/src/librustc_mir/build/into.rs new file mode 100644 index 00000000000..426e59f1c40 --- /dev/null +++ b/src/librustc_mir/build/into.rs @@ -0,0 +1,70 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! In general, there are a number of things for which it's convenient +//! to just call `builder.into` and have it emit its result into a +//! given location. This is basically for expressions or things that can be +//! wrapped up as expressions (e.g. blocks). To make this ergonomic, we use this +//! latter `EvalInto` trait. + +use build::{BlockAnd, Builder}; +use hair::*; +use repr::*; + +pub trait EvalInto { + fn eval_into(self, builder: &mut Builder, destination: &Lvalue, + block: BasicBlock) -> BlockAnd<()>; +} + +impl Builder { + pub fn into(&mut self, + destination: &Lvalue, + block: BasicBlock, + expr: E) + -> BlockAnd<()> + where E: EvalInto + { + expr.eval_into(self, destination, block) + } +} + +impl EvalInto for ExprRef { + fn eval_into(self, + builder: &mut Builder, + destination: &Lvalue, + block: BasicBlock) + -> BlockAnd<()> { + let expr = builder.hir.mirror(self); + builder.into_expr(destination, block, expr) + } +} + +impl EvalInto for Expr { + fn eval_into(self, + builder: &mut Builder, + destination: &Lvalue, + block: BasicBlock) + -> BlockAnd<()> { + builder.into_expr(destination, block, self) + } +} + +impl EvalInto for Option> { + fn eval_into(self, + builder: &mut Builder, + destination: &Lvalue, + block: BasicBlock) + -> BlockAnd<()> { + match self { + Some(expr) => builder.into(destination, block, expr), + None => block.unit() + } + } +} diff --git a/src/librustc_mir/build/matches/mod.rs b/src/librustc_mir/build/matches/mod.rs new file mode 100644 index 00000000000..ff6e4997b79 --- /dev/null +++ b/src/librustc_mir/build/matches/mod.rs @@ -0,0 +1,409 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Code related to match expresions. These are sufficiently complex +//! to warrant their own module and submodules. :) This main module +//! includes the high-level algorithm, the submodules contain the +//! details. + +use build::{BlockAnd, Builder}; +use repr::*; +use hair::*; + +// helper functions, broken out by category: +mod simplify; +mod test; +mod util; + +impl Builder { + pub fn match_expr(&mut self, + destination: &Lvalue, + span: H::Span, + mut block: BasicBlock, + discriminant: ExprRef, + arms: Vec>) + -> BlockAnd<()> + { + let discriminant_lvalue = + unpack!(block = self.as_lvalue(block, discriminant)); + + let arm_blocks: Vec = + arms.iter() + .map(|_| self.cfg.start_new_block()) + .collect(); + + let arm_bodies: Vec> = + arms.iter() + .map(|arm| arm.body.clone()) + .collect(); + + // assemble a list of candidates: there is one candidate per + // pattern, which means there may be more than one candidate + // *per arm*. These candidates are kept sorted such that the + // highest priority candidate comes last in the list. This the + // reverse of the order in which candidates are written in the + // source. + let candidates: Vec> = + arms.into_iter() + .zip(arm_blocks.iter()) + .rev() // highest priority comes last + .flat_map(|(arm, &arm_block)| { + let guard = arm.guard; + arm.patterns.into_iter() + .rev() + .map(move |pat| (arm_block, pat, guard.clone())) + }) + .map(|(arm_block, pattern, guard)| { + Candidate { + match_pairs: vec![self.match_pair(discriminant_lvalue.clone(), pattern)], + bindings: vec![], + guard: guard, + arm_block: arm_block, + } + }) + .collect(); + + // this will generate code to test discriminant_lvalue and + // branch to the appropriate arm block + let var_extent = self.extent_of_innermost_scope().unwrap(); + self.match_candidates(span, var_extent, candidates, block); + + // all the arm blocks will rejoin here + let end_block = self.cfg.start_new_block(); + + for (arm_body, &arm_block) in arm_bodies.into_iter().zip(arm_blocks.iter()) { + let mut arm_block = arm_block; + unpack!(arm_block = self.into(destination, arm_block, arm_body)); + self.cfg.terminate(arm_block, Terminator::Goto { target: end_block }); + } + + end_block.unit() + } + + pub fn expr_into_pattern(&mut self, + mut block: BasicBlock, + var_extent: H::CodeExtent, // lifetime of vars + irrefutable_pat: PatternRef, + initializer: ExprRef) + -> BlockAnd<()> + { + // optimize the case of `let x = ...` + let irrefutable_pat = self.hir.mirror(irrefutable_pat); + match irrefutable_pat.kind { + PatternKind::Binding { mutability, + name, + mode: BindingMode::ByValue, + var, + ty, + subpattern: None } => { + let index = self.declare_binding(var_extent, mutability, name, + var, ty, irrefutable_pat.span); + let lvalue = Lvalue::Var(index); + return self.into(&lvalue, block, initializer); + } + _ => { } + } + let lvalue = unpack!(block = self.as_lvalue(block, initializer)); + self.lvalue_into_pattern(block, var_extent, + PatternRef::Mirror(Box::new(irrefutable_pat)), &lvalue) + } + + pub fn lvalue_into_pattern(&mut self, + block: BasicBlock, + var_extent: H::CodeExtent, + irrefutable_pat: PatternRef, + initializer: &Lvalue) + -> BlockAnd<()> + { + // create a dummy candidate + let mut candidate = Candidate:: { + match_pairs: vec![self.match_pair(initializer.clone(), irrefutable_pat)], + bindings: vec![], + guard: None, + arm_block: block + }; + + // Simplify the candidate. Since the pattern is irrefutable, this should + // always convert all match-pairs into bindings. + self.simplify_candidate(&mut candidate); + + if !candidate.match_pairs.is_empty() { + self.hir.span_bug( + candidate.match_pairs[0].pattern.span, + &format!("match pairs {:?} remaining after simplifying irrefutable pattern", + candidate.match_pairs)); + } + + // now apply the bindings, which will also declare the variables + self.bind_matched_candidate(block, var_extent, candidate.bindings); + + block.unit() + } + + pub fn declare_uninitialized_variables(&mut self, + var_extent: H::CodeExtent, + pattern: PatternRef) + { + let pattern = self.hir.mirror(pattern); + match pattern.kind { + PatternKind::Binding { mutability, name, mode: _, var, ty, subpattern } => { + self.declare_binding(var_extent, mutability, name, var, ty, pattern.span); + if let Some(subpattern) = subpattern { + self.declare_uninitialized_variables(var_extent, subpattern); + } + } + PatternKind::Array { prefix, slice, suffix } | + PatternKind::Slice { prefix, slice, suffix } => { + for subpattern in prefix.into_iter().chain(slice).chain(suffix) { + self.declare_uninitialized_variables(var_extent, subpattern); + } + } + PatternKind::Constant { .. } | PatternKind::Range { .. } | PatternKind::Wild => { + } + PatternKind::Deref { subpattern } => { + self.declare_uninitialized_variables(var_extent, subpattern); + } + PatternKind::Leaf { subpatterns } | + PatternKind::Variant { subpatterns, .. } => { + for subpattern in subpatterns { + self.declare_uninitialized_variables(var_extent, subpattern.pattern); + } + } + } + } +} + +#[derive(Clone, Debug)] +struct Candidate { + // all of these must be satisfied... + match_pairs: Vec>, + + // ...these bindings established... + bindings: Vec>, + + // ...and the guard must be evaluated... + guard: Option>, + + // ...and then we branch here. + arm_block: BasicBlock, +} + +#[derive(Clone, Debug)] +struct Binding { + span: H::Span, + source: Lvalue, + name: H::Ident, + var_id: H::VarId, + var_ty: H::Ty, + mutability: Mutability, + binding_mode: BindingMode, +} + +#[derive(Clone, Debug)] +struct MatchPair { + // this lvalue... + lvalue: Lvalue, + + // ... must match this pattern. + pattern: Pattern, +} + +#[derive(Clone, Debug, PartialEq)] +enum TestKind { + // test the branches of enum + Switch { adt_def: H::AdtDef }, + + // test for equality + Eq { value: Constant, ty: H::Ty }, + + // test whether the value falls within an inclusive range + Range { lo: Constant, hi: Constant, ty: H::Ty }, + + // test length of the slice is equal to len + Len { len: usize, op: BinOp }, +} + +#[derive(Debug)] +struct Test { + span: H::Span, + + // the kind of test to be performed, + kind: TestKind, + + // the outcome we expect, + outcome: usize, + + // and the match pairs that will result + match_pairs: Vec> +} + +/////////////////////////////////////////////////////////////////////////// +// Main matching algorithm + +impl Builder { + fn match_candidates(&mut self, + span: H::Span, + var_extent: H::CodeExtent, + mut candidates: Vec>, + mut block: BasicBlock) + { + debug!("matched_candidate(span={:?}, var_extent={:?}, block={:?}, candidates={:?})", + span, var_extent, block, candidates); + + // Start by simplifying candidates. Once this process is + // complete, all the match pairs which remain require some + // form of test, whether it be a switch or pattern comparison. + for candidate in &mut candidates { + self.simplify_candidate(candidate); + } + + // The candidates are inversely sorted by priority. Check to + // see whether the candidates in the front of the queue (and + // hence back of the vec) have satisfied all their match + // pairs. + let fully_matched = + candidates.iter().rev().take_while(|c| c.match_pairs.is_empty()).count(); + debug!("match_candidates: {:?} candidates fully matched", fully_matched); + for _ in 0..fully_matched { + // If so, apply any bindings, test the guard (if any), and + // branch to the arm. + let candidate = candidates.pop().unwrap(); + match self.bind_and_guard_matched_candidate(block, var_extent, candidate) { + None => { return; } + Some(b) => { block = b; } + } + } + + // If there are no candidates that still need testing, we're done. + // Since all matches are exhaustive, execution should never reach this point. + if candidates.is_empty() { + return self.panic(block); + } + + // otherwise, extract the next match pair and construct tests + let match_pair = &candidates.last().unwrap().match_pairs[0]; + let test = self.test(match_pair); + debug!("match_candidates: test={:?} match_pair={:?}", test, match_pair); + let target_blocks = self.perform_test(block, &match_pair.lvalue, &test); + + for (outcome, target_block) in target_blocks.into_iter().enumerate() { + let applicable_candidates: Vec> = + candidates.iter() + .filter_map(|candidate| { + self.candidate_under_assumption(&match_pair.lvalue, + &test.kind, + outcome, + candidate) + }) + .collect(); + self.match_candidates(span, var_extent, applicable_candidates, target_block); + } + } + + /// Initializes each of the bindings from the candidate by + /// moving/copying/ref'ing the source as appropriate. Tests the + /// guard, if any, and then branches to the arm. Returns the block + /// for the case where the guard fails. + /// + /// Note: we check earlier that if there is a guard, there cannot + /// be move bindings. This isn't really important for the + /// self-consistency of this fn, but the reason for it should be + /// clear: after we've done the assignments, if there were move + /// bindings, further tests would be a use-after-move (which would + /// in turn be detected by the borrowck code that runs on the + /// MIR). + fn bind_and_guard_matched_candidate(&mut self, + mut block: BasicBlock, + var_extent: H::CodeExtent, + candidate: Candidate) + -> Option { + debug!("bind_and_guard_matched_candidate(block={:?}, var_extent={:?}, candidate={:?})", + block, var_extent, candidate); + + debug_assert!(candidate.match_pairs.is_empty()); + + self.bind_matched_candidate(block, var_extent, candidate.bindings); + + if let Some(guard) = candidate.guard { + // the block to branch to if the guard fails; if there is no + // guard, this block is simply unreachable + let cond = unpack!(block = self.as_operand(block, guard)); + let otherwise = self.cfg.start_new_block(); + self.cfg.terminate(block, Terminator::If { cond: cond, + targets: [candidate.arm_block, otherwise]}); + Some(otherwise) + } else { + self.cfg.terminate(block, Terminator::Goto { target: candidate.arm_block }); + None + } + } + + fn bind_matched_candidate(&mut self, + block: BasicBlock, + var_extent: H::CodeExtent, + bindings: Vec>) { + debug!("bind_matched_candidate(block={:?}, var_extent={:?}, bindings={:?})", + block, var_extent, bindings); + + // Assign each of the bindings. This may trigger moves out of the candidate. + for binding in bindings { + // Create a variable for the `var_id` being bound. In the + // case where there are multiple patterns for a single + // arm, it may already exist. + let var_index = if !self.var_indices.contains_key(&binding.var_id) { + self.declare_binding(var_extent, + binding.mutability, + binding.name, + binding.var_id, + binding.var_ty, + binding.span) + } else { + self.var_indices[&binding.var_id] + }; + + let rvalue = match binding.binding_mode { + BindingMode::ByValue => + Rvalue::Use(Operand::Consume(binding.source)), + BindingMode::ByRef(region, borrow_kind) => + Rvalue::Ref(region, borrow_kind, binding.source), + }; + + self.cfg.push_assign(block, binding.span, &Lvalue::Var(var_index), rvalue); + } + } + + fn declare_binding(&mut self, + var_extent: H::CodeExtent, + mutability: Mutability, + name: H::Ident, + var_id: H::VarId, + var_ty: H::Ty, + span: H::Span) + -> u32 + { + debug!("declare_binding(var_id={:?}, name={:?}, var_ty={:?}, var_extent={:?}, span={:?})", + var_id, name, var_ty, var_extent, span); + + let index = self.var_decls.len(); + self.var_decls.push(VarDecl:: { + mutability: mutability, + name: name, + ty: var_ty.clone(), + }); + let index = index as u32; + self.schedule_drop(span, var_extent, DropKind::Deep, &Lvalue::Var(index), var_ty); + self.var_indices.insert(var_id, index); + + debug!("declare_binding: index={:?}", index); + + index + } +} + diff --git a/src/librustc_mir/build/matches/simplify.rs b/src/librustc_mir/build/matches/simplify.rs new file mode 100644 index 00000000000..90d01b44345 --- /dev/null +++ b/src/librustc_mir/build/matches/simplify.rs @@ -0,0 +1,127 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Simplifying Candidates +//! +//! *Simplifying* a match pair `lvalue @ pattern` means breaking it down +//! into bindings or other, simpler match pairs. For example: +//! +//! - `lvalue @ (P1, P2)` can be simplified to `[lvalue.0 @ P1, lvalue.1 @ P2]` +//! - `lvalue @ x` can be simplified to `[]` by binding `x` to `lvalue` +//! +//! The `simplify_candidate` routine just repeatedly applies these +//! sort of simplifications until there is nothing left to +//! simplify. Match pairs cannot be simplified if they require some +//! sort of test: for example, testing which variant an enum is, or +//! testing a value against a constant. + +use build::Builder; +use build::matches::{Binding, MatchPair, Candidate}; +use hair::*; +use repr::*; + +use std::mem; + +impl Builder { + pub fn simplify_candidate(&mut self, + candidate: &mut Candidate) + { + // repeatedly simplify match pairs until fixed point is reached + loop { + let match_pairs = mem::replace(&mut candidate.match_pairs, vec![]); + let mut progress = match_pairs.len(); // count how many were simplified + for match_pair in match_pairs { + if let Err(match_pair) = self.simplify_match_pair(match_pair, candidate) { + candidate.match_pairs.push(match_pair); + progress -= 1; // this one was not simplified + } + } + if progress == 0 { + return; // if we were not able to simplify any, done. + } + } + } + + /// Tries to simplify `match_pair`, returning true if + /// successful. If successful, new match pairs and bindings will + /// have been pushed into the candidate. On failure (if false is + /// returned), no changes are made to candidate. + fn simplify_match_pair(&mut self, + match_pair: MatchPair, + candidate: &mut Candidate) + -> Result<(), MatchPair> // returns Err() if cannot simplify + { + match match_pair.pattern.kind { + PatternKind::Wild(..) => { + // nothing left to do + Ok(()) + } + + PatternKind::Binding { name, mutability, mode, var, ty, subpattern } => { + candidate.bindings.push(Binding { + name: name, + mutability: mutability, + span: match_pair.pattern.span, + source: match_pair.lvalue.clone(), + var_id: var, + var_ty: ty, + binding_mode: mode, + }); + + if let Some(subpattern) = subpattern { + // this is the `x @ P` case; have to keep matching against `P` now + let subpattern = self.hir.mirror(subpattern); + candidate.match_pairs.push(MatchPair::new(match_pair.lvalue, subpattern)); + } + + Ok(()) + } + + PatternKind::Constant { .. } => { + // FIXME normalize patterns when possible + Err(match_pair) + } + + PatternKind::Array { prefix, slice: None, suffix } => { + self.append_prefix_suffix_pairs( + &mut candidate.match_pairs, match_pair.lvalue.clone(), prefix, suffix); + Ok(()) + } + + PatternKind::Array { prefix: _, slice: Some(_), suffix: _ } => { + self.hir.span_bug( + match_pair.pattern.span, + &format!("slice patterns not implemented in MIR")); + } + + PatternKind::Slice { .. } | + PatternKind::Range { .. } | + PatternKind::Variant { .. } => { + // cannot simplify, test is required + Err(match_pair) + } + + PatternKind::Leaf { subpatterns } => { + // tuple struct, match subpats (if any) + candidate.match_pairs.extend( + self.field_match_pairs(match_pair.lvalue, subpatterns)); + Ok(()) + } + + PatternKind::Deref { subpattern } => { + let lvalue = match_pair.lvalue.deref(); + let subpattern = self.hir.mirror(subpattern); + candidate.match_pairs.push(MatchPair::new(lvalue, subpattern)); + Ok(()) + } + } + } +} + diff --git a/src/librustc_mir/build/matches/test.rs b/src/librustc_mir/build/matches/test.rs new file mode 100644 index 00000000000..181c947038c --- /dev/null +++ b/src/librustc_mir/build/matches/test.rs @@ -0,0 +1,301 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Testing candidates +// +// After candidates have been simplified, the only match pairs that +// remain are those that require some sort of test. The functions here +// identify what tests are needed, perform the tests, and then filter +// the candidates based on the result. + +use build::Builder; +use build::matches::{Candidate, MatchPair, Test, TestKind}; +use hair::*; +use repr::*; + +impl Builder { + /// Identifies what test is needed to decide if `match_pair` is applicable. + /// + /// It is a bug to call this with a simplifyable pattern. + pub fn test(&mut self, match_pair: &MatchPair) -> Test { + match match_pair.pattern.kind.clone() { + PatternKind::Variant { adt_def, variant_index, subpatterns } => { + let elem = ProjectionElem::Downcast(adt_def, variant_index); + let downcast_lvalue = match_pair.lvalue.clone().elem(elem); + + let consequent_match_pairs = + subpatterns.into_iter() + .map(|subpattern| { + let lvalue = + downcast_lvalue.clone().field( + subpattern.field); + self.match_pair(lvalue, subpattern.pattern) + }) + .collect(); + + Test { + span: match_pair.pattern.span, + kind: TestKind::Switch { adt_def: adt_def }, + outcome: variant_index, + match_pairs: consequent_match_pairs, + } + } + + PatternKind::Constant { expr } => { + let expr = self.as_constant(expr); + Test { + span: match_pair.pattern.span, + kind: TestKind::Eq { value: expr, + ty: match_pair.pattern.ty.clone() }, + outcome: 0, // 0 == true, of course. :) + match_pairs: vec![] + } + } + + PatternKind::Range { lo, hi } => { + let lo = self.as_constant(lo); + let hi = self.as_constant(hi); + Test { + span: match_pair.pattern.span, + kind: TestKind::Range { lo: lo, + hi: hi, + ty: match_pair.pattern.ty.clone() }, + outcome: 0, // 0 == true, of course. :) + match_pairs: vec![] + } + } + + PatternKind::Slice { prefix, slice: None, suffix } => { + let len = prefix.len() + suffix.len(); + let mut consequent_match_pairs = vec![]; + self.append_prefix_suffix_pairs( + &mut consequent_match_pairs, match_pair.lvalue.clone(), prefix, suffix); + Test { + span: match_pair.pattern.span, + kind: TestKind::Len { len: len, op: BinOp::Eq }, + outcome: 0, // 0 == true, of course. :) + match_pairs: consequent_match_pairs + } + } + + PatternKind::Slice { prefix: _, slice: Some(_), suffix: _ } => { + self.hir.span_bug( + match_pair.pattern.span, + &format!("slice patterns not implemented in MIR")); + } + + PatternKind::Array { .. } | + PatternKind::Wild | + PatternKind::Binding { .. } | + PatternKind::Leaf { .. } | + PatternKind::Deref { .. } => { + self.error_simplifyable(match_pair) + } + } + } + + /// Generates the code to perform a test. + pub fn perform_test(&mut self, + block: BasicBlock, + lvalue: &Lvalue, + test: &Test) + -> Vec { + match test.kind.clone() { + TestKind::Switch { adt_def } => { + let num_enum_variants = self.hir.num_variants(adt_def); + let target_blocks: Vec<_> = + (0..num_enum_variants).map(|_| self.cfg.start_new_block()) + .collect(); + self.cfg.terminate(block, Terminator::Switch { + discr: lvalue.clone(), + targets: target_blocks.clone() + }); + target_blocks + } + + TestKind::Eq { value, ty } => { + // call PartialEq::eq(discrim, constant) + let constant = self.push_constant(block, test.span, ty.clone(), value); + let item_ref = self.hir.partial_eq(ty); + self.call_comparison_fn(block, test.span, item_ref, lvalue.clone(), constant) + } + + TestKind::Range { lo, hi, ty } => { + // Test `v` by computing `PartialOrd::le(lo, v) && PartialOrd::le(v, hi)`. + let lo = self.push_constant(block, test.span, ty.clone(), lo); + let hi = self.push_constant(block, test.span, ty.clone(), hi); + let item_ref = self.hir.partial_le(ty); + + let lo_blocks = + self.call_comparison_fn(block, test.span, item_ref.clone(), lo, lvalue.clone()); + + let hi_blocks = + self.call_comparison_fn(lo_blocks[0], test.span, item_ref, lvalue.clone(), hi); + + let failure = self.cfg.start_new_block(); + self.cfg.terminate(lo_blocks[1], Terminator::Goto { target: failure }); + self.cfg.terminate(hi_blocks[1], Terminator::Goto { target: failure }); + + vec![hi_blocks[0], failure] + } + + TestKind::Len { len, op } => { + let (usize_ty, bool_ty) = (self.hir.usize_ty(), self.hir.bool_ty()); + let (actual, result) = (self.temp(usize_ty), self.temp(bool_ty)); + + // actual = len(lvalue) + self.cfg.push_assign( + block, test.span, + &actual, Rvalue::Len(lvalue.clone())); + + // expected = + let expected = + self.push_usize(block, test.span, len); + + // result = actual == expected OR result = actual < expected + self.cfg.push_assign( + block, test.span, + &result, Rvalue::BinaryOp(op, + Operand::Consume(actual), + Operand::Consume(expected))); + + // branch based on result + let target_blocks: Vec<_> = vec![self.cfg.start_new_block(), + self.cfg.start_new_block()]; + self.cfg.terminate(block, Terminator::If { + cond: Operand::Consume(result), + targets: [target_blocks[0], target_blocks[1]] + }); + + target_blocks + } + } + } + + fn call_comparison_fn(&mut self, + block: BasicBlock, + span: H::Span, + item_ref: ItemRef, + lvalue1: Lvalue, + lvalue2: Lvalue) + -> Vec { + let target_blocks = vec![self.cfg.start_new_block(), + self.cfg.start_new_block()]; + + let bool_ty = self.hir.bool_ty(); + let eq_result = self.temp(bool_ty); + let func = self.push_item_ref(block, span, item_ref); + let call_blocks = [self.cfg.start_new_block(), self.diverge_cleanup()]; + self.cfg.terminate(block, + Terminator::Call { + data: CallData { + destination: eq_result.clone(), + func: func, + args: vec![lvalue1, lvalue2], + }, + targets: call_blocks, + }); + + // check the result + self.cfg.terminate(call_blocks[0], + Terminator::If { + cond: Operand::Consume(eq_result), + targets: [target_blocks[0], target_blocks[1]] + }); + + target_blocks + } + + /// Given a candidate and the outcome of a test we have performed, + /// transforms the candidate into a new candidate that reflects + /// further tests still needed. Returns `None` if this candidate + /// has now been ruled out. + /// + /// For example, if a candidate included the patterns `[x.0 @ + /// Ok(P1), x.1 @ 22]`, and we did a switch test on `x.0` and + /// found the variant `Err` (as indicated by the `test_outcome` + /// parameter), we would return `None`. But if the test_outcome + /// were `Ok`, we would return `Some([x.0.downcast.0 @ P1, x.1 + /// @ 22])`. + pub fn candidate_under_assumption(&mut self, + test_lvalue: &Lvalue, + test_kind: &TestKind, + test_outcome: usize, + candidate: &Candidate) + -> Option> { + let candidate = candidate.clone(); + let match_pairs = candidate.match_pairs; + match self.match_pairs_under_assumption(test_lvalue, test_kind, test_outcome, match_pairs) { + Some(match_pairs) => Some(Candidate { match_pairs: match_pairs, ..candidate }), + None => None + } + } + + /// Helper for candidate_under_assumption that does the actual + /// work of transforming the list of match pairs. + fn match_pairs_under_assumption(&mut self, + test_lvalue: &Lvalue, + test_kind: &TestKind, + test_outcome: usize, + match_pairs: Vec>) + -> Option>> { + let mut result = vec![]; + for match_pair in match_pairs { + // if the match pair is testing a different lvalue, it + // is unaffected by this test. + if match_pair.lvalue != *test_lvalue { + result.push(match_pair); + continue; + } + + let desired_test = self.test(&match_pair); + + if *test_kind != desired_test.kind { + // if the match pair wants to (e.g.) test for + // equality against some particular constant, but + // we did a switch, then we can't say whether it + // matches or not, so we still have to include it + // as a possibility. + // + // For example, we have a constant `FOO: + // Option = Some(22)`, and `match_pair` is `x + // @ FOO`, but we did a switch on the variant + // (`Some` vs `None`). (OK, in principle this + // could tell us something, but we're not that + // smart yet to actually dig into the constant + // itself) + result.push(match_pair); + continue; + } + + if test_outcome != desired_test.outcome { + // if we did the right kind of test, but it had the + // wrong outcome, then this *entire candidate* can no + // longer apply, huzzah! Therefore, we can stop this + // iteration and just return `None` to our caller. + return None; + } + + // otherwise, the test passed, so we now have to include the + // "unlocked" set of match pairs. For example, if we had `x @ + // Some(P1)`, and here we `test_kind==Switch` and + // `outcome=Some`, then we would return `x.downcast.0 @ + // P1`. + result.extend(desired_test.match_pairs); + } + Some(result) + } + + fn error_simplifyable(&mut self, match_pair: &MatchPair) -> ! { + self.hir.span_bug( + match_pair.pattern.span, + &format!("simplifyable pattern found: {:?}", match_pair.pattern)) + } +} diff --git a/src/librustc_mir/build/matches/util.rs b/src/librustc_mir/build/matches/util.rs new file mode 100644 index 00000000000..e0357196434 --- /dev/null +++ b/src/librustc_mir/build/matches/util.rs @@ -0,0 +1,82 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use build::Builder; +use build::matches::MatchPair; +use hair::*; +use repr::*; +use std::u32; + +impl Builder { + pub fn field_match_pairs(&mut self, + lvalue: Lvalue, + subpatterns: Vec>) + -> Vec> { + subpatterns.into_iter() + .map(|fieldpat| { + let lvalue = lvalue.clone().field(fieldpat.field); + self.match_pair(lvalue, fieldpat.pattern) + }) + .collect() + } + + pub fn match_pair(&mut self, lvalue: Lvalue, pattern: PatternRef) -> MatchPair { + let pattern = self.hir.mirror(pattern); + MatchPair::new(lvalue, pattern) + } + + pub fn append_prefix_suffix_pairs(&mut self, + match_pairs: &mut Vec>, + lvalue: Lvalue, + prefix: Vec>, + suffix: Vec>) + { + let min_length = prefix.len() + suffix.len(); + assert!(min_length < u32::MAX as usize); + let min_length = min_length as u32; + + let prefix_pairs: Vec<_> = + prefix.into_iter() + .enumerate() + .map(|(idx, subpattern)| { + let elem = ProjectionElem::ConstantIndex { + offset: idx as u32, + min_length: min_length, + from_end: false, + }; + let lvalue = lvalue.clone().elem(elem); + self.match_pair(lvalue, subpattern) + }) + .collect(); + + let suffix_pairs: Vec<_> = + suffix.into_iter() + .rev() + .enumerate() + .map(|(idx, subpattern)| { + let elem = ProjectionElem::ConstantIndex { + offset: (idx+1) as u32, + min_length: min_length, + from_end: true, + }; + let lvalue = lvalue.clone().elem(elem); + self.match_pair(lvalue, subpattern) + }) + .collect(); + + match_pairs.extend(prefix_pairs.into_iter().chain(suffix_pairs)); + } +} + +impl MatchPair { + pub fn new(lvalue: Lvalue, pattern: Pattern) -> MatchPair { + MatchPair { lvalue: lvalue, pattern: pattern } + } +} diff --git a/src/librustc_mir/build/misc.rs b/src/librustc_mir/build/misc.rs new file mode 100644 index 00000000000..1c44988e4b4 --- /dev/null +++ b/src/librustc_mir/build/misc.rs @@ -0,0 +1,78 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Miscellaneous builder routines that are not specific to building any particular +//! kind of thing. + +use build::Builder; +use hair::*; +use repr::*; + +use std::u32; + +impl Builder { + /// Add a new temporary value of type `ty` storing the result of + /// evaluating `expr`. + /// + /// NB: **No cleanup is scheduled for this temporary.** You should + /// call `schedule_drop` once the temporary is initialized. + pub fn temp(&mut self, ty: H::Ty) -> Lvalue { + let index = self.temp_decls.len(); + self.temp_decls.push(TempDecl { ty: ty }); + assert!(index < (u32::MAX) as usize); + let lvalue = Lvalue::Temp(index as u32); + debug!("temp: created temp {:?} with type {:?}", + lvalue, self.temp_decls.last().unwrap().ty); + lvalue + } + + pub fn push_constant(&mut self, + block: BasicBlock, + span: H::Span, + ty: H::Ty, + constant: Constant) + -> Lvalue { + let temp = self.temp(ty); + self.cfg.push_assign_constant(block, span, &temp, constant); + temp + } + + pub fn push_usize(&mut self, + block: BasicBlock, + span: H::Span, + value: usize) + -> Lvalue { + let usize_ty = self.hir.usize_ty(); + let temp = self.temp(usize_ty); + self.cfg.push_assign_constant( + block, span, &temp, + Constant { + span: span, + kind: ConstantKind::Literal(Literal::Uint { bits: IntegralBits::BSize, + value: value as u64 }), + }); + temp + } + + pub fn push_item_ref(&mut self, + block: BasicBlock, + span: H::Span, + item_ref: ItemRef) + -> Lvalue { + let constant = Constant { + span: span, + kind: ConstantKind::Literal(Literal::Item { + def_id: item_ref.def_id, + substs: item_ref.substs + }) + }; + self.push_constant(block, span, item_ref.ty, constant) + } +} diff --git a/src/librustc_mir/build/mod.rs b/src/librustc_mir/build/mod.rs new file mode 100644 index 00000000000..9d00044c660 --- /dev/null +++ b/src/librustc_mir/build/mod.rs @@ -0,0 +1,174 @@ +// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use hair::{self, Hair}; +use rustc_data_structures::fnv::FnvHashMap; +use repr::*; + +struct Builder { + hir: H, + extents: FnvHashMap>, + cfg: CFG, + scopes: Vec>, + loop_scopes: Vec>, + unit_temp: Lvalue, + var_decls: Vec>, + var_indices: FnvHashMap, + temp_decls: Vec>, +} + +struct CFG { + basic_blocks: Vec> +} + +/////////////////////////////////////////////////////////////////////////// +// The `BlockAnd` "monad" packages up the new basic block along with a +// produced value (sometimes just unit, of course). The `unpack!` +// macro (and methods below) makes working with `BlockAnd` much more +// convenient. + +#[must_use] // if you don't use one of these results, you're leaving a dangling edge +struct BlockAnd(BasicBlock, T); + +impl BasicBlock { + fn and(self, v: T) -> BlockAnd { + BlockAnd(self, v) + } + + fn unit(self) -> BlockAnd<()> { + BlockAnd(self, ()) + } +} + +/// Update a block pointer and return the value. +/// Use it like `let x = unpack!(block = self.foo(block, foo))`. +macro_rules! unpack { + ($x:ident = $c:expr) => { + { + let BlockAnd(b, v) = $c; + $x = b; + v + } + }; + + ($c:expr) => { + { + let BlockAnd(b, ()) = $c; + b + } + }; +} + +/////////////////////////////////////////////////////////////////////////// +// construct() -- the main entry point for building MIR for a function + +pub fn construct(mut hir: H, + _span: H::Span, + implicit_arguments: Vec, + explicit_arguments: Vec<(H::Ty, H::Pattern)>, + argument_extent: H::CodeExtent, + ast_block: H::Block) + -> Mir { + let cfg = CFG { basic_blocks: vec![] }; + + // it's handy to have a temporary of type `()` sometimes, so make + // one from the start and keep it available + let temp_decls = vec![TempDecl:: { ty: hir.unit_ty() }]; + let unit_temp = Lvalue::Temp(0); + + let mut builder = Builder { + hir: hir, + cfg: cfg, + extents: FnvHashMap(), + scopes: vec![], + loop_scopes: vec![], + temp_decls: temp_decls, + var_decls: vec![], + var_indices: FnvHashMap(), + unit_temp: unit_temp, + }; + + assert_eq!(builder.cfg.start_new_block(), START_BLOCK); + assert_eq!(builder.cfg.start_new_block(), END_BLOCK); + assert_eq!(builder.cfg.start_new_block(), DIVERGE_BLOCK); + + let mut block = START_BLOCK; + let arg_decls = unpack!(block = builder.args_and_body(block, + implicit_arguments, + explicit_arguments, + argument_extent, + ast_block)); + + builder.cfg.terminate(block, Terminator::Goto { target: END_BLOCK }); + builder.cfg.terminate(END_BLOCK, Terminator::Return); + + Mir { + basic_blocks: builder.cfg.basic_blocks, + extents: builder.extents, + var_decls: builder.var_decls, + arg_decls: arg_decls, + temp_decls: builder.temp_decls, + } +} + +impl Builder { + fn args_and_body(&mut self, + mut block: BasicBlock, + implicit_arguments: Vec, + explicit_arguments: Vec<(H::Ty, H::Pattern)>, + argument_extent: H::CodeExtent, + ast_block: H::Block) + -> BlockAnd>> + { + self.in_scope(argument_extent, block, |this| { + let arg_decls = { + let implicit_arg_decls = implicit_arguments.into_iter() + .map(|ty| ArgDecl { ty: ty }); + + // to start, translate the argument patterns and collect the + // argument types. + let explicit_arg_decls = + explicit_arguments + .into_iter() + .enumerate() + .map(|(index, (ty, pattern))| { + let lvalue = Lvalue::Arg(index as u32); + unpack!(block = this.lvalue_into_pattern(block, + argument_extent, + hair::PatternRef::Hair(pattern), + &lvalue)); + ArgDecl { ty: ty } + }); + + implicit_arg_decls.chain(explicit_arg_decls).collect() + }; + + // start the first basic block and translate the body + unpack!(block = this.ast_block(&Lvalue::ReturnPointer, block, ast_block)); + + block.and(arg_decls) + }) + } +} + +/////////////////////////////////////////////////////////////////////////// +// Builder methods are broken up into modules, depending on what kind +// of thing is being translated. Note that they use the `unpack` macro +// above extensively. + +mod block; +mod cfg; +mod expr; +mod into; +mod matches; +mod misc; +mod scope; +mod stmt; + diff --git a/src/librustc_mir/build/scope.rs b/src/librustc_mir/build/scope.rs new file mode 100644 index 00000000000..87a4731ac74 --- /dev/null +++ b/src/librustc_mir/build/scope.rs @@ -0,0 +1,304 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +/*! +Managing the scope stack. The scopes are tied to lexical scopes, so as +we descend the HAIR, we push a scope on the stack, translate ite +contents, and then pop it off. Every scope is named by a +`H::CodeExtent`. + +### SEME Regions + +When pushing a new scope, we record the current point in the graph (a +basic block); this marks the entry to the scope. We then generate more +stuff in the control-flow graph. Whenever the scope is exited, either +via a `break` or `return` or just by fallthrough, that marks an exit +from the scope. Each lexical scope thus corresponds to a single-entry, +multiple-exit (SEME) region in the control-flow graph. + +For now, we keep a mapping from each `H::CodeExtent` to its +corresponding SEME region for later reference (see caveat in next +paragraph). This is because region scopes are tied to +them. Eventually, when we shift to non-lexical lifetimes, three should +be no need to remember this mapping. + +There is one additional wrinkle, actually, that I wanted to hide from +you but duty compels me to mention. In the course of translating +matches, it sometimes happen that certain code (namely guards) gets +executed multiple times. This means that the scope lexical scope may +in fact correspond to multiple, disjoint SEME regions. So in fact our +mapping os from one scope to a vector of SEME regions. + +### Drops + +The primary purpose for scopes is to insert drops: while translating +the contents, we also accumulate lvalues that need to be dropped upon +exit from each scope. This is done by calling `schedule_drop`. Once a +drop is scheduled, whenever we branch out we will insert drops of all +those lvalues onto the outgoing edge. Note that we don't know the full +set of scheduled drops up front, and so whenever we exit from the +scope we only drop the values scheduled thus far. For example, consider +the scope S corresponding to this loop: + +``` +loop { + let x = ...; + if cond { break; } + let y = ...; +} +``` + +When processing the `let x`, we will add one drop to the scope for +`x`. The break will then insert a drop for `x`. When we process `let +y`, we will add another drop (in fact, to a subscope, but let's ignore +that for now); any later drops would also drop `y`. + +### Early exit + +There are numerous "normal" ways to early exit a scope: `break`, +`continue`, `return` (panics are handled separately). Whenever an +early exit occurs, the method `exit_scope` is called. It is given the +current point in execution where the early exit occurs, as well as the +scope you want to branch to (note that all early exits from to some +other enclosing scope). `exit_scope` will record thid exit point and +also add all drops. + +Panics are handled in a similar fashion, except that a panic always +returns out to the `DIVERGE_BLOCK`. To trigger a panic, simply call +`panic(p)` with the current point `p`. Or else you can call +`diverge_cleanup`, which will produce a block that you can branch to +which does the appropriate cleanup and then diverges. `panic(p)` +simply calls `diverge_cleanup()` and adds an edge from `p` to the +result. + +### Loop scopes + +In addition to the normal scope stack, we track a loop scope stack +that contains only loops. It tracks where a `break` and `continue` +should go to. + +*/ + +use build::{BlockAnd, Builder, CFG}; +use hair::Hair; +use repr::*; + +pub struct Scope { + extent: H::CodeExtent, + exits: Vec, + drops: Vec<(DropKind, H::Span, Lvalue)>, + cached_block: Option, +} + +#[derive(Clone, Debug)] +pub struct LoopScope { + pub extent: H::CodeExtent, // extent of the loop + pub continue_block: BasicBlock, // where to go on a `loop` + pub break_block: BasicBlock, // where to go on a `break +} + +impl Builder { + /// Start a loop scope, which tracks where `continue` and `break` + /// should branch to. See module comment for more details. + pub fn in_loop_scope(&mut self, + loop_block: BasicBlock, + break_block: BasicBlock, + f: F) + -> BlockAnd + where F: FnOnce(&mut Builder) -> BlockAnd + { + let extent = self.extent_of_innermost_scope().unwrap(); + let loop_scope = LoopScope:: { extent: extent.clone(), + continue_block: loop_block, + break_block: break_block }; + self.loop_scopes.push(loop_scope); + let r = f(self); + assert!(self.loop_scopes.pop().unwrap().extent == extent); + r + } + + /// Start a scope. The closure `f` should translate the contents + /// of the scope. See module comment for more details. + pub fn in_scope(&mut self, + extent: H::CodeExtent, + block: BasicBlock, + f: F) + -> BlockAnd + where F: FnOnce(&mut Builder) -> BlockAnd + { + debug!("in_scope(extent={:?}, block={:?})", extent, block); + + let start_point = self.cfg.end_point(block); + + // push scope, execute `f`, then pop scope again + self.scopes.push(Scope { + extent: extent.clone(), + drops: vec![], + exits: vec![], + cached_block: None, + }); + let BlockAnd(fallthrough_block, rv) = f(self); + let mut scope = self.scopes.pop().unwrap(); + + // add in any drops needed on the fallthrough path (any other + // exiting paths, such as those that arise from `break`, will + // have drops already) + for (kind, span, lvalue) in scope.drops { + self.cfg.push_drop(fallthrough_block, span, kind, &lvalue); + } + + // add the implicit fallthrough edge + scope.exits.push(self.cfg.end_point(fallthrough_block)); + + // compute the extent from start to finish and store it in the graph + let graph_extent = self.graph_extent(start_point, scope.exits); + self.extents.entry(extent) + .or_insert(vec![]) + .push(graph_extent); + + debug!("in_scope: exiting extent={:?} fallthrough_block={:?}", extent, fallthrough_block); + fallthrough_block.and(rv) + } + + /// Creates a graph extent (SEME region) from an entry point and + /// exit points. + fn graph_extent(&self, entry: ExecutionPoint, exits: Vec) -> GraphExtent { + if exits.len() == 1 && entry.block == exits[0].block { + GraphExtent { entry: entry, exit: GraphExtentExit::Statement(exits[0].statement) } + } else { + GraphExtent { entry: entry, exit: GraphExtentExit::Points(exits) } + } + } + + /// Finds the loop scope for a given label. This is used for + /// resolving `break` and `continue`. + pub fn find_loop_scope(&mut self, + span: H::Span, + label: Option) + -> LoopScope { + let loop_scope = + match label { + None => { + // no label? return the innermost loop scope + self.loop_scopes.iter() + .rev() + .next() + } + Some(label) => { + // otherwise, find the loop-scope with the correct id + self.loop_scopes.iter() + .rev() + .filter(|loop_scope| loop_scope.extent == label) + .next() + } + }; + + match loop_scope { + Some(loop_scope) => loop_scope.clone(), + None => self.hir.span_bug(span, "no enclosing loop scope found?") + } + } + + /// Branch out of `block` to `target`, exiting all scopes up to + /// and including `extent`. This will insert whatever drops are + /// needed, as well as tracking this exit for the SEME region. See + /// module comment for details. + pub fn exit_scope(&mut self, + span: H::Span, + extent: H::CodeExtent, + block: BasicBlock, + target: BasicBlock) { + let popped_scopes = + match self.scopes.iter().rev().position(|scope| scope.extent == extent) { + Some(p) => p + 1, + None => self.hir.span_bug(span, &format!("extent {:?} does not enclose", + extent)), + }; + + for scope in self.scopes.iter_mut().rev().take(popped_scopes) { + for &(kind, drop_span, ref lvalue) in &scope.drops { + self.cfg.push_drop(block, drop_span, kind, lvalue); + } + + scope.exits.push(self.cfg.end_point(block)); + } + + self.cfg.terminate(block, Terminator::Goto { target: target }); + } + + /// Creates a path that performs all required cleanup for + /// unwinding. This path terminates in DIVERGE. Returns the start + /// of the path. See module comment for more details. + pub fn diverge_cleanup(&mut self) -> BasicBlock { + diverge_cleanup_helper(&mut self.cfg, &mut self.scopes) + } + + /// Create diverge cleanup and branch to it from `block`. + pub fn panic(&mut self, block: BasicBlock) { + let cleanup = self.diverge_cleanup(); + self.cfg.terminate(block, Terminator::Panic { target: cleanup }); + } + + /// Indicates that `lvalue` should be dropped on exit from + /// `extent`. + pub fn schedule_drop(&mut self, + span: H::Span, + extent: H::CodeExtent, + kind: DropKind, + lvalue: &Lvalue, + lvalue_ty: H::Ty) + { + if self.hir.needs_drop(lvalue_ty, span) { + match self.scopes.iter_mut().rev().find(|s| s.extent == extent) { + Some(scope) => { + scope.drops.push((kind, span, lvalue.clone())); + scope.cached_block = None; + } + None => self.hir.span_bug(span, &format!("extent {:?} not in scope to drop {:?}", + extent, lvalue)), + } + } + } + + pub fn extent_of_innermost_scope(&self) -> Option { + self.scopes.last().map(|scope| scope.extent) + } + + pub fn extent_of_outermost_scope(&self) -> Option { + self.scopes.first().map(|scope| scope.extent) + } +} + +fn diverge_cleanup_helper(cfg: &mut CFG, + scopes: &mut [Scope]) + -> BasicBlock { + let len = scopes.len(); + + if len == 0 { + return DIVERGE_BLOCK; + } + + let (remaining, scope) = scopes.split_at_mut(len - 1); + let scope = &mut scope[0]; + + if let Some(b) = scope.cached_block { + return b; + } + + let block = cfg.start_new_block(); + for &(kind, span, ref lvalue) in &scope.drops { + cfg.push_drop(block, span, kind, lvalue); + } + scope.cached_block = Some(block); + + let remaining_cleanup_block = diverge_cleanup_helper(cfg, remaining); + cfg.terminate(block, Terminator::Goto { target: remaining_cleanup_block }); + block +} diff --git a/src/librustc_mir/build/stmt.rs b/src/librustc_mir/build/stmt.rs new file mode 100644 index 00000000000..9d5a83154d4 --- /dev/null +++ b/src/librustc_mir/build/stmt.rs @@ -0,0 +1,61 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use build::{BlockAnd, Builder}; +use hair::*; +use repr::*; + +impl Builder { + pub fn stmts(&mut self, mut block: BasicBlock, stmts: Vec>) -> BlockAnd<()> { + for stmt in stmts { + unpack!(block = self.stmt(block, stmt)); + } + block.unit() + } + + pub fn stmt(&mut self, mut block: BasicBlock, stmt: StmtRef) -> BlockAnd<()> { + let this = self; + let Stmt { span, kind } = this.hir.mirror(stmt); + match kind { + StmtKind::Let { remainder_scope, + init_scope, + pattern, + initializer: Some(initializer), + stmts } => { + this.in_scope(remainder_scope, block, |this| { + unpack!(block = this.in_scope(init_scope, block, |this| { + this.expr_into_pattern(block, remainder_scope, pattern, initializer) + })); + this.stmts(block, stmts) + }) + } + + StmtKind::Let { remainder_scope, init_scope, pattern, initializer: None, stmts } => { + this.in_scope(remainder_scope, block, |this| { + unpack!(block = this.in_scope(init_scope, block, |this| { + this.declare_uninitialized_variables(remainder_scope, pattern); + block.unit() + })); + this.stmts(block, stmts) + }) + } + + StmtKind::Expr { scope, expr } => { + this.in_scope(scope, block, |this| { + let expr = this.hir.mirror(expr); + let temp = this.temp(expr.ty.clone()); + unpack!(block = this.into(&temp, block, expr)); + this.cfg.push_drop(block, span, DropKind::Deep, &temp); + block.unit() + }) + } + } + } +} diff --git a/src/librustc_mir/dump.rs b/src/librustc_mir/dump.rs new file mode 100644 index 00000000000..8e608de024e --- /dev/null +++ b/src/librustc_mir/dump.rs @@ -0,0 +1,225 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! An experimental pass that scources for `#[rustc_mir]` attributes, +//! builds the resulting MIR, and dumps it out into a file for inspection. +//! +//! The attribute formats that are currently accepted are: +//! +//! - `#[rustc_mir(graphviz="file.gv")]` +//! - `#[rustc_mir(pretty="file.mir")]` + +extern crate syntax; +extern crate rustc; +extern crate rustc_front; + +use build; +use dot; +use repr::Mir; +use std::fs::File; +use tcx::{PatNode, Cx}; + +use self::rustc::middle::def_id::DefId; +use self::rustc::middle::infer; +use self::rustc::middle::region::CodeExtentData; +use self::rustc::middle::ty::{self, Ty}; +use self::rustc::util::common::ErrorReported; +use self::rustc_front::hir; +use self::rustc_front::attr::{AttrMetaMethods}; +use self::rustc_front::visit; +use self::syntax::ast; +use self::syntax::codemap::Span; + +pub fn dump_crate(tcx: &ty::ctxt) { + let mut dump = OuterDump { tcx: tcx }; + visit::walk_crate(&mut dump, tcx.map.krate()); +} + +/////////////////////////////////////////////////////////////////////////// +// OuterDump -- walks a crate, looking for fn items and methods to build MIR from + +struct OuterDump<'a,'tcx:'a> { + tcx: &'a ty::ctxt<'tcx>, +} + +impl<'a, 'tcx> OuterDump<'a, 'tcx> { + fn visit_mir(&self, attributes: &'tcx [hir::Attribute], mut walk_op: OP) + where OP: FnMut(&mut InnerDump<'a,'tcx>) + { + let mut built_mir = false; + + for attr in attributes { + if attr.check_name("rustc_mir") { + let mut closure_dump = InnerDump { tcx: self.tcx, attr: Some(attr) }; + walk_op(&mut closure_dump); + built_mir = true; + } + } + + let always_build_mir = self.tcx.sess.opts.always_build_mir; + if !built_mir && always_build_mir { + let mut closure_dump = InnerDump { tcx: self.tcx, attr: None }; + walk_op(&mut closure_dump); + } + } +} + + +impl<'a, 'tcx> visit::Visitor<'tcx> for OuterDump<'a, 'tcx> { + fn visit_item(&mut self, item: &'tcx hir::Item) { + self.visit_mir(&item.attrs, |c| visit::walk_item(c, item)); + visit::walk_item(self, item); + } + + fn visit_trait_item(&mut self, trait_item: &'tcx hir::TraitItem) { + match trait_item.node { + hir::MethodTraitItem(_, Some(_)) => { + self.visit_mir(&trait_item.attrs, |c| visit::walk_trait_item(c, trait_item)); + } + _ => { } + } + visit::walk_trait_item(self, trait_item); + } +} + +/////////////////////////////////////////////////////////////////////////// +// InnerDump -- dumps MIR for a single fn and its contained closures + +struct InnerDump<'a,'tcx:'a> { + tcx: &'a ty::ctxt<'tcx>, + attr: Option<&'a hir::Attribute>, +} + +impl<'a, 'tcx> visit::Visitor<'tcx> for InnerDump<'a,'tcx> { + fn visit_item(&mut self, _: &'tcx hir::Item) { + // ignore nested items; they need their own graphviz annotation + } + + fn visit_fn(&mut self, + fk: visit::FnKind<'tcx>, + decl: &'tcx hir::FnDecl, + body: &'tcx hir::Block, + span: Span, + id: ast::NodeId) { + let (prefix, implicit_arg_tys) = match fk { + visit::FnKind::Closure => + (format!("{}-", id), vec![closure_self_ty(&self.tcx, id, body.id)]), + _ => + (format!(""), vec![]), + }; + + let param_env = + ty::ParameterEnvironment::for_item(self.tcx, id); + + let infcx = + infer::new_infer_ctxt(self.tcx, + &self.tcx.tables, + Some(param_env), + true); + + match build_mir(Cx::new(&infcx), implicit_arg_tys, id, span, decl, body) { + Ok(mir) => { + let meta_item_list = + self.attr.iter() + .flat_map(|a| a.meta_item_list()) + .flat_map(|l| l.iter()); + for item in meta_item_list { + if item.check_name("graphviz") { + match item.value_str() { + Some(s) => { + match + File::create(format!("{}{}", prefix, s)) + .and_then(|ref mut output| dot::render(&mir, output)) + { + Ok(()) => { } + Err(e) => { + self.tcx.sess.span_fatal( + item.span, + &format!("Error writing graphviz \ + results to `{}`: {}", + s, e)); + } + } + } + None => { + self.tcx.sess.span_err( + item.span, + &format!("graphviz attribute requires a path")); + } + } + } + } + } + Err(ErrorReported) => { } + } + + visit::walk_fn(self, fk, decl, body, span); + } +} + +fn build_mir<'a,'tcx:'a>(cx: Cx<'a,'tcx>, + implicit_arg_tys: Vec>, + fn_id: ast::NodeId, + span: Span, + decl: &'tcx hir::FnDecl, + body: &'tcx hir::Block) + -> Result>, ErrorReported> { + let arguments = + decl.inputs + .iter() + .map(|arg| { + let ty = cx.tcx.node_id_to_type(arg.id); + (ty, PatNode::irrefutable(&arg.pat)) + }) + .collect(); + + let parameter_scope = + cx.tcx.region_maps.lookup_code_extent( + CodeExtentData::ParameterScope { fn_id: fn_id, body_id: body.id }); + Ok(build::construct(cx, + span, + implicit_arg_tys, + arguments, + parameter_scope, + body)) +} + +fn closure_self_ty<'a,'tcx>(tcx: &ty::ctxt<'tcx>, + closure_expr_id: ast::NodeId, + body_id: ast::NodeId) + -> Ty<'tcx> +{ + let closure_ty = tcx.node_id_to_type(closure_expr_id); + + // We're just hard-coding the idea that the signature will be + // &self or &mut self and hence will have a bound region with + // number 0, hokey. + let region = + ty::Region::ReFree( + ty::FreeRegion { + scope: tcx.region_maps.item_extent(body_id), + bound_region: ty::BoundRegion::BrAnon(0) + }); + let region = + tcx.mk_region(region); + + match tcx.closure_kind(DefId::local(closure_expr_id)) { + ty::ClosureKind::FnClosureKind => + tcx.mk_ref(region, + ty::TypeAndMut { ty: closure_ty, + mutbl: hir::MutImmutable }), + ty::ClosureKind::FnMutClosureKind => + tcx.mk_ref(region, + ty::TypeAndMut { ty: closure_ty, + mutbl: hir::MutMutable }), + ty::ClosureKind::FnOnceClosureKind => + closure_ty + } +} diff --git a/src/librustc_mir/graphviz/mod.rs b/src/librustc_mir/graphviz/mod.rs new file mode 100644 index 00000000000..01ccf20ae06 --- /dev/null +++ b/src/librustc_mir/graphviz/mod.rs @@ -0,0 +1,157 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use dot; +use hair::Hair; +use repr::*; +use std::borrow::IntoCow; + +#[derive(Copy, Clone, PartialEq, Eq)] +pub struct EdgeIndex { + source: BasicBlock, + target: BasicBlock, + index: usize, +} + +impl<'a,H:Hair> dot::Labeller<'a, BasicBlock, EdgeIndex> for Mir { + fn graph_id(&'a self) -> dot::Id<'a> { + dot::Id::new("Mir").unwrap() + } + + fn node_id(&'a self, n: &BasicBlock) -> dot::Id<'a> { + dot::Id::new(format!("BB{}", n.index())).unwrap() + } + + fn node_shape(&'a self, _: &BasicBlock) -> Option> { + Some(dot::LabelText::label("none")) + } + + fn node_label(&'a self, &n: &BasicBlock) -> dot::LabelText<'a> { + let mut buffer = String::new(); + buffer.push_str(""); + + buffer.push_str(""); + + let data = self.basic_block_data(n); + for statement in &data.statements { + buffer.push_str(""); + } + + buffer.push_str(""); + + buffer.push_str("
"); + buffer.push_str(&format!("{:?}", n)); + buffer.push_str("
"); + buffer.push_str(&escape(format!("{:?}", statement))); + buffer.push_str("
"); + buffer.push_str(&escape(format!("{:?}", &data.terminator))); + buffer.push_str("
"); + + dot::LabelText::html(buffer) + } + + fn edge_label(&'a self, edge: &EdgeIndex) -> dot::LabelText<'a> { + dot::LabelText::label(format!("{}", edge.index)) + } +} + +impl<'a,H:Hair> dot::GraphWalk<'a, BasicBlock, EdgeIndex> for Mir { + fn nodes(&'a self) -> dot::Nodes<'a, BasicBlock> { + self.all_basic_blocks().into_cow() + } + + fn edges(&'a self) -> dot::Edges<'a, EdgeIndex> { + self.all_basic_blocks() + .into_iter() + .flat_map(|source| { + self.basic_block_data(source).terminator + .successors() + .iter() + .enumerate() + .map(move |(index, &target)| { + EdgeIndex { source: source, + target: target, + index: index } + }) + }) + .collect::>() + .into_cow() + } + + fn source(&'a self, edge: &EdgeIndex) -> BasicBlock { + edge.source + } + + fn target(&'a self, edge: &EdgeIndex) -> BasicBlock { + edge.target + } +} + +fn escape(text: String) -> String { + let text = dot::escape_html(&text); + let text = all_to_subscript("Temp", text); + let text = all_to_subscript("Var", text); + let text = all_to_subscript("Arg", text); + let text = all_to_subscript("BB", text); + text +} + +/// A call like `all_to_subscript("Temp", "Temp(123)")` will convert +/// to `Temp₁₂₃`. +fn all_to_subscript(header: &str, mut text: String) -> String { + let mut offset = 0; + while offset < text.len() { + if let Some(text1) = to_subscript1(header, &text, &mut offset) { + text = text1; + } + } + return text; + + /// Looks for `Foo(\d*)` where `header=="Foo"` and replaces the `\d` with subscripts. + /// Updates `offset` to point to the next location where we might want to search. + /// Returns an updated string if changes were made, else None. + fn to_subscript1(header: &str, text: &str, offset: &mut usize) -> Option { + let a = match text[*offset..].find(header) { + None => { *offset = text.len(); return None; } + Some(a) => a + *offset, + }; + + // Example: + // + // header: "Foo" + // text: ....Foo(123)... + // ^ ^ + // a b + + let b = a + header.len(); + *offset = b; + + let mut chars = text[b..].chars(); + if Some('(') != chars.next() { + return None; + } + + let mut result = String::new(); + result.push_str(&text[..b]); + + while let Some(c) = chars.next() { + if c == ')' { break; } + if !c.is_digit(10) { return None; } + + // 0x208 is _0 in unicode, 0x209 is _1, etc + const SUBSCRIPTS: &'static str = "₀₁₂₃₄₅₆₇₈₉"; + let n = (c as usize) - ('0' as usize); + result.extend(SUBSCRIPTS.chars().skip(n).take(1)); + } + + result.extend(chars); + return Some(result); + } +} diff --git a/src/librustc_mir/hair.rs b/src/librustc_mir/hair.rs new file mode 100644 index 00000000000..c63a0348337 --- /dev/null +++ b/src/librustc_mir/hair.rs @@ -0,0 +1,382 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! The MIR is translated from some high-level abstract IR +//! (HAIR). This section defines the HAIR along with a trait for +//! accessing it. The intention is to allow MIR construction to be +//! unit-tested and separated from the Rust source and compiler data +//! structures. + +use repr::{BinOp, BorrowKind, Field, Literal, Mutability, UnOp}; +use std::fmt::Debug; +use std::hash::Hash; + +pub trait Hair: Sized+Debug+Clone+Eq+Hash { // (*) + + // (*) the `Sized` and Debug` bounds are the only ones that really + // make sense. The rest are just there so that we can + // `#[derive(Clone)]` on things that are parameterized over + // `H:HAIR`. It's kind of lame. + + type VarId: Copy+Debug+Eq+Hash; // e.g., NodeId for a variable + type DefId: Copy+Debug+Eq+Hash; // e.g., DefId + type AdtDef: Copy+Debug+Eq+Hash; // e.g., AdtDef<'tcx> + type Name: Copy+Debug+Eq+Hash; // e.g., ast::Name + type Ident: Copy+Debug+Eq+Hash; // e.g., ast::Ident + type InternedString: Clone+Debug+Eq+Hash; // e.g., InternedString + type Bytes: Clone+Debug+Eq+Hash; // e.g., Rc> + type Span: Copy+Debug+Eq; // e.g., syntax::codemap::Span + type Projection: Clone+Debug+Eq; // e.g., ty::ProjectionTy<'tcx> + type Substs: Clone+Debug+Eq; // e.g., substs::Substs<'tcx> + type ClosureSubsts: Clone+Debug+Eq; // e.g., ty::ClosureSubsts<'tcx> + type Ty: Clone+Debug+Eq; // e.g., ty::Ty<'tcx> + type Region: Copy+Debug; // e.g., ty::Region + type CodeExtent: Copy+Debug+Hash+Eq; // e.g., region::CodeExtent + type Pattern: Clone+Debug+Mirror>; // e.g., &P + type Expr: Clone+Debug+Mirror>; // e.g., &P + type Stmt: Clone+Debug+Mirror>; // e.g., &P + type Block: Clone+Debug+Mirror>; // e.g., &P + type InlineAsm: Clone+Debug+Eq+Hash; // e.g., ast::InlineAsm + + /// Normalizes `ast` into the appropriate `mirror` type. + fn mirror>(&mut self, ast: M) -> M::Output { + ast.make_mirror(self) + } + + /// Returns the unit type `()` + fn unit_ty(&mut self) -> Self::Ty; + + /// Returns the type `usize`. + fn usize_ty(&mut self) -> Self::Ty; + + /// Returns the type `bool`. + fn bool_ty(&mut self) -> Self::Ty; + + /// Returns a reference to `PartialEq::::eq` + fn partial_eq(&mut self, ty: Self::Ty) -> ItemRef; + + /// Returns a reference to `PartialOrd::::le` + fn partial_le(&mut self, ty: Self::Ty) -> ItemRef; + + /// Returns the number of variants for the given enum + fn num_variants(&mut self, adt: Self::AdtDef) -> usize; + + fn fields(&mut self, adt: Self::AdtDef, variant_index: usize) -> Vec>; + + /// true if a value of type `ty` (may) need to be dropped; this + /// may return false even for non-Copy types if there is no + /// destructor to execute. If correct result is not known, may be + /// approximated by returning `true`; this will result in more + /// drops but not incorrect code. + fn needs_drop(&mut self, ty: Self::Ty, span: Self::Span) -> bool; + + /// Report an internal inconsistency. + fn span_bug(&mut self, span: Self::Span, message: &str) -> !; +} + +#[derive(Clone, Debug)] +pub struct ItemRef { + pub ty: H::Ty, + pub def_id: H::DefId, + pub substs: H::Substs, +} + +#[derive(Clone, Debug)] +pub struct Block { + pub extent: H::CodeExtent, + pub span: H::Span, + pub stmts: Vec>, + pub expr: Option>, +} + +#[derive(Clone, Debug)] +pub enum StmtRef { + Hair(H::Stmt), + Mirror(Box>), +} + +#[derive(Clone, Debug)] +pub struct Stmt { + pub span: H::Span, + pub kind: StmtKind, +} + +#[derive(Clone, Debug)] +pub enum StmtKind { + Expr { + /// scope for this statement; may be used as lifetime of temporaries + scope: H::CodeExtent, + + /// expression being evaluated in this statement + expr: ExprRef + }, + + Let { + /// scope for variables bound in this let; covers this and + /// remaining statements in block + remainder_scope: H::CodeExtent, + + /// scope for the initialization itself; might be used as + /// lifetime of temporaries + init_scope: H::CodeExtent, + + /// let = ... + pattern: PatternRef, + + /// let pat = ... + initializer: Option>, + + /// let pat = init; + stmts: Vec> + }, +} + +// The Hair trait implementor translates their expressions (`H::Expr`) +// into instances of this `Expr` enum. This translation can be done +// basically as lazilly or as eagerly as desired: every recursive +// reference to an expression in this enum is an `ExprRef`, which +// may in turn be another instance of this enum (boxed), or else an +// untranslated `H::Expr`. Note that instances of `Expr` are very +// shortlived. They are created by `Hair::to_expr`, analyzed and +// converted into MIR, and then discarded. +// +// If you compare `Expr` to the full compiler AST, you will see it is +// a good bit simpler. In fact, a number of the more straight-forward +// MIR simplifications are already done in the impl of `Hair`. For +// example, method calls and overloaded operators are absent: they are +// expected to be converted into `Expr::Call` instances. +#[derive(Clone, Debug)] +pub struct Expr { + // type of this expression + pub ty: H::Ty, + + // lifetime of this expression if it should be spilled into a + // temporary; should be None only if in a constant context + pub temp_lifetime: Option, + + // span of the expression in the source + pub span: H::Span, + + // kind of expression + pub kind: ExprKind, +} + +#[derive(Clone, Debug)] +pub enum ExprKind { + Scope { extent: H::CodeExtent, value: ExprRef }, + Paren { arg: ExprRef }, // ugh. should be able to remove this! + Box { place: Option>, value: ExprRef }, + Call { fun: ExprRef, args: Vec> }, + Deref { arg: ExprRef }, // NOT overloaded! + Binary { op: BinOp, lhs: ExprRef, rhs: ExprRef }, // NOT overloaded! + LogicalOp { op: LogicalOp, lhs: ExprRef, rhs: ExprRef }, + Unary { op: UnOp, arg: ExprRef }, // NOT overloaded! + Cast { source: ExprRef }, + ReifyFnPointer { source: ExprRef }, + UnsafeFnPointer { source: ExprRef }, + Unsize { source: ExprRef }, + If { condition: ExprRef, then: ExprRef, otherwise: Option> }, + Loop { condition: Option>, body: ExprRef, }, + Match { discriminant: ExprRef, arms: Vec> }, + Block { body: H::Block }, + Assign { lhs: ExprRef, rhs: ExprRef }, + AssignOp { op: BinOp, lhs: ExprRef, rhs: ExprRef }, + Field { lhs: ExprRef, name: Field }, + Index { lhs: ExprRef, index: ExprRef }, + VarRef { id: H::VarId }, + SelfRef, // first argument, used for self in a closure + StaticRef { id: H::DefId }, + Borrow { region: H::Region, borrow_kind: BorrowKind, arg: ExprRef }, + Break { label: Option }, + Continue { label: Option }, + Return { value: Option> }, + Repeat { value: ExprRef, count: ExprRef }, + Vec { fields: Vec> }, + Tuple { fields: Vec> }, + Adt { adt_def: H::AdtDef, + variant_index: usize, + substs: H::Substs, + fields: Vec>, + base: Option> }, + Closure { closure_id: H::DefId, substs: H::ClosureSubsts, + upvars: Vec> }, + Literal { literal: Literal }, + InlineAsm { asm: H::InlineAsm }, +} + +#[derive(Clone, Debug)] +pub enum ExprRef { + Hair(H::Expr), + Mirror(Box>), +} + +#[derive(Clone, Debug)] +pub struct FieldExprRef { + pub name: Field, + pub expr: ExprRef, +} + +#[derive(Clone, Debug)] +pub struct Arm { + pub patterns: Vec>, + pub guard: Option>, + pub body: ExprRef, +} + +#[derive(Clone, Debug)] +pub struct Pattern { + pub ty: H::Ty, + pub span: H::Span, + pub kind: PatternKind, +} + +#[derive(Copy, Clone, Debug)] +pub enum LogicalOp { + And, + Or +} + +#[derive(Clone, Debug)] +pub enum PatternKind { + Wild, + + // x, ref x, x @ P, etc + Binding { mutability: Mutability, + name: H::Ident, + mode: BindingMode, + var: H::VarId, + ty: H::Ty, + subpattern: Option> }, + + // Foo(...) or Foo{...} or Foo, where `Foo` is a variant name from an adt with >1 variants + Variant { adt_def: H::AdtDef, variant_index: usize, subpatterns: Vec> }, + + // (...), Foo(...), Foo{...}, or Foo, where `Foo` is a variant name from an adt with 1 variant + Leaf { subpatterns: Vec> }, + + Deref { subpattern: PatternRef }, // box P, &P, &mut P, etc + + Constant { expr: ExprRef }, + + Range { lo: ExprRef, hi: ExprRef }, + + // matches against a slice, checking the length and extracting elements + Slice { prefix: Vec>, + slice: Option>, + suffix: Vec> }, + + // fixed match against an array, irrefutable + Array { prefix: Vec>, + slice: Option>, + suffix: Vec> }, +} + +#[derive(Copy, Clone, Debug)] +pub enum BindingMode { + ByValue, + ByRef(H::Region, BorrowKind), +} + +#[derive(Clone, Debug)] +pub enum PatternRef { + Hair(H::Pattern), + Mirror(Box>), +} + +#[derive(Clone, Debug)] +pub struct FieldPatternRef { + pub field: Field, + pub pattern: PatternRef, +} + +/////////////////////////////////////////////////////////////////////////// +// The Mirror trait + +/// "Mirroring" is the process of converting from a Hair type into one +/// of the types in this file. For example, the mirror of a `H::Expr` +/// is an `Expr`. Mirroring is the point at which the actual IR is +/// converting into the more idealized representation described in +/// this file. Mirroring is gradual: when you mirror an outer +/// expression like `e1 + e2`, the references to the inner expressions +/// `e1` and `e2` are `ExprRef` instances, and they may or may not +/// be eagerly mirrored. This allows a single AST node from the +/// compiler to expand into one or more Hair nodes, which lets the Hair +/// nodes be simpler. +pub trait Mirror { + type Output; + + fn make_mirror(self, hir: &mut H) -> Self::Output; +} + +impl Mirror for Expr { + type Output = Expr; + + fn make_mirror(self, _: &mut H) -> Expr { + self + } +} + +impl Mirror for ExprRef { + type Output = Expr; + + fn make_mirror(self, hir: &mut H) -> Expr { + match self { + ExprRef::Hair(h) => h.make_mirror(hir), + ExprRef::Mirror(m) => *m, + } + } +} + +impl Mirror for Stmt { + type Output = Stmt; + + fn make_mirror(self, _: &mut H) -> Stmt { + self + } +} + +impl Mirror for StmtRef { + type Output = Stmt; + + fn make_mirror(self, hir: &mut H) -> Stmt { + match self { + StmtRef::Hair(h) => h.make_mirror(hir), + StmtRef::Mirror(m) => *m, + } + } +} + +impl Mirror for Pattern { + type Output = Pattern; + + fn make_mirror(self, _: &mut H) -> Pattern { + self + } +} + +impl Mirror for PatternRef { + type Output = Pattern; + + fn make_mirror(self, hir: &mut H) -> Pattern { + match self { + PatternRef::Hair(h) => h.make_mirror(hir), + PatternRef::Mirror(m) => *m, + } + } +} + +impl Mirror for Block { + type Output = Block; + + fn make_mirror(self, _: &mut H) -> Block { + self + } +} + diff --git a/src/librustc_mir/lib.rs b/src/librustc_mir/lib.rs new file mode 100644 index 00000000000..ebec1609959 --- /dev/null +++ b/src/librustc_mir/lib.rs @@ -0,0 +1,34 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +/*! + +Rust MIR: a lowered representation of Rust. Also: an experiment! + +*/ + +#![crate_name = "rustc_mir"] +#![crate_type = "rlib"] +#![crate_type = "dylib"] + +#![feature(ref_slice)] +#![feature(rustc_private)] +#![feature(into_cow)] + +#[macro_use] extern crate log; +extern crate graphviz as dot; +extern crate rustc_data_structures; + +pub mod build; +pub mod dump; +pub mod hair; +pub mod repr; +mod graphviz; +mod tcx; diff --git a/src/librustc_mir/repr.rs b/src/librustc_mir/repr.rs new file mode 100644 index 00000000000..ea62be26225 --- /dev/null +++ b/src/librustc_mir/repr.rs @@ -0,0 +1,676 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use hair::Hair; +use rustc_data_structures::fnv::FnvHashMap; +use std::fmt::{Debug, Formatter, Error}; +use std::slice; +use std::u32; + +/// Lowered representation of a single function. +pub struct Mir { + pub basic_blocks: Vec>, + + // for every node id + pub extents: FnvHashMap>, + + pub var_decls: Vec>, + pub arg_decls: Vec>, + pub temp_decls: Vec>, +} + +/// where execution begins +pub const START_BLOCK: BasicBlock = BasicBlock(0); + +/// where execution ends, on normal return +pub const END_BLOCK: BasicBlock = BasicBlock(1); + +/// where execution ends, on panic +pub const DIVERGE_BLOCK: BasicBlock = BasicBlock(2); + +impl Mir { + pub fn all_basic_blocks(&self) -> Vec { + (0..self.basic_blocks.len()) + .map(|i| BasicBlock::new(i)) + .collect() + } + + pub fn basic_block_data(&self, bb: BasicBlock) -> &BasicBlockData { + &self.basic_blocks[bb.index()] + } + + pub fn basic_block_data_mut(&mut self, bb: BasicBlock) -> &mut BasicBlockData { + &mut self.basic_blocks[bb.index()] + } +} + +/////////////////////////////////////////////////////////////////////////// +// Mutability and borrow kinds + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum Mutability { + Mut, + Not, +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum BorrowKind { + /// Data must be immutable and is aliasable. + Shared, + + /// Data must be immutable but not aliasable. This kind of borrow + /// cannot currently be expressed by the user and is used only in + /// implicit closure bindings. It is needed when you the closure + /// is borrowing or mutating a mutable referent, e.g.: + /// + /// let x: &mut isize = ...; + /// let y = || *x += 5; + /// + /// If we were to try to translate this closure into a more explicit + /// form, we'd encounter an error with the code as written: + /// + /// struct Env { x: & &mut isize } + /// let x: &mut isize = ...; + /// let y = (&mut Env { &x }, fn_ptr); // Closure is pair of env and fn + /// fn fn_ptr(env: &mut Env) { **env.x += 5; } + /// + /// This is then illegal because you cannot mutate a `&mut` found + /// in an aliasable location. To solve, you'd have to translate with + /// an `&mut` borrow: + /// + /// struct Env { x: & &mut isize } + /// let x: &mut isize = ...; + /// let y = (&mut Env { &mut x }, fn_ptr); // changed from &x to &mut x + /// fn fn_ptr(env: &mut Env) { **env.x += 5; } + /// + /// Now the assignment to `**env.x` is legal, but creating a + /// mutable pointer to `x` is not because `x` is not mutable. We + /// could fix this by declaring `x` as `let mut x`. This is ok in + /// user code, if awkward, but extra weird for closures, since the + /// borrow is hidden. + /// + /// So we introduce a "unique imm" borrow -- the referent is + /// immutable, but not aliasable. This solves the problem. For + /// simplicity, we don't give users the way to express this + /// borrow, it's just used when translating closures. + Unique, + + /// Data is mutable and not aliasable. + Mut +} + +/////////////////////////////////////////////////////////////////////////// +// Variables and temps + +// A "variable" is a binding declared by the user as part of the fn +// decl, a let, etc. +pub struct VarDecl { + pub mutability: Mutability, + pub name: H::Ident, + pub ty: H::Ty, +} + +// A "temp" is a temporary that we place on the stack. They are +// anonymous, always mutable, and have only a type. +pub struct TempDecl { + pub ty: H::Ty, +} + +// A "arg" is one of the function's formal arguments. These are +// anonymous and distinct from the bindings that the user declares. +// +// For example, in this function: +// +// ``` +// fn foo((x, y): (i32, u32)) { ... } +// ``` +// +// there is only one argument, of type `(i32, u32)`, but two bindings +// (`x` and `y`). +pub struct ArgDecl { + pub ty: H::Ty, +} + +/////////////////////////////////////////////////////////////////////////// +// Graph extents + +/// A moment in the flow of execution. It corresponds to a point in +/// between two statements: +/// +/// BB[block]: +/// <--- if statement == 0 +/// STMT[0] +/// <--- if statement == 1 +/// STMT[1] +/// ... +/// <--- if statement == n-1 +/// STMT[n-1] +/// <--- if statement == n +/// +/// where the block has `n` statements. +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct ExecutionPoint { + pub block: BasicBlock, + pub statement: u32, +} + +/// A single-entry-multiple-exit region in the graph. We build one of +/// these for every node-id during MIR construction. By construction +/// we are assured that the entry dominates all points within, and +/// that, for every interior point X, it is postdominated by some exit. +pub struct GraphExtent { + pub entry: ExecutionPoint, + pub exit: GraphExtentExit, +} + +pub enum GraphExtentExit { + /// `Statement(X)`: a very common special case covering a span + /// that is local to a single block. It starts at the entry point + /// and extends until the start of statement `X` (non-inclusive). + Statement(u32), + + /// The more general case where the exits are a set of points. + Points(Vec), +} + +/////////////////////////////////////////////////////////////////////////// +// BasicBlock + +/// The index of a particular basic block. The index is into the `basic_blocks` +/// list of the `Mir`. +/// +/// (We use a `u32` internally just to save memory.) +#[derive(Copy, Clone, PartialEq, Eq)] +pub struct BasicBlock(u32); + +impl BasicBlock { + pub fn new(index: usize) -> BasicBlock { + assert!(index < (u32::MAX as usize)); + BasicBlock(index as u32) + } + + /// Extract the index. + pub fn index(self) -> usize { + self.0 as usize + } +} + +impl Debug for BasicBlock { + fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> { + write!(fmt, "BB({})", self.0) + } +} + +/////////////////////////////////////////////////////////////////////////// +// BasicBlock and Terminator + +#[derive(Debug)] +pub struct BasicBlockData { + pub statements: Vec>, + pub terminator: Terminator, +} + +pub enum Terminator { + /// block should have one successor in the graph; we jump there + Goto { target: BasicBlock }, + + /// block should initiate unwinding; should be one successor + /// that does cleanup and branches to DIVERGE_BLOCK + Panic { target: BasicBlock }, + + /// jump to branch 0 if this lvalue evaluates to true + If { cond: Operand, targets: [BasicBlock; 2] }, + + /// lvalue evaluates to some enum; jump depending on the branch + Switch { discr: Lvalue, targets: Vec }, + + /// Indicates that the last statement in the block panics, aborts, + /// etc. No successors. This terminator appears on exactly one + /// basic block which we create in advance. However, during + /// construction, we use this value as a sentinel for "terminator + /// not yet assigned", and assert at the end that only the + /// well-known diverging block actually diverges. + Diverge, + + /// Indicates a normal return. The ReturnPointer lvalue should + /// have been filled in by now. This should only occur in the + /// `END_BLOCK`. + Return, + + /// block ends with a call; it should have two successors. The + /// first successor indicates normal return. The second indicates + /// unwinding. + Call { data: CallData, targets: [BasicBlock; 2] }, +} + +impl Terminator { + pub fn successors(&self) -> &[BasicBlock] { + use self::Terminator::*; + match *self { + Goto { target: ref b } => slice::ref_slice(b), + Panic { target: ref b } => slice::ref_slice(b), + If { cond: _, targets: ref b } => b, + Switch { discr: _, targets: ref b } => b, + Diverge => &[], + Return => &[], + Call { data: _, targets: ref b } => b, + } + } +} + +#[derive(Debug)] +pub struct CallData { + /// where the return value is written to + pub destination: Lvalue, + + /// the fn being called + pub func: Lvalue, + + /// the arguments + pub args: Vec>, +} + +impl BasicBlockData { + pub fn new(terminator: Terminator) -> BasicBlockData { + BasicBlockData { + statements: vec![], + terminator: terminator, + } + } +} + +impl Debug for Terminator { + fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> { + use self::Terminator::*; + match *self { + Goto { target } => + write!(fmt, "goto -> {:?}", target), + Panic { target } => + write!(fmt, "panic -> {:?}", target), + If { cond: ref lv, ref targets } => + write!(fmt, "if({:?}) -> {:?}", lv, targets), + Switch { discr: ref lv, ref targets } => + write!(fmt, "switch({:?}) -> {:?}", lv, targets), + Diverge => + write!(fmt, "diverge"), + Return => + write!(fmt, "return"), + Call { data: ref c, targets } => { + try!(write!(fmt, "{:?} = {:?}(", c.destination, c.func)); + for (index, arg) in c.args.iter().enumerate() { + if index > 0 { try!(write!(fmt, ", ")); } + try!(write!(fmt, "{:?}", arg)); + } + write!(fmt, ") -> {:?}", targets) + } + } + } +} + + +/////////////////////////////////////////////////////////////////////////// +// Statements + +pub struct Statement { + pub span: H::Span, + pub kind: StatementKind, +} + +#[derive(Debug)] +pub enum StatementKind { + Assign(Lvalue, Rvalue), + Drop(DropKind, Lvalue), +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum DropKind { + Shallow, + Deep +} + +impl Debug for Statement { + fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> { + use self::StatementKind::*; + match self.kind { + Assign(ref lv, ref rv) => write!(fmt, "{:?} = {:?}", lv, rv), + Drop(DropKind::Shallow, ref lv) => write!(fmt, "shallow_drop {:?}", lv), + Drop(DropKind::Deep, ref lv) => write!(fmt, "drop {:?}", lv), + } + } +} +/////////////////////////////////////////////////////////////////////////// +// Lvalues + +/// A path to a value; something that can be evaluated without +/// changing or disturbing program state. +#[derive(Clone, PartialEq)] +pub enum Lvalue { + /// local variable declared by the user + Var(u32), + + /// temporary introduced during lowering into MIR + Temp(u32), + + /// formal parameter of the function; note that these are NOT the + /// bindings that the user declares, which are vars + Arg(u32), + + /// static or static mut variable + Static(H::DefId), + + /// the return pointer of the fn + ReturnPointer, + + /// projection out of an lvalue (access a field, deref a pointer, etc) + Projection(Box>) +} + +/// The `Projection` data structure defines things of the form `B.x` +/// or `*B` or `B[index]`. Note that it is parameterized because it is +/// shared between `Constant` and `Lvalue`. See the aliases +/// `LvalueProjection` etc below. +#[derive(Clone, Debug, PartialEq)] +pub struct Projection { + pub base: B, + pub elem: ProjectionElem, +} + +#[derive(Clone, Debug, PartialEq)] +pub enum ProjectionElem { + Deref, + Field(Field), + Index(V), + + // These indices are generated by slice patterns. Easiest to explain + // by example: + // + // ``` + // [X, _, .._, _, _] => { offset: 0, min_length: 4, from_end: false }, + // [_, X, .._, _, _] => { offset: 1, min_length: 4, from_end: false }, + // [_, _, .._, X, _] => { offset: 2, min_length: 4, from_end: true }, + // [_, _, .._, _, X] => { offset: 1, min_length: 4, from_end: true }, + // ``` + ConstantIndex { + offset: u32, // index or -index (in Python terms), depending on from_end + min_length: u32, // thing being indexed must be at least this long + from_end: bool, // counting backwards from end? + }, + + // "Downcast" to a variant of an ADT. Currently, we only introduce + // this for ADTs with more than one variant. It may be better to + // just introduce it always, or always for enums. + Downcast(H::AdtDef, usize), +} + +/// Alias for projections as they appear in lvalues, where the base is an lvalue +/// and the index is an operand. +pub type LvalueProjection = + Projection,Operand>; + +/// Alias for projections as they appear in lvalues, where the base is an lvalue +/// and the index is an operand. +pub type LvalueElem = + ProjectionElem>; + +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +pub enum Field { + Named(H::Name), + Indexed(usize), +} + +impl Lvalue { + pub fn field(self, f: Field) -> Lvalue { + self.elem(ProjectionElem::Field(f)) + } + + pub fn deref(self) -> Lvalue { + self.elem(ProjectionElem::Deref) + } + + pub fn index(self, index: Operand) -> Lvalue { + self.elem(ProjectionElem::Index(index)) + } + + pub fn elem(self, elem: LvalueElem) -> Lvalue { + Lvalue::Projection(Box::new(LvalueProjection { base: self, elem: elem })) + } +} + +impl Debug for Lvalue { + fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> { + use self::Lvalue::*; + + match *self { + Var(id) => + write!(fmt,"Var({:?})", id), + Arg(id) => + write!(fmt,"Arg({:?})", id), + Temp(id) => + write!(fmt,"Temp({:?})", id), + Static(id) => + write!(fmt,"Static({:?})", id), + ReturnPointer => + write!(fmt,"ReturnPointer"), + Projection(ref data) => + match data.elem { + ProjectionElem::Downcast(_, variant_index) => + write!(fmt,"({:?} as {:?})", data.base, variant_index), + ProjectionElem::Deref => + write!(fmt,"(*{:?})", data.base), + ProjectionElem::Field(Field::Named(name)) => + write!(fmt,"{:?}.{:?}", data.base, name), + ProjectionElem::Field(Field::Indexed(index)) => + write!(fmt,"{:?}.{:?}", data.base, index), + ProjectionElem::Index(ref index) => + write!(fmt,"{:?}[{:?}]", data.base, index), + ProjectionElem::ConstantIndex { offset, min_length, from_end: false } => + write!(fmt,"{:?}[{:?}; {:?}]", data.base, offset, min_length), + ProjectionElem::ConstantIndex { offset, min_length, from_end: true } => + write!(fmt,"{:?}[-{:?}; {:?}]", data.base, offset, min_length), + }, + } + } +} + +/////////////////////////////////////////////////////////////////////////// +// Operands +// +// These are values that can appear inside an rvalue (or an index +// lvalue). They are intentionally limited to prevent rvalues from +// being nested in one another. + +#[derive(Clone, PartialEq)] +pub enum Operand { + Consume(Lvalue), + Constant(Constant), +} + +impl Debug for Operand { + fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> { + use self::Operand::*; + match *self { + Constant(ref a) => write!(fmt, "{:?}", a), + Consume(ref lv) => write!(fmt, "{:?}", lv), + } + } +} + +/////////////////////////////////////////////////////////////////////////// +// Rvalues + +#[derive(Clone)] +pub enum Rvalue { + // x (either a move or copy, depending on type of x) + Use(Operand), + + // [x; 32] + Repeat(Operand, Operand), + + // &x or &mut x + Ref(H::Region, BorrowKind, Lvalue), + + // length of a [X] or [X;n] value + Len(Lvalue), + + Cast(CastKind, Operand, H::Ty), + + BinaryOp(BinOp, Operand, Operand), + + UnaryOp(UnOp, Operand), + + // Creates an *uninitialized* Box + Box(H::Ty), + + // Create an aggregate value, like a tuple or struct. This is + // only needed because we want to distinguish `dest = Foo { x: + // ..., y: ... }` from `dest.x = ...; dest.y = ...;` in the case + // that `Foo` has a destructor. These rvalues can be optimized + // away after type-checking and before lowering. + Aggregate(AggregateKind, Vec>), + + InlineAsm(H::InlineAsm), +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum CastKind { + Misc, + + /// Convert unique, zero-sized type for a fn to fn() + ReifyFnPointer, + + /// Convert safe fn() to unsafe fn() + UnsafeFnPointer, + + /// "Unsize" -- convert a thin-or-fat pointer to a fat pointer. + /// trans must figure out the details once full monomorphization + /// is known. For example, this could be used to cast from a + /// `&[i32;N]` to a `&[i32]`, or a `Box` to a `Box` + /// (presuming `T: Trait`). + Unsize, +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum AggregateKind { + Vec, + Tuple, + Adt(H::AdtDef, usize, H::Substs), + Closure(H::DefId, H::ClosureSubsts), +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum BinOp { + /// The `+` operator (addition) + Add, + /// The `-` operator (subtraction) + Sub, + /// The `*` operator (multiplication) + Mul, + /// The `/` operator (division) + Div, + /// The `%` operator (modulus) + Rem, + /// The `^` operator (bitwise xor) + BitXor, + /// The `&` operator (bitwise and) + BitAnd, + /// The `|` operator (bitwise or) + BitOr, + /// The `<<` operator (shift left) + Shl, + /// The `>>` operator (shift right) + Shr, + /// The `==` operator (equality) + Eq, + /// The `<` operator (less than) + Lt, + /// The `<=` operator (less than or equal to) + Le, + /// The `!=` operator (not equal to) + Ne, + /// The `>=` operator (greater than or equal to) + Ge, + /// The `>` operator (greater than) + Gt, +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum UnOp { + /// The `!` operator for logical inversion + Not, + /// The `-` operator for negation + Neg +} + +impl Debug for Rvalue { + fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> { + use self::Rvalue::*; + + match *self { + Use(ref lvalue) => write!(fmt, "{:?}", lvalue), + Repeat(ref a, ref b) => write!(fmt, "[{:?}; {:?}]", a, b), + Ref(ref a, bk, ref b) => write!(fmt, "&{:?} {:?} {:?}", a, bk, b), + Len(ref a) => write!(fmt, "LEN({:?})", a), + Cast(ref kind, ref lv, ref ty) => write!(fmt, "{:?} as {:?} ({:?}", lv, ty, kind), + BinaryOp(ref op, ref a, ref b) => write!(fmt, "{:?}({:?},{:?})", op, a, b), + UnaryOp(ref op, ref a) => write!(fmt, "{:?}({:?})", op, a), + Box(ref t) => write!(fmt, "Box {:?}", t), + Aggregate(ref kind, ref lvs) => write!(fmt, "Aggregate<{:?}>({:?})", kind, lvs), + InlineAsm(ref asm) => write!(fmt, "InlineAsm({:?})", asm), + } + } +} + +/////////////////////////////////////////////////////////////////////////// +// Constants + +#[derive(Clone, Debug, PartialEq)] +pub struct Constant { + pub span: H::Span, + pub kind: ConstantKind +} + +#[derive(Clone, Debug, PartialEq)] +pub enum ConstantKind { + Literal(Literal), + Aggregate(AggregateKind, Vec>), + Call(Box>, Vec>), + Cast(Box>, H::Ty), + Repeat(Box>, Box>), + Ref(BorrowKind, Box>), + BinaryOp(BinOp, Box>, Box>), + UnaryOp(UnOp, Box>), + Projection(Box>) +} + +pub type ConstantProjection = + Projection,Constant>; + +#[derive(Clone, Debug, PartialEq)] +pub enum Literal { + Item { def_id: H::DefId, substs: H::Substs }, + Projection { projection: H::Projection }, + Int { bits: IntegralBits, value: i64 }, + Uint { bits: IntegralBits, value: u64 }, + Float { bits: FloatBits, value: f64 }, + Char { c: char }, + Bool { value: bool }, + Bytes { value: H::Bytes }, + String { value: H::InternedString }, +} + +#[derive(Copy, Clone, Debug, PartialEq, PartialOrd)] +pub enum IntegralBits { + B8, B16, B32, B64, BSize +} + +#[derive(Copy, Clone, Debug, PartialEq, PartialOrd)] +pub enum FloatBits { + F32, F64 +} diff --git a/src/librustc_mir/tcx/block.rs b/src/librustc_mir/tcx/block.rs new file mode 100644 index 00000000000..033e6ed2968 --- /dev/null +++ b/src/librustc_mir/tcx/block.rs @@ -0,0 +1,114 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use hair::*; + +use tcx::Cx; +use tcx::pattern::PatNode; +use tcx::rustc::middle::region::{BlockRemainder, CodeExtentData}; +use tcx::rustc_front::hir; +use tcx::syntax::ast; +use tcx::syntax::ptr::P; +use tcx::to_ref::ToRef; + +impl<'a,'tcx:'a> Mirror> for &'tcx hir::Block { + type Output = Block>; + + fn make_mirror(self, cx: &mut Cx<'a,'tcx>) -> Block> { + // We have to eagerly translate the "spine" of the statements + // in order to get the lexical scoping correctly. + let stmts = mirror_stmts(cx, self.id, self.stmts.iter().enumerate()); + Block { + extent: cx.tcx.region_maps.node_extent(self.id), + span: self.span, + stmts: stmts, + expr: self.expr.to_ref() + } + } +} + +impl<'a,'tcx:'a> Mirror> for &'tcx hir::Stmt { + type Output = Stmt>; + + fn make_mirror(self, _cx: &mut Cx<'a,'tcx>) -> Stmt> { + // In order to get the scoping correct, we eagerly mirror + // statements when we translate the enclosing block, so we + // should in fact never get to this point. + panic!("statements are eagerly mirrored"); + } +} + +fn mirror_stmts<'a,'tcx:'a,STMTS>(cx: &mut Cx<'a,'tcx>, + block_id: ast::NodeId, + mut stmts: STMTS) + -> Vec>> + where STMTS: Iterator)> +{ + let mut result = vec![]; + while let Some((index, stmt)) = stmts.next() { + match stmt.node { + hir::StmtExpr(ref expr, id) | hir::StmtSemi(ref expr, id) => + result.push( + StmtRef::Mirror( + Box::new(Stmt { span: stmt.span, + kind: StmtKind::Expr { + scope: cx.tcx.region_maps.node_extent(id), + expr: expr.to_ref() } }))), + + hir::StmtDecl(ref decl, id) => { + match decl.node { + hir::DeclItem(..) => { /* ignore for purposes of the MIR */ } + hir::DeclLocal(ref local) => { + let remainder_extent = CodeExtentData::Remainder(BlockRemainder { + block: block_id, + first_statement_index: index as u32 + }); + let remainder_extent = + cx.tcx.region_maps.lookup_code_extent(remainder_extent); + + // pull in all following statements, since + // they are within the scope of this let: + let following_stmts = mirror_stmts(cx, block_id, stmts); + + result.push( + StmtRef::Mirror( + Box::new(Stmt { + span: stmt.span, + kind: StmtKind::Let { + remainder_scope: remainder_extent, + init_scope: cx.tcx.region_maps.node_extent(id), + pattern: PatNode::irrefutable(&local.pat).to_ref(), + initializer: local.init.to_ref(), + stmts: following_stmts + } + }))); + + return result; + } + } + } + } + } + return result; +} + +pub fn to_expr_ref<'a,'tcx:'a>(cx: &mut Cx<'a,'tcx>, + block: &'tcx hir::Block) + -> ExprRef> { + let block_ty = cx.tcx.node_id_to_type(block.id); + let temp_lifetime = cx.tcx.region_maps.temporary_scope(block.id); + let expr = Expr { + ty: block_ty, + temp_lifetime: temp_lifetime, + span: block.span, + kind: ExprKind::Block { body: block } + }; + expr.to_ref() +} diff --git a/src/librustc_mir/tcx/expr.rs b/src/librustc_mir/tcx/expr.rs new file mode 100644 index 00000000000..6352af39f14 --- /dev/null +++ b/src/librustc_mir/tcx/expr.rs @@ -0,0 +1,870 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use hair::*; +use repr::*; +use rustc_data_structures::fnv::FnvHashMap; +use std::rc::Rc; +use tcx::Cx; +use tcx::block; +use tcx::pattern::PatNode; +use tcx::rustc::front::map; +use tcx::rustc::middle::def; +use tcx::rustc::middle::def_id::DefId; +use tcx::rustc::middle::region::CodeExtent; +use tcx::rustc::middle::pat_util; +use tcx::rustc::middle::ty::{self, Ty}; +use tcx::rustc_front::hir; +use tcx::rustc_front::util as hir_util; +use tcx::syntax::codemap::Span; +use tcx::syntax::parse::token; +use tcx::syntax::ptr::P; +use tcx::to_ref::ToRef; + +impl<'a,'tcx:'a> Mirror> for &'tcx hir::Expr { + type Output = Expr>; + + fn make_mirror(self, cx: &mut Cx<'a,'tcx>) -> Expr> { + debug!("Expr::make_mirror(): id={}, span={:?}", self.id, self.span); + + let expr_ty = cx.tcx.expr_ty(self); // note: no adjustments (yet)! + + let kind = match self.node { + // Here comes the interesting stuff: + + hir::ExprMethodCall(_, _, ref args) => { + // Rewrite a.b(c) into UFCS form like Trait::b(a, c) + let expr = method_callee(cx, self, ty::MethodCall::expr(self.id)); + let args = args.iter() + .map(|e| e.to_ref()) + .collect(); + ExprKind::Call { + fun: expr.to_ref(), + args: args + } + } + + hir::ExprAddrOf(mutbl, ref expr) => { + let region = match expr_ty.sty { + ty::TyRef(r, _) => r, + _ => cx.tcx.sess.span_bug(expr.span, "type of & not region") + }; + ExprKind::Borrow { region: *region, + borrow_kind: to_borrow_kind(mutbl), + arg: expr.to_ref() } + } + + hir::ExprBlock(ref blk) => { + ExprKind::Block { + body: &**blk + } + } + + hir::ExprAssign(ref lhs, ref rhs) => { + ExprKind::Assign { + lhs: lhs.to_ref(), + rhs: rhs.to_ref(), + } + } + + hir::ExprAssignOp(op, ref lhs, ref rhs) => { + let op = bin_op(op.node); + ExprKind::AssignOp { + op: op, + lhs: lhs.to_ref(), + rhs: rhs.to_ref(), + } + } + + hir::ExprLit(ref lit) => { + let literal = convert_literal(cx, self.span, expr_ty, lit); + ExprKind::Literal { literal: literal } + } + + hir::ExprBinary(op, ref lhs, ref rhs) => { + if cx.tcx.is_method_call(self.id) { + let pass_args = if hir_util::is_by_value_binop(op.node) { + PassArgs::ByValue + } else { + PassArgs::ByRef + }; + overloaded_operator(cx, self, ty::MethodCall::expr(self.id), + pass_args, lhs.to_ref(), vec![rhs]) + } else { + // FIXME overflow + match op.node { + hir::BinOp_::BiAnd => { + ExprKind::LogicalOp { op: LogicalOp::And, + lhs: lhs.to_ref(), + rhs: rhs.to_ref() } + } + hir::BinOp_::BiOr => { + ExprKind::LogicalOp { op: LogicalOp::Or, + lhs: lhs.to_ref(), + rhs: rhs.to_ref() } + } + _ => { + let op = bin_op(op.node); + ExprKind::Binary { op: op, + lhs: lhs.to_ref(), + rhs: rhs.to_ref() } + } + } + } + } + + hir::ExprIndex(ref lhs, ref index) => { + if cx.tcx.is_method_call(self.id) { + overloaded_lvalue(cx, self, ty::MethodCall::expr(self.id), + PassArgs::ByValue, lhs.to_ref(), vec![index]) + } else { + ExprKind::Index { lhs: lhs.to_ref(), + index: index.to_ref() } + } + } + + hir::ExprUnary(hir::UnOp::UnDeref, ref arg) => { + if cx.tcx.is_method_call(self.id) { + overloaded_lvalue(cx, self, ty::MethodCall::expr(self.id), + PassArgs::ByValue, arg.to_ref(), vec![]) + } else { + ExprKind::Deref { arg: arg.to_ref() } + } + } + + hir::ExprUnary(hir::UnOp::UnUniq, ref arg) => { + assert!(!cx.tcx.is_method_call(self.id)); + ExprKind::Box { place: None, value: arg.to_ref() } + } + + hir::ExprUnary(op, ref arg) => { + if cx.tcx.is_method_call(self.id) { + overloaded_operator(cx, self, ty::MethodCall::expr(self.id), + PassArgs::ByValue, arg.to_ref(), vec![]) + } else { + // FIXME overflow + let op = match op { + hir::UnOp::UnNot => UnOp::Not, + hir::UnOp::UnNeg => UnOp::Neg, + hir::UnOp::UnUniq | hir::UnOp::UnDeref => { + cx.tcx.sess.span_bug( + self.span, + &format!("operator should have been handled elsewhere {:?}", op)); + } + }; + ExprKind::Unary { op: op, arg: arg.to_ref() } + } + } + + hir::ExprStruct(_, ref fields, ref base) => { + match expr_ty.sty { + ty::TyStruct(adt, substs) => { + ExprKind::Adt { + adt_def: adt, + variant_index: 0, + substs: substs, + fields: fields.to_ref(), + base: base.to_ref(), + } + } + ty::TyEnum(adt, substs) => { + match cx.tcx.def_map.borrow()[&self.id].full_def() { + def::DefVariant(enum_id, variant_id, true) => { + debug_assert!(adt.did == enum_id); + let index = adt.variant_index_with_id(variant_id); + ExprKind::Adt { + adt_def: adt, + variant_index: index, + substs: substs, + fields: fields.to_ref(), + base: base.to_ref(), + } + } + ref def => { + cx.tcx.sess.span_bug( + self.span, + &format!("unexpected def: {:?}", def)); + } + } + } + _ => { + cx.tcx.sess.span_bug( + self.span, + &format!("unexpected type for struct literal: {:?}", expr_ty)); + } + } + } + + hir::ExprClosure(..) => { + let closure_ty = cx.tcx.expr_ty(self); + let (def_id, substs) = match closure_ty.sty { + ty::TyClosure(def_id, ref substs) => (def_id, substs), + _ => { + cx.tcx.sess.span_bug(self.span, + &format!("closure expr w/o closure type: {:?}", + closure_ty)); + } + }; + let upvars = cx.tcx.with_freevars(self.id, |freevars| { + freevars.iter() + .enumerate() + .map(|(i, fv)| capture_freevar(cx, self, fv, substs.upvar_tys[i])) + .collect() + }); + ExprKind::Closure { + closure_id: def_id, + substs: &**substs, + upvars: upvars, + } + } + + hir::ExprRange(ref start, ref end) => { + let range_ty = cx.tcx.expr_ty(self); + let (adt_def, substs) = match range_ty.sty { + ty::TyStruct(adt_def, substs) => (adt_def, substs), + _ => { + cx.tcx.sess.span_bug( + self.span, + &format!("unexpanded ast")); + } + }; + + let field_expr_ref = |s: &'tcx P, nm: &str| { + FieldExprRef { name: Field::Named(token::intern(nm)), + expr: s.to_ref() } + }; + + let start_field = start.as_ref() + .into_iter() + .map(|s| field_expr_ref(s, "start")); + + let end_field = end.as_ref() + .into_iter() + .map(|e| field_expr_ref(e, "end")); + + ExprKind::Adt { adt_def: adt_def, + variant_index: 0, + substs: substs, + fields: start_field.chain(end_field).collect(), + base: None } + } + + hir::ExprPath(..) => { + convert_path_expr(cx, self) + } + + hir::ExprInlineAsm(ref asm) => { + ExprKind::InlineAsm { asm: asm } + } + + // Now comes the rote stuff: + + hir::ExprParen(ref p) => + ExprKind::Paren { arg: p.to_ref() }, + hir::ExprRepeat(ref v, ref c) => + ExprKind::Repeat { value: v.to_ref(), count: c.to_ref() }, + hir::ExprRet(ref v) => + ExprKind::Return { value: v.to_ref() }, + hir::ExprBreak(label) => + ExprKind::Break { label: label.map(|_| loop_label(cx, self)) }, + hir::ExprAgain(label) => + ExprKind::Continue { label: label.map(|_| loop_label(cx, self)) }, + hir::ExprMatch(ref discr, ref arms, _) => + ExprKind::Match { discriminant: discr.to_ref(), + arms: arms.iter().map(|a| convert_arm(cx, a)).collect() }, + hir::ExprIf(ref cond, ref then, ref otherwise) => + ExprKind::If { condition: cond.to_ref(), + then: block::to_expr_ref(cx, then), + otherwise: otherwise.to_ref() }, + hir::ExprWhile(ref cond, ref body, _) => + ExprKind::Loop { condition: Some(cond.to_ref()), + body: block::to_expr_ref(cx, body) }, + hir::ExprLoop(ref body, _) => + ExprKind::Loop { condition: None, + body: block::to_expr_ref(cx, body) }, + hir::ExprField(ref source, ident) => + ExprKind::Field { lhs: source.to_ref(), + name: Field::Named(ident.node.name) }, + hir::ExprTupField(ref source, ident) => + ExprKind::Field { lhs: source.to_ref(), + name: Field::Indexed(ident.node) }, + hir::ExprCast(ref source, _) => + ExprKind::Cast { source: source.to_ref() }, + hir::ExprBox(ref place, ref value) => + ExprKind::Box { place: place.to_ref(), value: value.to_ref() }, + hir::ExprVec(ref fields) => + ExprKind::Vec { fields: fields.to_ref() }, + hir::ExprTup(ref fields) => + ExprKind::Tuple { fields: fields.to_ref() }, + hir::ExprCall(ref fun, ref args) => + ExprKind::Call { fun: fun.to_ref(), args: args.to_ref() }, + }; + + let temp_lifetime = cx.tcx.region_maps.temporary_scope(self.id); + let expr_extent = cx.tcx.region_maps.node_extent(self.id); + + let mut expr = Expr { + temp_lifetime: temp_lifetime, + ty: expr_ty, + span: self.span, + kind: kind, + }; + + // Now apply adjustments, if any. + match cx.tcx.tables.borrow().adjustments.get(&self.id) { + None => { } + Some(&ty::AdjustReifyFnPointer) => { + let adjusted_ty = cx.tcx.expr_ty_adjusted(self); + expr = Expr { + temp_lifetime: temp_lifetime, + ty: adjusted_ty, + span: self.span, + kind: ExprKind::ReifyFnPointer { source: expr.to_ref() }, + }; + } + Some(&ty::AdjustUnsafeFnPointer) => { + let adjusted_ty = cx.tcx.expr_ty_adjusted(self); + expr = Expr { + temp_lifetime: temp_lifetime, + ty: adjusted_ty, + span: self.span, + kind: ExprKind::UnsafeFnPointer { source: expr.to_ref() }, + }; + } + Some(&ty::AdjustDerefRef(ref adj)) => { + for i in 0..adj.autoderefs { + let i = i as u32; + let adjusted_ty = + expr.ty.adjust_for_autoderef( + cx.tcx, + self.id, + self.span, + i, + |mc| cx.tcx.tables.borrow().method_map.get(&mc).map(|m| m.ty)); + let kind = if cx.tcx.is_overloaded_autoderef(self.id, i) { + overloaded_lvalue(cx, self, ty::MethodCall::autoderef(self.id, i), + PassArgs::ByValue, expr.to_ref(), vec![]) + } else { + ExprKind::Deref { arg: expr.to_ref() } + }; + expr = Expr { + temp_lifetime: temp_lifetime, + ty: adjusted_ty, + span: self.span, + kind: kind + }; + } + + if let Some(target) = adj.unsize { + expr = Expr { + temp_lifetime: temp_lifetime, + ty: target, + span: self.span, + kind: ExprKind::Unsize { source: expr.to_ref() } + }; + } else if let Some(autoref) = adj.autoref { + let adjusted_ty = expr.ty.adjust_for_autoref(cx.tcx, Some(autoref)); + match autoref { + ty::AutoPtr(r, m) => { + expr = Expr { + temp_lifetime: temp_lifetime, + ty: adjusted_ty, + span: self.span, + kind: ExprKind::Borrow { region: *r, + borrow_kind: to_borrow_kind(m), + arg: expr.to_ref() } + }; + } + ty::AutoUnsafe(m) => { + // Convert this to a suitable `&foo` and + // then an unsafe coercion. Limit the region to be just this + // expression. + let region = ty::ReScope(expr_extent); + let region = cx.tcx.mk_region(region); + expr = Expr { + temp_lifetime: temp_lifetime, + ty: cx.tcx.mk_ref(region, ty::TypeAndMut { ty: expr.ty, mutbl: m }), + span: self.span, + kind: ExprKind::Borrow { region: *region, + borrow_kind: to_borrow_kind(m), + arg: expr.to_ref() } + }; + expr = Expr { + temp_lifetime: temp_lifetime, + ty: adjusted_ty, + span: self.span, + kind: ExprKind::Cast { source: expr.to_ref() } + }; + } + } + } + } + } + + // Next, wrap this up in the expr's scope. + expr = Expr { + temp_lifetime: temp_lifetime, + ty: expr.ty, + span: self.span, + kind: ExprKind::Scope { extent: expr_extent, + value: expr.to_ref() } + }; + + // Finally, create a destruction scope, if any. + if let Some(extent) = cx.tcx.region_maps.opt_destruction_extent(self.id) { + expr = Expr { + temp_lifetime: temp_lifetime, + ty: expr.ty, + span: self.span, + kind: ExprKind::Scope { extent: extent, value: expr.to_ref() } + }; + } + + // OK, all done! + expr + } +} + +fn method_callee<'a,'tcx:'a>(cx: &mut Cx<'a,'tcx>, + expr: &hir::Expr, + method_call: ty::MethodCall) + -> Expr> { + let tables = cx.tcx.tables.borrow(); + let callee = &tables.method_map[&method_call]; + let temp_lifetime = cx.tcx.region_maps.temporary_scope(expr.id); + Expr { + temp_lifetime: temp_lifetime, + ty: callee.ty, + span: expr.span, + kind: ExprKind::Literal { + literal: Literal::Item { + def_id: callee.def_id, + substs: callee.substs, + } + } + } +} + +fn to_borrow_kind(m: hir::Mutability) -> BorrowKind { + match m { + hir::MutMutable => BorrowKind::Mut, + hir::MutImmutable => BorrowKind::Shared, + } +} + +fn convert_literal<'a,'tcx:'a>(cx: &mut Cx<'a,'tcx>, + expr_span: Span, + expr_ty: Ty<'tcx>, + literal: &hir::Lit) + -> Literal> +{ + use repr::IntegralBits::*; + match (&literal.node, &expr_ty.sty) { + (&hir::LitStr(ref text, _), _) => + Literal::String { value: text.clone() }, + (&hir::LitByteStr(ref bytes), _) => + Literal::Bytes { value: bytes.clone() }, + (&hir::LitByte(c), _) => + Literal::Uint { bits: B8, value: c as u64 }, + (&hir::LitChar(c), _) => + Literal::Char { c: c }, + (&hir::LitInt(v, _), &ty::TyUint(hir::TyU8)) => + Literal::Uint { bits: B8, value: v }, + (&hir::LitInt(v, _), &ty::TyUint(hir::TyU16)) => + Literal::Uint { bits: B16, value: v }, + (&hir::LitInt(v, _), &ty::TyUint(hir::TyU32)) => + Literal::Uint { bits: B32, value: v }, + (&hir::LitInt(v, _), &ty::TyUint(hir::TyU64)) => + Literal::Uint { bits: B64, value: v }, + (&hir::LitInt(v, _), &ty::TyUint(hir::TyUs)) => + Literal::Uint { bits: BSize, value: v }, + (&hir::LitInt(v, hir::SignedIntLit(_, hir::Sign::Minus)), &ty::TyInt(hir::TyI8)) => + Literal::Int { bits: B8, value: -(v as i64) }, + (&hir::LitInt(v, hir::SignedIntLit(_, hir::Sign::Minus)), &ty::TyInt(hir::TyI16)) => + Literal::Int { bits: B16, value: -(v as i64) }, + (&hir::LitInt(v, hir::SignedIntLit(_, hir::Sign::Minus)), &ty::TyInt(hir::TyI32)) => + Literal::Int { bits: B32, value: -(v as i64) }, + (&hir::LitInt(v, hir::SignedIntLit(_, hir::Sign::Minus)), &ty::TyInt(hir::TyI64)) => + Literal::Int { bits: B64, value: -(v as i64) }, + (&hir::LitInt(v, hir::SignedIntLit(_, hir::Sign::Minus)), &ty::TyInt(hir::TyIs)) => + Literal::Int { bits: BSize, value: -(v as i64) }, + (&hir::LitInt(v, _), &ty::TyInt(hir::TyI8)) => + Literal::Int { bits: B8, value: v as i64 }, + (&hir::LitInt(v, _), &ty::TyInt(hir::TyI16)) => + Literal::Int { bits: B16, value: v as i64 }, + (&hir::LitInt(v, _), &ty::TyInt(hir::TyI32)) => + Literal::Int { bits: B32, value: v as i64 }, + (&hir::LitInt(v, _), &ty::TyInt(hir::TyI64)) => + Literal::Int { bits: B64, value: v as i64 }, + (&hir::LitInt(v, _), &ty::TyInt(hir::TyIs)) => + Literal::Int { bits: BSize, value: v as i64 }, + (&hir::LitFloat(ref v, _), &ty::TyFloat(hir::TyF32)) | + (&hir::LitFloatUnsuffixed(ref v), &ty::TyFloat(hir::TyF32)) => + Literal::Float { bits: FloatBits::F32, value: v.parse::().unwrap() }, + (&hir::LitFloat(ref v, _), &ty::TyFloat(hir::TyF64)) | + (&hir::LitFloatUnsuffixed(ref v), &ty::TyFloat(hir::TyF64)) => + Literal::Float { bits: FloatBits::F64, value: v.parse::().unwrap() }, + (&hir::LitBool(v), _) => + Literal::Bool { value: v }, + (ref l, ref t) => + cx.tcx.sess.span_bug( + expr_span, + &format!("Invalid literal/type combination: {:?},{:?}", l, t)) + } +} + +fn convert_arm<'a,'tcx:'a>(cx: &Cx<'a,'tcx>, arm: &'tcx hir::Arm) -> Arm> { + let map = if arm.pats.len() == 1 { + None + } else { + let mut map = FnvHashMap(); + pat_util::pat_bindings(&cx.tcx.def_map, &arm.pats[0], |_, p_id, _, path| { + map.insert(path.node, p_id); + }); + Some(Rc::new(map)) + }; + + Arm { patterns: arm.pats.iter().map(|p| PatNode::new(p, map.clone()).to_ref()).collect(), + guard: arm.guard.to_ref(), + body: arm.body.to_ref() } +} + +fn convert_path_expr<'a,'tcx:'a>(cx: &mut Cx<'a,'tcx>, + expr: &'tcx hir::Expr) + -> ExprKind> +{ + let substs = cx.tcx.mk_substs(cx.tcx.node_id_item_substs(expr.id).substs); + match cx.tcx.def_map.borrow()[&expr.id].full_def() { + def::DefVariant(_, def_id, false) | + def::DefStruct(def_id) | + def::DefFn(def_id, _) | + def::DefConst(def_id) | + def::DefMethod(def_id) | + def::DefAssociatedConst(def_id) => + ExprKind::Literal { + literal: Literal::Item { def_id: def_id, substs: substs } + }, + + def::DefStatic(node_id, _) => + ExprKind::StaticRef { + id: node_id, + }, + + def @ def::DefLocal(..) | + def @ def::DefUpvar(..) => + convert_var(cx, expr, def), + + def => + cx.tcx.sess.span_bug( + expr.span, + &format!("def `{:?}` not yet implemented", def)), + } +} + +fn convert_var<'a,'tcx:'a>(cx: &mut Cx<'a,'tcx>, + expr: &'tcx hir::Expr, + def: def::Def) + -> ExprKind> +{ + let temp_lifetime = cx.tcx.region_maps.temporary_scope(expr.id); + + match def { + def::DefLocal(node_id) => { + ExprKind::VarRef { + id: node_id, + } + } + + def::DefUpvar(id_var, index, closure_expr_id) => { + debug!("convert_var(upvar({:?}, {:?}, {:?}))", id_var, index, closure_expr_id); + let var_ty = cx.tcx.node_id_to_type(id_var); + + let body_id = match cx.tcx.map.find(closure_expr_id) { + Some(map::NodeExpr(expr)) => { + match expr.node { + hir::ExprClosure(_, _, ref body) => body.id, + _ => { + cx.tcx.sess.span_bug(expr.span, + &format!("closure expr is not a closure expr")); + } + } + } + _ => { + cx.tcx.sess.span_bug(expr.span, + &format!("ast-map has garbage for closure expr")); + } + }; + + // FIXME free regions in closures are not right + let closure_ty = + cx.tcx.node_id_to_type(closure_expr_id); + + // FIXME we're just hard-coding the idea that the + // signature will be &self or &mut self and hence will + // have a bound region with number 0 + let region = + ty::Region::ReFree( + ty::FreeRegion { + scope: cx.tcx.region_maps.node_extent(body_id), + bound_region: ty::BoundRegion::BrAnon(0) + }); + let region = + cx.tcx.mk_region(region); + + let self_expr = match cx.tcx.closure_kind(DefId::local(closure_expr_id)) { + ty::ClosureKind::FnClosureKind => { + let ref_closure_ty = + cx.tcx.mk_ref(region, + ty::TypeAndMut { ty: closure_ty, + mutbl: hir::MutImmutable }); + Expr { + ty: closure_ty, + temp_lifetime: temp_lifetime, + span: expr.span, + kind: ExprKind::Deref { + arg: Expr { + ty: ref_closure_ty, + temp_lifetime: temp_lifetime, + span: expr.span, + kind: ExprKind::SelfRef + }.to_ref() + } + } + } + ty::ClosureKind::FnMutClosureKind => { + let ref_closure_ty = + cx.tcx.mk_ref(region, + ty::TypeAndMut { ty: closure_ty, + mutbl: hir::MutMutable }); + Expr { + ty: closure_ty, + temp_lifetime: temp_lifetime, + span: expr.span, + kind: ExprKind::Deref { + arg: Expr { + ty: ref_closure_ty, + temp_lifetime: temp_lifetime, + span: expr.span, + kind: ExprKind::SelfRef + }.to_ref() + } + } + } + ty::ClosureKind::FnOnceClosureKind => { + Expr { + ty: closure_ty, + temp_lifetime: temp_lifetime, + span: expr.span, + kind: ExprKind::SelfRef + } + } + }; + + // at this point we have `self.n`, which loads up the upvar + let field_kind = + ExprKind::Field { lhs: self_expr.to_ref(), + name: Field::Indexed(index) }; + + // ...but the upvar might be an `&T` or `&mut T` capture, at which + // point we need an implicit deref + let upvar_id = ty::UpvarId { var_id: id_var, closure_expr_id: closure_expr_id }; + let upvar_capture = match cx.tcx.upvar_capture(upvar_id) { + Some(c) => c, + None => { + cx.tcx.sess.span_bug( + expr.span, + &format!("no upvar_capture for {:?}", upvar_id)); + } + }; + match upvar_capture { + ty::UpvarCapture::ByValue => field_kind, + ty::UpvarCapture::ByRef(_) => { + ExprKind::Deref { + arg: Expr { + temp_lifetime: temp_lifetime, + ty: var_ty, + span: expr.span, + kind: field_kind, + }.to_ref() + } + } + } + } + + _ => cx.tcx.sess.span_bug(expr.span, "type of & not region") + } +} + + +fn bin_op(op: hir::BinOp_) -> BinOp { + match op { + hir::BinOp_::BiAdd => BinOp::Add, + hir::BinOp_::BiSub => BinOp::Sub, + hir::BinOp_::BiMul => BinOp::Mul, + hir::BinOp_::BiDiv => BinOp::Div, + hir::BinOp_::BiRem => BinOp::Rem, + hir::BinOp_::BiBitXor => BinOp::BitXor, + hir::BinOp_::BiBitAnd => BinOp::BitAnd, + hir::BinOp_::BiBitOr => BinOp::BitOr, + hir::BinOp_::BiShl => BinOp::Shl, + hir::BinOp_::BiShr => BinOp::Shr, + hir::BinOp_::BiEq => BinOp::Eq, + hir::BinOp_::BiLt => BinOp::Lt, + hir::BinOp_::BiLe => BinOp::Le, + hir::BinOp_::BiNe => BinOp::Ne, + hir::BinOp_::BiGe => BinOp::Ge, + hir::BinOp_::BiGt => BinOp::Gt, + _ => panic!("no equivalent for ast binop {:?}", op) + } +} + +enum PassArgs { + ByValue, + ByRef +} + +fn overloaded_operator<'a,'tcx:'a>(cx: &mut Cx<'a,'tcx>, + expr: &'tcx hir::Expr, + method_call: ty::MethodCall, + pass_args: PassArgs, + receiver: ExprRef>, + args: Vec<&'tcx P>) + -> ExprKind> +{ + // the receiver has all the adjustments that are needed, so we can + // just push a reference to it + let mut argrefs = vec![receiver]; + + // the arguments, unfortunately, do not, so if this is a ByRef + // operator, we have to gin up the autorefs (but by value is easy) + match pass_args { + PassArgs::ByValue => { + argrefs.extend( + args.iter() + .map(|arg| arg.to_ref())) + } + + PassArgs::ByRef => { + let scope = cx.tcx.region_maps.node_extent(expr.id); + let region = cx.tcx.mk_region(ty::ReScope(scope)); + let temp_lifetime = cx.tcx.region_maps.temporary_scope(expr.id); + argrefs.extend( + args.iter() + .map(|arg| { + let arg_ty = cx.tcx.expr_ty_adjusted(arg); + let adjusted_ty = + cx.tcx.mk_ref(region, + ty::TypeAndMut { ty: arg_ty, + mutbl: hir::MutImmutable }); + Expr { + temp_lifetime: temp_lifetime, + ty: adjusted_ty, + span: expr.span, + kind: ExprKind::Borrow { region: *region, + borrow_kind: BorrowKind::Shared, + arg: arg.to_ref() } + }.to_ref() + })) + } + } + + // now create the call itself + let fun = method_callee(cx, expr, method_call); + ExprKind::Call { + fun: fun.to_ref(), + args: argrefs, + } +} + +fn overloaded_lvalue<'a,'tcx:'a>(cx: &mut Cx<'a,'tcx>, + expr: &'tcx hir::Expr, + method_call: ty::MethodCall, + pass_args: PassArgs, + receiver: ExprRef>, + args: Vec<&'tcx P>) + -> ExprKind> +{ + // For an overloaded *x or x[y] expression of type T, the method + // call returns an &T and we must add the deref so that the types + // line up (this is because `*x` and `x[y]` represent lvalues): + + // to find the type &T of the content returned by the method; + let tables = cx.tcx.tables.borrow(); + let callee = &tables.method_map[&method_call]; + let ref_ty = callee.ty.fn_ret(); + let ref_ty = cx.tcx.no_late_bound_regions(&ref_ty).unwrap().unwrap(); + // 1~~~~~ 2~~~~~ + // (1) callees always have all late-bound regions fully instantiated, + // (2) overloaded methods don't return `!` + + // construct the complete expression `foo()` for the overloaded call, + // which will yield the &T type + let temp_lifetime = cx.tcx.region_maps.temporary_scope(expr.id); + let ref_kind = overloaded_operator(cx, expr, method_call, pass_args, receiver, args); + let ref_expr = Expr { + temp_lifetime: temp_lifetime, + ty: ref_ty, + span: expr.span, + kind: ref_kind, + }; + + // construct and return a deref wrapper `*foo()` + ExprKind::Deref { arg: ref_expr.to_ref() } +} + +fn capture_freevar<'a,'tcx:'a>(cx: &mut Cx<'a,'tcx>, + closure_expr: &'tcx hir::Expr, + freevar: &ty::Freevar, + freevar_ty: Ty<'tcx>) + -> ExprRef> { + let id_var = freevar.def.def_id().node; + let upvar_id = ty::UpvarId { var_id: id_var, closure_expr_id: closure_expr.id }; + let upvar_capture = cx.tcx.upvar_capture(upvar_id).unwrap(); + let temp_lifetime = cx.tcx.region_maps.temporary_scope(closure_expr.id); + let var_ty = cx.tcx.node_id_to_type(id_var); + let captured_var = Expr { temp_lifetime: temp_lifetime, + ty: var_ty, + span: closure_expr.span, + kind: convert_var(cx, closure_expr, freevar.def) }; + match upvar_capture { + ty::UpvarCapture::ByValue => { + captured_var.to_ref() + } + ty::UpvarCapture::ByRef(upvar_borrow) => { + let borrow_kind = match upvar_borrow.kind { + ty::BorrowKind::ImmBorrow => BorrowKind::Shared, + ty::BorrowKind::UniqueImmBorrow => BorrowKind::Unique, + ty::BorrowKind::MutBorrow => BorrowKind::Mut, + }; + Expr { + temp_lifetime: temp_lifetime, + ty: freevar_ty, + span: closure_expr.span, + kind: ExprKind::Borrow { region: upvar_borrow.region, + borrow_kind: borrow_kind, + arg: captured_var.to_ref() } + }.to_ref() + } + } +} + +fn loop_label<'a,'tcx:'a>(cx: &mut Cx<'a,'tcx>, + expr: &'tcx hir::Expr) + -> CodeExtent +{ + match cx.tcx.def_map.borrow().get(&expr.id).map(|d| d.full_def()) { + Some(def::DefLabel(loop_id)) => cx.tcx.region_maps.node_extent(loop_id), + d => { + cx.tcx.sess.span_bug( + expr.span, + &format!("loop scope resolved to {:?}", d)); + } + } +} diff --git a/src/librustc_mir/tcx/mod.rs b/src/librustc_mir/tcx/mod.rs new file mode 100644 index 00000000000..35cfc772998 --- /dev/null +++ b/src/librustc_mir/tcx/mod.rs @@ -0,0 +1,184 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use hair::*; +use repr::*; +use std::fmt::{Debug, Formatter, Error}; +use std::hash::{Hash, Hasher}; +use std::rc::Rc; + +use self::rustc::middle::def_id::DefId; +use self::rustc::middle::infer::InferCtxt; +use self::rustc::middle::region::CodeExtent; +use self::rustc::middle::subst::{self, Subst, Substs}; +use self::rustc::middle::ty::{self, Ty}; +use self::rustc_front::hir; +use self::syntax::ast; +use self::syntax::codemap::Span; +use self::syntax::parse::token::{self, special_idents, InternedString}; + +extern crate rustc; +extern crate rustc_front; +extern crate syntax; + +#[derive(Copy, Clone)] +pub struct Cx<'a,'tcx:'a> { + pub tcx: &'a ty::ctxt<'tcx>, + pub infcx: &'a InferCtxt<'a,'tcx>, +} + +impl<'a,'tcx> Cx<'a,'tcx> { + pub fn new(infcx: &'a InferCtxt<'a,'tcx>) -> Cx<'a,'tcx> { + Cx { tcx: infcx.tcx, infcx: infcx } + } +} + +pub use self::pattern::PatNode; + +impl<'a,'tcx:'a> Hair for Cx<'a, 'tcx> { + type VarId = ast::NodeId; + type DefId = DefId; + type AdtDef = ty::AdtDef<'tcx>; + type Name = ast::Name; + type Ident = ast::Ident; + type InternedString = InternedString; + type Bytes = Rc>; + type Span = Span; + type Projection = ty::ProjectionTy<'tcx>; + type Substs = &'tcx subst::Substs<'tcx>; + type ClosureSubsts = &'tcx ty::ClosureSubsts<'tcx>; + type Ty = Ty<'tcx>; + type Region = ty::Region; + type CodeExtent = CodeExtent; + type Pattern = PatNode<'tcx>; + type Expr = &'tcx hir::Expr; + type Stmt = &'tcx hir::Stmt; + type Block = &'tcx hir::Block; + type InlineAsm = &'tcx hir::InlineAsm; + + fn unit_ty(&mut self) -> Ty<'tcx> { + self.tcx.mk_nil() + } + + fn usize_ty(&mut self) -> Ty<'tcx> { + self.tcx.types.usize + } + + fn bool_ty(&mut self) -> Ty<'tcx> { + self.tcx.types.bool + } + + fn partial_eq(&mut self, ty: Ty<'tcx>) -> ItemRef { + let eq_def_id = self.tcx.lang_items.eq_trait().unwrap(); + self.cmp_method_ref(eq_def_id, "eq", ty) + } + + fn partial_le(&mut self, ty: Ty<'tcx>) -> ItemRef { + let ord_def_id = self.tcx.lang_items.ord_trait().unwrap(); + self.cmp_method_ref(ord_def_id, "le", ty) + } + + fn num_variants(&mut self, adt_def: ty::AdtDef<'tcx>) -> usize { + adt_def.variants.len() + } + + fn fields(&mut self, adt_def: ty::AdtDef<'tcx>, variant_index: usize) -> Vec> { + adt_def.variants[variant_index] + .fields + .iter() + .enumerate() + .map(|(index, field)| { + if field.name == special_idents::unnamed_field.name { + Field::Indexed(index) + } else { + Field::Named(field.name) + } + }) + .collect() + } + + fn needs_drop(&mut self, ty: Ty<'tcx>, span: Self::Span) -> bool { + if self.infcx.type_moves_by_default(ty, span) { + // TODO we should do an add'l check here to determine if + // any dtor will execute, but the relevant fn + // (`type_needs_drop`) is currently factored into + // `librustc_trans`, so we can't easily do so. + true + } else { + // if type implements Copy, cannot require drop + false + } + } + + fn span_bug(&mut self, span: Self::Span, message: &str) -> ! { + self.tcx.sess.span_bug(span, message) + } +} + +impl<'a,'tcx:'a> Cx<'a,'tcx> { + fn cmp_method_ref(&mut self, + trait_def_id: DefId, + method_name: &str, + arg_ty: Ty<'tcx>) + -> ItemRef> { + let method_name = token::intern(method_name); + let substs = Substs::new_trait(vec![arg_ty], vec![], arg_ty); + for trait_item in self.tcx.trait_items(trait_def_id).iter() { + match *trait_item { + ty::ImplOrTraitItem::MethodTraitItem(ref method) => { + if method.name == method_name { + let method_ty = self.tcx.lookup_item_type(method.def_id); + let method_ty = method_ty.ty.subst(self.tcx, &substs); + return ItemRef { + ty: method_ty, + def_id: method.def_id, + substs: self.tcx.mk_substs(substs), + }; + } + } + ty::ImplOrTraitItem::ConstTraitItem(..) | + ty::ImplOrTraitItem::TypeTraitItem(..) => { + } + } + } + + self.tcx.sess.bug( + &format!("found no method `{}` in `{:?}`", method_name, trait_def_id)); + } +} + +// We only need this impl so that we do deriving for things that are +// defined relative to the `Hair` trait. See `Hair` trait for more +// details. +impl<'a,'tcx> PartialEq for Cx<'a,'tcx> { + fn eq(&self, _: &Cx<'a,'tcx>) -> bool { + panic!("Cx should never ACTUALLY be compared for equality") + } +} + +impl<'a,'tcx> Eq for Cx<'a,'tcx> { } + +impl<'a,'tcx> Hash for Cx<'a,'tcx> { + fn hash(&self, _: &mut H) { + panic!("Cx should never ACTUALLY be hashed") + } +} + +impl<'a,'tcx> Debug for Cx<'a,'tcx> { + fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> { + write!(fmt, "Tcx") + } +} + +mod block; +mod expr; +mod pattern; +mod to_ref; + diff --git a/src/librustc_mir/tcx/pattern.rs b/src/librustc_mir/tcx/pattern.rs new file mode 100644 index 00000000000..eee0911f1cd --- /dev/null +++ b/src/librustc_mir/tcx/pattern.rs @@ -0,0 +1,291 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use hair::*; +use repr::*; + +use rustc_data_structures::fnv::FnvHashMap; +use std::rc::Rc; +use tcx::Cx; +use tcx::rustc::middle::const_eval::lookup_const_by_id; +use tcx::rustc::middle::def; +use tcx::rustc::middle::pat_util::{pat_is_resolved_const, pat_is_binding}; +use tcx::rustc::middle::ty::{self, Ty}; +use tcx::rustc_front::hir; +use tcx::syntax::ast; +use tcx::syntax::ptr::P; +use tcx::to_ref::ToRef; + +/// When there are multiple patterns in a single arm, each one has its +/// own node-ids for the bindings. References to the variables always +/// use the node-ids from the first pattern in the arm, so we just +/// remap the ids for all subsequent bindings to the first one. +/// +/// Example: +/// ``` +/// match foo { +/// Test1(flavor /* def 1 */) | +/// Test2(flavor /* def 2 */) if flavor /* ref 1 */.is_tasty() => { ... } +/// _ => { ... } +/// } +/// ``` +#[derive(Clone, Debug)] +pub struct PatNode<'tcx> { + pat: &'tcx hir::Pat, + binding_map: Option>> +} + +impl<'tcx> PatNode<'tcx> { + pub fn new(pat: &'tcx hir::Pat, + binding_map: Option>>) + -> PatNode<'tcx> { + PatNode { + pat: pat, + binding_map: binding_map, + } + } + + pub fn irrefutable(pat: &'tcx hir::Pat) + -> PatNode<'tcx> { + PatNode::new(pat, None) + } + + fn pat_ref<'a>(&self, pat: &'tcx hir::Pat) -> PatternRef> { + PatNode::new(pat, self.binding_map.clone()).to_ref() + } + + fn pat_refs<'a>(&self, pats: &'tcx Vec>) -> Vec>> { + pats.iter().map(|p| self.pat_ref(p)).collect() + } + + fn opt_pat_ref<'a>(&self, pat: &'tcx Option>) -> Option>> { + pat.as_ref().map(|p| self.pat_ref(p)) + } + + fn slice_or_array_pattern<'a>(&self, + cx: &mut Cx<'a, 'tcx>, + ty: Ty<'tcx>, + prefix: &'tcx Vec>, + slice: &'tcx Option>, + suffix: &'tcx Vec>) + -> PatternKind> + { + match ty.sty { + ty::TySlice(..) => + // matching a slice or fixed-length array + PatternKind::Slice { + prefix: self.pat_refs(prefix), + slice: self.opt_pat_ref(slice), + suffix: self.pat_refs(suffix), + }, + + ty::TyArray(_, len) => { + // fixed-length array + assert!(len >= prefix.len() + suffix.len()); + PatternKind::Array { + prefix: self.pat_refs(prefix), + slice: self.opt_pat_ref(slice), + suffix: self.pat_refs(suffix), + } + } + + _ => { + cx.tcx.sess.span_bug( + self.pat.span, + "unexpanded macro or bad constant etc"); + } + } + } + + fn variant_or_leaf<'a>(&self, + cx: &mut Cx<'a, 'tcx>, + subpatterns: Vec>>) + -> PatternKind> + { + let def = cx.tcx.def_map.borrow().get(&self.pat.id).unwrap().full_def(); + match def { + def::DefVariant(enum_id, variant_id, _) => { + let adt_def = cx.tcx.lookup_adt_def(enum_id); + if adt_def.variants.len() > 1 { + PatternKind::Variant { adt_def: adt_def, + variant_index: adt_def.variant_index_with_id(variant_id), + subpatterns: subpatterns } + } else { + PatternKind::Leaf { subpatterns: subpatterns } + } + } + + // NB: resolving to DefStruct means the struct *constructor*, + // not the struct as a type. + def::DefStruct(..) | def::DefTy(..) => { + PatternKind::Leaf { subpatterns: subpatterns } + } + + _ => { + cx.tcx.sess.span_bug( + self.pat.span, + &format!("inappropriate def for pattern: {:?}", def)); + } + } + } +} + +impl<'a,'tcx:'a> Mirror> for PatNode<'tcx> { + type Output = Pattern>; + + fn make_mirror(self, cx: &mut Cx<'a,'tcx>) -> Pattern> { + let kind = match self.pat.node { + hir::PatWild(..) => + PatternKind::Wild, + + hir::PatLit(ref lt) => + PatternKind::Constant { expr: lt.to_ref() }, + + hir::PatRange(ref begin, ref end) => + PatternKind::Range { lo: begin.to_ref(), + hi: end.to_ref() }, + + hir::PatEnum(..) | hir::PatIdent(..) | hir::PatQPath(..) + if pat_is_resolved_const(&cx.tcx.def_map, self.pat) => + { + let def = cx.tcx.def_map.borrow().get(&self.pat.id).unwrap().full_def(); + match def { + def::DefConst(def_id) | def::DefAssociatedConst(def_id) => + match lookup_const_by_id(cx.tcx, def_id, Some(self.pat.id)) { + Some(const_expr) => + PatternKind::Constant { expr: const_expr.to_ref() }, + None => + cx.tcx.sess.span_bug( + self.pat.span, + &format!("cannot eval constant: {:?}", def_id)), + }, + _ => + cx.tcx.sess.span_bug( + self.pat.span, + &format!("def not a constant: {:?}", def)), + } + } + + hir::PatRegion(ref subpattern, _) | + hir::PatBox(ref subpattern) => { + PatternKind::Deref { subpattern: self.pat_ref(subpattern) } + } + + hir::PatVec(ref prefix, ref slice, ref suffix) => { + let ty = cx.tcx.node_id_to_type(self.pat.id); + match ty.sty { + ty::TyRef(_, mt) => + PatternKind::Deref { + subpattern: Pattern { + ty: mt.ty, + span: self.pat.span, + kind: self.slice_or_array_pattern(cx, mt.ty, prefix, + slice, suffix), + }.to_ref() + }, + + ty::TySlice(..) | + ty::TyArray(..) => + self.slice_or_array_pattern(cx, ty, prefix, slice, suffix), + + ref sty => + cx.tcx.sess.span_bug( + self.pat.span, + &format!("unexpanded type for vector pattern: {:?}", sty)), + } + } + + hir::PatTup(ref subpatterns) => { + let subpatterns = + subpatterns.iter() + .enumerate() + .map(|(i, subpattern)| FieldPatternRef { + field: Field::Indexed(i), + pattern: self.pat_ref(subpattern), + }) + .collect(); + + PatternKind::Leaf { subpatterns: subpatterns } + } + + hir::PatIdent(bm, ref ident, ref sub) + if pat_is_binding(&cx.tcx.def_map, self.pat) => + { + let id = match self.binding_map { + None => self.pat.id, + Some(ref map) => map[&ident.node], + }; + let var_ty = cx.tcx.node_id_to_type(self.pat.id); + let region = match var_ty.sty { + ty::TyRef(&r, _) => Some(r), + _ => None, + }; + let (mutability, mode) = match bm { + hir::BindByValue(hir::MutMutable) => + (Mutability::Mut, BindingMode::ByValue), + hir::BindByValue(hir::MutImmutable) => + (Mutability::Not, BindingMode::ByValue), + hir::BindByRef(hir::MutMutable) => + (Mutability::Not, BindingMode::ByRef(region.unwrap(), BorrowKind::Mut)), + hir::BindByRef(hir::MutImmutable) => + (Mutability::Not, BindingMode::ByRef(region.unwrap(), BorrowKind::Shared)), + }; + PatternKind::Binding { + mutability: mutability, + mode: mode, + name: ident.node, + var: id, + ty: var_ty, + subpattern: self.opt_pat_ref(sub), + } + } + + hir::PatIdent(..) => { + self.variant_or_leaf(cx, vec![]) + } + + hir::PatEnum(_, ref opt_subpatterns) => { + let subpatterns = + opt_subpatterns.iter() + .flat_map(|v| v.iter()) + .enumerate() + .map(|(i, field)| FieldPatternRef { + field: Field::Indexed(i), + pattern: self.pat_ref(field), + }) + .collect(); + self.variant_or_leaf(cx, subpatterns) + } + + hir::PatStruct(_, ref fields, _) => { + let subpatterns = + fields.iter() + .map(|field| FieldPatternRef { + field: Field::Named(field.node.ident.name), + pattern: self.pat_ref(&field.node.pat), + }) + .collect(); + self.variant_or_leaf(cx, subpatterns) + } + + hir::PatQPath(..) => { + cx.tcx.sess.span_bug( + self.pat.span, + "unexpanded macro or bad constant etc"); + } + }; + + let ty = cx.tcx.node_id_to_type(self.pat.id); + + Pattern { span: self.pat.span, + ty: ty, + kind: kind } + } +} diff --git a/src/librustc_mir/tcx/to_ref.rs b/src/librustc_mir/tcx/to_ref.rs new file mode 100644 index 00000000000..6d5e4c2e3fd --- /dev/null +++ b/src/librustc_mir/tcx/to_ref.rs @@ -0,0 +1,94 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use hair::*; +use repr::*; + +use tcx::Cx; +use tcx::pattern::PatNode; +use tcx::rustc_front::hir; +use tcx::syntax::ptr::P; + +pub trait ToRef { + type Output; + fn to_ref(self) -> Self::Output; +} + +impl<'a,'tcx:'a> ToRef> for &'tcx hir::Expr { + type Output = ExprRef>; + + fn to_ref(self) -> ExprRef> { + ExprRef::Hair(self) + } +} + +impl<'a,'tcx:'a> ToRef> for &'tcx P { + type Output = ExprRef>; + + fn to_ref(self) -> ExprRef> { + ExprRef::Hair(&**self) + } +} + +impl<'a,'tcx:'a> ToRef> for Expr> { + type Output = ExprRef>; + + fn to_ref(self) -> ExprRef> { + ExprRef::Mirror(Box::new(self)) + } +} + +impl<'a,'tcx:'a> ToRef> for PatNode<'tcx> { + type Output = PatternRef>; + + fn to_ref(self) -> PatternRef> { + PatternRef::Hair(self) + } +} + +impl<'a,'tcx:'a> ToRef> for Pattern> { + type Output = PatternRef>; + + fn to_ref(self) -> PatternRef> { + PatternRef::Mirror(Box::new(self)) + } +} + +impl<'a,'tcx:'a,T,U> ToRef> for &'tcx Option + where &'tcx T: ToRef, Output=U> +{ + type Output = Option; + + fn to_ref(self) -> Option { + self.as_ref().map(|expr| expr.to_ref()) + } +} + +impl<'a,'tcx:'a,T,U> ToRef> for &'tcx Vec + where &'tcx T: ToRef, Output=U> +{ + type Output = Vec; + + fn to_ref(self) -> Vec { + self.iter().map(|expr| expr.to_ref()).collect() + } +} + +impl<'a,'tcx:'a> ToRef> for &'tcx hir::Field { + type Output = FieldExprRef>; + + fn to_ref(self) -> FieldExprRef> { + FieldExprRef { + name: Field::Named(self.ident.node.name), + expr: self.expr.to_ref() + } + } +} + diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index 0c0c68c89a1..1cc2eaa6278 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -272,13 +272,20 @@ pub const KNOWN_ATTRIBUTES: &'static [(&'static str, AttributeType, AttributeGat feature")), ("rustc_variance", Normal, Gated("rustc_attrs", "the `#[rustc_variance]` attribute \ - is an experimental feature")), + is just used for rustc unit tests \ + and will never be stable")), ("rustc_error", Whitelisted, Gated("rustc_attrs", "the `#[rustc_error]` attribute \ - is an experimental feature")), + is just used for rustc unit tests \ + and will never be stable")), ("rustc_move_fragments", Normal, Gated("rustc_attrs", "the `#[rustc_move_fragments]` attribute \ - is an experimental feature")), + is just used for rustc unit tests \ + and will never be stable")), + ("rustc_mir", Normal, Gated("rustc_attrs", + "the `#[rustc_mir]` attribute \ + is just used for rustc unit tests \ + and will never be stable")), ("allow_internal_unstable", Normal, Gated("allow_internal_unstable", EXPLAIN_ALLOW_INTERNAL_UNSTABLE)), diff --git a/src/test/compile-fail/feature-gate-rustc-attrs.rs b/src/test/compile-fail/feature-gate-rustc-attrs.rs index 125cec6183f..bb5b70829a1 100644 --- a/src/test/compile-fail/feature-gate-rustc-attrs.rs +++ b/src/test/compile-fail/feature-gate-rustc-attrs.rs @@ -14,7 +14,7 @@ #[rustc_variance] //~ ERROR the `#[rustc_variance]` attribute is just used for rustc unit tests and will never be stable #[rustc_error] //~ ERROR the `#[rustc_error]` attribute is just used for rustc unit tests and will never be stable -#[rustc_move_fragments] //~ ERROR the `#[rustc_move_fragments]` attribute is an experimental feature +#[rustc_move_fragments] //~ ERROR the `#[rustc_move_fragments]` attribute is just used for rustc unit tests and will never be stable #[rustc_foo] //~^ ERROR unless otherwise specified, attributes with the prefix `rustc_` are reserved for internal compiler diagnostics