(index<- )        ./librustc/middle/trans/cleanup.rs

    git branch:    * master           5200215 auto merge of #14035 : alexcrichton/rust/experimental, r=huonw
    modified:    Fri May  9 13:02:28 2014
   1  // Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
   2  // file at the top-level directory of this distribution and at
   3  // http://rust-lang.org/COPYRIGHT.
   4  //
   5  // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
   6  // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
   7  // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
   8  // option. This file may not be copied, modified, or distributed
   9  // except according to those terms.
  10  
  11  /*!
  12   * Code pertaining to cleanup of temporaries as well as execution of
  13   * drop glue. See discussion in `doc.rs` for a high-level summary.
  14   */
  15  
  16  use lib::llvm::{BasicBlockRef, ValueRef};
  17  use middle::lang_items::{EhPersonalityLangItem};
  18  use middle::trans::base;
  19  use middle::trans::build;
  20  use middle::trans::callee;
  21  use middle::trans::common;
  22  use middle::trans::common::{Block, FunctionContext, ExprId};
  23  use middle::trans::glue;
  24  use middle::trans::type_::Type;
  25  use middle::ty;
  26  use syntax::ast;
  27  use util::ppaux::Repr;
  28  
  29  
  30  pub struct CleanupScope<'a> {
  31      // The id of this cleanup scope. If the id is None,
  32      // this is a *temporary scope* that is pushed during trans to
  33      // cleanup miscellaneous garbage that trans may generate whose
  34      // lifetime is a subset of some expression.  See module doc for
  35      // more details.
  36      kind: CleanupScopeKind<'a>,
  37  
  38      // Cleanups to run upon scope exit.
  39      cleanups: Vec<Box<Cleanup>>,
  40  
  41      cached_early_exits: Vec<CachedEarlyExit>,
  42      cached_landing_pad: Option<BasicBlockRef>,
  43  }
  44  
  45  pub struct CustomScopeIndex {
  46      index: uint
  47  }
  48  
  49  pub static EXIT_BREAK: uint = 0;
  50  pub static EXIT_LOOP: uint = 1;
  51  pub static EXIT_MAX: uint = 2;
  52  
  53  pub enum CleanupScopeKind<'a> {
  54      CustomScopeKind,
  55      AstScopeKind(ast::NodeId),
  56      LoopScopeKind(ast::NodeId, [&'a Block<'a>, ..EXIT_MAX])
  57  }
  58  
  59  #[deriving(Eq)]
  60  pub enum EarlyExitLabel {
  61      UnwindExit,
  62      ReturnExit,
  63      LoopExit(ast::NodeId, uint)
  64  }
  65  
  66  pub struct CachedEarlyExit {
  67      label: EarlyExitLabel,
  68      cleanup_block: BasicBlockRef,
  69  }
  70  
  71  pub trait Cleanup {
  72      fn clean_on_unwind(&self) -> bool;
  73      fn trans<'a>(&self, bcx: &'a Block<'a>) -> &'a Block<'a>;
  74  }
  75  
  76  pub enum ScopeId {
  77      AstScope(ast::NodeId),
  78      CustomScope(CustomScopeIndex)
  79  }
  80  
  81  impl<'a> CleanupMethods<'a> for FunctionContext<'a> {
  82      fn push_ast_cleanup_scope(&self, idast::NodeId) {
  83          /*!
  84           * Invoked when we start to trans the code contained
  85           * within a new cleanup scope.
  86           */
  87  
  88          debug!("push_ast_cleanup_scope({})",
  89                 self.ccx.tcx.map.node_to_str(id));
  90  
  91          // FIXME(#2202) -- currently closure bodies have a parent
  92          // region, which messes up the assertion below, since there
  93          // are no cleanup scopes on the stack at the start of
  94          // trans'ing a closure body.  I think though that this should
  95          // eventually be fixed by closure bodies not having a parent
  96          // region, though that's a touch unclear, and it might also be
  97          // better just to narrow this assertion more (i.e., by
  98          // excluding id's that correspond to closure bodies only). For
  99          // now we just say that if there is already an AST scope on the stack,
 100          // this new AST scope had better be its immediate child.
 101          let top_scope = self.top_ast_scope();
 102          if top_scope.is_some() {
 103              assert_eq!(self.ccx.tcx.region_maps.opt_encl_scope(id), top_scope);
 104          }
 105  
 106          self.push_scope(CleanupScope::new(AstScopeKind(id)));
 107      }
 108  
 109      fn push_loop_cleanup_scope(&self,
 110                                 idast::NodeId,
 111                                 exits[&'a Block<'a>, ..EXIT_MAX]) {
 112          debug!("push_loop_cleanup_scope({})",
 113                 self.ccx.tcx.map.node_to_str(id));
 114          assert_eq!(Some(id), self.top_ast_scope());
 115  
 116          self.push_scope(CleanupScope::new(LoopScopeKind(id, exits)));
 117      }
 118  
 119      fn push_custom_cleanup_scope(&self) -> CustomScopeIndex {
 120          let index = self.scopes_len();
 121          debug!("push_custom_cleanup_scope(){}", index);
 122          self.push_scope(CleanupScope::new(CustomScopeKind));
 123          CustomScopeIndex { index: index }
 124      }
 125  
 126      fn pop_and_trans_ast_cleanup_scope(&self,
 127                                         bcx&'a Block<'a>,
 128                                         cleanup_scopeast::NodeId)
 129                                         -> &'a Block<'a> {
 130          /*!
 131           * Removes the cleanup scope for id `cleanup_scope`, which
 132           * must be at the top of the cleanup stack, and generates the
 133           * code to do its cleanups for normal exit.
 134           */
 135  
 136          debug!("pop_and_trans_ast_cleanup_scope({})",
 137                 self.ccx.tcx.map.node_to_str(cleanup_scope));
 138  
 139          assert!(self.top_scope(|s| s.kind.is_ast_with_id(cleanup_scope)));
 140  
 141          let scope = self.pop_scope();
 142          self.trans_scope_cleanups(bcx, &scope)
 143  
 144      }
 145  
 146      fn pop_loop_cleanup_scope(&self,
 147                                cleanup_scopeast::NodeId) {
 148          /*!
 149           * Removes the loop cleanup scope for id `cleanup_scope`, which
 150           * must be at the top of the cleanup stack. Does not generate
 151           * any cleanup code, since loop scopes should exit by
 152           * branching to a block generated by `normal_exit_block`.
 153           */
 154  
 155          debug!("pop_loop_cleanup_scope({})",
 156                 self.ccx.tcx.map.node_to_str(cleanup_scope));
 157  
 158          assert!(self.top_scope(|s| s.kind.is_loop_with_id(cleanup_scope)));
 159  
 160          let _ = self.pop_scope();
 161      }
 162  
 163      fn pop_custom_cleanup_scope(&self,
 164                                  custom_scopeCustomScopeIndex) {
 165          /*!
 166           * Removes the top cleanup scope from the stack without
 167           * executing its cleanups. The top cleanup scope must
 168           * be the temporary scope `custom_scope`.
 169           */
 170  
 171          debug!("pop_custom_cleanup_scope({})", custom_scope.index);
 172          assert!(self.is_valid_to_pop_custom_scope(custom_scope));
 173          let _ = self.pop_scope();
 174      }
 175  
 176      fn pop_and_trans_custom_cleanup_scope(&self,
 177                                          bcx&'a Block<'a>,
 178                                          custom_scopeCustomScopeIndex)
 179                                          -> &'a Block<'a> {
 180          /*!
 181           * Removes the top cleanup scope from the stack, which must be
 182           * a temporary scope, and generates the code to do its
 183           * cleanups for normal exit.
 184           */
 185  
 186          debug!("pop_and_trans_custom_cleanup_scope({:?})", custom_scope);
 187          assert!(self.is_valid_to_pop_custom_scope(custom_scope));
 188  
 189          let scope = self.pop_scope();
 190          self.trans_scope_cleanups(bcx, &scope)
 191      }
 192  
 193      fn top_loop_scope(&self) -> ast::NodeId {
 194          /*!
 195           * Returns the id of the top-most loop scope
 196           */
 197  
 198          for scope in self.scopes.borrow().iter().rev() {
 199              match scope.kind {
 200                  LoopScopeKind(id, _) => {
 201                      return id;
 202                  }
 203                  _ => {}
 204              }
 205          }
 206          self.ccx.sess().bug("no loop scope found");
 207      }
 208  
 209      fn normal_exit_block(&'a self,
 210                           cleanup_scopeast::NodeId,
 211                           exituint) -> BasicBlockRef {
 212          /*!
 213           * Returns a block to branch to which will perform all pending
 214           * cleanups and then break/continue (depending on `exit`) out
 215           * of the loop with id `cleanup_scope`
 216           */
 217  
 218          self.trans_cleanups_to_exit_scope(LoopExit(cleanup_scope, exit))
 219      }
 220  
 221      fn return_exit_block(&'a self) -> BasicBlockRef {
 222          /*!
 223           * Returns a block to branch to which will perform all pending
 224           * cleanups and then return from this function
 225           */
 226  
 227          self.trans_cleanups_to_exit_scope(ReturnExit)
 228      }
 229  
 230      fn schedule_drop_mem(&self,
 231                           cleanup_scopeScopeId,
 232                           valValueRef,
 233                           tyty::t) {
 234          /*!
 235           * Schedules a (deep) drop of `val`, which is a pointer to an
 236           * instance of `ty`
 237           */
 238  
 239          if !ty::type_needs_drop(self.ccx.tcx(), ty) { return; }
 240          let drop = box DropValue {
 241              is_immediate: false,
 242              on_unwind: ty::type_needs_unwind_cleanup(self.ccx.tcx(), ty),
 243              val: val,
 244              ty: ty
 245          };
 246  
 247          debug!("schedule_drop_mem({:?}, val={}, ty={})",
 248                 cleanup_scope,
 249                 self.ccx.tn.val_to_str(val),
 250                 ty.repr(self.ccx.tcx()));
 251  
 252          self.schedule_clean(cleanup_scope, drop as Box<Cleanup>);
 253      }
 254  
 255      fn schedule_drop_immediate(&self,
 256                                 cleanup_scopeScopeId,
 257                                 valValueRef,
 258                                 tyty::t) {
 259          /*!
 260           * Schedules a (deep) drop of `val`, which is an instance of `ty`
 261           */
 262  
 263          if !ty::type_needs_drop(self.ccx.tcx(), ty) { return; }
 264          let drop = box DropValue {
 265              is_immediate: true,
 266              on_unwind: ty::type_needs_unwind_cleanup(self.ccx.tcx(), ty),
 267              val: val,
 268              ty: ty
 269          };
 270  
 271          debug!("schedule_drop_immediate({:?}, val={}, ty={})",
 272                 cleanup_scope,
 273                 self.ccx.tn.val_to_str(val),
 274                 ty.repr(self.ccx.tcx()));
 275  
 276          self.schedule_clean(cleanup_scope, drop as Box<Cleanup>);
 277      }
 278  
 279      fn schedule_free_value(&self,
 280                             cleanup_scopeScopeId,
 281                             valValueRef,
 282                             heapHeap) {
 283          /*!
 284           * Schedules a call to `free(val)`. Note that this is a shallow
 285           * operation.
 286           */
 287  
 288          let drop = box FreeValue { ptr: val, heap: heap };
 289  
 290          debug!("schedule_free_value({:?}, val={}, heap={:?})",
 291                 cleanup_scope,
 292                 self.ccx.tn.val_to_str(val),
 293                 heap);
 294  
 295          self.schedule_clean(cleanup_scope, drop as Box<Cleanup>);
 296      }
 297  
 298      fn schedule_clean(&self,
 299                        cleanup_scopeScopeId,
 300                        cleanupBox<Cleanup>) {
 301          match cleanup_scope {
 302              AstScope(id) => self.schedule_clean_in_ast_scope(id, cleanup),
 303              CustomScope(id) => self.schedule_clean_in_custom_scope(id, cleanup),
 304          }
 305      }
 306  
 307      fn schedule_clean_in_ast_scope(&self,
 308                                     cleanup_scopeast::NodeId,
 309                                     cleanupBox<Cleanup>) {
 310          /*!
 311           * Schedules a cleanup to occur upon exit from `cleanup_scope`.
 312           * If `cleanup_scope` is not provided, then the cleanup is scheduled
 313           * in the topmost scope, which must be a temporary scope.
 314           */
 315  
 316          debug!("schedule_clean_in_ast_scope(cleanup_scope={:?})",
 317                 cleanup_scope);
 318  
 319          for scope in self.scopes.borrow_mut().mut_iter().rev() {
 320              if scope.kind.is_ast_with_id(cleanup_scope) {
 321                  scope.cleanups.push(cleanup);
 322                  scope.clear_cached_exits();
 323                  return;
 324              } else {
 325                  // will be adding a cleanup to some enclosing scope
 326                  scope.clear_cached_exits();
 327              }
 328          }
 329  
 330          self.ccx.sess().bug(
 331              format!("no cleanup scope {} found",
 332                      self.ccx.tcx.map.node_to_str(cleanup_scope)));
 333      }
 334  
 335      fn schedule_clean_in_custom_scope(&self,
 336                                        custom_scopeCustomScopeIndex,
 337                                        cleanupBox<Cleanup>) {
 338          /*!
 339           * Schedules a cleanup to occur in the top-most scope,
 340           * which must be a temporary scope.
 341           */
 342  
 343          debug!("schedule_clean_in_custom_scope(custom_scope={})",
 344                 custom_scope.index);
 345  
 346          assert!(self.is_valid_custom_scope(custom_scope));
 347  
 348          let mut scopes = self.scopes.borrow_mut();
 349          let scope = scopes.get_mut(custom_scope.index);
 350          scope.cleanups.push(cleanup);
 351          scope.clear_cached_exits();
 352      }
 353  
 354      fn needs_invoke(&self) -> bool {
 355          /*!
 356           * Returns true if there are pending cleanups that should
 357           * execute on failure.
 358           */
 359  
 360          self.scopes.borrow().iter().rev().any(|s| s.needs_invoke())
 361      }
 362  
 363      fn get_landing_pad(&'a self) -> BasicBlockRef {
 364          /*!
 365           * Returns a basic block to branch to in the event of a failure.
 366           * This block will run the failure cleanups and eventually
 367           * invoke the LLVM `Resume` instruction.
 368           */
 369  
 370          let _icx = base::push_ctxt("get_landing_pad");
 371  
 372          debug!("get_landing_pad");
 373  
 374          let orig_scopes_len = self.scopes_len();
 375          assert!(orig_scopes_len > 0);
 376  
 377          // Remove any scopes that do not have cleanups on failure:
 378          let mut popped_scopes = vec!();
 379          while !self.top_scope(|s| s.needs_invoke()) {
 380              debug!("top scope does not need invoke");
 381              popped_scopes.push(self.pop_scope());
 382          }
 383  
 384          // Check for an existing landing pad in the new topmost scope:
 385          let llbb = self.get_or_create_landing_pad();
 386  
 387          // Push the scopes we removed back on:
 388          loop {
 389              match popped_scopes.pop() {
 390                  Some(scope) => self.push_scope(scope),
 391                  None => break
 392              }
 393          }
 394  
 395          assert_eq!(self.scopes_len(), orig_scopes_len);
 396  
 397          return llbb;
 398      }
 399  }
 400  
 401  impl<'a> CleanupHelperMethods<'a> for FunctionContext<'a> {
 402      fn top_ast_scope(&self) -> Option<ast::NodeId> {
 403          /*!
 404           * Returns the id of the current top-most AST scope, if any.
 405           */
 406          for scope in self.scopes.borrow().iter().rev() {
 407              match scope.kind {
 408                  CustomScopeKind | LoopScopeKind(..) => {}
 409                  AstScopeKind(i) => {
 410                      return Some(i);
 411                  }
 412              }
 413          }
 414          None
 415      }
 416  
 417      fn top_nonempty_cleanup_scope(&self) -> Option<uint> {
 418          self.scopes.borrow().iter().rev().position(|s| !s.cleanups.is_empty())
 419      }
 420  
 421      fn is_valid_to_pop_custom_scope(&self, custom_scopeCustomScopeIndex) -> bool {
 422          self.is_valid_custom_scope(custom_scope) &&
 423              custom_scope.index == self.scopes.borrow().len() - 1
 424      }
 425  
 426      fn is_valid_custom_scope(&self, custom_scopeCustomScopeIndex) -> bool {
 427          let scopes = self.scopes.borrow();
 428          custom_scope.index < scopes.len() &&
 429              scopes.get(custom_scope.index).kind.is_temp()
 430      }
 431  
 432      fn trans_scope_cleanups(&self, // cannot borrow self, will recurse
 433                              bcx&'a Block<'a>,
 434                              scope&CleanupScope) -> &'a Block<'a> {
 435          /*! Generates the cleanups for `scope` into `bcx` */
 436  
 437          let mut bcx = bcx;
 438          if !bcx.unreachable.get() {
 439              for cleanup in scope.cleanups.iter().rev() {
 440                  bcx = cleanup.trans(bcx);
 441              }
 442          }
 443          bcx
 444      }
 445  
 446      fn scopes_len(&self) -> uint {
 447          self.scopes.borrow().len()
 448      }
 449  
 450      fn push_scope(&self, scopeCleanupScope<'a>) {
 451          self.scopes.borrow_mut().push(scope)
 452      }
 453  
 454      fn pop_scope(&self) -> CleanupScope<'a> {
 455          debug!("popping cleanup scope {}, {} scopes remaining",
 456                 self.top_scope(|s| s.block_name("")),
 457                 self.scopes_len() - 1);
 458  
 459          self.scopes.borrow_mut().pop().unwrap()
 460      }
 461  
 462      fn top_scope<R>(&self, f|&CleanupScope<'a>-> R) -> R {
 463          f(self.scopes.borrow().last().unwrap())
 464      }
 465  
 466      fn trans_cleanups_to_exit_scope(&'a self,
 467                                      labelEarlyExitLabel)
 468                                      -> BasicBlockRef {
 469          /*!
 470           * Used when the caller wishes to jump to an early exit, such
 471           * as a return, break, continue, or unwind. This function will
 472           * generate all cleanups between the top of the stack and the
 473           * exit `label` and return a basic block that the caller can
 474           * branch to.
 475           *
 476           * For example, if the current stack of cleanups were as follows:
 477           *
 478           *      AST 22
 479           *      Custom 1
 480           *      AST 23
 481           *      Loop 23
 482           *      Custom 2
 483           *      AST 24
 484           *
 485           * and the `label` specifies a break from `Loop 23`, then this
 486           * function would generate a series of basic blocks as follows:
 487           *
 488           *      Cleanup(AST 24) -> Cleanup(Custom 2) -> break_blk
 489           *
 490           * where `break_blk` is the block specified in `Loop 23` as
 491           * the target for breaks. The return value would be the first
 492           * basic block in that sequence (`Cleanup(AST 24)`). The
 493           * caller could then branch to `Cleanup(AST 24)` and it will
 494           * perform all cleanups and finally branch to the `break_blk`.
 495           */
 496  
 497          debug!("trans_cleanups_to_exit_scope label={:?} scopes={}",
 498                 label, self.scopes_len());
 499  
 500          let orig_scopes_len = self.scopes_len();
 501          let mut prev_llbb;
 502          let mut popped_scopes = vec!();
 503  
 504          // First we pop off all the cleanup stacks that are
 505          // traversed until the exit is reached, pushing them
 506          // onto the side vector `popped_scopes`. No code is
 507          // generated at this time.
 508          //
 509          // So, continuing the example from above, we would wind up
 510          // with a `popped_scopes` vector of `[AST 24, Custom 2]`.
 511          // (Presuming that there are no cached exits)
 512          loop {
 513              if self.scopes_len() == 0 {
 514                  match label {
 515                      UnwindExit => {
 516                          // Generate a block that will `Resume`.
 517                          let prev_bcx = self.new_block(true, "resume", None);
 518                          let personality = self.personality.get().expect(
 519                              "create_landing_pad() should have set this");
 520                          build::Resume(prev_bcx,
 521                                        build::Load(prev_bcx, personality));
 522                          prev_llbb = prev_bcx.llbb;
 523                          break;
 524                      }
 525  
 526                      ReturnExit => {
 527                          prev_llbb = self.get_llreturn();
 528                          break;
 529                      }
 530  
 531                      LoopExit(id, _) => {
 532                          self.ccx.sess().bug(format!(
 533                                  "cannot exit from scope {:?}, \
 534                                  not in scope", id));
 535                      }
 536                  }
 537              }
 538  
 539              // Check if we have already cached the unwinding of this
 540              // scope for this label. If so, we can stop popping scopes
 541              // and branch to the cached label, since it contains the
 542              // cleanups for any subsequent scopes.
 543              match self.top_scope(|s| s.cached_early_exit(label)) {
 544                  Some(cleanup_block) => {
 545                      prev_llbb = cleanup_block;
 546                      break;
 547                  }
 548                  None => { }
 549              }
 550  
 551              // Pop off the scope, since we will be generating
 552              // unwinding code for it. If we are searching for a loop exit,
 553              // and this scope is that loop, then stop popping and set
 554              // `prev_llbb` to the appropriate exit block from the loop.
 555              popped_scopes.push(self.pop_scope());
 556              let scope = popped_scopes.last().unwrap();
 557              match label {
 558                  UnwindExit | ReturnExit => { }
 559                  LoopExit(id, exit) => {
 560                      match scope.kind.early_exit_block(id, exit) {
 561                          Some(exitllbb) => {
 562                              prev_llbb = exitllbb;
 563                              break;
 564                          }
 565  
 566                          None => { }
 567                      }
 568                  }
 569              }
 570          }
 571  
 572          debug!("trans_cleanups_to_exit_scope: popped {} scopes",
 573                 popped_scopes.len());
 574  
 575          // Now push the popped scopes back on. As we go,
 576          // we track in `prev_llbb` the exit to which this scope
 577          // should branch when it's done.
 578          //
 579          // So, continuing with our example, we will start out with
 580          // `prev_llbb` being set to `break_blk` (or possibly a cached
 581          // early exit). We will then pop the scopes from `popped_scopes`
 582          // and generate a basic block for each one, prepending it in the
 583          // series and updating `prev_llbb`. So we begin by popping `Custom 2`
 584          // and generating `Cleanup(Custom 2)`. We make `Cleanup(Custom 2)`
 585          // branch to `prev_llbb == break_blk`, giving us a sequence like:
 586          //
 587          //     Cleanup(Custom 2) -> prev_llbb
 588          //
 589          // We then pop `AST 24` and repeat the process, giving us the sequence:
 590          //
 591          //     Cleanup(AST 24) -> Cleanup(Custom 2) -> prev_llbb
 592          //
 593          // At this point, `popped_scopes` is empty, and so the final block
 594          // that we return to the user is `Cleanup(AST 24)`.
 595          while !popped_scopes.is_empty() {
 596              let mut scope = popped_scopes.pop().unwrap();
 597  
 598              if scope.cleanups.iter().any(|c| cleanup_is_suitable_for(*c, label))
 599              {
 600                  let name = scope.block_name("clean");
 601                  debug!("generating cleanups for {}", name);
 602                  let bcx_in = self.new_block(label.is_unwind(), name, None);
 603                  let mut bcx_out = bcx_in;
 604                  for cleanup in scope.cleanups.iter().rev() {
 605                      if cleanup_is_suitable_for(*cleanup, label) {
 606                          bcx_out = cleanup.trans(bcx_out);
 607                      }
 608                  }
 609                  build::Br(bcx_out, prev_llbb);
 610                  prev_llbb = bcx_in.llbb;
 611              } else {
 612                  debug!("no suitable cleanups in {}",
 613                         scope.block_name("clean"));
 614              }
 615  
 616              scope.add_cached_early_exit(label, prev_llbb);
 617              self.push_scope(scope);
 618          }
 619  
 620          debug!("trans_cleanups_to_exit_scope: prev_llbb={}", prev_llbb);
 621  
 622          assert_eq!(self.scopes_len(), orig_scopes_len);
 623          prev_llbb
 624      }
 625  
 626      fn get_or_create_landing_pad(&'a self) -> BasicBlockRef {
 627          /*!
 628           * Creates a landing pad for the top scope, if one does not
 629           * exist.  The landing pad will perform all cleanups necessary
 630           * for an unwind and then `resume` to continue error
 631           * propagation:
 632           *
 633           *     landing_pad -> ... cleanups ... -> [resume]
 634           *
 635           * (The cleanups and resume instruction are created by
 636           * `trans_cleanups_to_exit_scope()`, not in this function
 637           * itself.)
 638           */
 639  
 640          let pad_bcx;
 641  
 642          debug!("get_or_create_landing_pad");
 643  
 644          // Check if a landing pad block exists; if not, create one.
 645          {
 646              let mut scopes = self.scopes.borrow_mut();
 647              let last_scope = scopes.mut_last().unwrap();
 648              match last_scope.cached_landing_pad {
 649                  Some(llbb) => { return llbb; }
 650                  None => {
 651                      let name = last_scope.block_name("unwind");
 652                      pad_bcx = self.new_block(true, name, None);
 653                      last_scope.cached_landing_pad = Some(pad_bcx.llbb);
 654                  }
 655              }
 656          }
 657  
 658          // The landing pad return type (the type being propagated). Not sure what
 659          // this represents but it's determined by the personality function and
 660          // this is what the EH proposal example uses.
 661          let llretty = Type::struct_(self.ccx,
 662                                      [Type::i8p(self.ccx), Type::i32(self.ccx)],
 663                                      false);
 664  
 665          // The exception handling personality function.
 666          let def_id = common::langcall(pad_bcx, None, "", EhPersonalityLangItem);
 667          let llpersonality = callee::trans_fn_ref(pad_bcx, def_id, ExprId(0));
 668  
 669          // The only landing pad clause will be 'cleanup'
 670          let llretval = build::LandingPad(pad_bcx, llretty, llpersonality, 1u);
 671  
 672          // The landing pad block is a cleanup
 673          build::SetCleanup(pad_bcx, llretval);
 674  
 675          // We store the retval in a function-central alloca, so that calls to
 676          // Resume can find it.
 677          match self.personality.get() {
 678              Some(addr) => {
 679                  build::Store(pad_bcx, llretval, addr);
 680              }
 681              None => {
 682                  let addr = base::alloca(pad_bcx, common::val_ty(llretval), "");
 683                  self.personality.set(Some(addr));
 684                  build::Store(pad_bcx, llretval, addr);
 685              }
 686          }
 687  
 688          // Generate the cleanup block and branch to it.
 689          let cleanup_llbb = self.trans_cleanups_to_exit_scope(UnwindExit);
 690          build::Br(pad_bcx, cleanup_llbb);
 691  
 692          return pad_bcx.llbb;
 693      }
 694  }
 695  
 696  impl<'a> CleanupScope<'a> {
 697      fn new(kindCleanupScopeKind<'a>) -> CleanupScope<'a> {
 698          CleanupScope {
 699              kind: kind,
 700              cleanups: vec!(),
 701              cached_early_exits: vec!(),
 702              cached_landing_pad: None,
 703          }
 704      }
 705  
 706      fn clear_cached_exits(&mut self) {
 707          self.cached_early_exits = vec!();
 708          self.cached_landing_pad = None;
 709      }
 710  
 711      fn cached_early_exit(&self,
 712                           labelEarlyExitLabel)
 713                           -> Option<BasicBlockRef> {
 714          self.cached_early_exits.iter().
 715              find(|e| e.label == label).
 716              map(|e| e.cleanup_block)
 717      }
 718  
 719      fn add_cached_early_exit(&mut self,
 720                               labelEarlyExitLabel,
 721                               blkBasicBlockRef) {
 722          self.cached_early_exits.push(
 723              CachedEarlyExit { label: label,
 724                                cleanup_block: blk });
 725      }
 726  
 727      fn needs_invoke(&self) -> bool {
 728          /*! True if this scope has cleanups for use during unwinding */
 729  
 730          self.cached_landing_pad.is_some() ||
 731              self.cleanups.iter().any(|c| c.clean_on_unwind())
 732      }
 733  
 734      fn block_name(&self, prefix&str) -> ~str {
 735          /*!
 736           * Returns a suitable name to use for the basic block that
 737           * handles this cleanup scope
 738           */
 739  
 740          match self.kind {
 741              CustomScopeKind => format!("{}_custom_", prefix),
 742              AstScopeKind(id) => format!("{}_ast_{}_", prefix, id),
 743              LoopScopeKind(id, _) => format!("{}_loop_{}_", prefix, id),
 744          }
 745      }
 746  }
 747  
 748  impl<'a> CleanupScopeKind<'a> {
 749      fn is_temp(&self) -> bool {
 750          match *self {
 751              CustomScopeKind => true,
 752              LoopScopeKind(..) | AstScopeKind(..) => false,
 753          }
 754      }
 755  
 756      fn is_ast_with_id(&self, idast::NodeId) -> bool {
 757          match *self {
 758              CustomScopeKind | LoopScopeKind(..) => false,
 759              AstScopeKind(i) => i == id
 760          }
 761      }
 762  
 763      fn is_loop_with_id(&self, idast::NodeId) -> bool {
 764          match *self {
 765              CustomScopeKind | AstScopeKind(..) => false,
 766              LoopScopeKind(i, _) => i == id
 767          }
 768      }
 769  
 770      fn early_exit_block(&self,
 771                          idast::NodeId,
 772                          exituint) -> Option<BasicBlockRef> {
 773          /*!
 774           * If this is a loop scope with id `id`, return the early
 775           * exit block `exit`, else `None`
 776           */
 777  
 778          match *self {
 779              LoopScopeKind(i, ref exits) if id == i => Some(exits[exit].llbb),
 780              _ => None,
 781          }
 782      }
 783  }
 784  
 785  impl EarlyExitLabel {
 786      fn is_unwind(&self) -> bool {
 787          match *self {
 788              UnwindExit => true,
 789              _ => false
 790          }
 791      }
 792  }
 793  
 794  ///////////////////////////////////////////////////////////////////////////
 795  // Cleanup types
 796  
 797  pub struct DropValue {
 798      is_immediate: bool,
 799      on_unwind: bool,
 800      val: ValueRef,
 801      ty: ty::t,
 802  }
 803  
 804  impl Cleanup for DropValue {
 805      fn clean_on_unwind(&self) -> bool {
 806          self.on_unwind
 807      }
 808  
 809      fn trans<'a>(&self, bcx&'a Block<'a>) -> &'a Block<'a> {
 810          if self.is_immediate {
 811              glue::drop_ty_immediate(bcx, self.val, self.ty)
 812          } else {
 813              glue::drop_ty(bcx, self.val, self.ty)
 814          }
 815      }
 816  }
 817  
 818  pub enum Heap {
 819      HeapManaged,
 820      HeapExchange
 821  }
 822  
 823  pub struct FreeValue {
 824      ptr: ValueRef,
 825      heap: Heap,
 826  }
 827  
 828  impl Cleanup for FreeValue {
 829      fn clean_on_unwind(&self) -> bool {
 830          true
 831      }
 832  
 833      fn trans<'a>(&self, bcx&'a Block<'a>) -> &'a Block<'a> {
 834          match self.heap {
 835              HeapManaged => {
 836                  glue::trans_free(bcx, self.ptr)
 837              }
 838              HeapExchange => {
 839                  glue::trans_exchange_free(bcx, self.ptr)
 840              }
 841          }
 842      }
 843  }
 844  
 845  pub fn temporary_scope(tcx: &ty::ctxt,
 846                         idast::NodeId)
 847                         -> ScopeId {
 848      match tcx.region_maps.temporary_scope(id) {
 849          Some(scope) => {
 850              let r = AstScope(scope);
 851              debug!("temporary_scope({}) = {:?}", id, r);
 852              r
 853          }
 854          None => {
 855              tcx.sess.bug(format!("no temporary scope available for expr {}", id))
 856          }
 857      }
 858  }
 859  
 860  pub fn var_scope(tcx: &ty::ctxt,
 861                   idast::NodeId)
 862                   -> ScopeId {
 863      let r = AstScope(tcx.region_maps.var_scope(id));
 864      debug!("var_scope({}) = {:?}", id, r);
 865      r
 866  }
 867  
 868  fn cleanup_is_suitable_for(c: &Cleanup,
 869                             labelEarlyExitLabel) -> bool {
 870      !label.is_unwind() || c.clean_on_unwind()
 871  }
 872  
 873  ///////////////////////////////////////////////////////////////////////////
 874  // These traits just exist to put the methods into this file.
 875  
 876  pub trait CleanupMethods<'a> {
 877      fn push_ast_cleanup_scope(&self, id: ast::NodeId);
 878      fn push_loop_cleanup_scope(&self,
 879                                     id: ast::NodeId,
 880                                     exits: [&'a Block<'a>, ..EXIT_MAX]);
 881      fn push_custom_cleanup_scope(&self) -> CustomScopeIndex;
 882      fn pop_and_trans_ast_cleanup_scope(&self,
 883                                                bcx: &'a Block<'a>,
 884                                                cleanup_scope: ast::NodeId)
 885                                                -> &'a Block<'a>;
 886      fn pop_loop_cleanup_scope(&self,
 887                                cleanup_scope: ast::NodeId);
 888      fn pop_custom_cleanup_scope(&self,
 889                                  custom_scope: CustomScopeIndex);
 890      fn pop_and_trans_custom_cleanup_scope(&self,
 891                                            bcx: &'a Block<'a>,
 892                                            custom_scope: CustomScopeIndex)
 893                                            -> &'a Block<'a>;
 894      fn top_loop_scope(&self) -> ast::NodeId;
 895      fn normal_exit_block(&'a self,
 896                           cleanup_scope: ast::NodeId,
 897                           exit: uint) -> BasicBlockRef;
 898      fn return_exit_block(&'a self) -> BasicBlockRef;
 899      fn schedule_drop_mem(&self,
 900                           cleanup_scope: ScopeId,
 901                           val: ValueRef,
 902                           ty: ty::t);
 903      fn schedule_drop_immediate(&self,
 904                                 cleanup_scope: ScopeId,
 905                                 val: ValueRef,
 906                                 ty: ty::t);
 907      fn schedule_free_value(&self,
 908                             cleanup_scope: ScopeId,
 909                             val: ValueRef,
 910                             heap: Heap);
 911      fn schedule_clean(&self,
 912                        cleanup_scope: ScopeId,
 913                        cleanup: Box<Cleanup>);
 914      fn schedule_clean_in_ast_scope(&self,
 915                                     cleanup_scope: ast::NodeId,
 916                                     cleanup: Box<Cleanup>);
 917      fn schedule_clean_in_custom_scope(&self,
 918                                      custom_scope: CustomScopeIndex,
 919                                      cleanup: Box<Cleanup>);
 920      fn needs_invoke(&self) -> bool;
 921      fn get_landing_pad(&'a self) -> BasicBlockRef;
 922  }
 923  
 924  trait CleanupHelperMethods<'a> {
 925      fn top_ast_scope(&self) -> Option<ast::NodeId>;
 926      fn top_nonempty_cleanup_scope(&self) -> Option<uint>;
 927      fn is_valid_to_pop_custom_scope(&self, custom_scope: CustomScopeIndex) -> bool;
 928      fn is_valid_custom_scope(&self, custom_scope: CustomScopeIndex) -> bool;
 929      fn trans_scope_cleanups(&self,
 930                              bcx: &'a Block<'a>,
 931                              scope: &CleanupScope<'a>) -> &'a Block<'a>;
 932      fn trans_cleanups_to_exit_scope(&'a self,
 933                                      label: EarlyExitLabel)
 934                                      -> BasicBlockRef;
 935      fn get_or_create_landing_pad(&'a self) -> BasicBlockRef;
 936      fn scopes_len(&self) -> uint;
 937      fn push_scope(&self, scope: CleanupScope<'a>);
 938      fn pop_scope(&self) -> CleanupScope<'a>;
 939      fn top_scope<R>(&self, f: |&CleanupScope<'a>-> R) -> R;
 940  }


librustc/middle/trans/cleanup.rs:844:1-844:1 -fn- definition:
pub fn temporary_scope(tcx: &ty::ctxt,
                       id: ast::NodeId)
                       -> ScopeId {
references:- 5
librustc/middle/trans/datum.rs:
459:             |r| {
460:                 let scope = cleanup::temporary_scope(bcx.tcx(), expr_id);
461:                 r.to_lvalue_datum_in_scope(bcx, name, scope)
librustc/middle/trans/expr.rs:
1794:             RvalueExpr(Rvalue { mode: ByValue }) => {
1795:                 let scope = cleanup::temporary_scope(bcx.tcx(), expr.id);
1796:                 if !type_is_zero_size(bcx.ccx(), content_ty) {
librustc/middle/trans/tvec.rs:
170:         let llfixed_casted = BitCast(bcx, llfixed, llfixed_ty);
171:         let cleanup_scope = cleanup::temporary_scope(bcx.tcx(), content_expr.id);
172:         fcx.schedule_drop_mem(cleanup_scope, llfixed_casted, fixed_ty);
librustc/middle/trans/datum.rs:
433:             |r| {
434:                 let scope = cleanup::temporary_scope(bcx.tcx(), expr_id);
435:                 r.add_clean(bcx.fcx, scope);


librustc/middle/trans/cleanup.rs:59:16-59:16 -enum- definition:
pub enum EarlyExitLabel {
    UnwindExit,
    ReturnExit,
references:- 10
932:     fn trans_cleanups_to_exit_scope(&'a self,
933:                                     label: EarlyExitLabel)
934:                                     -> BasicBlockRef;


librustc/middle/trans/cleanup.rs:859:1-859:1 -fn- definition:
pub fn var_scope(tcx: &ty::ctxt,
                 id: ast::NodeId)
                 -> ScopeId {
references:- 3
librustc/middle/trans/_match.rs:
2032:         pat_bindings(&tcx.def_map, pat, |_, p_id, _, path| {
2033:                 let scope = cleanup::var_scope(tcx, p_id);
2034:                 bcx = mk_binding_alloca(


librustc/middle/trans/cleanup.rs:796:1-796:1 -struct- definition:
pub struct DropValue {
    is_immediate: bool,
    on_unwind: bool,
references:- 3
239:         if !ty::type_needs_drop(self.ccx.tcx(), ty) { return; }
240:         let drop = box DropValue {
241:             is_immediate: false,
--
804: impl Cleanup for DropValue {
805:     fn clean_on_unwind(&self) -> bool {


librustc/middle/trans/cleanup.rs:867:1-867:1 -fn- definition:
fn cleanup_is_suitable_for(c: &Cleanup,
                           label: EarlyExitLabel) -> bool {
    !label.is_unwind() || c.clean_on_unwind()
references:- 2
598:             if scope.cleanups.iter().any(|c| cleanup_is_suitable_for(*c, label))
599:             {
--
604:                 for cleanup in scope.cleanups.iter().rev() {
605:                     if cleanup_is_suitable_for(*cleanup, label) {
606:                         bcx_out = cleanup.trans(bcx_out);


librustc/middle/trans/cleanup.rs:817:1-817:1 -enum- definition:
pub enum Heap {
    HeapManaged,
    HeapExchange
references:- 3
824:     ptr: ValueRef,
825:     heap: Heap,
826: }
--
909:                            val: ValueRef,
910:                            heap: Heap);
911:     fn schedule_clean(&self,


librustc/middle/trans/cleanup.rs:70:1-70:1 -trait- definition:
pub trait Cleanup {
    fn clean_on_unwind(&self) -> bool;
    fn trans<'a>(&self, bcx: &'a Block<'a>) -> &'a Block<'a>;
references:- 13
912:                       cleanup_scope: ScopeId,
913:                       cleanup: Box<Cleanup>);
914:     fn schedule_clean_in_ast_scope(&self,
--
918:                                     custom_scope: CustomScopeIndex,
919:                                     cleanup: Box<Cleanup>);
920:     fn needs_invoke(&self) -> bool;


librustc/middle/trans/cleanup.rs:29:1-29:1 -struct- definition:
pub struct CleanupScope<'a> {
    // The id of this cleanup scope. If the id is None,
    // this is a *temporary scope* that is pushed during trans to
references:- 12
697:     fn new(kind: CleanupScopeKind<'a>) -> CleanupScope<'a> {
698:         CleanupScope {
699:             kind: kind,
--
936:     fn scopes_len(&self) -> uint;
937:     fn push_scope(&self, scope: CleanupScope<'a>);
938:     fn pop_scope(&self) -> CleanupScope<'a>;
939:     fn top_scope<R>(&self, f: |&CleanupScope<'a>| -> R) -> R;
940: }
librustc/middle/trans/common.rs:
277:     // Cleanup scopes.
278:     pub scopes: RefCell<Vec<cleanup::CleanupScope<'a>> >,
279: }
librustc/middle/trans/cleanup.rs:
462:     fn top_scope<R>(&self, f: |&CleanupScope<'a>| -> R) -> R {
463:         f(self.scopes.borrow().last().unwrap())


librustc/middle/trans/cleanup.rs:75:1-75:1 -enum- definition:
pub enum ScopeId {
    AstScope(ast::NodeId),
    CustomScope(CustomScopeIndex)
references:- 24
899:     fn schedule_drop_mem(&self,
900:                          cleanup_scope: ScopeId,
901:                          val: ValueRef,
--
903:     fn schedule_drop_immediate(&self,
904:                                cleanup_scope: ScopeId,
905:                                val: ValueRef,
--
911:     fn schedule_clean(&self,
912:                       cleanup_scope: ScopeId,
913:                       cleanup: Box<Cleanup>);
librustc/middle/trans/datum.rs:
117:                                    zero: bool,
118:                                    scope: cleanup::ScopeId,
119:                                    arg: A,
--
294:                      fcx: &FunctionContext,
295:                      scope: cleanup::ScopeId)
296:                      -> ValueRef {
--
310:                                         name: &str,
311:                                         scope: cleanup::ScopeId)
312:                                         -> DatumBlock<'a, Lvalue> {
librustc/middle/trans/callee.rs:
524:                         get_callee: |bcx: &'a Block<'a>,
525:                                      arg_cleanup_scope: cleanup::ScopeId|
526:                                      -> Callee<'a>,
--
771:                   llargs: &mut Vec<ValueRef> ,
772:                   arg_cleanup_scope: cleanup::ScopeId,
773:                   ignore_self: bool)
--
844:                       arg_datum: Datum<Expr>,
845:                       arg_cleanup_scope: cleanup::ScopeId,
846:                       autoref_arg: AutorefArg)
librustc/middle/trans/_match.rs:
2097:                            binding_mode: IrrefutablePatternBindingMode,
2098:                            cleanup_scope: cleanup::ScopeId,
2099:                            arg: A,
librustc/middle/trans/meth.rs:
81:                            self_expr: Option<&ast::Expr>,
82:                            arg_cleanup_scope: cleanup::ScopeId)
83:                            -> Callee<'a> {
--
342:                           self_expr: &ast::Expr,
343:                           arg_cleanup_scope: cleanup::ScopeId)
344:                           -> Callee<'a> {
librustc/middle/trans/_match.rs:
2128:                         binding_mode: IrrefutablePatternBindingMode,
2129:                         cleanup_scope: cleanup::ScopeId)
2130:                         -> &'a Block<'a> {


librustc/middle/trans/cleanup.rs:65:1-65:1 -struct- definition:
pub struct CachedEarlyExit {
    label: EarlyExitLabel,
    cleanup_block: BasicBlockRef,
references:- 2
41:     cached_early_exits: Vec<CachedEarlyExit>,
42:     cached_landing_pad: Option<BasicBlockRef>,
--
722:         self.cached_early_exits.push(
723:             CachedEarlyExit { label: label,
724:                               cleanup_block: blk });


librustc/middle/trans/cleanup.rs:52:1-52:1 -enum- definition:
pub enum CleanupScopeKind<'a> {
    CustomScopeKind,
    AstScopeKind(ast::NodeId),
references:- 3
748: impl<'a> CleanupScopeKind<'a> {
749:     fn is_temp(&self) -> bool {


librustc/middle/trans/cleanup.rs:44:1-44:1 -struct- definition:
pub struct CustomScopeIndex {
    index: uint
}
references:- 15
122:         self.push_scope(CleanupScope::new(CustomScopeKind));
123:         CustomScopeIndex { index: index }
124:     }
--
177:                                         bcx: &'a Block<'a>,
178:                                         custom_scope: CustomScopeIndex)
179:                                         -> &'a Block<'a> {
--
880:                                    exits: [&'a Block<'a>, ..EXIT_MAX]);
881:     fn push_custom_cleanup_scope(&self) -> CustomScopeIndex;
882:     fn pop_and_trans_ast_cleanup_scope(&self,
--
917:     fn schedule_clean_in_custom_scope(&self,
918:                                     custom_scope: CustomScopeIndex,
919:                                     cleanup: Box<Cleanup>);
--
927:     fn is_valid_to_pop_custom_scope(&self, custom_scope: CustomScopeIndex) -> bool;
928:     fn is_valid_custom_scope(&self, custom_scope: CustomScopeIndex) -> bool;
929:     fn trans_scope_cleanups(&self,
librustc/middle/trans/base.rs:
1275: fn copy_args_to_allocas<'a>(fcx: &FunctionContext<'a>,
1276:                             arg_scope: cleanup::CustomScopeIndex,
1277:                             bcx: &'a Block<'a>,
librustc/middle/trans/cleanup.rs:
426:     fn is_valid_custom_scope(&self, custom_scope: CustomScopeIndex) -> bool {
427:         let scopes = self.scopes.borrow();


librustc/middle/trans/cleanup.rs:822:1-822:1 -struct- definition:
pub struct FreeValue {
    ptr: ValueRef,
    heap: Heap,
references:- 2
288:         let drop = box FreeValue { ptr: val, heap: heap };
--
828: impl Cleanup for FreeValue {
829:     fn clean_on_unwind(&self) -> bool {