auto merge of #10094 : alexcrichton/rust/issue-8704, r=pcwalton

This drops more of the old C++ runtime to rather be written in rust. A few
features were lost along the way, but hopefully not too many. The main loss is
that there are no longer backtraces associated with allocations (rust doesn't
have a way of acquiring those just yet). Other than that though, I believe that
the rest of the debugging utilities made their way over into rust.

Closes #8704
This commit is contained in:
bors 2013-10-28 06:41:40 -07:00
commit bee40a9f98
13 changed files with 257 additions and 656 deletions

View File

@ -89,8 +89,6 @@ RUNTIME_CXXS_$(1)_$(2) := \
rt/rust_upcall.cpp \
rt/rust_uv.cpp \
rt/miniz.cpp \
rt/memory_region.cpp \
rt/boxed_region.cpp \
rt/rust_android_dummy.cpp \
rt/rust_test_helpers.cpp

View File

@ -158,7 +158,6 @@ pub mod raw {
use at_vec::capacity;
use cast;
use cast::{transmute, transmute_copy};
use libc;
use ptr;
use mem;
use uint;
@ -250,7 +249,7 @@ pub mod raw {
use rt::task::Task;
do Local::borrow |task: &mut Task| {
task.heap.realloc(ptr as *libc::c_void, size) as *()
task.heap.realloc(ptr as *mut Box<()>, size) as *()
}
}
}

View File

@ -11,7 +11,7 @@
#[doc(hidden)];
use libc::c_void;
use ptr::null;
use ptr;
use unstable::intrinsics::TyDesc;
use unstable::raw;
@ -37,7 +37,7 @@ unsafe fn each_live_alloc(read_next_before: bool,
use rt::local_heap;
let mut box = local_heap::live_allocs();
while box != null() {
while box != ptr::mut_null() {
let next_before = (*box).next;
let uniq = (*box).ref_count == managed::RC_MANAGED_UNIQUE;

View File

@ -19,6 +19,7 @@ use os;
static mut MIN_STACK: uint = 4000000;
static mut DEBUG_BORROW: bool = false;
static mut POISON_ON_FREE: bool = false;
pub fn init() {
unsafe {
@ -33,6 +34,10 @@ pub fn init() {
Some(_) => DEBUG_BORROW = true,
None => ()
}
match os::getenv("RUST_POISON_ON_FREE") {
Some(_) => POISON_ON_FREE = true,
None => ()
}
}
}
@ -43,3 +48,7 @@ pub fn min_stack() -> uint {
pub fn debug_borrow() -> bool {
unsafe { DEBUG_BORROW }
}
pub fn poison_on_free() -> bool {
unsafe { POISON_ON_FREE }
}

View File

@ -19,7 +19,7 @@ extern {
}
#[inline]
fn get_box_size(body_size: uint, body_align: uint) -> uint {
pub fn get_box_size(body_size: uint, body_align: uint) -> uint {
let header_size = size_of::<raw::Box<()>>();
// FIXME (#2699): This alignment calculation is suspicious. Is it right?
let total_size = align_to(header_size, body_align) + body_size;

View File

@ -10,80 +10,272 @@
//! The local, garbage collected heap
use cast;
use libc::{c_void, uintptr_t};
use libc;
use libc::{c_void, uintptr_t, size_t};
use mem;
use ops::Drop;
use option::{Option, None, Some};
use ptr;
use rt::env;
use rt::global_heap;
use rt::local::Local;
use rt::task::Task;
use unstable::intrinsics::TyDesc;
use unstable::raw;
type MemoryRegion = c_void;
// This has no meaning with out rtdebug also turned on.
static TRACK_ALLOCATIONS: int = 0;
static MAGIC: u32 = 0xbadc0ffe;
struct Env { priv opaque: () }
pub type Box = raw::Box<()>;
struct BoxedRegion {
env: *Env,
backing_region: *MemoryRegion,
live_allocs: *raw::Box<()>,
pub struct MemoryRegion {
priv allocations: ~[*AllocHeader],
priv live_allocations: uint,
}
pub type OpaqueBox = c_void;
pub type TypeDesc = c_void;
pub struct LocalHeap {
priv memory_region: *MemoryRegion,
priv boxed_region: *BoxedRegion
priv memory_region: MemoryRegion,
priv poison_on_free: bool,
priv live_allocs: *mut raw::Box<()>,
}
impl LocalHeap {
#[fixed_stack_segment] #[inline(never)]
pub fn new() -> LocalHeap {
unsafe {
// XXX: These usually come from the environment
let detailed_leaks = false as uintptr_t;
let poison_on_free = false as uintptr_t;
let region = rust_new_memory_region(detailed_leaks, poison_on_free);
assert!(region.is_not_null());
let boxed = rust_new_boxed_region(region, poison_on_free);
assert!(boxed.is_not_null());
LocalHeap {
memory_region: region,
boxed_region: boxed
let region = MemoryRegion {
allocations: ~[],
live_allocations: 0,
};
LocalHeap {
memory_region: region,
poison_on_free: env::poison_on_free(),
live_allocs: ptr::mut_null(),
}
}
pub fn alloc(&mut self, td: *TyDesc, size: uint) -> *mut Box {
let total_size = global_heap::get_box_size(size, unsafe { (*td).align });
let box = self.memory_region.malloc(total_size);
{
// Make sure that we can't use `mybox` outside of this scope
let mybox: &mut Box = unsafe { cast::transmute(box) };
// Clear out this box, and move it to the front of the live
// allocations list
mybox.type_desc = td;
mybox.ref_count = 1;
mybox.prev = ptr::mut_null();
mybox.next = self.live_allocs;
if !self.live_allocs.is_null() {
unsafe { (*self.live_allocs).prev = box; }
}
self.live_allocs = box;
}
return box;
}
pub fn realloc(&mut self, ptr: *mut Box, size: uint) -> *mut Box {
// Make sure that we can't use `mybox` outside of this scope
let total_size = size + mem::size_of::<Box>();
let new_box = self.memory_region.realloc(ptr, total_size);
{
// Fix links because we could have moved around
let mybox: &mut Box = unsafe { cast::transmute(new_box) };
if !mybox.prev.is_null() {
unsafe { (*mybox.prev).next = new_box; }
}
if !mybox.next.is_null() {
unsafe { (*mybox.next).prev = new_box; }
}
}
if self.live_allocs == ptr {
self.live_allocs = new_box;
}
return new_box;
}
#[fixed_stack_segment] #[inline(never)]
pub fn alloc(&mut self, td: *TypeDesc, size: uint) -> *OpaqueBox {
unsafe {
return rust_boxed_region_malloc(self.boxed_region, td, size as size_t);
}
}
pub fn free(&mut self, box: *mut Box) {
{
// Make sure that we can't use `mybox` outside of this scope
let mybox: &mut Box = unsafe { cast::transmute(box) };
assert!(!mybox.type_desc.is_null());
#[fixed_stack_segment] #[inline(never)]
pub fn realloc(&mut self, ptr: *OpaqueBox, size: uint) -> *OpaqueBox {
unsafe {
return rust_boxed_region_realloc(self.boxed_region, ptr, size as size_t);
}
}
// Unlink it from the linked list
if !mybox.prev.is_null() {
unsafe { (*mybox.prev).next = mybox.next; }
}
if !mybox.next.is_null() {
unsafe { (*mybox.next).prev = mybox.prev; }
}
if self.live_allocs == box {
self.live_allocs = mybox.next;
}
#[fixed_stack_segment] #[inline(never)]
pub fn free(&mut self, box: *OpaqueBox) {
unsafe {
return rust_boxed_region_free(self.boxed_region, box);
// Destroy the box memory-wise
if self.poison_on_free {
unsafe {
let ptr: *mut u8 = cast::transmute(&mybox.data);
ptr::set_memory(ptr, 0xab, (*mybox.type_desc).size);
}
}
mybox.prev = ptr::mut_null();
mybox.next = ptr::mut_null();
mybox.type_desc = ptr::null();
}
self.memory_region.free(box);
}
}
impl Drop for LocalHeap {
#[fixed_stack_segment] #[inline(never)]
fn drop(&mut self) {
unsafe {
rust_delete_boxed_region(self.boxed_region);
rust_delete_memory_region(self.memory_region);
assert!(self.live_allocs.is_null());
}
}
#[cfg(rtdebug)]
struct AllocHeader {
magic: u32,
index: i32,
size: u32,
}
#[cfg(not(rtdebug))]
struct AllocHeader;
impl AllocHeader {
#[cfg(rtdebug)]
fn init(&mut self, size: u32) {
if TRACK_ALLOCATIONS > 0 {
self.magic = MAGIC;
self.index = -1;
self.size = size;
}
}
#[cfg(not(rtdebug))]
fn init(&mut self, _size: u32) {}
#[cfg(rtdebug)]
fn assert_sane(&self) {
if TRACK_ALLOCATIONS > 0 {
rtassert!(self.magic == MAGIC);
}
}
#[cfg(not(rtdebug))]
fn assert_sane(&self) {}
#[cfg(rtdebug)]
fn update_size(&mut self, size: u32) {
if TRACK_ALLOCATIONS > 0 {
self.size = size;
}
}
#[cfg(not(rtdebug))]
fn update_size(&mut self, _size: u32) {}
fn box(&mut self) -> *mut Box {
let myaddr: uint = unsafe { cast::transmute(self) };
(myaddr + AllocHeader::size()) as *mut Box
}
fn size() -> uint {
// For some platforms, 16 byte alignment is required.
let ptr_size = 16;
let header_size = mem::size_of::<AllocHeader>();
return (header_size + ptr_size - 1) / ptr_size * ptr_size;
}
fn from(box: *mut Box) -> *mut AllocHeader {
(box as uint - AllocHeader::size()) as *mut AllocHeader
}
}
impl MemoryRegion {
fn malloc(&mut self, size: uint) -> *mut Box {
let total_size = size + AllocHeader::size();
let alloc: *AllocHeader = unsafe {
global_heap::malloc_raw(total_size) as *AllocHeader
};
let alloc: &mut AllocHeader = unsafe { cast::transmute(alloc) };
alloc.init(size as u32);
self.claim(alloc);
self.live_allocations += 1;
return alloc.box();
}
fn realloc(&mut self, box: *mut Box, size: uint) -> *mut Box {
rtassert!(!box.is_null());
let orig_alloc = AllocHeader::from(box);
unsafe { (*orig_alloc).assert_sane(); }
let total_size = size + AllocHeader::size();
let alloc: *AllocHeader = unsafe {
global_heap::realloc_raw(orig_alloc as *mut libc::c_void,
total_size) as *AllocHeader
};
let alloc: &mut AllocHeader = unsafe { cast::transmute(alloc) };
alloc.assert_sane();
alloc.update_size(size as u32);
self.update(alloc, orig_alloc as *AllocHeader);
return alloc.box();
}
fn free(&mut self, box: *mut Box) {
rtassert!(!box.is_null());
let alloc = AllocHeader::from(box);
unsafe {
(*alloc).assert_sane();
self.release(cast::transmute(alloc));
rtassert!(self.live_allocations > 0);
self.live_allocations -= 1;
global_heap::exchange_free(alloc as *libc::c_char)
}
}
#[cfg(rtdebug)]
fn claim(&mut self, alloc: &mut AllocHeader) {
alloc.assert_sane();
if TRACK_ALLOCATIONS > 1 {
alloc.index = self.allocations.len() as i32;
self.allocations.push(&*alloc as *AllocHeader);
}
}
#[cfg(not(rtdebug))]
fn claim(&mut self, _alloc: &mut AllocHeader) {}
#[cfg(rtdebug)]
fn release(&mut self, alloc: &AllocHeader) {
alloc.assert_sane();
if TRACK_ALLOCATIONS > 1 {
rtassert!(self.allocations[alloc.index] == alloc as *AllocHeader);
self.allocations[alloc.index] = ptr::null();
}
}
#[cfg(not(rtdebug))]
fn release(&mut self, _alloc: &AllocHeader) {}
#[cfg(rtdebug)]
fn update(&mut self, alloc: &mut AllocHeader, orig: *AllocHeader) {
alloc.assert_sane();
if TRACK_ALLOCATIONS > 1 {
rtassert!(self.allocations[alloc.index] == orig);
self.allocations[alloc.index] = &*alloc as *AllocHeader;
}
}
#[cfg(not(rtdebug))]
fn update(&mut self, _alloc: &mut AllocHeader, _orig: *AllocHeader) {}
}
impl Drop for MemoryRegion {
fn drop(&mut self) {
if self.live_allocations != 0 {
rtabort!("leaked managed memory ({} objects)", self.live_allocations);
}
rtassert!(self.allocations.iter().all(|s| s.is_null()));
}
}
pub unsafe fn local_malloc(td: *libc::c_char, size: libc::uintptr_t) -> *libc::c_char {
@ -91,7 +283,7 @@ pub unsafe fn local_malloc(td: *libc::c_char, size: libc::uintptr_t) -> *libc::c
let task: Option<*mut Task> = Local::try_unsafe_borrow();
match task {
Some(task) => {
(*task).heap.alloc(td as *libc::c_void, size as uint) as *libc::c_char
(*task).heap.alloc(td as *TyDesc, size as uint) as *libc::c_char
}
None => rtabort!("local malloc outside of task")
}
@ -103,34 +295,16 @@ pub unsafe fn local_free(ptr: *libc::c_char) {
let task_ptr: Option<*mut Task> = Local::try_unsafe_borrow();
match task_ptr {
Some(task) => {
(*task).heap.free(ptr as *libc::c_void);
(*task).heap.free(ptr as *mut Box)
}
None => rtabort!("local free outside of task")
}
}
pub fn live_allocs() -> *raw::Box<()> {
let region = do Local::borrow |task: &mut Task| {
task.heap.boxed_region
};
return unsafe { (*region).live_allocs };
}
extern {
fn rust_new_memory_region(detailed_leaks: uintptr_t,
poison_on_free: uintptr_t) -> *MemoryRegion;
fn rust_delete_memory_region(region: *MemoryRegion);
fn rust_new_boxed_region(region: *MemoryRegion,
poison_on_free: uintptr_t) -> *BoxedRegion;
fn rust_delete_boxed_region(region: *BoxedRegion);
fn rust_boxed_region_malloc(region: *BoxedRegion,
td: *TypeDesc,
size: size_t) -> *OpaqueBox;
fn rust_boxed_region_realloc(region: *BoxedRegion,
ptr: *OpaqueBox,
size: size_t) -> *OpaqueBox;
fn rust_boxed_region_free(region: *BoxedRegion, box: *OpaqueBox);
pub fn live_allocs() -> *mut Box {
do Local::borrow |task: &mut Task| {
task.heap.live_allocs
}
}
#[cfg(test)]

View File

@ -15,8 +15,8 @@ use unstable::intrinsics::TyDesc;
pub struct Box<T> {
ref_count: uint,
type_desc: *TyDesc,
priv prev: *Box<T>,
next: *Box<T>,
prev: *mut Box<T>,
next: *mut Box<T>,
data: T
}

View File

@ -1,99 +0,0 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#include "memory_region.h"
#include "boxed_region.h"
#include "rust_globals.h"
#include "rust_util.h"
// #define DUMP_BOXED_REGION
rust_opaque_box *boxed_region::malloc(type_desc *td, size_t body_size) {
size_t total_size = get_box_size(body_size, td->align);
rust_opaque_box *box =
(rust_opaque_box*)backing_region->malloc(total_size, "@");
box->td = td;
box->ref_count = 1;
box->prev = NULL;
box->next = live_allocs;
if (live_allocs) live_allocs->prev = box;
live_allocs = box;
/*LOG(rust_get_current_task(), box,
"@malloc()=%p with td %p, size %lu==%lu+%lu, "
"align %lu, prev %p, next %p\n",
box, td, total_size, sizeof(rust_opaque_box), body_size,
td->align, box->prev, box->next);*/
return box;
}
rust_opaque_box *boxed_region::realloc(rust_opaque_box *box,
size_t new_size) {
size_t total_size = new_size + sizeof(rust_opaque_box);
rust_opaque_box *new_box =
(rust_opaque_box*)backing_region->realloc(box, total_size);
if (new_box->prev) new_box->prev->next = new_box;
if (new_box->next) new_box->next->prev = new_box;
if (live_allocs == box) live_allocs = new_box;
/*LOG(rust_get_current_task(), box,
"@realloc()=%p with orig=%p, size %lu==%lu+%lu",
new_box, box, total_size, sizeof(rust_opaque_box), new_size);*/
return new_box;
}
rust_opaque_box *boxed_region::calloc(type_desc *td, size_t body_size) {
rust_opaque_box *box = malloc(td, body_size);
memset(box_body(box), 0, td->size);
return box;
}
void boxed_region::free(rust_opaque_box *box) {
// This turns out to not be true in various situations,
// like when we are unwinding after a failure.
//
// assert(box->ref_count == 0);
// This however should always be true. Helps to detect
// double frees (kind of).
assert(box->td != NULL);
/*LOG(rust_get_current_task(), box,
"@free(%p) with td %p, prev %p, next %p\n",
box, box->td, box->prev, box->next);*/
if (box->prev) box->prev->next = box->next;
if (box->next) box->next->prev = box->prev;
if (live_allocs == box) live_allocs = box->next;
if (poison_on_free) {
memset(box_body(box), 0xab, box->td->size);
}
box->prev = NULL;
box->next = NULL;
box->td = NULL;
backing_region->free(box);
}
//
// Local Variables:
// mode: C++
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
//

View File

@ -1,68 +0,0 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#ifndef BOXED_REGION_H
#define BOXED_REGION_H
#include <stdlib.h>
struct type_desc;
class memory_region;
struct rust_opaque_box;
struct rust_env;
/* Tracks the data allocated by a particular task in the '@' region.
* Currently still relies on the standard malloc as a backing allocator, but
* this could be improved someday if necessary. Every allocation must provide
* a type descr which describes the payload (what follows the header). */
class boxed_region {
private:
bool poison_on_free;
memory_region *backing_region;
rust_opaque_box *live_allocs;
size_t align_to(size_t v, size_t align) {
size_t alignm1 = align - 1;
v += alignm1;
v &= ~alignm1;
return v;
}
private:
// private and undefined to disable copying
boxed_region(const boxed_region& rhs);
boxed_region& operator=(const boxed_region& rhs);
public:
boxed_region(memory_region *br, bool poison_on_free)
: poison_on_free(poison_on_free)
, backing_region(br)
, live_allocs(NULL)
{}
rust_opaque_box *first_live_alloc() { return live_allocs; }
rust_opaque_box *malloc(type_desc *td, size_t body_size);
rust_opaque_box *calloc(type_desc *td, size_t body_size);
rust_opaque_box *realloc(rust_opaque_box *box, size_t new_size);
void free(rust_opaque_box *box);
};
#endif /* BOXED_REGION_H */
//
// Local Variables:
// mode: C++
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
//

View File

@ -1,258 +0,0 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#include "memory_region.h"
#if RUSTRT_TRACK_ALLOCATIONS >= 3
#include <execinfo.h>
#endif
#if RUSTRT_TRACK_ALLOCATIONS >= 1
// For some platforms, 16 byte alignment is required.
# define PTR_SIZE 16
# define ALIGN_PTR(x) (((x)+PTR_SIZE-1)/PTR_SIZE*PTR_SIZE)
# define HEADER_SIZE ALIGN_PTR(sizeof(alloc_header))
# define MAGIC 0xbadc0ffe
#else
# define HEADER_SIZE 0
#endif
memory_region::alloc_header *memory_region::get_header(void *mem) {
return (alloc_header *)((char *)mem - HEADER_SIZE);
}
void *memory_region::get_data(alloc_header *ptr) {
return (void*)((char *)ptr + HEADER_SIZE);
}
inline void memory_region::maybe_print_backtrace(const alloc_header *header) const {
# if RUSTRT_TRACK_ALLOCATIONS >= 3
if (_detailed_leaks) {
backtrace_symbols_fd(header->bt + 1, header->btframes - 1, 2);
}
# endif
}
memory_region::memory_region(bool detailed_leaks,
bool poison_on_free) :
_parent(NULL), _live_allocations(0),
_detailed_leaks(detailed_leaks),
_poison_on_free(poison_on_free) {
}
memory_region::memory_region(memory_region *parent) :
_parent(parent), _live_allocations(0),
_detailed_leaks(parent->_detailed_leaks),
_poison_on_free(parent->_poison_on_free) {
}
void memory_region::add_alloc() {
_live_allocations++;
}
void memory_region::dec_alloc() {
_live_allocations--;
}
void memory_region::free(void *mem) {
// printf("free: ptr 0x%" PRIxPTR" region=%p\n", (uintptr_t) mem, this);
if (!mem) { return; }
alloc_header *alloc = get_header(mem);
# if RUSTRT_TRACK_ALLOCATIONS >= 1
assert(alloc->magic == MAGIC);
# endif
if (_live_allocations < 1) {
assert(false && "live_allocs < 1");
}
release_alloc(mem);
maybe_poison(mem);
::free(alloc);
}
void *
memory_region::realloc(void *mem, size_t orig_size) {
if (!mem) {
add_alloc();
}
alloc_header *alloc = get_header(mem);
# if RUSTRT_TRACK_ALLOCATIONS >= 1
assert(alloc->magic == MAGIC);
# endif
size_t size = orig_size + HEADER_SIZE;
alloc_header *newMem = (alloc_header *)::realloc(alloc, size);
if (newMem == NULL) {
fprintf(stderr,
"memory_region::realloc> "
"Out of memory allocating %ld bytes",
(long int) size);
abort();
}
# if RUSTRT_TRACK_ALLOCATIONS >= 1
assert(newMem->magic == MAGIC);
newMem->size = orig_size;
# endif
# if RUSTRT_TRACK_ALLOCATIONS >= 2
if (_allocation_list[newMem->index] != alloc) {
printf("at index %d, found %p, expected %p\n",
alloc->index, _allocation_list[alloc->index], alloc);
printf("realloc: ptr 0x%" PRIxPTR " (%s) is not in allocation_list\n",
(uintptr_t) get_data(alloc), alloc->tag);
assert(false && "not in allocation_list");
}
else {
_allocation_list[newMem->index] = newMem;
// printf("realloc: stored %p at index %d, replacing %p\n",
// newMem, index, mem);
}
# endif
return get_data(newMem);
}
void *
memory_region::malloc(size_t size, const char *tag) {
# if RUSTRT_TRACK_ALLOCATIONS >= 1
size_t old_size = size;
# endif
size += HEADER_SIZE;
alloc_header *mem = (alloc_header *)::malloc(size);
if (mem == NULL) {
fprintf(stderr,
"memory_region::malloc> "
"Out of memory allocating %ld bytes",
(long int) size);
abort();
}
# if RUSTRT_TRACK_ALLOCATIONS >= 1
mem->magic = MAGIC;
mem->tag = tag;
mem->index = -1;
mem->size = old_size;
# endif
void *data = get_data(mem);
claim_alloc(data);
return data;
}
memory_region::~memory_region() {
if (_live_allocations == 0 && !_detailed_leaks) {
return;
}
char msg[128];
if(_live_allocations > 0) {
snprintf(msg, sizeof(msg),
"leaked memory in rust main loop (%d objects)",
_live_allocations);
}
# if RUSTRT_TRACK_ALLOCATIONS >= 2
if (_detailed_leaks) {
int leak_count = 0;
for (size_t i = 0; i < _allocation_list.size(); i++) {
if (_allocation_list[i] != NULL) {
alloc_header *header = (alloc_header*)_allocation_list[i];
printf("allocation (%s) 0x%" PRIxPTR " was not freed\n",
header->tag,
(uintptr_t) get_data(header));
++leak_count;
maybe_print_backtrace(header);
}
}
assert(leak_count == _live_allocations);
}
# endif
if (_live_allocations > 0) {
fprintf(stderr, "%s\n", msg);
assert(false);
}
}
void
memory_region::release_alloc(void *mem) {
# if RUSTRT_TRACK_ALLOCATIONS >= 1
alloc_header *alloc = get_header(mem);
assert(alloc->magic == MAGIC);
# endif
# if RUSTRT_TRACK_ALLOCATIONS >= 2
if (((size_t) alloc->index) >= _allocation_list.size()) {
printf("free: ptr 0x%" PRIxPTR " (%s) index %d is beyond allocation_list of size %zu\n",
(uintptr_t) get_data(alloc), alloc->tag, alloc->index, _allocation_list.size());
maybe_print_backtrace(alloc);
assert(false && "index beyond allocation_list");
}
if (_allocation_list[alloc->index] != alloc) {
printf("free: ptr 0x%" PRIxPTR " (%s) is not in allocation_list\n",
(uintptr_t) get_data(alloc), alloc->tag);
maybe_print_backtrace(alloc);
assert(false && "not in allocation_list");
}
else {
// printf("freed index %d\n", index);
_allocation_list[alloc->index] = NULL;
alloc->index = -1;
}
# endif
dec_alloc();
}
void
memory_region::claim_alloc(void *mem) {
# if RUSTRT_TRACK_ALLOCATIONS >= 1
alloc_header *alloc = get_header(mem);
assert(alloc->magic == MAGIC);
# endif
# if RUSTRT_TRACK_ALLOCATIONS >= 2
alloc->index = _allocation_list.append(alloc);
# endif
# if RUSTRT_TRACK_ALLOCATIONS >= 3
if (_detailed_leaks) {
alloc->btframes = ::backtrace(alloc->bt, 32);
}
# endif
add_alloc();
}
void
memory_region::maybe_poison(void *mem) {
if (!_poison_on_free)
return;
# if RUSTRT_TRACK_ALLOCATIONS >= 1
alloc_header *alloc = get_header(mem);
memset(mem, '\xcd', alloc->size);
# endif
}
//
// Local Variables:
// mode: C++
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
//

View File

@ -1,107 +0,0 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*
* The Rust runtime uses memory regions to provide a primitive level of
* memory management and isolation between tasks, and domains.
*
* FIXME (#2686): Implement a custom lock-free malloc / free instead of
* relying solely on the standard malloc / free.
*/
#ifndef MEMORY_REGION_H
#define MEMORY_REGION_H
#include "rust_globals.h"
#include "sync/lock_and_signal.h"
#include "util/array_list.h"
// There are three levels of debugging:
//
// 0 --- no headers, no debugging support
// 1 --- support poison, but do not track allocations
// 2 --- track allocations in detail
// 3 --- record backtraces of every allocation
//
// NB: please do not commit code with level 2. It's
// hugely expensive and should only be used as a last resort.
#define RUSTRT_TRACK_ALLOCATIONS 0
struct rust_env;
class memory_region {
private:
struct alloc_header {
# if RUSTRT_TRACK_ALLOCATIONS > 0
uint32_t magic;
int index;
const char *tag;
uint32_t size;
# if RUSTRT_TRACK_ALLOCATIONS >= 3
void *bt[32];
int btframes;
# endif
# endif
};
inline alloc_header *get_header(void *mem);
inline void *get_data(alloc_header *);
memory_region *_parent;
int _live_allocations;
array_list<alloc_header *> _allocation_list;
const bool _detailed_leaks;
const bool _poison_on_free;
lock_and_signal _lock;
void add_alloc();
void dec_alloc();
void maybe_poison(void *mem);
void release_alloc(void *mem);
void claim_alloc(void *mem);
void maybe_print_backtrace(const alloc_header *) const;
private:
// private and undefined to disable copying
memory_region(const memory_region& rhs);
memory_region& operator=(const memory_region& rhs);
public:
memory_region(bool detailed_leaks, bool poison_on_free);
memory_region(memory_region *parent);
void *malloc(size_t size, const char *tag);
void *realloc(void *mem, size_t size);
void free(void *mem);
~memory_region();
};
inline void *operator new(size_t size, memory_region &region,
const char *tag) {
return region.malloc(size, tag);
}
inline void *operator new(size_t size, memory_region *region,
const char *tag) {
return region->malloc(size, tag);
}
//
// Local Variables:
// mode: C++
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
//
#endif /* MEMORY_REGION_H */

View File

@ -13,8 +13,6 @@
#include "rust_util.h"
#include "sync/rust_thread.h"
#include "sync/lock_and_signal.h"
#include "memory_region.h"
#include "boxed_region.h"
#include "vg/valgrind.h"
#include <time.h>
@ -477,44 +475,6 @@ rust_initialize_rt_tls_key(tls_key *key) {
}
}
extern "C" CDECL memory_region*
rust_new_memory_region(uintptr_t detailed_leaks,
uintptr_t poison_on_free) {
return new memory_region((bool)detailed_leaks,
(bool)poison_on_free);
}
extern "C" CDECL void
rust_delete_memory_region(memory_region *region) {
delete region;
}
extern "C" CDECL boxed_region*
rust_new_boxed_region(memory_region *region,
uintptr_t poison_on_free) {
return new boxed_region(region, poison_on_free);
}
extern "C" CDECL void
rust_delete_boxed_region(boxed_region *region) {
delete region;
}
extern "C" CDECL rust_opaque_box*
rust_boxed_region_malloc(boxed_region *region, type_desc *td, size_t size) {
return region->malloc(td, size);
}
extern "C" CDECL rust_opaque_box*
rust_boxed_region_realloc(boxed_region *region, rust_opaque_box *ptr, size_t size) {
return region->realloc(ptr, size);
}
extern "C" CDECL void
rust_boxed_region_free(boxed_region *region, rust_opaque_box *box) {
region->free(box);
}
typedef void *(rust_try_fn)(void*, void*);
extern "C" CDECL uintptr_t

View File

@ -164,13 +164,6 @@ rust_uv_free_ip4_addr
rust_uv_free_ip6_addr
rust_initialize_rt_tls_key
rust_dbg_next_port
rust_new_memory_region
rust_delete_memory_region
rust_new_boxed_region
rust_delete_boxed_region
rust_boxed_region_malloc
rust_boxed_region_realloc
rust_boxed_region_free
rust_try
rust_begin_unwind
rust_valgrind_stack_register