1 // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! The local, garbage collected heap
12
13 use cast;
14 use iter::Iterator;
15 use mem;
16 use ops::Drop;
17 use option::{Option, None, Some};
18 use ptr;
19 use ptr::RawPtr;
20 use rt::global_heap;
21 use rt::local::Local;
22 use rt::task::Task;
23 use raw;
24 use slice::{ImmutableVector, Vector};
25 use vec::Vec;
26
27 // This has no meaning with out rtdebug also turned on.
28 #[cfg(rtdebug)]
29 static TRACK_ALLOCATIONS: int = 0;
30 #[cfg(rtdebug)]
31 static MAGIC: u32 = 0xbadc0ffe;
32
33 pub type Box = raw::Box<()>;
34
35 pub struct MemoryRegion {
36 allocations: Vec<*AllocHeader>,
37 live_allocations: uint,
38 }
39
40 pub struct LocalHeap {
41 memory_region: MemoryRegion,
42
43 live_allocs: *mut raw::Box<()>,
44 }
45
46 impl LocalHeap {
47 #[inline]
48 pub fn new() -> LocalHeap {
49 let region = MemoryRegion {
50 allocations: Vec::new(),
51 live_allocations: 0,
52 };
53 LocalHeap {
54 memory_region: region,
55 live_allocs: ptr::mut_null(),
56 }
57 }
58
59 #[inline]
60 pub fn alloc(&mut self, drop_glue: fn(*mut u8), size: uint, align: uint) -> *mut Box {
61 let total_size = global_heap::get_box_size(size, align);
62 let alloc = self.memory_region.malloc(total_size);
63 {
64 // Make sure that we can't use `mybox` outside of this scope
65 let mybox: &mut Box = unsafe { cast::transmute(alloc) };
66 // Clear out this box, and move it to the front of the live
67 // allocations list
68 mybox.drop_glue = drop_glue;
69 mybox.ref_count = 1;
70 mybox.prev = ptr::mut_null();
71 mybox.next = self.live_allocs;
72 if !self.live_allocs.is_null() {
73 unsafe { (*self.live_allocs).prev = alloc; }
74 }
75 self.live_allocs = alloc;
76 }
77 return alloc;
78 }
79
80 #[inline]
81 pub fn realloc(&mut self, ptr: *mut Box, size: uint) -> *mut Box {
82 // Make sure that we can't use `mybox` outside of this scope
83 let total_size = size + mem::size_of::<Box>();
84 let new_box = self.memory_region.realloc(ptr, total_size);
85 {
86 // Fix links because we could have moved around
87 let mybox: &mut Box = unsafe { cast::transmute(new_box) };
88 if !mybox.prev.is_null() {
89 unsafe { (*mybox.prev).next = new_box; }
90 }
91 if !mybox.next.is_null() {
92 unsafe { (*mybox.next).prev = new_box; }
93 }
94 }
95 if self.live_allocs == ptr {
96 self.live_allocs = new_box;
97 }
98 return new_box;
99 }
100
101 #[inline]
102 pub fn free(&mut self, alloc: *mut Box) {
103 {
104 // Make sure that we can't use `mybox` outside of this scope
105 let mybox: &mut Box = unsafe { cast::transmute(alloc) };
106
107 // Unlink it from the linked list
108 if !mybox.prev.is_null() {
109 unsafe { (*mybox.prev).next = mybox.next; }
110 }
111 if !mybox.next.is_null() {
112 unsafe { (*mybox.next).prev = mybox.prev; }
113 }
114 if self.live_allocs == alloc {
115 self.live_allocs = mybox.next;
116 }
117 }
118
119 self.memory_region.free(alloc);
120 }
121 }
122
123 impl Drop for LocalHeap {
124 fn drop(&mut self) {
125 assert!(self.live_allocs.is_null());
126 }
127 }
128
129 #[cfg(rtdebug)]
130 struct AllocHeader {
131 magic: u32,
132 index: i32,
133 size: u32,
134 }
135 #[cfg(not(rtdebug))]
136 struct AllocHeader;
137
138 impl AllocHeader {
139 #[cfg(rtdebug)]
140 fn init(&mut self, size: u32) {
141 if TRACK_ALLOCATIONS > 0 {
142 self.magic = MAGIC;
143 self.index = -1;
144 self.size = size;
145 }
146 }
147 #[cfg(not(rtdebug))]
148 fn init(&mut self, _size: u32) {}
149
150 #[cfg(rtdebug)]
151 fn assert_sane(&self) {
152 if TRACK_ALLOCATIONS > 0 {
153 rtassert!(self.magic == MAGIC);
154 }
155 }
156 #[cfg(not(rtdebug))]
157 fn assert_sane(&self) {}
158
159 #[cfg(rtdebug)]
160 fn update_size(&mut self, size: u32) {
161 if TRACK_ALLOCATIONS > 0 {
162 self.size = size;
163 }
164 }
165 #[cfg(not(rtdebug))]
166 fn update_size(&mut self, _size: u32) {}
167
168 fn as_box(&mut self) -> *mut Box {
169 let myaddr: uint = unsafe { cast::transmute(self) };
170 (myaddr + AllocHeader::size()) as *mut Box
171 }
172
173 fn size() -> uint {
174 // For some platforms, 16 byte alignment is required.
175 let ptr_size = 16;
176 let header_size = mem::size_of::<AllocHeader>();
177 return (header_size + ptr_size - 1) / ptr_size * ptr_size;
178 }
179
180 fn from(a_box: *mut Box) -> *mut AllocHeader {
181 (a_box as uint - AllocHeader::size()) as *mut AllocHeader
182 }
183 }
184
185 impl MemoryRegion {
186 #[inline]
187 fn malloc(&mut self, size: uint) -> *mut Box {
188 let total_size = size + AllocHeader::size();
189 let alloc: *AllocHeader = unsafe {
190 global_heap::malloc_raw(total_size) as *AllocHeader
191 };
192
193 let alloc: &mut AllocHeader = unsafe { cast::transmute(alloc) };
194 alloc.init(size as u32);
195 self.claim(alloc);
196 self.live_allocations += 1;
197
198 return alloc.as_box();
199 }
200
201 #[inline]
202 fn realloc(&mut self, alloc: *mut Box, size: uint) -> *mut Box {
203 rtassert!(!alloc.is_null());
204 let orig_alloc = AllocHeader::from(alloc);
205 unsafe { (*orig_alloc).assert_sane(); }
206
207 let total_size = size + AllocHeader::size();
208 let alloc: *AllocHeader = unsafe {
209 global_heap::realloc_raw(orig_alloc as *mut u8,
210 total_size) as *AllocHeader
211 };
212
213 let alloc: &mut AllocHeader = unsafe { cast::transmute(alloc) };
214 alloc.assert_sane();
215 alloc.update_size(size as u32);
216 self.update(alloc, orig_alloc as *AllocHeader);
217 return alloc.as_box();
218 }
219
220 #[inline]
221 fn free(&mut self, alloc: *mut Box) {
222 rtassert!(!alloc.is_null());
223 let alloc = AllocHeader::from(alloc);
224 unsafe {
225 (*alloc).assert_sane();
226 self.release(cast::transmute(alloc));
227 rtassert!(self.live_allocations > 0);
228 self.live_allocations -= 1;
229 global_heap::exchange_free(alloc as *u8)
230 }
231 }
232
233 #[cfg(rtdebug)]
234 fn claim(&mut self, alloc: &mut AllocHeader) {
235 alloc.assert_sane();
236 if TRACK_ALLOCATIONS > 1 {
237 alloc.index = self.allocations.len() as i32;
238 self.allocations.push(&*alloc as *AllocHeader);
239 }
240 }
241 #[cfg(not(rtdebug))]
242 #[inline]
243 fn claim(&mut self, _alloc: &mut AllocHeader) {}
244
245 #[cfg(rtdebug)]
246 fn release(&mut self, alloc: &AllocHeader) {
247 alloc.assert_sane();
248 if TRACK_ALLOCATIONS > 1 {
249 rtassert!(self.allocations.as_slice()[alloc.index] == alloc as *AllocHeader);
250 self.allocations.as_mut_slice()[alloc.index] = ptr::null();
251 }
252 }
253 #[cfg(not(rtdebug))]
254 #[inline]
255 fn release(&mut self, _alloc: &AllocHeader) {}
256
257 #[cfg(rtdebug)]
258 fn update(&mut self, alloc: &mut AllocHeader, orig: *AllocHeader) {
259 alloc.assert_sane();
260 if TRACK_ALLOCATIONS > 1 {
261 rtassert!(self.allocations.as_slice()[alloc.index] == orig);
262 self.allocations.as_mut_slice()[alloc.index] = &*alloc as *AllocHeader;
263 }
264 }
265 #[cfg(not(rtdebug))]
266 #[inline]
267 fn update(&mut self, _alloc: &mut AllocHeader, _orig: *AllocHeader) {}
268 }
269
270 impl Drop for MemoryRegion {
271 fn drop(&mut self) {
272 if self.live_allocations != 0 {
273 rtabort!("leaked managed memory ({} objects)", self.live_allocations);
274 }
275 rtassert!(self.allocations.as_slice().iter().all(|s| s.is_null()));
276 }
277 }
278
279
280 #[cfg(not(test))]
281 #[lang="malloc"]
282 #[inline]
283 pub unsafe fn local_malloc_(drop_glue: fn(*mut u8), size: uint, align: uint) -> *u8 {
284 local_malloc(drop_glue, size, align)
285 }
286
287 #[inline]
288 pub unsafe fn local_malloc(drop_glue: fn(*mut u8), size: uint, align: uint) -> *u8 {
289 // FIXME: Unsafe borrow for speed. Lame.
290 let task: Option<*mut Task> = Local::try_unsafe_borrow();
291 match task {
292 Some(task) => {
293 (*task).heap.alloc(drop_glue, size, align) as *u8
294 }
295 None => rtabort!("local malloc outside of task")
296 }
297 }
298
299 #[cfg(not(test))]
300 #[lang="free"]
301 #[inline]
302 pub unsafe fn local_free_(ptr: *u8) {
303 local_free(ptr)
304 }
305
306 // NB: Calls to free CANNOT be allowed to fail, as throwing an exception from
307 // inside a landing pad may corrupt the state of the exception handler. If a
308 // problem occurs, call exit instead.
309 #[inline]
310 pub unsafe fn local_free(ptr: *u8) {
311 // FIXME: Unsafe borrow for speed. Lame.
312 let task_ptr: Option<*mut Task> = Local::try_unsafe_borrow();
313 match task_ptr {
314 Some(task) => {
315 (*task).heap.free(ptr as *mut Box)
316 }
317 None => rtabort!("local free outside of task")
318 }
319 }
320
321 pub fn live_allocs() -> *mut Box {
322 Local::borrow(None::<Task>).heap.live_allocs
323 }
324
325 #[cfg(test)]
326 mod bench {
327 extern crate test;
328 use self::test::Bencher;
329
330 #[bench]
331 fn alloc_managed_small(b: &mut Bencher) {
332 b.iter(|| { @10; });
333 }
334
335 #[bench]
336 fn alloc_managed_big(b: &mut Bencher) {
337 b.iter(|| { @([10, ..1000]); });
338 }
339 }
libstd/rt/local_heap.rs:32:1-32:1 -NK_AS_STR_TODO- definition:
pub type Box = raw::Box<()>;
pub struct MemoryRegion {
allocations: Vec<*AllocHeader>,
references:- 17169: let myaddr: uint = unsafe { cast::transmute(self) };
170: (myaddr + AllocHeader::size()) as *mut Box
171: }
--
180: fn from(a_box: *mut Box) -> *mut AllocHeader {
181: (a_box as uint - AllocHeader::size()) as *mut AllocHeader
--
201: #[inline]
202: fn realloc(&mut self, alloc: *mut Box, size: uint) -> *mut Box {
203: rtassert!(!alloc.is_null());
--
321: pub fn live_allocs() -> *mut Box {
322: Local::borrow(None::<Task>).heap.live_allocs
libstd/rt/local_heap.rs:309:10-309:10 -fn- definition:
pub unsafe fn local_free(ptr: *u8) {
// FIXME: Unsafe borrow for speed. Lame.
let task_ptr: Option<*mut Task> = Local::try_unsafe_borrow();
references:- 2libstd/cleanup.rs:
93: each_live_alloc(true, |alloc| {
94: local_free(alloc as *u8);
95: true
libstd/rt/local_heap.rs:
302: pub unsafe fn local_free_(ptr: *u8) {
303: local_free(ptr)
304: }
libstd/rt/local_heap.rs:34:1-34:1 -struct- definition:
pub struct MemoryRegion {
allocations: Vec<*AllocHeader>,
live_allocations: uint,
references:- 4185: impl MemoryRegion {
186: #[inline]
--
270: impl Drop for MemoryRegion {
271: fn drop(&mut self) {
libstd/rt/local_heap.rs:135:21-135:21 -struct- definition:
struct AllocHeader;
impl AllocHeader {
#[cfg(rtdebug)]
references:- 16207: let total_size = size + AllocHeader::size();
208: let alloc: *AllocHeader = unsafe {
209: global_heap::realloc_raw(orig_alloc as *mut u8,
--
215: alloc.update_size(size as u32);
216: self.update(alloc, orig_alloc as *AllocHeader);
217: return alloc.as_box();
--
266: #[inline]
267: fn update(&mut self, _alloc: &mut AllocHeader, _orig: *AllocHeader) {}
268: }
libstd/rt/local_heap.rs:39:1-39:1 -struct- definition:
pub struct LocalHeap {
memory_region: MemoryRegion,
live_allocs: *mut raw::Box<()>,
references:- 552: };
53: LocalHeap {
54: memory_region: region,
--
123: impl Drop for LocalHeap {
124: fn drop(&mut self) {
libstd/rt/task.rs:
46: pub struct Task {
47: pub heap: LocalHeap,
48: pub gc: GarbageCollector,
libstd/rt/local_heap.rs:
47: #[inline]
48: pub fn new() -> LocalHeap {
49: let region = MemoryRegion {