(index<- ) ./libcore/intrinsics.rs
git branch: * master 5200215 auto merge of #14035 : alexcrichton/rust/experimental, r=huonw
modified: Fri May 9 13:02:28 2014
1 // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 /*! rustc compiler intrinsics.
12
13 The corresponding definitions are in librustc/middle/trans/foreign.rs.
14
15 # Volatiles
16
17 The volatile intrinsics provide operations intended to act on I/O
18 memory, which are guaranteed to not be reordered by the compiler
19 across other volatile intrinsics. See the LLVM documentation on
20 [[volatile]].
21
22 [volatile]: http://llvm.org/docs/LangRef.html#volatile-memory-accesses
23
24 # Atomics
25
26 The atomic intrinsics provide common atomic operations on machine
27 words, with multiple possible memory orderings. They obey the same
28 semantics as C++11. See the LLVM documentation on [[atomics]].
29
30 [atomics]: http://llvm.org/docs/Atomics.html
31
32 A quick refresher on memory ordering:
33
34 * Acquire - a barrier for acquiring a lock. Subsequent reads and writes
35 take place after the barrier.
36 * Release - a barrier for releasing a lock. Preceding reads and writes
37 take place before the barrier.
38 * Sequentially consistent - sequentially consistent operations are
39 guaranteed to happen in order. This is the standard mode for working
40 with atomic types and is equivalent to Java's `volatile`.
41
42 */
43
44 #![experimental]
45 #![allow(missing_doc)]
46
47 // This is needed to prevent duplicate lang item definitions.
48 #[cfg(test)]
49 pub use realcore::intrinsics::{TyDesc, Opaque, TyVisitor, TypeId};
50
51 pub type GlueFn = extern "Rust" fn(*i8);
52
53 #[lang="ty_desc"]
54 #[cfg(not(test))]
55 pub struct TyDesc {
56 // sizeof(T)
57 pub size: uint,
58
59 // alignof(T)
60 pub align: uint,
61
62 // Called when a value of type `T` is no longer needed
63 pub drop_glue: GlueFn,
64
65 // Called by reflection visitor to visit a value of type `T`
66 pub visit_glue: GlueFn,
67
68 // Name corresponding to the type
69 pub name: &'static str,
70 }
71
72 #[lang="opaque"]
73 #[cfg(not(test))]
74 pub enum Opaque { }
75
76 pub type Disr = u64;
77
78 #[lang="ty_visitor"]
79 #[cfg(not(test))]
80 pub trait TyVisitor {
81 fn visit_bot(&mut self) -> bool;
82 fn visit_nil(&mut self) -> bool;
83 fn visit_bool(&mut self) -> bool;
84
85 fn visit_int(&mut self) -> bool;
86 fn visit_i8(&mut self) -> bool;
87 fn visit_i16(&mut self) -> bool;
88 fn visit_i32(&mut self) -> bool;
89 fn visit_i64(&mut self) -> bool;
90
91 fn visit_uint(&mut self) -> bool;
92 fn visit_u8(&mut self) -> bool;
93 fn visit_u16(&mut self) -> bool;
94 fn visit_u32(&mut self) -> bool;
95 fn visit_u64(&mut self) -> bool;
96
97 fn visit_f32(&mut self) -> bool;
98 fn visit_f64(&mut self) -> bool;
99 fn visit_f128(&mut self) -> bool;
100
101 fn visit_char(&mut self) -> bool;
102
103 fn visit_estr_box(&mut self) -> bool;
104 fn visit_estr_uniq(&mut self) -> bool;
105 fn visit_estr_slice(&mut self) -> bool;
106 fn visit_estr_fixed(&mut self, n: uint, sz: uint, align: uint) -> bool;
107
108 fn visit_box(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
109 fn visit_uniq(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
110 fn visit_ptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
111 fn visit_rptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
112
113 fn visit_evec_box(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
114 fn visit_evec_uniq(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
115 fn visit_evec_slice(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
116 fn visit_evec_fixed(&mut self, n: uint, sz: uint, align: uint,
117 mtbl: uint, inner: *TyDesc) -> bool;
118
119 fn visit_enter_rec(&mut self, n_fields: uint,
120 sz: uint, align: uint) -> bool;
121 fn visit_rec_field(&mut self, i: uint, name: &str,
122 mtbl: uint, inner: *TyDesc) -> bool;
123 fn visit_leave_rec(&mut self, n_fields: uint,
124 sz: uint, align: uint) -> bool;
125
126 fn visit_enter_class(&mut self, name: &str, named_fields: bool, n_fields: uint,
127 sz: uint, align: uint) -> bool;
128 fn visit_class_field(&mut self, i: uint, name: &str, named: bool,
129 mtbl: uint, inner: *TyDesc) -> bool;
130 fn visit_leave_class(&mut self, name: &str, named_fields: bool, n_fields: uint,
131 sz: uint, align: uint) -> bool;
132
133 fn visit_enter_tup(&mut self, n_fields: uint,
134 sz: uint, align: uint) -> bool;
135 fn visit_tup_field(&mut self, i: uint, inner: *TyDesc) -> bool;
136 fn visit_leave_tup(&mut self, n_fields: uint,
137 sz: uint, align: uint) -> bool;
138
139 fn visit_enter_enum(&mut self, n_variants: uint,
140 get_disr: extern unsafe fn(ptr: *Opaque) -> Disr,
141 sz: uint, align: uint) -> bool;
142 fn visit_enter_enum_variant(&mut self, variant: uint,
143 disr_val: Disr,
144 n_fields: uint,
145 name: &str) -> bool;
146 fn visit_enum_variant_field(&mut self, i: uint, offset: uint, inner: *TyDesc) -> bool;
147 fn visit_leave_enum_variant(&mut self, variant: uint,
148 disr_val: Disr,
149 n_fields: uint,
150 name: &str) -> bool;
151 fn visit_leave_enum(&mut self, n_variants: uint,
152 get_disr: extern unsafe fn(ptr: *Opaque) -> Disr,
153 sz: uint, align: uint) -> bool;
154
155 fn visit_enter_fn(&mut self, purity: uint, proto: uint,
156 n_inputs: uint, retstyle: uint) -> bool;
157 fn visit_fn_input(&mut self, i: uint, mode: uint, inner: *TyDesc) -> bool;
158 fn visit_fn_output(&mut self, retstyle: uint, variadic: bool, inner: *TyDesc) -> bool;
159 fn visit_leave_fn(&mut self, purity: uint, proto: uint,
160 n_inputs: uint, retstyle: uint) -> bool;
161
162 fn visit_trait(&mut self, name: &str) -> bool;
163 fn visit_param(&mut self, i: uint) -> bool;
164 fn visit_self(&mut self) -> bool;
165 }
166
167 extern "rust-intrinsic" {
168
169 // NB: These intrinsics take unsafe pointers because they mutate aliased
170 // memory, which is not valid for either `&` or `&mut`.
171
172 pub fn atomic_cxchg<T>(dst: *mut T, old: T, src: T) -> T;
173 pub fn atomic_cxchg_acq<T>(dst: *mut T, old: T, src: T) -> T;
174 pub fn atomic_cxchg_rel<T>(dst: *mut T, old: T, src: T) -> T;
175 pub fn atomic_cxchg_acqrel<T>(dst: *mut T, old: T, src: T) -> T;
176 pub fn atomic_cxchg_relaxed<T>(dst: *mut T, old: T, src: T) -> T;
177
178 pub fn atomic_load<T>(src: *T) -> T;
179 pub fn atomic_load_acq<T>(src: *T) -> T;
180 pub fn atomic_load_relaxed<T>(src: *T) -> T;
181
182 pub fn atomic_store<T>(dst: *mut T, val: T);
183 pub fn atomic_store_rel<T>(dst: *mut T, val: T);
184 pub fn atomic_store_relaxed<T>(dst: *mut T, val: T);
185
186 pub fn atomic_xchg<T>(dst: *mut T, src: T) -> T;
187 pub fn atomic_xchg_acq<T>(dst: *mut T, src: T) -> T;
188 pub fn atomic_xchg_rel<T>(dst: *mut T, src: T) -> T;
189 pub fn atomic_xchg_acqrel<T>(dst: *mut T, src: T) -> T;
190 pub fn atomic_xchg_relaxed<T>(dst: *mut T, src: T) -> T;
191
192 pub fn atomic_xadd<T>(dst: *mut T, src: T) -> T;
193 pub fn atomic_xadd_acq<T>(dst: *mut T, src: T) -> T;
194 pub fn atomic_xadd_rel<T>(dst: *mut T, src: T) -> T;
195 pub fn atomic_xadd_acqrel<T>(dst: *mut T, src: T) -> T;
196 pub fn atomic_xadd_relaxed<T>(dst: *mut T, src: T) -> T;
197
198 pub fn atomic_xsub<T>(dst: *mut T, src: T) -> T;
199 pub fn atomic_xsub_acq<T>(dst: *mut T, src: T) -> T;
200 pub fn atomic_xsub_rel<T>(dst: *mut T, src: T) -> T;
201 pub fn atomic_xsub_acqrel<T>(dst: *mut T, src: T) -> T;
202 pub fn atomic_xsub_relaxed<T>(dst: *mut T, src: T) -> T;
203
204 pub fn atomic_and<T>(dst: *mut T, src: T) -> T;
205 pub fn atomic_and_acq<T>(dst: *mut T, src: T) -> T;
206 pub fn atomic_and_rel<T>(dst: *mut T, src: T) -> T;
207 pub fn atomic_and_acqrel<T>(dst: *mut T, src: T) -> T;
208 pub fn atomic_and_relaxed<T>(dst: *mut T, src: T) -> T;
209
210 pub fn atomic_nand<T>(dst: *mut T, src: T) -> T;
211 pub fn atomic_nand_acq<T>(dst: *mut T, src: T) -> T;
212 pub fn atomic_nand_rel<T>(dst: *mut T, src: T) -> T;
213 pub fn atomic_nand_acqrel<T>(dst: *mut T, src: T) -> T;
214 pub fn atomic_nand_relaxed<T>(dst: *mut T, src: T) -> T;
215
216 pub fn atomic_or<T>(dst: *mut T, src: T) -> T;
217 pub fn atomic_or_acq<T>(dst: *mut T, src: T) -> T;
218 pub fn atomic_or_rel<T>(dst: *mut T, src: T) -> T;
219 pub fn atomic_or_acqrel<T>(dst: *mut T, src: T) -> T;
220 pub fn atomic_or_relaxed<T>(dst: *mut T, src: T) -> T;
221
222 pub fn atomic_xor<T>(dst: *mut T, src: T) -> T;
223 pub fn atomic_xor_acq<T>(dst: *mut T, src: T) -> T;
224 pub fn atomic_xor_rel<T>(dst: *mut T, src: T) -> T;
225 pub fn atomic_xor_acqrel<T>(dst: *mut T, src: T) -> T;
226 pub fn atomic_xor_relaxed<T>(dst: *mut T, src: T) -> T;
227
228 pub fn atomic_max<T>(dst: *mut T, src: T) -> T;
229 pub fn atomic_max_acq<T>(dst: *mut T, src: T) -> T;
230 pub fn atomic_max_rel<T>(dst: *mut T, src: T) -> T;
231 pub fn atomic_max_acqrel<T>(dst: *mut T, src: T) -> T;
232 pub fn atomic_max_relaxed<T>(dst: *mut T, src: T) -> T;
233
234 pub fn atomic_min<T>(dst: *mut T, src: T) -> T;
235 pub fn atomic_min_acq<T>(dst: *mut T, src: T) -> T;
236 pub fn atomic_min_rel<T>(dst: *mut T, src: T) -> T;
237 pub fn atomic_min_acqrel<T>(dst: *mut T, src: T) -> T;
238 pub fn atomic_min_relaxed<T>(dst: *mut T, src: T) -> T;
239
240 pub fn atomic_umin<T>(dst: *mut T, src: T) -> T;
241 pub fn atomic_umin_acq<T>(dst: *mut T, src: T) -> T;
242 pub fn atomic_umin_rel<T>(dst: *mut T, src: T) -> T;
243 pub fn atomic_umin_acqrel<T>(dst: *mut T, src: T) -> T;
244 pub fn atomic_umin_relaxed<T>(dst: *mut T, src: T) -> T;
245
246 pub fn atomic_umax<T>(dst: *mut T, src: T) -> T;
247 pub fn atomic_umax_acq<T>(dst: *mut T, src: T) -> T;
248 pub fn atomic_umax_rel<T>(dst: *mut T, src: T) -> T;
249 pub fn atomic_umax_acqrel<T>(dst: *mut T, src: T) -> T;
250 pub fn atomic_umax_relaxed<T>(dst: *mut T, src: T) -> T;
251 }
252
253 extern "rust-intrinsic" {
254
255 pub fn atomic_fence();
256 pub fn atomic_fence_acq();
257 pub fn atomic_fence_rel();
258 pub fn atomic_fence_acqrel();
259
260 /// Abort the execution of the process.
261 pub fn abort() -> !;
262
263 /// Execute a breakpoint trap, for inspection by a debugger.
264 pub fn breakpoint();
265
266 /// The size of a type in bytes.
267 ///
268 /// This is the exact number of bytes in memory taken up by a
269 /// value of the given type. In other words, a memset of this size
270 /// would *exactly* overwrite a value. When laid out in vectors
271 /// and structures there may be additional padding between
272 /// elements.
273 pub fn size_of<T>() -> uint;
274
275 /// Move a value to an uninitialized memory location.
276 ///
277 /// Drop glue is not run on the destination.
278 pub fn move_val_init<T>(dst: &mut T, src: T);
279
280 pub fn min_align_of<T>() -> uint;
281 pub fn pref_align_of<T>() -> uint;
282
283 /// Get a static pointer to a type descriptor.
284 pub fn get_tydesc<T>() -> *TyDesc;
285
286 /// Gets an identifier which is globally unique to the specified type. This
287 /// function will return the same value for a type regardless of whichever
288 /// crate it is invoked in.
289 pub fn type_id<T: 'static>() -> TypeId;
290
291
292 /// Create a value initialized to zero.
293 ///
294 /// `init` is unsafe because it returns a zeroed-out datum,
295 /// which is unsafe unless T is Copy.
296 pub fn init<T>() -> T;
297
298 /// Create an uninitialized value.
299 pub fn uninit<T>() -> T;
300
301 /// Move a value out of scope without running drop glue.
302 ///
303 /// `forget` is unsafe because the caller is responsible for
304 /// ensuring the argument is deallocated already.
305 pub fn forget<T>(_: T) -> ();
306 pub fn transmute<T,U>(e: T) -> U;
307
308 /// Returns `true` if a type requires drop glue.
309 pub fn needs_drop<T>() -> bool;
310
311 /// Returns `true` if a type is managed (will be allocated on the local heap)
312 pub fn owns_managed<T>() -> bool;
313
314 pub fn visit_tydesc(td: *TyDesc, tv: &mut TyVisitor);
315
316 /// Calculates the offset from a pointer. The offset *must* be in-bounds of
317 /// the object, or one-byte-past-the-end. An arithmetic overflow is also
318 /// undefined behaviour.
319 ///
320 /// This is implemented as an intrinsic to avoid converting to and from an
321 /// integer, since the conversion would throw away aliasing information.
322 pub fn offset<T>(dst: *T, offset: int) -> *T;
323
324 /// Equivalent to the appropriate `llvm.memcpy.p0i8.0i8.*` intrinsic, with
325 /// a size of `count` * `size_of::<T>()` and an alignment of
326 /// `min_align_of::<T>()`
327 pub fn copy_nonoverlapping_memory<T>(dst: *mut T, src: *T, count: uint);
328
329 /// Equivalent to the appropriate `llvm.memmove.p0i8.0i8.*` intrinsic, with
330 /// a size of `count` * `size_of::<T>()` and an alignment of
331 /// `min_align_of::<T>()`
332 pub fn copy_memory<T>(dst: *mut T, src: *T, count: uint);
333
334 /// Equivalent to the appropriate `llvm.memset.p0i8.*` intrinsic, with a
335 /// size of `count` * `size_of::<T>()` and an alignment of
336 /// `min_align_of::<T>()`
337 pub fn set_memory<T>(dst: *mut T, val: u8, count: uint);
338
339 /// Equivalent to the appropriate `llvm.memcpy.p0i8.0i8.*` intrinsic, with
340 /// a size of `count` * `size_of::<T>()` and an alignment of
341 /// `min_align_of::<T>()`
342 ///
343 /// The volatile parameter parameter is set to `true`, so it will not be optimized out.
344 pub fn volatile_copy_nonoverlapping_memory<T>(dst: *mut T, src: *T, count: uint);
345 /// Equivalent to the appropriate `llvm.memmove.p0i8.0i8.*` intrinsic, with
346 /// a size of `count` * `size_of::<T>()` and an alignment of
347 /// `min_align_of::<T>()`
348 ///
349 /// The volatile parameter parameter is set to `true`, so it will not be optimized out.
350 pub fn volatile_copy_memory<T>(dst: *mut T, src: *T, count: uint);
351 /// Equivalent to the appropriate `llvm.memset.p0i8.*` intrinsic, with a
352 /// size of `count` * `size_of::<T>()` and an alignment of
353 /// `min_align_of::<T>()`.
354 ///
355 /// The volatile parameter parameter is set to `true`, so it will not be optimized out.
356 pub fn volatile_set_memory<T>(dst: *mut T, val: u8, count: uint);
357
358 /// Perform a volatile load from the `src` pointer.
359 pub fn volatile_load<T>(src: *T) -> T;
360 /// Perform a volatile store to the `dst` pointer.
361 pub fn volatile_store<T>(dst: *mut T, val: T);
362
363 pub fn sqrtf32(x: f32) -> f32;
364 pub fn sqrtf64(x: f64) -> f64;
365
366 pub fn powif32(a: f32, x: i32) -> f32;
367 pub fn powif64(a: f64, x: i32) -> f64;
368
369 pub fn sinf32(x: f32) -> f32;
370 pub fn sinf64(x: f64) -> f64;
371
372 pub fn cosf32(x: f32) -> f32;
373 pub fn cosf64(x: f64) -> f64;
374
375 pub fn powf32(a: f32, x: f32) -> f32;
376 pub fn powf64(a: f64, x: f64) -> f64;
377
378 pub fn expf32(x: f32) -> f32;
379 pub fn expf64(x: f64) -> f64;
380
381 pub fn exp2f32(x: f32) -> f32;
382 pub fn exp2f64(x: f64) -> f64;
383
384 pub fn logf32(x: f32) -> f32;
385 pub fn logf64(x: f64) -> f64;
386
387 pub fn log10f32(x: f32) -> f32;
388 pub fn log10f64(x: f64) -> f64;
389
390 pub fn log2f32(x: f32) -> f32;
391 pub fn log2f64(x: f64) -> f64;
392
393 pub fn fmaf32(a: f32, b: f32, c: f32) -> f32;
394 pub fn fmaf64(a: f64, b: f64, c: f64) -> f64;
395
396 pub fn fabsf32(x: f32) -> f32;
397 pub fn fabsf64(x: f64) -> f64;
398
399 pub fn copysignf32(x: f32, y: f32) -> f32;
400 pub fn copysignf64(x: f64, y: f64) -> f64;
401
402 pub fn floorf32(x: f32) -> f32;
403 pub fn floorf64(x: f64) -> f64;
404
405 pub fn ceilf32(x: f32) -> f32;
406 pub fn ceilf64(x: f64) -> f64;
407
408 pub fn truncf32(x: f32) -> f32;
409 pub fn truncf64(x: f64) -> f64;
410
411 pub fn rintf32(x: f32) -> f32;
412 pub fn rintf64(x: f64) -> f64;
413
414 pub fn nearbyintf32(x: f32) -> f32;
415 pub fn nearbyintf64(x: f64) -> f64;
416
417 pub fn roundf32(x: f32) -> f32;
418 pub fn roundf64(x: f64) -> f64;
419
420 pub fn ctpop8(x: u8) -> u8;
421 pub fn ctpop16(x: u16) -> u16;
422 pub fn ctpop32(x: u32) -> u32;
423 pub fn ctpop64(x: u64) -> u64;
424
425 pub fn ctlz8(x: u8) -> u8;
426 pub fn ctlz16(x: u16) -> u16;
427 pub fn ctlz32(x: u32) -> u32;
428 pub fn ctlz64(x: u64) -> u64;
429
430 pub fn cttz8(x: u8) -> u8;
431 pub fn cttz16(x: u16) -> u16;
432 pub fn cttz32(x: u32) -> u32;
433 pub fn cttz64(x: u64) -> u64;
434
435 pub fn bswap16(x: u16) -> u16;
436 pub fn bswap32(x: u32) -> u32;
437 pub fn bswap64(x: u64) -> u64;
438
439 pub fn i8_add_with_overflow(x: i8, y: i8) -> (i8, bool);
440 pub fn i16_add_with_overflow(x: i16, y: i16) -> (i16, bool);
441 pub fn i32_add_with_overflow(x: i32, y: i32) -> (i32, bool);
442 pub fn i64_add_with_overflow(x: i64, y: i64) -> (i64, bool);
443
444 pub fn u8_add_with_overflow(x: u8, y: u8) -> (u8, bool);
445 pub fn u16_add_with_overflow(x: u16, y: u16) -> (u16, bool);
446 pub fn u32_add_with_overflow(x: u32, y: u32) -> (u32, bool);
447 pub fn u64_add_with_overflow(x: u64, y: u64) -> (u64, bool);
448
449 pub fn i8_sub_with_overflow(x: i8, y: i8) -> (i8, bool);
450 pub fn i16_sub_with_overflow(x: i16, y: i16) -> (i16, bool);
451 pub fn i32_sub_with_overflow(x: i32, y: i32) -> (i32, bool);
452 pub fn i64_sub_with_overflow(x: i64, y: i64) -> (i64, bool);
453
454 pub fn u8_sub_with_overflow(x: u8, y: u8) -> (u8, bool);
455 pub fn u16_sub_with_overflow(x: u16, y: u16) -> (u16, bool);
456 pub fn u32_sub_with_overflow(x: u32, y: u32) -> (u32, bool);
457 pub fn u64_sub_with_overflow(x: u64, y: u64) -> (u64, bool);
458
459 pub fn i8_mul_with_overflow(x: i8, y: i8) -> (i8, bool);
460 pub fn i16_mul_with_overflow(x: i16, y: i16) -> (i16, bool);
461 pub fn i32_mul_with_overflow(x: i32, y: i32) -> (i32, bool);
462 pub fn i64_mul_with_overflow(x: i64, y: i64) -> (i64, bool);
463
464 pub fn u8_mul_with_overflow(x: u8, y: u8) -> (u8, bool);
465 pub fn u16_mul_with_overflow(x: u16, y: u16) -> (u16, bool);
466 pub fn u32_mul_with_overflow(x: u32, y: u32) -> (u32, bool);
467 pub fn u64_mul_with_overflow(x: u64, y: u64) -> (u64, bool);
468 }
469
470
471 /// `TypeId` represents a globally unique identifier for a type
472 #[lang="type_id"] // This needs to be kept in lockstep with the code in trans/intrinsic.rs and
473 // middle/lang_items.rs
474 #[deriving(Eq, TotalEq)]
475 #[cfg(not(test))]
476 pub struct TypeId {
477 t: u64,
478 }
479
480 #[cfg(not(test))]
481 impl TypeId {
482 /// Returns the `TypeId` of the type this generic function has been instantiated with
483 pub fn of<T: 'static>() -> TypeId {
484 unsafe { type_id::<T>() }
485 }
486 pub fn hash(&self) -> u64 { self.t }
487 }
libcore/intrinsics.rs:75:1-75:1 -NK_AS_STR_TODO- definition:
pub type Disr = u64;
pub trait TyVisitor {
fn visit_bot(&mut self) -> bool;
references:- 4142: fn visit_enter_enum_variant(&mut self, variant: uint,
143: disr_val: Disr,
144: n_fields: uint,
--
151: fn visit_leave_enum(&mut self, n_variants: uint,
152: get_disr: extern unsafe fn(ptr: *Opaque) -> Disr,
153: sz: uint, align: uint) -> bool;
libcore/intrinsics.rs:475:18-475:18 -struct- definition:
pub struct TypeId {
t: u64,
}
references:- 14473: // middle/lang_items.rs
--
482: /// Returns the `TypeId` of the type this generic function has been instantiated with
483: pub fn of<T: 'static>() -> TypeId {
484: unsafe { type_id::<T>() }
libcore/any.rs:
46: /// Get the `TypeId` of `self`
47: fn get_type_id(&self) -> TypeId {
48: TypeId::of::<T>()
libcore/intrinsics.rs:54:18-54:18 -struct- definition:
pub struct TyDesc {
// sizeof(T)
pub size: uint,
references:- 16113: fn visit_evec_box(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
114: fn visit_evec_uniq(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
115: fn visit_evec_slice(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
116: fn visit_evec_fixed(&mut self, n: uint, sz: uint, align: uint,
117: mtbl: uint, inner: *TyDesc) -> bool;
--
283: /// Get a static pointer to a type descriptor.
284: pub fn get_tydesc<T>() -> *TyDesc;
--
314: pub fn visit_tydesc(td: *TyDesc, tv: &mut TyVisitor);
libcore/intrinsics.rs:50:1-50:1 -NK_AS_STR_TODO- definition:
pub type GlueFn = extern "Rust" fn(*i8);
pub struct TyDesc {
// sizeof(T)
references:- 265: // Called by reflection visitor to visit a value of type `T`
66: pub visit_glue: GlueFn,
libcore/intrinsics.rs:73:18-73:18 -enum- definition:
pub enum Opaque { }
pub type Disr = u64;
pub trait TyVisitor {
references:- 2151: fn visit_leave_enum(&mut self, n_variants: uint,
152: get_disr: extern unsafe fn(ptr: *Opaque) -> Disr,
153: sz: uint, align: uint) -> bool;