1 // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 #![allow(dead_code)] // FFI wrappers
12
13 use lib::llvm::llvm;
14 use lib::llvm::{CallConv, AtomicBinOp, AtomicOrdering, AsmDialect};
15 use lib::llvm::{Opcode, IntPredicate, RealPredicate};
16 use lib::llvm::{ValueRef, BasicBlockRef};
17 use lib;
18 use middle::trans::common::*;
19 use syntax::codemap::Span;
20
21 use middle::trans::builder::Builder;
22 use middle::trans::type_::Type;
23
24 use libc::{c_uint, c_ulonglong, c_char};
25
26 pub fn terminate(cx: &Block, _: &str) {
27 debug!("terminate({})", cx.to_str());
28 cx.terminated.set(true);
29 }
30
31 pub fn check_not_terminated(cx: &Block) {
32 if cx.terminated.get() {
33 fail!("already terminated!");
34 }
35 }
36
37 pub fn B<'a>(cx: &'a Block) -> Builder<'a> {
38 let b = cx.fcx.ccx.builder();
39 b.position_at_end(cx.llbb);
40 b
41 }
42
43 // The difference between a block being unreachable and being terminated is
44 // somewhat obscure, and has to do with error checking. When a block is
45 // terminated, we're saying that trying to add any further statements in the
46 // block is an error. On the other hand, if something is unreachable, that
47 // means that the block was terminated in some way that we don't want to check
48 // for (fail/break/return statements, call to diverging functions, etc), and
49 // further instructions to the block should simply be ignored.
50
51 pub fn RetVoid(cx: &Block) {
52 if cx.unreachable.get() { return; }
53 check_not_terminated(cx);
54 terminate(cx, "RetVoid");
55 B(cx).ret_void();
56 }
57
58 pub fn Ret(cx: &Block, v: ValueRef) {
59 if cx.unreachable.get() { return; }
60 check_not_terminated(cx);
61 terminate(cx, "Ret");
62 B(cx).ret(v);
63 }
64
65 pub fn AggregateRet(cx: &Block, ret_vals: &[ValueRef]) {
66 if cx.unreachable.get() { return; }
67 check_not_terminated(cx);
68 terminate(cx, "AggregateRet");
69 B(cx).aggregate_ret(ret_vals);
70 }
71
72 pub fn Br(cx: &Block, dest: BasicBlockRef) {
73 if cx.unreachable.get() { return; }
74 check_not_terminated(cx);
75 terminate(cx, "Br");
76 B(cx).br(dest);
77 }
78
79 pub fn CondBr(cx: &Block,
80 if_: ValueRef,
81 then: BasicBlockRef,
82 else_: BasicBlockRef) {
83 if cx.unreachable.get() { return; }
84 check_not_terminated(cx);
85 terminate(cx, "CondBr");
86 B(cx).cond_br(if_, then, else_);
87 }
88
89 pub fn Switch(cx: &Block, v: ValueRef, else_: BasicBlockRef, num_cases: uint)
90 -> ValueRef {
91 if cx.unreachable.get() { return _Undef(v); }
92 check_not_terminated(cx);
93 terminate(cx, "Switch");
94 B(cx).switch(v, else_, num_cases)
95 }
96
97 pub fn AddCase(s: ValueRef, on_val: ValueRef, dest: BasicBlockRef) {
98 unsafe {
99 if llvm::LLVMIsUndef(s) == lib::llvm::True { return; }
100 llvm::LLVMAddCase(s, on_val, dest);
101 }
102 }
103
104 pub fn IndirectBr(cx: &Block, addr: ValueRef, num_dests: uint) {
105 if cx.unreachable.get() { return; }
106 check_not_terminated(cx);
107 terminate(cx, "IndirectBr");
108 B(cx).indirect_br(addr, num_dests);
109 }
110
111 pub fn Invoke(cx: &Block,
112 fn_: ValueRef,
113 args: &[ValueRef],
114 then: BasicBlockRef,
115 catch: BasicBlockRef,
116 attributes: &[(uint, lib::llvm::Attribute)])
117 -> ValueRef {
118 if cx.unreachable.get() {
119 return C_null(Type::i8(cx.ccx()));
120 }
121 check_not_terminated(cx);
122 terminate(cx, "Invoke");
123 debug!("Invoke({} with arguments ({}))",
124 cx.val_to_str(fn_),
125 args.iter().map(|a| cx.val_to_str(*a)).collect::<Vec<~str>>().connect(", "));
126 B(cx).invoke(fn_, args, then, catch, attributes)
127 }
128
129 pub fn Unreachable(cx: &Block) {
130 if cx.unreachable.get() {
131 return
132 }
133 cx.unreachable.set(true);
134 if !cx.terminated.get() {
135 B(cx).unreachable();
136 }
137 }
138
139 pub fn _Undef(val: ValueRef) -> ValueRef {
140 unsafe {
141 return llvm::LLVMGetUndef(val_ty(val).to_ref());
142 }
143 }
144
145 /* Arithmetic */
146 pub fn Add(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
147 if cx.unreachable.get() { return _Undef(lhs); }
148 B(cx).add(lhs, rhs)
149 }
150
151 pub fn NSWAdd(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
152 if cx.unreachable.get() { return _Undef(lhs); }
153 B(cx).nswadd(lhs, rhs)
154 }
155
156 pub fn NUWAdd(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
157 if cx.unreachable.get() { return _Undef(lhs); }
158 B(cx).nuwadd(lhs, rhs)
159 }
160
161 pub fn FAdd(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
162 if cx.unreachable.get() { return _Undef(lhs); }
163 B(cx).fadd(lhs, rhs)
164 }
165
166 pub fn Sub(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
167 if cx.unreachable.get() { return _Undef(lhs); }
168 B(cx).sub(lhs, rhs)
169 }
170
171 pub fn NSWSub(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
172 if cx.unreachable.get() { return _Undef(lhs); }
173 B(cx).nswsub(lhs, rhs)
174 }
175
176 pub fn NUWSub(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
177 if cx.unreachable.get() { return _Undef(lhs); }
178 B(cx).nuwsub(lhs, rhs)
179 }
180
181 pub fn FSub(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
182 if cx.unreachable.get() { return _Undef(lhs); }
183 B(cx).fsub(lhs, rhs)
184 }
185
186 pub fn Mul(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
187 if cx.unreachable.get() { return _Undef(lhs); }
188 B(cx).mul(lhs, rhs)
189 }
190
191 pub fn NSWMul(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
192 if cx.unreachable.get() { return _Undef(lhs); }
193 B(cx).nswmul(lhs, rhs)
194 }
195
196 pub fn NUWMul(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
197 if cx.unreachable.get() { return _Undef(lhs); }
198 B(cx).nuwmul(lhs, rhs)
199 }
200
201 pub fn FMul(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
202 if cx.unreachable.get() { return _Undef(lhs); }
203 B(cx).fmul(lhs, rhs)
204 }
205
206 pub fn UDiv(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
207 if cx.unreachable.get() { return _Undef(lhs); }
208 B(cx).udiv(lhs, rhs)
209 }
210
211 pub fn SDiv(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
212 if cx.unreachable.get() { return _Undef(lhs); }
213 B(cx).sdiv(lhs, rhs)
214 }
215
216 pub fn ExactSDiv(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
217 if cx.unreachable.get() { return _Undef(lhs); }
218 B(cx).exactsdiv(lhs, rhs)
219 }
220
221 pub fn FDiv(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
222 if cx.unreachable.get() { return _Undef(lhs); }
223 B(cx).fdiv(lhs, rhs)
224 }
225
226 pub fn URem(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
227 if cx.unreachable.get() { return _Undef(lhs); }
228 B(cx).urem(lhs, rhs)
229 }
230
231 pub fn SRem(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
232 if cx.unreachable.get() { return _Undef(lhs); }
233 B(cx).srem(lhs, rhs)
234 }
235
236 pub fn FRem(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
237 if cx.unreachable.get() { return _Undef(lhs); }
238 B(cx).frem(lhs, rhs)
239 }
240
241 pub fn Shl(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
242 if cx.unreachable.get() { return _Undef(lhs); }
243 B(cx).shl(lhs, rhs)
244 }
245
246 pub fn LShr(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
247 if cx.unreachable.get() { return _Undef(lhs); }
248 B(cx).lshr(lhs, rhs)
249 }
250
251 pub fn AShr(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
252 if cx.unreachable.get() { return _Undef(lhs); }
253 B(cx).ashr(lhs, rhs)
254 }
255
256 pub fn And(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
257 if cx.unreachable.get() { return _Undef(lhs); }
258 B(cx).and(lhs, rhs)
259 }
260
261 pub fn Or(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
262 if cx.unreachable.get() { return _Undef(lhs); }
263 B(cx).or(lhs, rhs)
264 }
265
266 pub fn Xor(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
267 if cx.unreachable.get() { return _Undef(lhs); }
268 B(cx).xor(lhs, rhs)
269 }
270
271 pub fn BinOp(cx: &Block, op: Opcode, lhs: ValueRef, rhs: ValueRef)
272 -> ValueRef {
273 if cx.unreachable.get() { return _Undef(lhs); }
274 B(cx).binop(op, lhs, rhs)
275 }
276
277 pub fn Neg(cx: &Block, v: ValueRef) -> ValueRef {
278 if cx.unreachable.get() { return _Undef(v); }
279 B(cx).neg(v)
280 }
281
282 pub fn NSWNeg(cx: &Block, v: ValueRef) -> ValueRef {
283 if cx.unreachable.get() { return _Undef(v); }
284 B(cx).nswneg(v)
285 }
286
287 pub fn NUWNeg(cx: &Block, v: ValueRef) -> ValueRef {
288 if cx.unreachable.get() { return _Undef(v); }
289 B(cx).nuwneg(v)
290 }
291 pub fn FNeg(cx: &Block, v: ValueRef) -> ValueRef {
292 if cx.unreachable.get() { return _Undef(v); }
293 B(cx).fneg(v)
294 }
295
296 pub fn Not(cx: &Block, v: ValueRef) -> ValueRef {
297 if cx.unreachable.get() { return _Undef(v); }
298 B(cx).not(v)
299 }
300
301 /* Memory */
302 pub fn Malloc(cx: &Block, ty: Type) -> ValueRef {
303 unsafe {
304 if cx.unreachable.get() {
305 return llvm::LLVMGetUndef(Type::i8p(cx.ccx()).to_ref());
306 }
307 B(cx).malloc(ty)
308 }
309 }
310
311 pub fn ArrayMalloc(cx: &Block, ty: Type, val: ValueRef) -> ValueRef {
312 unsafe {
313 if cx.unreachable.get() {
314 return llvm::LLVMGetUndef(Type::i8p(cx.ccx()).to_ref());
315 }
316 B(cx).array_malloc(ty, val)
317 }
318 }
319
320 pub fn Alloca(cx: &Block, ty: Type, name: &str) -> ValueRef {
321 unsafe {
322 if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.ptr_to().to_ref()); }
323 AllocaFcx(cx.fcx, ty, name)
324 }
325 }
326
327 pub fn AllocaFcx(fcx: &FunctionContext, ty: Type, name: &str) -> ValueRef {
328 let b = fcx.ccx.builder();
329 b.position_before(fcx.alloca_insert_pt.get().unwrap());
330 b.alloca(ty, name)
331 }
332
333 pub fn ArrayAlloca(cx: &Block, ty: Type, val: ValueRef) -> ValueRef {
334 unsafe {
335 if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.ptr_to().to_ref()); }
336 let b = cx.fcx.ccx.builder();
337 b.position_before(cx.fcx.alloca_insert_pt.get().unwrap());
338 b.array_alloca(ty, val)
339 }
340 }
341
342 pub fn Free(cx: &Block, pointer_val: ValueRef) {
343 if cx.unreachable.get() { return; }
344 B(cx).free(pointer_val)
345 }
346
347 pub fn Load(cx: &Block, pointer_val: ValueRef) -> ValueRef {
348 unsafe {
349 let ccx = cx.fcx.ccx;
350 if cx.unreachable.get() {
351 let ty = val_ty(pointer_val);
352 let eltty = if ty.kind() == lib::llvm::Array {
353 ty.element_type()
354 } else {
355 ccx.int_type
356 };
357 return llvm::LLVMGetUndef(eltty.to_ref());
358 }
359 B(cx).load(pointer_val)
360 }
361 }
362
363 pub fn VolatileLoad(cx: &Block, pointer_val: ValueRef) -> ValueRef {
364 unsafe {
365 if cx.unreachable.get() {
366 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
367 }
368 B(cx).volatile_load(pointer_val)
369 }
370 }
371
372 pub fn AtomicLoad(cx: &Block, pointer_val: ValueRef, order: AtomicOrdering) -> ValueRef {
373 unsafe {
374 let ccx = cx.fcx.ccx;
375 if cx.unreachable.get() {
376 return llvm::LLVMGetUndef(ccx.int_type.to_ref());
377 }
378 B(cx).atomic_load(pointer_val, order)
379 }
380 }
381
382
383 pub fn LoadRangeAssert(cx: &Block, pointer_val: ValueRef, lo: c_ulonglong,
384 hi: c_ulonglong, signed: lib::llvm::Bool) -> ValueRef {
385 if cx.unreachable.get() {
386 let ccx = cx.fcx.ccx;
387 let ty = val_ty(pointer_val);
388 let eltty = if ty.kind() == lib::llvm::Array {
389 ty.element_type()
390 } else {
391 ccx.int_type
392 };
393 unsafe {
394 llvm::LLVMGetUndef(eltty.to_ref())
395 }
396 } else {
397 B(cx).load_range_assert(pointer_val, lo, hi, signed)
398 }
399 }
400
401 pub fn Store(cx: &Block, val: ValueRef, ptr: ValueRef) {
402 if cx.unreachable.get() { return; }
403 B(cx).store(val, ptr)
404 }
405
406 pub fn VolatileStore(cx: &Block, val: ValueRef, ptr: ValueRef) {
407 if cx.unreachable.get() { return; }
408 B(cx).volatile_store(val, ptr)
409 }
410
411 pub fn AtomicStore(cx: &Block, val: ValueRef, ptr: ValueRef, order: AtomicOrdering) {
412 if cx.unreachable.get() { return; }
413 B(cx).atomic_store(val, ptr, order)
414 }
415
416 pub fn GEP(cx: &Block, pointer: ValueRef, indices: &[ValueRef]) -> ValueRef {
417 unsafe {
418 if cx.unreachable.get() {
419 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref());
420 }
421 B(cx).gep(pointer, indices)
422 }
423 }
424
425 // Simple wrapper around GEP that takes an array of ints and wraps them
426 // in C_i32()
427 #[inline]
428 pub fn GEPi(cx: &Block, base: ValueRef, ixs: &[uint]) -> ValueRef {
429 unsafe {
430 if cx.unreachable.get() {
431 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref());
432 }
433 B(cx).gepi(base, ixs)
434 }
435 }
436
437 pub fn InBoundsGEP(cx: &Block, pointer: ValueRef, indices: &[ValueRef]) -> ValueRef {
438 unsafe {
439 if cx.unreachable.get() {
440 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref());
441 }
442 B(cx).inbounds_gep(pointer, indices)
443 }
444 }
445
446 pub fn StructGEP(cx: &Block, pointer: ValueRef, idx: uint) -> ValueRef {
447 unsafe {
448 if cx.unreachable.get() {
449 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref());
450 }
451 B(cx).struct_gep(pointer, idx)
452 }
453 }
454
455 pub fn GlobalString(cx: &Block, _str: *c_char) -> ValueRef {
456 unsafe {
457 if cx.unreachable.get() {
458 return llvm::LLVMGetUndef(Type::i8p(cx.ccx()).to_ref());
459 }
460 B(cx).global_string(_str)
461 }
462 }
463
464 pub fn GlobalStringPtr(cx: &Block, _str: *c_char) -> ValueRef {
465 unsafe {
466 if cx.unreachable.get() {
467 return llvm::LLVMGetUndef(Type::i8p(cx.ccx()).to_ref());
468 }
469 B(cx).global_string_ptr(_str)
470 }
471 }
472
473 /* Casts */
474 pub fn Trunc(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
475 unsafe {
476 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
477 B(cx).trunc(val, dest_ty)
478 }
479 }
480
481 pub fn ZExt(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
482 unsafe {
483 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
484 B(cx).zext(val, dest_ty)
485 }
486 }
487
488 pub fn SExt(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
489 unsafe {
490 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
491 B(cx).sext(val, dest_ty)
492 }
493 }
494
495 pub fn FPToUI(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
496 unsafe {
497 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
498 B(cx).fptoui(val, dest_ty)
499 }
500 }
501
502 pub fn FPToSI(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
503 unsafe {
504 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
505 B(cx).fptosi(val, dest_ty)
506 }
507 }
508
509 pub fn UIToFP(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
510 unsafe {
511 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
512 B(cx).uitofp(val, dest_ty)
513 }
514 }
515
516 pub fn SIToFP(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
517 unsafe {
518 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
519 B(cx).sitofp(val, dest_ty)
520 }
521 }
522
523 pub fn FPTrunc(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
524 unsafe {
525 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
526 B(cx).fptrunc(val, dest_ty)
527 }
528 }
529
530 pub fn FPExt(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
531 unsafe {
532 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
533 B(cx).fpext(val, dest_ty)
534 }
535 }
536
537 pub fn PtrToInt(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
538 unsafe {
539 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
540 B(cx).ptrtoint(val, dest_ty)
541 }
542 }
543
544 pub fn IntToPtr(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
545 unsafe {
546 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
547 B(cx).inttoptr(val, dest_ty)
548 }
549 }
550
551 pub fn BitCast(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
552 unsafe {
553 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
554 B(cx).bitcast(val, dest_ty)
555 }
556 }
557
558 pub fn ZExtOrBitCast(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
559 unsafe {
560 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
561 B(cx).zext_or_bitcast(val, dest_ty)
562 }
563 }
564
565 pub fn SExtOrBitCast(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
566 unsafe {
567 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
568 B(cx).sext_or_bitcast(val, dest_ty)
569 }
570 }
571
572 pub fn TruncOrBitCast(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
573 unsafe {
574 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
575 B(cx).trunc_or_bitcast(val, dest_ty)
576 }
577 }
578
579 pub fn Cast(cx: &Block, op: Opcode, val: ValueRef, dest_ty: Type, _: *u8)
580 -> ValueRef {
581 unsafe {
582 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
583 B(cx).cast(op, val, dest_ty)
584 }
585 }
586
587 pub fn PointerCast(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
588 unsafe {
589 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
590 B(cx).pointercast(val, dest_ty)
591 }
592 }
593
594 pub fn IntCast(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
595 unsafe {
596 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
597 B(cx).intcast(val, dest_ty)
598 }
599 }
600
601 pub fn FPCast(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
602 unsafe {
603 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
604 B(cx).fpcast(val, dest_ty)
605 }
606 }
607
608
609 /* Comparisons */
610 pub fn ICmp(cx: &Block, op: IntPredicate, lhs: ValueRef, rhs: ValueRef)
611 -> ValueRef {
612 unsafe {
613 if cx.unreachable.get() {
614 return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref());
615 }
616 B(cx).icmp(op, lhs, rhs)
617 }
618 }
619
620 pub fn FCmp(cx: &Block, op: RealPredicate, lhs: ValueRef, rhs: ValueRef)
621 -> ValueRef {
622 unsafe {
623 if cx.unreachable.get() {
624 return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref());
625 }
626 B(cx).fcmp(op, lhs, rhs)
627 }
628 }
629
630 /* Miscellaneous instructions */
631 pub fn EmptyPhi(cx: &Block, ty: Type) -> ValueRef {
632 unsafe {
633 if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.to_ref()); }
634 B(cx).empty_phi(ty)
635 }
636 }
637
638 pub fn Phi(cx: &Block, ty: Type, vals: &[ValueRef], bbs: &[BasicBlockRef]) -> ValueRef {
639 unsafe {
640 if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.to_ref()); }
641 B(cx).phi(ty, vals, bbs)
642 }
643 }
644
645 pub fn AddIncomingToPhi(phi: ValueRef, val: ValueRef, bb: BasicBlockRef) {
646 unsafe {
647 if llvm::LLVMIsUndef(phi) == lib::llvm::True { return; }
648 llvm::LLVMAddIncoming(phi, &val, &bb, 1 as c_uint);
649 }
650 }
651
652 pub fn _UndefReturn(cx: &Block, fn_: ValueRef) -> ValueRef {
653 unsafe {
654 let ccx = cx.fcx.ccx;
655 let ty = val_ty(fn_);
656 let retty = if ty.kind() == lib::llvm::Integer {
657 ty.return_type()
658 } else {
659 ccx.int_type
660 };
661 B(cx).count_insn("ret_undef");
662 llvm::LLVMGetUndef(retty.to_ref())
663 }
664 }
665
666 pub fn add_span_comment(cx: &Block, sp: Span, text: &str) {
667 B(cx).add_span_comment(sp, text)
668 }
669
670 pub fn add_comment(cx: &Block, text: &str) {
671 B(cx).add_comment(text)
672 }
673
674 pub fn InlineAsmCall(cx: &Block, asm: *c_char, cons: *c_char,
675 inputs: &[ValueRef], output: Type,
676 volatile: bool, alignstack: bool,
677 dia: AsmDialect) -> ValueRef {
678 B(cx).inline_asm_call(asm, cons, inputs, output, volatile, alignstack, dia)
679 }
680
681 pub fn Call(cx: &Block, fn_: ValueRef, args: &[ValueRef],
682 attributes: &[(uint, lib::llvm::Attribute)]) -> ValueRef {
683 if cx.unreachable.get() { return _UndefReturn(cx, fn_); }
684 B(cx).call(fn_, args, attributes)
685 }
686
687 pub fn CallWithConv(cx: &Block, fn_: ValueRef, args: &[ValueRef], conv: CallConv,
688 attributes: &[(uint, lib::llvm::Attribute)]) -> ValueRef {
689 if cx.unreachable.get() { return _UndefReturn(cx, fn_); }
690 B(cx).call_with_conv(fn_, args, conv, attributes)
691 }
692
693 pub fn AtomicFence(cx: &Block, order: AtomicOrdering) {
694 if cx.unreachable.get() { return; }
695 B(cx).atomic_fence(order)
696 }
697
698 pub fn Select(cx: &Block, if_: ValueRef, then: ValueRef, else_: ValueRef) -> ValueRef {
699 if cx.unreachable.get() { return _Undef(then); }
700 B(cx).select(if_, then, else_)
701 }
702
703 pub fn VAArg(cx: &Block, list: ValueRef, ty: Type) -> ValueRef {
704 unsafe {
705 if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.to_ref()); }
706 B(cx).va_arg(list, ty)
707 }
708 }
709
710 pub fn ExtractElement(cx: &Block, vec_val: ValueRef, index: ValueRef) -> ValueRef {
711 unsafe {
712 if cx.unreachable.get() {
713 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
714 }
715 B(cx).extract_element(vec_val, index)
716 }
717 }
718
719 pub fn InsertElement(cx: &Block, vec_val: ValueRef, elt_val: ValueRef,
720 index: ValueRef) -> ValueRef {
721 unsafe {
722 if cx.unreachable.get() {
723 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
724 }
725 B(cx).insert_element(vec_val, elt_val, index)
726 }
727 }
728
729 pub fn ShuffleVector(cx: &Block, v1: ValueRef, v2: ValueRef,
730 mask: ValueRef) -> ValueRef {
731 unsafe {
732 if cx.unreachable.get() {
733 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
734 }
735 B(cx).shuffle_vector(v1, v2, mask)
736 }
737 }
738
739 pub fn VectorSplat(cx: &Block, num_elts: uint, elt_val: ValueRef) -> ValueRef {
740 unsafe {
741 if cx.unreachable.get() {
742 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
743 }
744 B(cx).vector_splat(num_elts, elt_val)
745 }
746 }
747
748 pub fn ExtractValue(cx: &Block, agg_val: ValueRef, index: uint) -> ValueRef {
749 unsafe {
750 if cx.unreachable.get() {
751 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
752 }
753 B(cx).extract_value(agg_val, index)
754 }
755 }
756
757 pub fn InsertValue(cx: &Block, agg_val: ValueRef, elt_val: ValueRef, index: uint) -> ValueRef {
758 unsafe {
759 if cx.unreachable.get() {
760 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
761 }
762 B(cx).insert_value(agg_val, elt_val, index)
763 }
764 }
765
766 pub fn IsNull(cx: &Block, val: ValueRef) -> ValueRef {
767 unsafe {
768 if cx.unreachable.get() {
769 return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref());
770 }
771 B(cx).is_null(val)
772 }
773 }
774
775 pub fn IsNotNull(cx: &Block, val: ValueRef) -> ValueRef {
776 unsafe {
777 if cx.unreachable.get() {
778 return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref());
779 }
780 B(cx).is_not_null(val)
781 }
782 }
783
784 pub fn PtrDiff(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
785 unsafe {
786 let ccx = cx.fcx.ccx;
787 if cx.unreachable.get() { return llvm::LLVMGetUndef(ccx.int_type.to_ref()); }
788 B(cx).ptrdiff(lhs, rhs)
789 }
790 }
791
792 pub fn Trap(cx: &Block) {
793 if cx.unreachable.get() { return; }
794 B(cx).trap();
795 }
796
797 pub fn LandingPad(cx: &Block, ty: Type, pers_fn: ValueRef,
798 num_clauses: uint) -> ValueRef {
799 check_not_terminated(cx);
800 assert!(!cx.unreachable.get());
801 B(cx).landing_pad(ty, pers_fn, num_clauses)
802 }
803
804 pub fn SetCleanup(cx: &Block, landing_pad: ValueRef) {
805 B(cx).set_cleanup(landing_pad)
806 }
807
808 pub fn Resume(cx: &Block, exn: ValueRef) -> ValueRef {
809 check_not_terminated(cx);
810 terminate(cx, "Resume");
811 B(cx).resume(exn)
812 }
813
814 // Atomic Operations
815 pub fn AtomicCmpXchg(cx: &Block, dst: ValueRef,
816 cmp: ValueRef, src: ValueRef,
817 order: AtomicOrdering,
818 failure_order: AtomicOrdering) -> ValueRef {
819 B(cx).atomic_cmpxchg(dst, cmp, src, order, failure_order)
820 }
821 pub fn AtomicRMW(cx: &Block, op: AtomicBinOp,
822 dst: ValueRef, src: ValueRef,
823 order: AtomicOrdering) -> ValueRef {
824 B(cx).atomic_rmw(op, dst, src, order)
825 }
librustc/middle/trans/build.rs:50:1-50:1 -fn- definition:
pub fn RetVoid(cx: &Block) {
if cx.unreachable.get() { return; }
check_not_terminated(cx);
references:- 19librustc/middle/trans/base.rs:
1330: if fcx.llretptr.get().is_none() || fcx.caller_expects_out_pointer {
1331: return RetVoid(ret_cx);
1332: }
librustc/middle/trans/closure.rs:
453: if type_is_zero_size(ccx, f.sig.output) || fcx.llretptr.get().is_some() {
454: RetVoid(bcx);
455: } else {
librustc/middle/trans/intrinsic.rs:
251: order);
252: RetVoid(bcx);
253: }
--
350: Store(bcx, val, ptr);
351: RetVoid(bcx);
352: },
--
376: "forget" => {
377: RetVoid(bcx);
378: }
--
406: Store(bcx, llsrcval, PointerCast(bcx, llretptr, llintype.ptr_to()));
407: RetVoid(bcx);
408: }
--
458: glue::call_visit_glue(bcx, visitor, td, None);
459: RetVoid(bcx);
460: }
librustc/middle/trans/build.rs:319:1-319:1 -fn- definition:
pub fn Alloca(cx: &Block, ty: Type, name: &str) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.ptr_to().to_ref()); }
references:- 4librustc/middle/trans/base.rs:
1092: debuginfo::clear_source_location(cx.fcx);
1093: let p = Alloca(cx, ty, name);
1094: if zero {
librustc/middle/trans/intrinsic.rs:
410: (Pointer, other) | (other, Pointer) if other != Pointer => {
411: let tmp = Alloca(bcx, llouttype, "");
412: Store(bcx, llsrcval, PointerCast(bcx, tmp, llintype.ptr_to()));
--
415: (Array, _) | (_, Array) | (Struct, _) | (_, Struct) => {
416: let tmp = Alloca(bcx, llouttype, "");
417: Store(bcx, llsrcval, PointerCast(bcx, tmp, llintype.ptr_to()));
librustc/middle/trans/base.rs:
1122: let bcx = fcx.entry_bcx.borrow().clone().unwrap();
1123: Alloca(bcx, lloutputtype, "__make_return_pointer")
1124: }
librustc/middle/trans/build.rs:436:1-436:1 -fn- definition:
pub fn InBoundsGEP(cx: &Block, pointer: ValueRef, indices: &[ValueRef]) -> ValueRef {
unsafe {
if cx.unreachable.get() {
references:- 6librustc/middle/trans/_match.rs:
1019: Some(n) if i > n => {
1020: InBoundsGEP(bcx, base, [
1021: Sub(bcx, count,
librustc/middle/trans/tvec.rs:
50: let bptr = PointerCast(bcx, ptr, Type::i8p(bcx.ccx()));
51: return PointerCast(bcx, InBoundsGEP(bcx, bptr, [bytes]), old_ty);
52: }
--
549: } else {
550: InBoundsGEP(body_bcx, data_ptr, [i])
551: };
librustc/middle/trans/intrinsic.rs:
463: let offset = get_param(decl, first_real_arg + 1);
464: let lladdr = InBoundsGEP(bcx, ptr, [offset]);
465: Ret(bcx, lladdr);
librustc/middle/trans/expr.rs:
494: });
495: let elt = InBoundsGEP(bcx, base, [ix_val]);
496: let elt = PointerCast(bcx, elt, vt.llunit_ty.ptr_to());
librustc/middle/trans/tvec.rs:
600: let body_bcx = f(body_bcx, data_ptr, vt.unit_ty);
601: AddIncomingToPhi(data_ptr, InBoundsGEP(body_bcx, data_ptr,
602: [C_int(bcx.ccx(), 1)]),
librustc/middle/trans/build.rs:71:1-71:1 -fn- definition:
pub fn Br(cx: &Block, dest: BasicBlockRef) {
if cx.unreachable.get() { return; }
check_not_terminated(cx);
references:- 31librustc/middle/trans/base.rs:
librustc/middle/trans/_match.rs:
librustc/middle/trans/tvec.rs:
librustc/middle/trans/reflect.rs:
librustc/middle/trans/cleanup.rs:
librustc/middle/trans/controlflow.rs:
librustc/middle/trans/glue.rs:
librustc/middle/trans/expr.rs:
librustc/middle/trans/common.rs:
librustc/middle/trans/base.rs:
librustc/middle/trans/build.rs:515:1-515:1 -fn- definition:
pub fn SIToFP(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
references:- 2librustc/middle/trans/expr.rs:
1586: lldiscrim_a, true),
1587: cast_float => SIToFP(bcx, lldiscrim_a, ll_t_out),
1588: _ => ccx.sess().bug(format!("translating unsupported cast: \
librustc/middle/trans/build.rs:382:1-382:1 -fn- definition:
pub fn LoadRangeAssert(cx: &Block, pointer_val: ValueRef, lo: c_ulonglong,
hi: c_ulonglong, signed: lib::llvm::Bool) -> ValueRef {
if cx.unreachable.get() {
references:- 3librustc/middle/trans/datum.rs:
542: // to 0x10FFFF inclusive only.
543: LoadRangeAssert(bcx, llptr, 0, 0x10FFFF + 1, lib::llvm::False)
544: } else {
librustc/middle/trans/adt.rs:
566: // so an overflow on (max + 1) is fine.
567: LoadRangeAssert(bcx, ptr, min as c_ulonglong,
568: (max + 1) as c_ulonglong,
librustc/middle/trans/build.rs:128:1-128:1 -fn- definition:
pub fn Unreachable(cx: &Block) {
if cx.unreachable.get() {
return
references:- 8librustc/middle/trans/base.rs:
729: let unr_cx = fcx.new_temp_block("enum-iter-unr");
730: Unreachable(unr_cx);
731: let llswitch = Switch(cx, lldiscrim_a, unr_cx.llbb,
librustc/middle/trans/intrinsic.rs:
289: Call(bcx, llfn, [], []);
290: Unreachable(bcx);
291: }
librustc/middle/trans/controlflow.rs:
361: Some(expr::Ignore)).bcx;
362: Unreachable(bcx);
363: return bcx;
--
379: Some(expr::Ignore)).bcx;
380: Unreachable(bcx);
381: return bcx;
librustc/middle/trans/callee.rs:
745: if ty::type_is_bot(ret_ty) {
746: Unreachable(bcx);
747: }
librustc/middle/trans/common.rs:
367: if !reachable {
368: build::Unreachable(out);
369: }
librustc/middle/trans/controlflow.rs:
298: Br(bcx, cleanup_llbb);
299: Unreachable(bcx); // anything afterwards should be ignored
300: return bcx;
librustc/middle/trans/build.rs:473:12-473:12 -fn- definition:
/* Casts */
pub fn Trunc(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
unsafe {
references:- 2librustc/middle/trans/base.rs:
772: cast_shift_rhs(op, lhs, rhs,
773: |a,b| Trunc(cx, a, b),
774: |a,b| ZExt(cx, a, b))
librustc/middle/trans/expr.rs:
474: } else if ix_size > int_size {
475: Trunc(bcx, ix_val, ccx.int_type)
476: } else {
librustc/middle/trans/build.rs:487:1-487:1 -fn- definition:
pub fn SExt(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
references:- 3librustc/middle/trans/adt.rs:
537: None => val,
538: Some(llty) => if signed { SExt(bcx, val, llty) } else { ZExt(bcx, val, llty) }
539: }
librustc/middle/trans/expr.rs:
471: if ty::type_is_signed(expr_ty(bcx, idx)) {
472: SExt(bcx, ix_val, ccx.int_type)
473: } else { ZExt(bcx, ix_val, ccx.int_type) }
librustc/middle/trans/base.rs:
647: // by the target architecture.
648: SExt(cx, ICmp(cx, cmp, lhs, rhs), return_ty)
649: },
librustc/middle/trans/build.rs:255:1-255:1 -fn- definition:
pub fn And(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
B(cx).and(lhs, rhs)
references:- 3librustc/middle/trans/_match.rs:
1690: t, ast::BiLe);
1691: Result::new(bcx, And(bcx, llge, llle))
1692: }
librustc/middle/trans/expr.rs:
1317: ast::BiBitOr => Or(bcx, lhs, rhs),
1318: ast::BiBitAnd => And(bcx, lhs, rhs),
1319: ast::BiBitXor => Xor(bcx, lhs, rhs),
librustc/middle/trans/_match.rs:
1735: signed_int, ast::BiLe);
1736: Result::new(bcx, And(bcx, llge, llle))
1737: }
librustc/middle/trans/build.rs:346:1-346:1 -fn- definition:
pub fn Load(cx: &Block, pointer_val: ValueRef) -> ValueRef {
unsafe {
let ccx = cx.fcx.ccx;
references:- 49librustc/middle/trans/base.rs:
librustc/middle/trans/_match.rs:
librustc/middle/trans/closure.rs:
librustc/middle/trans/tvec.rs:
librustc/middle/trans/meth.rs:
librustc/middle/trans/foreign.rs:
librustc/middle/trans/intrinsic.rs:
librustc/middle/trans/adt.rs:
librustc/middle/trans/cleanup.rs:
librustc/middle/trans/glue.rs:
librustc/middle/trans/datum.rs:
librustc/middle/trans/callee.rs:
librustc/middle/trans/expr.rs:
librustc/middle/trans/base.rs:
librustc/middle/trans/build.rs:480:1-480:1 -fn- definition:
pub fn ZExt(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
references:- 5librustc/middle/trans/base.rs:
773: |a,b| Trunc(cx, a, b),
774: |a,b| ZExt(cx, a, b))
775: }
librustc/middle/trans/intrinsic.rs:
100: let result = ExtractValue(bcx, val, 0);
101: let overflow = ZExt(bcx, ExtractValue(bcx, val, 1), Type::bool(bcx.ccx()));
102: let ret = C_undef(type_of::type_of(bcx.ccx(), t));
librustc/middle/trans/adt.rs:
537: None => val,
538: Some(llty) => if signed { SExt(bcx, val, llty) } else { ZExt(bcx, val, llty) }
539: }
librustc/middle/trans/expr.rs:
1331: bcx = cmpr.bcx;
1332: ZExt(bcx, cmpr.val, Type::i8(bcx.ccx()))
1333: } else if is_simd {
librustc/middle/trans/build.rs:637:1-637:1 -fn- definition:
pub fn Phi(cx: &Block, ty: Type, vals: &[ValueRef], bbs: &[BasicBlockRef]) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.to_ref()); }
references:- 2librustc/middle/trans/tvec.rs:
593: let data_ptr =
594: Phi(header_bcx, val_ty(data_ptr), [data_ptr], [bcx.llbb]);
595: let not_yet_at_end =
librustc/middle/trans/expr.rs:
1387: Br(past_rhs, join.llbb);
1388: let phi = Phi(join, Type::bool(bcx.ccx()), [lhs, rhs],
1389: [past_lhs.llbb, past_rhs.llbb]);
librustc/middle/trans/build.rs:586:1-586:1 -fn- definition:
pub fn PointerCast(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
references:- 49librustc/middle/trans/base.rs:
librustc/middle/trans/closure.rs:
librustc/middle/trans/tvec.rs:
librustc/middle/trans/meth.rs:
librustc/middle/trans/intrinsic.rs:
librustc/middle/trans/reflect.rs:
librustc/middle/trans/adt.rs:
librustc/middle/trans/controlflow.rs:
librustc/middle/trans/glue.rs:
librustc/middle/trans/callee.rs:
librustc/middle/trans/expr.rs:
librustc/middle/trans/common.rs:
librustc/middle/trans/closure.rs:
librustc/middle/trans/build.rs:550:1-550:1 -fn- definition:
pub fn BitCast(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
references:- 9librustc/middle/trans/tvec.rs:
169: let llfixed_ty = type_of::type_of(bcx.ccx(), fixed_ty).ptr_to();
170: let llfixed_casted = BitCast(bcx, llfixed, llfixed_ty);
171: let cleanup_scope = cleanup::temporary_scope(bcx.tcx(), content_expr.id);
librustc/middle/trans/foreign.rs:
418: let llscratch_i8 = BitCast(bcx, llscratch, Type::i8(ccx).ptr_to());
419: let llretptr_i8 = BitCast(bcx, llretptr, Type::i8(ccx).ptr_to());
420: let llrust_size = machine::llsize_of_store(ccx, llrust_ret_ty);
librustc/middle/trans/intrinsic.rs:
420: _ => {
421: let llbitcast = BitCast(bcx, llsrcval, llouttype);
422: Ret(bcx, llbitcast)
librustc/middle/trans/reflect.rs:
305: let bcx = fcx.entry_bcx.borrow().clone().unwrap();
306: let arg = BitCast(bcx, arg, llptrty);
307: let ret = adt::trans_get_discr(bcx, &*repr, arg, Some(Type::i64(ccx)));
librustc/middle/trans/expr.rs:
1468: return if dstsz == srcsz {
1469: BitCast(bcx, llsrc, lldsttype)
1470: } else if srcsz > dstsz {
librustc/middle/trans/callee.rs:
447: if val_ty(val) != llptrty {
448: val = BitCast(bcx, val, llptrty);
449: }
librustc/middle/trans/build.rs:165:1-165:1 -fn- definition:
pub fn Sub(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
B(cx).sub(lhs, rhs)
references:- 4librustc/middle/trans/_match.rs:
1020: InBoundsGEP(bcx, base, [
1021: Sub(bcx, count,
1022: C_int(bcx.ccx(), (elem_count - i) as int))])
librustc/middle/trans/glue.rs:
372: let rc_ptr = GEPi(decr_bcx, box_ptr, [0u, abi::box_field_refcnt]);
373: let rc = Sub(decr_bcx, Load(decr_bcx, rc_ptr), C_int(ccx, 1));
374: Store(decr_bcx, rc, rc_ptr);
librustc/middle/trans/expr.rs:
1282: if is_float { FSub(bcx, lhs, rhs) }
1283: else { Sub(bcx, lhs, rhs) }
1284: }
librustc/middle/trans/_match.rs:
1031: let slice_len_offset = C_uint(bcx.ccx(), elem_count - 1u);
1032: let slice_len = Sub(bcx, len, slice_len_offset);
1033: let slice_ty = ty::mk_slice(bcx.tcx(),
librustc/middle/trans/build.rs:756:1-756:1 -fn- definition:
pub fn InsertValue(cx: &Block, agg_val: ValueRef, elt_val: ValueRef, index: uint) -> ValueRef {
unsafe {
if cx.unreachable.get() {
references:- 2librustc/middle/trans/intrinsic.rs:
102: let ret = C_undef(type_of::type_of(bcx.ccx(), t));
103: let ret = InsertValue(bcx, ret, result, 0);
104: let ret = InsertValue(bcx, ret, overflow, 1);
librustc/middle/trans/build.rs:609:18-609:18 -fn- definition:
/* Comparisons */
pub fn ICmp(cx: &Block, op: IntPredicate, lhs: ValueRef, rhs: ValueRef)
-> ValueRef {
references:- 11librustc/middle/trans/base.rs:
647: // by the target architecture.
648: SExt(cx, ICmp(cx, cmp, lhs, rhs), return_ty)
649: },
--
832: let zero = C_integral(Type::uint_from_ty(cx.ccx(), t), 0u64, false);
833: ICmp(cx, lib::llvm::IntEQ, rhs, zero)
834: }
librustc/middle/trans/tvec.rs:
539: let rhs = count;
540: let cond_val = ICmp(cond_bcx, lib::llvm::IntULT, lhs, rhs);
--
595: let not_yet_at_end =
596: ICmp(header_bcx, lib::llvm::IntULT, data_ptr, data_end_ptr);
597: let body_bcx = fcx.new_temp_block("iter_vec_loop_body");
librustc/middle/trans/adt.rs:
546: let llptrty = type_of::type_of(bcx.ccx(), *nonnull.fields.get(ptrfield));
547: ICmp(bcx, cmp, llptr, C_null(llptrty))
548: }
librustc/middle/trans/expr.rs:
489: let bounds_check = ICmp(bcx, lib::llvm::IntUGE, ix_val, len);
490: let expect = ccx.get_intrinsic(&("llvm.expect.i1"));
librustc/middle/trans/common.rs:
852: pub fn bool_to_i1(bcx: &Block, llval: ValueRef) -> ValueRef {
853: build::ICmp(bcx, lib::llvm::IntNE, llval, C_bool(bcx.ccx(), false))
854: }
librustc/middle/trans/expr.rs:
1127: let val = datum.to_llscalarish(bcx);
1128: let llcond = ICmp(bcx,
1129: lib::llvm::IntEQ,
librustc/middle/trans/build.rs:669:1-669:1 -fn- definition:
pub fn add_comment(cx: &Block, text: &str) {
B(cx).add_comment(text)
}
references:- 3librustc/middle/trans/_match.rs:
2014: if bcx.sess().asm_comments() {
2015: add_comment(bcx, "creating zeroable ref llval");
2016: }
librustc/middle/trans/closure.rs:
206: if ccx.sess().asm_comments() {
207: add_comment(bcx, format!("Copy {} into closure",
208: bv.to_str(ccx)));
librustc/middle/trans/_match.rs:
2152: if bcx.sess().asm_comments() {
2153: add_comment(bcx, format!("bind_irrefutable_pat(pat={})",
2154: pat.repr(bcx.tcx())));
librustc/middle/trans/build.rs:295:1-295:1 -fn- definition:
pub fn Not(cx: &Block, v: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(v); }
B(cx).not(v)
references:- 2librustc/middle/trans/expr.rs:
1134: // Note: `Not` is bitwise, not suitable for logical not.
1135: Not(bcx, datum.to_llscalarish(bcx))
1136: };
librustc/middle/trans/_match.rs:
1383: return with_cond(bcx, Not(bcx, val), |bcx| {
1384: // Guard does not match: free the values we copied,
librustc/middle/trans/build.rs:145:17-145:17 -fn- definition:
/* Arithmetic */
pub fn Add(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
references:- 4librustc/middle/trans/tvec.rs:
279: let vecsize = Add(bcx, alloc, llsize_of(ccx, ccx.opaque_vec_type));
librustc/middle/trans/glue.rs:
392: let rc = Load(bcx, rc_ptr);
393: let rc = Add(bcx, rc, C_int(ccx, 1));
394: Store(bcx, rc, rc_ptr);
librustc/middle/trans/expr.rs:
1278: if is_float { FAdd(bcx, lhs, rhs) }
1279: else { Add(bcx, lhs, rhs) }
1280: }
librustc/middle/trans/tvec.rs:
558: let i = Load(inc_bcx, loop_counter);
559: let plusone = Add(inc_bcx, i, C_uint(bcx.ccx(), 1));
560: Store(inc_bcx, plusone, loop_counter);
librustc/middle/trans/build.rs:57:1-57:1 -fn- definition:
pub fn Ret(cx: &Block, v: ValueRef) {
if cx.unreachable.get() { return; }
check_not_terminated(cx);
references:- 22librustc/middle/trans/base.rs:
1353: Ret(ret_cx, retval);
1354: }
librustc/middle/trans/closure.rs:
455: } else {
456: Ret(bcx, retval);
457: }
librustc/middle/trans/intrinsic.rs:
187: let llcall = Call(bcx, llfn, [x, y], []);
188: Ret(bcx, llcall);
189: }
--
428: let ll_load = Load(bcx, llsrcptr);
429: Ret(bcx, ll_load);
430: } else {
--
464: let lladdr = InBoundsGEP(bcx, ptr, [offset]);
465: Ret(bcx, lladdr);
466: }
librustc/middle/trans/build.rs:427:10-427:10 -fn- definition:
pub fn GEPi(cx: &Block, base: ValueRef, ixs: &[uint]) -> ValueRef {
unsafe {
if cx.unreachable.get() {
references:- 52librustc/middle/trans/base.rs:
librustc/middle/trans/_match.rs:
librustc/middle/trans/closure.rs:
librustc/middle/trans/tvec.rs:
librustc/middle/trans/meth.rs:
librustc/middle/trans/reflect.rs:
librustc/middle/trans/adt.rs:
librustc/middle/trans/glue.rs:
librustc/middle/trans/callee.rs:
librustc/middle/trans/expr.rs:
librustc/middle/trans/build.rs:205:1-205:1 -fn- definition:
pub fn UDiv(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
B(cx).udiv(lhs, rhs)
references:- 2librustc/middle/trans/expr.rs:
1298: } else {
1299: UDiv(bcx, lhs, rhs)
1300: }
librustc/middle/trans/tvec.rs:
498: let body = Load(bcx, llval);
499: (get_dataptr(bcx, body), UDiv(bcx, get_fill(bcx, body), vt.llunit_size))
500: }
librustc/middle/trans/build.rs:185:1-185:1 -fn- definition:
pub fn Mul(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
B(cx).mul(lhs, rhs)
references:- 7librustc/middle/trans/_match.rs:
1028: let n = slice.unwrap();
1029: let slice_byte_offset = Mul(bcx, vt.llunit_size, C_uint(bcx.ccx(), n));
1030: let slice_begin = tvec::pointer_add_byte(bcx, base, slice_byte_offset);
librustc/middle/trans/tvec.rs:
275: let fill = Mul(bcx, C_uint(ccx, count), unit_sz);
276: let alloc = if count < 4u { Mul(bcx, C_int(ccx, 4), unit_sz) }
277: else { fill };
--
461: let base = GEPi(bcx, llval, [0u, 0u]);
462: let len = Mul(bcx, C_uint(ccx, vec_length), vt.llunit_size);
463: (base, len)
librustc/middle/trans/intrinsic.rs:
157: let llfn = ccx.get_intrinsic(&name);
158: Call(bcx, llfn, [dst_ptr, src_ptr, Mul(bcx, size, count), align, C_i1(ccx, volatile)], []);
159: RetVoid(bcx);
librustc/middle/trans/expr.rs:
1286: if is_float { FMul(bcx, lhs, rhs) }
1287: else { Mul(bcx, lhs, rhs) }
1288: }
librustc/middle/trans/intrinsic.rs:
178: let llfn = ccx.get_intrinsic(&name);
179: Call(bcx, llfn, [dst_ptr, val, Mul(bcx, size, count), align, C_i1(ccx, volatile)], []);
180: RetVoid(bcx);
librustc/middle/trans/build.rs:36:1-36:1 -fn- definition:
pub fn B<'a>(cx: &'a Block) -> Builder<'a> {
let b = cx.fcx.ccx.builder();
b.position_at_end(cx.llbb);
references:- 105librustc/middle/trans/base.rs:
librustc/middle/trans/debuginfo.rs:
librustc/middle/trans/build.rs:
librustc/middle/trans/build.rs:78:1-78:1 -fn- definition:
pub fn CondBr(cx: &Block,
if_: ValueRef,
then: BasicBlockRef,
references:- 13librustc/middle/trans/base.rs:
1002: let cond_cx = fcx.new_temp_block("cond");
1003: CondBr(bcx, val, cond_cx.llbb, next_cx.llbb);
1004: let after_cx = f(cond_cx);
librustc/middle/trans/_match.rs:
1746: }
1747: CondBr(after_cx, matches, opt_cx.llbb, bcx.llbb);
1748: }
librustc/middle/trans/tvec.rs:
598: let next_bcx = fcx.new_temp_block("iter_vec_next");
599: CondBr(header_bcx, not_yet_at_end, body_bcx.llbb, next_bcx.llbb);
600: let body_bcx = f(body_bcx, data_ptr, vt.unit_ty);
librustc/middle/trans/reflect.rs:
110: let next_bcx = fcx.new_temp_block("next");
111: CondBr(bcx, result, next_bcx.llbb, self.final_bcx.llbb);
112: self.bcx = next_bcx
librustc/middle/trans/controlflow.rs:
178: [then_bcx_out, else_bcx_out]);
179: CondBr(bcx, cond_val, then_bcx_in.llbb, else_bcx_in.llbb);
180: }
--
228: expr::trans(cond_bcx_in, cond).to_llbool();
229: CondBr(cond_bcx_out, cond_val, body_bcx_in.llbb, cleanup_llbb);
librustc/middle/trans/glue.rs:
374: Store(decr_bcx, rc, rc_ptr);
375: CondBr(decr_bcx, IsNull(decr_bcx, rc), free_bcx.llbb, next_bcx.llbb);
librustc/middle/trans/expr.rs:
1376: lazy_and => CondBr(past_lhs, lhs_i1, before_rhs.llbb, join.llbb),
1377: lazy_or => CondBr(past_lhs, lhs_i1, join.llbb, before_rhs.llbb)
1378: }
librustc/middle/trans/build.rs:680:1-680:1 -fn- definition:
pub fn Call(cx: &Block, fn_: ValueRef, args: &[ValueRef],
attributes: &[(uint, lib::llvm::Attribute)]) -> ValueRef {
if cx.unreachable.get() { return _UndefReturn(cx, fn_); }
references:- 14librustc/middle/trans/base.rs:
934: let llresult = Call(bcx, llfn, llargs.as_slice(), attributes);
935: return (llresult, bcx);
librustc/middle/trans/closure.rs:
452: let retval = Call(bcx, fn_ptr, llargs.as_slice(), []);
453: if type_is_zero_size(ccx, f.sig.output) || fcx.llretptr.get().is_some() {
librustc/middle/trans/intrinsic.rs:
178: let llfn = ccx.get_intrinsic(&name);
179: Call(bcx, llfn, [dst_ptr, val, Mul(bcx, size, count), align, C_i1(ccx, volatile)], []);
180: RetVoid(bcx);
--
288: let llfn = bcx.ccx().get_intrinsic(&("llvm.trap"));
289: Call(bcx, llfn, [], []);
290: Unreachable(bcx);
--
293: let llfn = bcx.ccx().get_intrinsic(&("llvm.debugtrap"));
294: Call(bcx, llfn, [], []);
295: RetVoid(bcx);
librustc/middle/trans/glue.rs:
339: let cdata = GEPi(bcx, env, [0u, abi::box_field_body]);
340: Call(bcx, dtor, [PointerCast(bcx, cdata, Type::i8p(bcx.ccx()))], []);
librustc/middle/trans/expr.rs:
490: let expect = ccx.get_intrinsic(&("llvm.expect.i1"));
491: let expected = Call(bcx, expect, [bounds_check, C_i1(ccx, false)], []);
492: let bcx = with_cond(bcx, expected, |bcx| {
librustc/middle/trans/base.rs:
1023: let volatile = C_i1(ccx, false);
1024: Call(cx, memcpy, [dst_ptr, src_ptr, size, align, volatile], []);
1025: }
librustc/middle/trans/build.rs:25:1-25:1 -fn- definition:
pub fn terminate(cx: &Block, _: &str) {
debug!("terminate({})", cx.to_str());
cx.terminated.set(true);
references:- 974: check_not_terminated(cx);
75: terminate(cx, "Br");
76: B(cx).br(dest);
--
84: check_not_terminated(cx);
85: terminate(cx, "CondBr");
86: B(cx).cond_br(if_, then, else_);
--
92: check_not_terminated(cx);
93: terminate(cx, "Switch");
94: B(cx).switch(v, else_, num_cases)
--
121: check_not_terminated(cx);
122: terminate(cx, "Invoke");
123: debug!("Invoke({} with arguments ({}))",
--
809: check_not_terminated(cx);
810: terminate(cx, "Resume");
811: B(cx).resume(exn)
librustc/middle/trans/build.rs:88:1-88:1 -fn- definition:
pub fn Switch(cx: &Block, v: ValueRef, else_: BasicBlockRef, num_cases: uint)
-> ValueRef {
if cx.unreachable.get() { return _Undef(v); }
references:- 2librustc/middle/trans/base.rs:
730: Unreachable(unr_cx);
731: let llswitch = Switch(cx, lldiscrim_a, unr_cx.llbb,
732: n_variants);
librustc/middle/trans/_match.rs:
1632: let sw = if kind == switch {
1633: Switch(bcx, test_val, else_cx.llbb, opts.len())
1634: } else {
librustc/middle/trans/build.rs:747:1-747:1 -fn- definition:
pub fn ExtractValue(cx: &Block, agg_val: ValueRef, index: uint) -> ValueRef {
unsafe {
if cx.unreachable.get() {
references:- 3librustc/middle/trans/intrinsic.rs:
100: let result = ExtractValue(bcx, val, 0);
101: let overflow = ZExt(bcx, ExtractValue(bcx, val, 1), Type::bool(bcx.ccx()));
102: let ret = C_undef(type_of::type_of(bcx.ccx(), t));
librustc/middle/trans/asm.rs:
124: for (i, o) in outputs.iter().enumerate() {
125: let v = ExtractValue(bcx, r, i);
126: Store(bcx, v, *o);
librustc/middle/trans/build.rs:138:1-138:1 -fn- definition:
pub fn _Undef(val: ValueRef) -> ValueRef {
unsafe {
return llvm::LLVMGetUndef(val_ty(val).to_ref());
references:- 33librustc/middle/trans/build.rs:30:1-30:1 -fn- definition:
pub fn check_not_terminated(cx: &Block) {
if cx.terminated.get() {
fail!("already terminated!");
references:- 1091: if cx.unreachable.get() { return _Undef(v); }
92: check_not_terminated(cx);
93: terminate(cx, "Switch");
--
808: pub fn Resume(cx: &Block, exn: ValueRef) -> ValueRef {
809: check_not_terminated(cx);
810: terminate(cx, "Resume");
librustc/middle/trans/build.rs:400:1-400:1 -fn- definition:
pub fn Store(cx: &Block, val: ValueRef, ptr: ValueRef) {
if cx.unreachable.get() { return; }
B(cx).store(val, ptr)
references:- 48librustc/middle/trans/base.rs:
librustc/middle/trans/_match.rs:
librustc/middle/trans/closure.rs:
librustc/middle/trans/tvec.rs:
librustc/middle/trans/meth.rs:
librustc/middle/trans/foreign.rs:
librustc/middle/trans/intrinsic.rs:
librustc/middle/trans/reflect.rs:
librustc/middle/trans/adt.rs:
librustc/middle/trans/asm.rs:
librustc/middle/trans/cleanup.rs:
librustc/middle/trans/glue.rs:
librustc/middle/trans/datum.rs:
librustc/middle/trans/callee.rs:
librustc/middle/trans/expr.rs:
librustc/middle/trans/_match.rs:
librustc/middle/trans/build.rs:774:1-774:1 -fn- definition:
pub fn IsNotNull(cx: &Block, val: ValueRef) -> ValueRef {
unsafe {
if cx.unreachable.get() {
references:- 5librustc/middle/trans/glue.rs:
282: let llbox = Load(bcx, v0);
283: let not_null = IsNotNull(bcx, llbox);
284: match ty::get(content_ty).sty {
--
368: let box_ptr = Load(bcx, box_ptr_ptr);
369: let llnotnull = IsNotNull(bcx, box_ptr);
370: CondBr(bcx, llnotnull, decr_bcx.llbb, next_bcx.llbb);
librustc/middle/trans/build.rs:651:1-651:1 -fn- definition:
pub fn _UndefReturn(cx: &Block, fn_: ValueRef) -> ValueRef {
unsafe {
let ccx = cx.fcx.ccx;
references:- 2682: attributes: &[(uint, lib::llvm::Attribute)]) -> ValueRef {
683: if cx.unreachable.get() { return _UndefReturn(cx, fn_); }
684: B(cx).call(fn_, args, attributes)
--
688: attributes: &[(uint, lib::llvm::Attribute)]) -> ValueRef {
689: if cx.unreachable.get() { return _UndefReturn(cx, fn_); }
690: B(cx).call_with_conv(fn_, args, conv, attributes)