1 // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 #![allow(dead_code)] // FFI wrappers
12
13 use lib;
14 use lib::llvm::llvm;
15 use lib::llvm::{CallConv, AtomicBinOp, AtomicOrdering, AsmDialect};
16 use lib::llvm::{Opcode, IntPredicate, RealPredicate, False};
17 use lib::llvm::{ValueRef, BasicBlockRef, BuilderRef, ModuleRef};
18 use middle::trans::base;
19 use middle::trans::common::*;
20 use middle::trans::machine::llalign_of_pref;
21 use middle::trans::type_::Type;
22 use collections::HashMap;
23 use libc::{c_uint, c_ulonglong, c_char};
24 use std::strbuf::StrBuf;
25 use syntax::codemap::Span;
26
27 pub struct Builder<'a> {
28 pub llbuilder: BuilderRef,
29 pub ccx: &'a CrateContext,
30 }
31
32 // This is a really awful way to get a zero-length c-string, but better (and a
33 // lot more efficient) than doing str::as_c_str("", ...) every time.
34 pub fn noname() -> *c_char {
35 static cnull: c_char = 0;
36 &cnull as *c_char
37 }
38
39 impl<'a> Builder<'a> {
40 pub fn new(ccx: &'a CrateContext) -> Builder<'a> {
41 Builder {
42 llbuilder: ccx.builder.b,
43 ccx: ccx,
44 }
45 }
46
47 pub fn count_insn(&self, category: &str) {
48 if self.ccx.sess().trans_stats() {
49 self.ccx.stats.n_llvm_insns.set(self.ccx
50 .stats
51 .n_llvm_insns
52 .get() + 1);
53 }
54 if self.ccx.sess().count_llvm_insns() {
55 base::with_insn_ctxt(|v| {
56 let mut h = self.ccx.stats.llvm_insns.borrow_mut();
57
58 // Build version of path with cycles removed.
59
60 // Pass 1: scan table mapping str -> rightmost pos.
61 let mut mm = HashMap::new();
62 let len = v.len();
63 let mut i = 0u;
64 while i < len {
65 mm.insert(v[i], i);
66 i += 1u;
67 }
68
69 // Pass 2: concat strings for each elt, skipping
70 // forwards over any cycles by advancing to rightmost
71 // occurrence of each element in path.
72 let mut s = StrBuf::from_str(".");
73 i = 0u;
74 while i < len {
75 i = *mm.get(&v[i]);
76 s.push_char('/');
77 s.push_str(v[i]);
78 i += 1u;
79 }
80
81 s.push_char('/');
82 s.push_str(category);
83
84 let s = s.into_owned();
85 let n = match h.find_equiv(&s) {
86 Some(&n) => n,
87 _ => 0u
88 };
89 h.insert(s, n+1u);
90 })
91 }
92 }
93
94 pub fn position_before(&self, insn: ValueRef) {
95 unsafe {
96 llvm::LLVMPositionBuilderBefore(self.llbuilder, insn);
97 }
98 }
99
100 pub fn position_at_end(&self, llbb: BasicBlockRef) {
101 unsafe {
102 llvm::LLVMPositionBuilderAtEnd(self.llbuilder, llbb);
103 }
104 }
105
106 pub fn ret_void(&self) {
107 self.count_insn("retvoid");
108 unsafe {
109 llvm::LLVMBuildRetVoid(self.llbuilder);
110 }
111 }
112
113 pub fn ret(&self, v: ValueRef) {
114 self.count_insn("ret");
115 unsafe {
116 llvm::LLVMBuildRet(self.llbuilder, v);
117 }
118 }
119
120 pub fn aggregate_ret(&self, ret_vals: &[ValueRef]) {
121 unsafe {
122 llvm::LLVMBuildAggregateRet(self.llbuilder,
123 ret_vals.as_ptr(),
124 ret_vals.len() as c_uint);
125 }
126 }
127
128 pub fn br(&self, dest: BasicBlockRef) {
129 self.count_insn("br");
130 unsafe {
131 llvm::LLVMBuildBr(self.llbuilder, dest);
132 }
133 }
134
135 pub fn cond_br(&self, cond: ValueRef, then_llbb: BasicBlockRef, else_llbb: BasicBlockRef) {
136 self.count_insn("condbr");
137 unsafe {
138 llvm::LLVMBuildCondBr(self.llbuilder, cond, then_llbb, else_llbb);
139 }
140 }
141
142 pub fn switch(&self, v: ValueRef, else_llbb: BasicBlockRef, num_cases: uint) -> ValueRef {
143 unsafe {
144 llvm::LLVMBuildSwitch(self.llbuilder, v, else_llbb, num_cases as c_uint)
145 }
146 }
147
148 pub fn indirect_br(&self, addr: ValueRef, num_dests: uint) {
149 self.count_insn("indirectbr");
150 unsafe {
151 llvm::LLVMBuildIndirectBr(self.llbuilder, addr, num_dests as c_uint);
152 }
153 }
154
155 pub fn invoke(&self,
156 llfn: ValueRef,
157 args: &[ValueRef],
158 then: BasicBlockRef,
159 catch: BasicBlockRef,
160 attributes: &[(uint, lib::llvm::Attribute)])
161 -> ValueRef {
162 self.count_insn("invoke");
163 unsafe {
164 let v = llvm::LLVMBuildInvoke(self.llbuilder,
165 llfn,
166 args.as_ptr(),
167 args.len() as c_uint,
168 then,
169 catch,
170 noname());
171 for &(idx, attr) in attributes.iter() {
172 llvm::LLVMAddInstrAttribute(v, idx as c_uint, attr as c_uint);
173 }
174 v
175 }
176 }
177
178 pub fn unreachable(&self) {
179 self.count_insn("unreachable");
180 unsafe {
181 llvm::LLVMBuildUnreachable(self.llbuilder);
182 }
183 }
184
185 /* Arithmetic */
186 pub fn add(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
187 self.count_insn("add");
188 unsafe {
189 llvm::LLVMBuildAdd(self.llbuilder, lhs, rhs, noname())
190 }
191 }
192
193 pub fn nswadd(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
194 self.count_insn("nswadd");
195 unsafe {
196 llvm::LLVMBuildNSWAdd(self.llbuilder, lhs, rhs, noname())
197 }
198 }
199
200 pub fn nuwadd(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
201 self.count_insn("nuwadd");
202 unsafe {
203 llvm::LLVMBuildNUWAdd(self.llbuilder, lhs, rhs, noname())
204 }
205 }
206
207 pub fn fadd(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
208 self.count_insn("fadd");
209 unsafe {
210 llvm::LLVMBuildFAdd(self.llbuilder, lhs, rhs, noname())
211 }
212 }
213
214 pub fn sub(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
215 self.count_insn("sub");
216 unsafe {
217 llvm::LLVMBuildSub(self.llbuilder, lhs, rhs, noname())
218 }
219 }
220
221 pub fn nswsub(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
222 self.count_insn("nwsub");
223 unsafe {
224 llvm::LLVMBuildNSWSub(self.llbuilder, lhs, rhs, noname())
225 }
226 }
227
228 pub fn nuwsub(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
229 self.count_insn("nuwsub");
230 unsafe {
231 llvm::LLVMBuildNUWSub(self.llbuilder, lhs, rhs, noname())
232 }
233 }
234
235 pub fn fsub(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
236 self.count_insn("sub");
237 unsafe {
238 llvm::LLVMBuildFSub(self.llbuilder, lhs, rhs, noname())
239 }
240 }
241
242 pub fn mul(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
243 self.count_insn("mul");
244 unsafe {
245 llvm::LLVMBuildMul(self.llbuilder, lhs, rhs, noname())
246 }
247 }
248
249 pub fn nswmul(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
250 self.count_insn("nswmul");
251 unsafe {
252 llvm::LLVMBuildNSWMul(self.llbuilder, lhs, rhs, noname())
253 }
254 }
255
256 pub fn nuwmul(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
257 self.count_insn("nuwmul");
258 unsafe {
259 llvm::LLVMBuildNUWMul(self.llbuilder, lhs, rhs, noname())
260 }
261 }
262
263 pub fn fmul(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
264 self.count_insn("fmul");
265 unsafe {
266 llvm::LLVMBuildFMul(self.llbuilder, lhs, rhs, noname())
267 }
268 }
269
270 pub fn udiv(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
271 self.count_insn("udiv");
272 unsafe {
273 llvm::LLVMBuildUDiv(self.llbuilder, lhs, rhs, noname())
274 }
275 }
276
277 pub fn sdiv(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
278 self.count_insn("sdiv");
279 unsafe {
280 llvm::LLVMBuildSDiv(self.llbuilder, lhs, rhs, noname())
281 }
282 }
283
284 pub fn exactsdiv(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
285 self.count_insn("exactsdiv");
286 unsafe {
287 llvm::LLVMBuildExactSDiv(self.llbuilder, lhs, rhs, noname())
288 }
289 }
290
291 pub fn fdiv(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
292 self.count_insn("fdiv");
293 unsafe {
294 llvm::LLVMBuildFDiv(self.llbuilder, lhs, rhs, noname())
295 }
296 }
297
298 pub fn urem(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
299 self.count_insn("urem");
300 unsafe {
301 llvm::LLVMBuildURem(self.llbuilder, lhs, rhs, noname())
302 }
303 }
304
305 pub fn srem(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
306 self.count_insn("srem");
307 unsafe {
308 llvm::LLVMBuildSRem(self.llbuilder, lhs, rhs, noname())
309 }
310 }
311
312 pub fn frem(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
313 self.count_insn("frem");
314 unsafe {
315 llvm::LLVMBuildFRem(self.llbuilder, lhs, rhs, noname())
316 }
317 }
318
319 pub fn shl(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
320 self.count_insn("shl");
321 unsafe {
322 llvm::LLVMBuildShl(self.llbuilder, lhs, rhs, noname())
323 }
324 }
325
326 pub fn lshr(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
327 self.count_insn("lshr");
328 unsafe {
329 llvm::LLVMBuildLShr(self.llbuilder, lhs, rhs, noname())
330 }
331 }
332
333 pub fn ashr(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
334 self.count_insn("ashr");
335 unsafe {
336 llvm::LLVMBuildAShr(self.llbuilder, lhs, rhs, noname())
337 }
338 }
339
340 pub fn and(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
341 self.count_insn("and");
342 unsafe {
343 llvm::LLVMBuildAnd(self.llbuilder, lhs, rhs, noname())
344 }
345 }
346
347 pub fn or(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
348 self.count_insn("or");
349 unsafe {
350 llvm::LLVMBuildOr(self.llbuilder, lhs, rhs, noname())
351 }
352 }
353
354 pub fn xor(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
355 self.count_insn("xor");
356 unsafe {
357 llvm::LLVMBuildXor(self.llbuilder, lhs, rhs, noname())
358 }
359 }
360
361 pub fn binop(&self, op: Opcode, lhs: ValueRef, rhs: ValueRef)
362 -> ValueRef {
363 self.count_insn("binop");
364 unsafe {
365 llvm::LLVMBuildBinOp(self.llbuilder, op, lhs, rhs, noname())
366 }
367 }
368
369 pub fn neg(&self, v: ValueRef) -> ValueRef {
370 self.count_insn("neg");
371 unsafe {
372 llvm::LLVMBuildNeg(self.llbuilder, v, noname())
373 }
374 }
375
376 pub fn nswneg(&self, v: ValueRef) -> ValueRef {
377 self.count_insn("nswneg");
378 unsafe {
379 llvm::LLVMBuildNSWNeg(self.llbuilder, v, noname())
380 }
381 }
382
383 pub fn nuwneg(&self, v: ValueRef) -> ValueRef {
384 self.count_insn("nuwneg");
385 unsafe {
386 llvm::LLVMBuildNUWNeg(self.llbuilder, v, noname())
387 }
388 }
389 pub fn fneg(&self, v: ValueRef) -> ValueRef {
390 self.count_insn("fneg");
391 unsafe {
392 llvm::LLVMBuildFNeg(self.llbuilder, v, noname())
393 }
394 }
395
396 pub fn not(&self, v: ValueRef) -> ValueRef {
397 self.count_insn("not");
398 unsafe {
399 llvm::LLVMBuildNot(self.llbuilder, v, noname())
400 }
401 }
402
403 /* Memory */
404 pub fn malloc(&self, ty: Type) -> ValueRef {
405 self.count_insn("malloc");
406 unsafe {
407 llvm::LLVMBuildMalloc(self.llbuilder, ty.to_ref(), noname())
408 }
409 }
410
411 pub fn array_malloc(&self, ty: Type, val: ValueRef) -> ValueRef {
412 self.count_insn("arraymalloc");
413 unsafe {
414 llvm::LLVMBuildArrayMalloc(self.llbuilder, ty.to_ref(), val, noname())
415 }
416 }
417
418 pub fn alloca(&self, ty: Type, name: &str) -> ValueRef {
419 self.count_insn("alloca");
420 unsafe {
421 if name.is_empty() {
422 llvm::LLVMBuildAlloca(self.llbuilder, ty.to_ref(), noname())
423 } else {
424 name.with_c_str(|c| {
425 llvm::LLVMBuildAlloca(self.llbuilder, ty.to_ref(), c)
426 })
427 }
428 }
429 }
430
431 pub fn array_alloca(&self, ty: Type, val: ValueRef) -> ValueRef {
432 self.count_insn("arrayalloca");
433 unsafe {
434 llvm::LLVMBuildArrayAlloca(self.llbuilder, ty.to_ref(), val, noname())
435 }
436 }
437
438 pub fn free(&self, ptr: ValueRef) {
439 self.count_insn("free");
440 unsafe {
441 llvm::LLVMBuildFree(self.llbuilder, ptr);
442 }
443 }
444
445 pub fn load(&self, ptr: ValueRef) -> ValueRef {
446 self.count_insn("load");
447 unsafe {
448 llvm::LLVMBuildLoad(self.llbuilder, ptr, noname())
449 }
450 }
451
452 pub fn volatile_load(&self, ptr: ValueRef) -> ValueRef {
453 self.count_insn("load.volatile");
454 unsafe {
455 let insn = llvm::LLVMBuildLoad(self.llbuilder, ptr, noname());
456 llvm::LLVMSetVolatile(insn, lib::llvm::True);
457 insn
458 }
459 }
460
461 pub fn atomic_load(&self, ptr: ValueRef, order: AtomicOrdering) -> ValueRef {
462 self.count_insn("load.atomic");
463 unsafe {
464 let ty = Type::from_ref(llvm::LLVMTypeOf(ptr));
465 let align = llalign_of_pref(self.ccx, ty.element_type());
466 llvm::LLVMBuildAtomicLoad(self.llbuilder, ptr, noname(), order,
467 align as c_uint)
468 }
469 }
470
471
472 pub fn load_range_assert(&self, ptr: ValueRef, lo: c_ulonglong,
473 hi: c_ulonglong, signed: lib::llvm::Bool) -> ValueRef {
474 let value = self.load(ptr);
475
476 unsafe {
477 let t = llvm::LLVMGetElementType(llvm::LLVMTypeOf(ptr));
478 let min = llvm::LLVMConstInt(t, lo, signed);
479 let max = llvm::LLVMConstInt(t, hi, signed);
480
481 let v = [min, max];
482
483 llvm::LLVMSetMetadata(value, lib::llvm::MD_range as c_uint,
484 llvm::LLVMMDNodeInContext(self.ccx.llcx,
485 v.as_ptr(), v.len() as c_uint));
486 }
487
488 value
489 }
490
491 pub fn store(&self, val: ValueRef, ptr: ValueRef) {
492 debug!("Store {} -> {}",
493 self.ccx.tn.val_to_str(val),
494 self.ccx.tn.val_to_str(ptr));
495 assert!(self.llbuilder.is_not_null());
496 self.count_insn("store");
497 unsafe {
498 llvm::LLVMBuildStore(self.llbuilder, val, ptr);
499 }
500 }
501
502 pub fn volatile_store(&self, val: ValueRef, ptr: ValueRef) {
503 debug!("Store {} -> {}",
504 self.ccx.tn.val_to_str(val),
505 self.ccx.tn.val_to_str(ptr));
506 assert!(self.llbuilder.is_not_null());
507 self.count_insn("store.volatile");
508 unsafe {
509 let insn = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
510 llvm::LLVMSetVolatile(insn, lib::llvm::True);
511 }
512 }
513
514 pub fn atomic_store(&self, val: ValueRef, ptr: ValueRef, order: AtomicOrdering) {
515 debug!("Store {} -> {}",
516 self.ccx.tn.val_to_str(val),
517 self.ccx.tn.val_to_str(ptr));
518 self.count_insn("store.atomic");
519 unsafe {
520 let ty = Type::from_ref(llvm::LLVMTypeOf(ptr));
521 let align = llalign_of_pref(self.ccx, ty.element_type());
522 llvm::LLVMBuildAtomicStore(self.llbuilder, val, ptr, order, align as c_uint);
523 }
524 }
525
526 pub fn gep(&self, ptr: ValueRef, indices: &[ValueRef]) -> ValueRef {
527 self.count_insn("gep");
528 unsafe {
529 llvm::LLVMBuildGEP(self.llbuilder, ptr, indices.as_ptr(),
530 indices.len() as c_uint, noname())
531 }
532 }
533
534 // Simple wrapper around GEP that takes an array of ints and wraps them
535 // in C_i32()
536 #[inline]
537 pub fn gepi(&self, base: ValueRef, ixs: &[uint]) -> ValueRef {
538 // Small vector optimization. This should catch 100% of the cases that
539 // we care about.
540 if ixs.len() < 16 {
541 let mut small_vec = [ C_i32(self.ccx, 0), ..16 ];
542 for (small_vec_e, &ix) in small_vec.mut_iter().zip(ixs.iter()) {
543 *small_vec_e = C_i32(self.ccx, ix as i32);
544 }
545 self.inbounds_gep(base, small_vec.slice(0, ixs.len()))
546 } else {
547 let v = ixs.iter().map(|i| C_i32(self.ccx, *i as i32)).collect::<Vec<ValueRef>>();
548 self.count_insn("gepi");
549 self.inbounds_gep(base, v.as_slice())
550 }
551 }
552
553 pub fn inbounds_gep(&self, ptr: ValueRef, indices: &[ValueRef]) -> ValueRef {
554 self.count_insn("inboundsgep");
555 unsafe {
556 llvm::LLVMBuildInBoundsGEP(
557 self.llbuilder, ptr, indices.as_ptr(), indices.len() as c_uint, noname())
558 }
559 }
560
561 pub fn struct_gep(&self, ptr: ValueRef, idx: uint) -> ValueRef {
562 self.count_insn("structgep");
563 unsafe {
564 llvm::LLVMBuildStructGEP(self.llbuilder, ptr, idx as c_uint, noname())
565 }
566 }
567
568 pub fn global_string(&self, _str: *c_char) -> ValueRef {
569 self.count_insn("globalstring");
570 unsafe {
571 llvm::LLVMBuildGlobalString(self.llbuilder, _str, noname())
572 }
573 }
574
575 pub fn global_string_ptr(&self, _str: *c_char) -> ValueRef {
576 self.count_insn("globalstringptr");
577 unsafe {
578 llvm::LLVMBuildGlobalStringPtr(self.llbuilder, _str, noname())
579 }
580 }
581
582 /* Casts */
583 pub fn trunc(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
584 self.count_insn("trunc");
585 unsafe {
586 llvm::LLVMBuildTrunc(self.llbuilder, val, dest_ty.to_ref(), noname())
587 }
588 }
589
590 pub fn zext(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
591 self.count_insn("zext");
592 unsafe {
593 llvm::LLVMBuildZExt(self.llbuilder, val, dest_ty.to_ref(), noname())
594 }
595 }
596
597 pub fn sext(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
598 self.count_insn("sext");
599 unsafe {
600 llvm::LLVMBuildSExt(self.llbuilder, val, dest_ty.to_ref(), noname())
601 }
602 }
603
604 pub fn fptoui(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
605 self.count_insn("fptoui");
606 unsafe {
607 llvm::LLVMBuildFPToUI(self.llbuilder, val, dest_ty.to_ref(), noname())
608 }
609 }
610
611 pub fn fptosi(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
612 self.count_insn("fptosi");
613 unsafe {
614 llvm::LLVMBuildFPToSI(self.llbuilder, val, dest_ty.to_ref(),noname())
615 }
616 }
617
618 pub fn uitofp(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
619 self.count_insn("uitofp");
620 unsafe {
621 llvm::LLVMBuildUIToFP(self.llbuilder, val, dest_ty.to_ref(), noname())
622 }
623 }
624
625 pub fn sitofp(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
626 self.count_insn("sitofp");
627 unsafe {
628 llvm::LLVMBuildSIToFP(self.llbuilder, val, dest_ty.to_ref(), noname())
629 }
630 }
631
632 pub fn fptrunc(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
633 self.count_insn("fptrunc");
634 unsafe {
635 llvm::LLVMBuildFPTrunc(self.llbuilder, val, dest_ty.to_ref(), noname())
636 }
637 }
638
639 pub fn fpext(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
640 self.count_insn("fpext");
641 unsafe {
642 llvm::LLVMBuildFPExt(self.llbuilder, val, dest_ty.to_ref(), noname())
643 }
644 }
645
646 pub fn ptrtoint(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
647 self.count_insn("ptrtoint");
648 unsafe {
649 llvm::LLVMBuildPtrToInt(self.llbuilder, val, dest_ty.to_ref(), noname())
650 }
651 }
652
653 pub fn inttoptr(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
654 self.count_insn("inttoptr");
655 unsafe {
656 llvm::LLVMBuildIntToPtr(self.llbuilder, val, dest_ty.to_ref(), noname())
657 }
658 }
659
660 pub fn bitcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
661 self.count_insn("bitcast");
662 unsafe {
663 llvm::LLVMBuildBitCast(self.llbuilder, val, dest_ty.to_ref(), noname())
664 }
665 }
666
667 pub fn zext_or_bitcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
668 self.count_insn("zextorbitcast");
669 unsafe {
670 llvm::LLVMBuildZExtOrBitCast(self.llbuilder, val, dest_ty.to_ref(), noname())
671 }
672 }
673
674 pub fn sext_or_bitcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
675 self.count_insn("sextorbitcast");
676 unsafe {
677 llvm::LLVMBuildSExtOrBitCast(self.llbuilder, val, dest_ty.to_ref(), noname())
678 }
679 }
680
681 pub fn trunc_or_bitcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
682 self.count_insn("truncorbitcast");
683 unsafe {
684 llvm::LLVMBuildTruncOrBitCast(self.llbuilder, val, dest_ty.to_ref(), noname())
685 }
686 }
687
688 pub fn cast(&self, op: Opcode, val: ValueRef, dest_ty: Type) -> ValueRef {
689 self.count_insn("cast");
690 unsafe {
691 llvm::LLVMBuildCast(self.llbuilder, op, val, dest_ty.to_ref(), noname())
692 }
693 }
694
695 pub fn pointercast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
696 self.count_insn("pointercast");
697 unsafe {
698 llvm::LLVMBuildPointerCast(self.llbuilder, val, dest_ty.to_ref(), noname())
699 }
700 }
701
702 pub fn intcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
703 self.count_insn("intcast");
704 unsafe {
705 llvm::LLVMBuildIntCast(self.llbuilder, val, dest_ty.to_ref(), noname())
706 }
707 }
708
709 pub fn fpcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
710 self.count_insn("fpcast");
711 unsafe {
712 llvm::LLVMBuildFPCast(self.llbuilder, val, dest_ty.to_ref(), noname())
713 }
714 }
715
716
717 /* Comparisons */
718 pub fn icmp(&self, op: IntPredicate, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
719 self.count_insn("icmp");
720 unsafe {
721 llvm::LLVMBuildICmp(self.llbuilder, op as c_uint, lhs, rhs, noname())
722 }
723 }
724
725 pub fn fcmp(&self, op: RealPredicate, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
726 self.count_insn("fcmp");
727 unsafe {
728 llvm::LLVMBuildFCmp(self.llbuilder, op as c_uint, lhs, rhs, noname())
729 }
730 }
731
732 /* Miscellaneous instructions */
733 pub fn empty_phi(&self, ty: Type) -> ValueRef {
734 self.count_insn("emptyphi");
735 unsafe {
736 llvm::LLVMBuildPhi(self.llbuilder, ty.to_ref(), noname())
737 }
738 }
739
740 pub fn phi(&self, ty: Type, vals: &[ValueRef], bbs: &[BasicBlockRef]) -> ValueRef {
741 assert_eq!(vals.len(), bbs.len());
742 let phi = self.empty_phi(ty);
743 self.count_insn("addincoming");
744 unsafe {
745 llvm::LLVMAddIncoming(phi, vals.as_ptr(),
746 bbs.as_ptr(),
747 vals.len() as c_uint);
748 phi
749 }
750 }
751
752 pub fn add_span_comment(&self, sp: Span, text: &str) {
753 if self.ccx.sess().asm_comments() {
754 let s = format!("{} ({})", text, self.ccx.sess().codemap().span_to_str(sp));
755 debug!("{}", s);
756 self.add_comment(s);
757 }
758 }
759
760 pub fn add_comment(&self, text: &str) {
761 if self.ccx.sess().asm_comments() {
762 let sanitized = text.replace("$", "");
763 let comment_text = format!("\\# {}", sanitized.replace("\n", "\n\t# "));
764 self.count_insn("inlineasm");
765 let asm = comment_text.with_c_str(|c| {
766 unsafe {
767 llvm::LLVMConstInlineAsm(Type::func([], &Type::void(self.ccx)).to_ref(),
768 c, noname(), False, False)
769 }
770 });
771 self.call(asm, [], []);
772 }
773 }
774
775 pub fn inline_asm_call(&self, asm: *c_char, cons: *c_char,
776 inputs: &[ValueRef], output: Type,
777 volatile: bool, alignstack: bool,
778 dia: AsmDialect) -> ValueRef {
779 self.count_insn("inlineasm");
780
781 let volatile = if volatile { lib::llvm::True }
782 else { lib::llvm::False };
783 let alignstack = if alignstack { lib::llvm::True }
784 else { lib::llvm::False };
785
786 let argtys = inputs.iter().map(|v| {
787 debug!("Asm Input Type: {:?}", self.ccx.tn.val_to_str(*v));
788 val_ty(*v)
789 }).collect::<Vec<_>>();
790
791 debug!("Asm Output Type: {:?}", self.ccx.tn.type_to_str(output));
792 let fty = Type::func(argtys.as_slice(), &output);
793 unsafe {
794 let v = llvm::LLVMInlineAsm(
795 fty.to_ref(), asm, cons, volatile, alignstack, dia as c_uint);
796 self.call(v, inputs, [])
797 }
798 }
799
800 pub fn call(&self, llfn: ValueRef, args: &[ValueRef],
801 attributes: &[(uint, lib::llvm::Attribute)]) -> ValueRef {
802 self.count_insn("call");
803
804 debug!("Call {} with args ({})",
805 self.ccx.tn.val_to_str(llfn),
806 args.iter()
807 .map(|&v| self.ccx.tn.val_to_str(v))
808 .collect::<Vec<~str>>()
809 .connect(", "));
810
811 unsafe {
812 let v = llvm::LLVMBuildCall(self.llbuilder, llfn, args.as_ptr(),
813 args.len() as c_uint, noname());
814 for &(idx, attr) in attributes.iter() {
815 llvm::LLVMAddInstrAttribute(v, idx as c_uint, attr as c_uint);
816 }
817 v
818 }
819 }
820
821 pub fn call_with_conv(&self, llfn: ValueRef, args: &[ValueRef],
822 conv: CallConv, attributes: &[(uint, lib::llvm::Attribute)]) -> ValueRef {
823 self.count_insn("callwithconv");
824 let v = self.call(llfn, args, attributes);
825 lib::llvm::SetInstructionCallConv(v, conv);
826 v
827 }
828
829 pub fn select(&self, cond: ValueRef, then_val: ValueRef, else_val: ValueRef) -> ValueRef {
830 self.count_insn("select");
831 unsafe {
832 llvm::LLVMBuildSelect(self.llbuilder, cond, then_val, else_val, noname())
833 }
834 }
835
836 pub fn va_arg(&self, list: ValueRef, ty: Type) -> ValueRef {
837 self.count_insn("vaarg");
838 unsafe {
839 llvm::LLVMBuildVAArg(self.llbuilder, list, ty.to_ref(), noname())
840 }
841 }
842
843 pub fn extract_element(&self, vec: ValueRef, idx: ValueRef) -> ValueRef {
844 self.count_insn("extractelement");
845 unsafe {
846 llvm::LLVMBuildExtractElement(self.llbuilder, vec, idx, noname())
847 }
848 }
849
850 pub fn insert_element(&self, vec: ValueRef, elt: ValueRef, idx: ValueRef) -> ValueRef {
851 self.count_insn("insertelement");
852 unsafe {
853 llvm::LLVMBuildInsertElement(self.llbuilder, vec, elt, idx, noname())
854 }
855 }
856
857 pub fn shuffle_vector(&self, v1: ValueRef, v2: ValueRef, mask: ValueRef) -> ValueRef {
858 self.count_insn("shufflevector");
859 unsafe {
860 llvm::LLVMBuildShuffleVector(self.llbuilder, v1, v2, mask, noname())
861 }
862 }
863
864 pub fn vector_splat(&self, num_elts: uint, elt: ValueRef) -> ValueRef {
865 unsafe {
866 let elt_ty = val_ty(elt);
867 let undef = llvm::LLVMGetUndef(Type::vector(&elt_ty, num_elts as u64).to_ref());
868 let vec = self.insert_element(undef, elt, C_i32(self.ccx, 0));
869 let vec_i32_ty = Type::vector(&Type::i32(self.ccx), num_elts as u64);
870 self.shuffle_vector(vec, undef, C_null(vec_i32_ty))
871 }
872 }
873
874 pub fn extract_value(&self, agg_val: ValueRef, idx: uint) -> ValueRef {
875 self.count_insn("extractvalue");
876 unsafe {
877 llvm::LLVMBuildExtractValue(self.llbuilder, agg_val, idx as c_uint, noname())
878 }
879 }
880
881 pub fn insert_value(&self, agg_val: ValueRef, elt: ValueRef,
882 idx: uint) -> ValueRef {
883 self.count_insn("insertvalue");
884 unsafe {
885 llvm::LLVMBuildInsertValue(self.llbuilder, agg_val, elt, idx as c_uint,
886 noname())
887 }
888 }
889
890 pub fn is_null(&self, val: ValueRef) -> ValueRef {
891 self.count_insn("isnull");
892 unsafe {
893 llvm::LLVMBuildIsNull(self.llbuilder, val, noname())
894 }
895 }
896
897 pub fn is_not_null(&self, val: ValueRef) -> ValueRef {
898 self.count_insn("isnotnull");
899 unsafe {
900 llvm::LLVMBuildIsNotNull(self.llbuilder, val, noname())
901 }
902 }
903
904 pub fn ptrdiff(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
905 self.count_insn("ptrdiff");
906 unsafe {
907 llvm::LLVMBuildPtrDiff(self.llbuilder, lhs, rhs, noname())
908 }
909 }
910
911 pub fn trap(&self) {
912 unsafe {
913 let bb: BasicBlockRef = llvm::LLVMGetInsertBlock(self.llbuilder);
914 let fn_: ValueRef = llvm::LLVMGetBasicBlockParent(bb);
915 let m: ModuleRef = llvm::LLVMGetGlobalParent(fn_);
916 let t: ValueRef = "llvm.trap".with_c_str(|buf| {
917 llvm::LLVMGetNamedFunction(m, buf)
918 });
919 assert!((t as int != 0));
920 let args: &[ValueRef] = [];
921 self.count_insn("trap");
922 llvm::LLVMBuildCall(
923 self.llbuilder, t, args.as_ptr(), args.len() as c_uint, noname());
924 }
925 }
926
927 pub fn landing_pad(&self, ty: Type, pers_fn: ValueRef, num_clauses: uint) -> ValueRef {
928 self.count_insn("landingpad");
929 unsafe {
930 llvm::LLVMBuildLandingPad(
931 self.llbuilder, ty.to_ref(), pers_fn, num_clauses as c_uint, noname())
932 }
933 }
934
935 pub fn set_cleanup(&self, landing_pad: ValueRef) {
936 self.count_insn("setcleanup");
937 unsafe {
938 llvm::LLVMSetCleanup(landing_pad, lib::llvm::True);
939 }
940 }
941
942 pub fn resume(&self, exn: ValueRef) -> ValueRef {
943 self.count_insn("resume");
944 unsafe {
945 llvm::LLVMBuildResume(self.llbuilder, exn)
946 }
947 }
948
949 // Atomic Operations
950 pub fn atomic_cmpxchg(&self, dst: ValueRef,
951 cmp: ValueRef, src: ValueRef,
952 order: AtomicOrdering,
953 failure_order: AtomicOrdering) -> ValueRef {
954 unsafe {
955 llvm::LLVMBuildAtomicCmpXchg(self.llbuilder, dst, cmp, src,
956 order, failure_order)
957 }
958 }
959 pub fn atomic_rmw(&self, op: AtomicBinOp,
960 dst: ValueRef, src: ValueRef,
961 order: AtomicOrdering) -> ValueRef {
962 unsafe {
963 llvm::LLVMBuildAtomicRMW(self.llbuilder, op, dst, src, order, False)
964 }
965 }
966
967 pub fn atomic_fence(&self, order: AtomicOrdering) {
968 unsafe {
969 llvm::LLVMBuildAtomicFence(self.llbuilder, order);
970 }
971 }
972 }