]>
Commit | Line | Data |
---|---|---|
9fa01778 | 1 | use crate::traits::*; |
b7449926 | 2 | |
136023e0 | 3 | use rustc_middle::ty::{self, Ty}; |
60c5eb7d | 4 | use rustc_target::abi::call::FnAbi; |
54a0048b | 5 | |
cc61c64b | 6 | #[derive(Copy, Clone, Debug)] |
ff7c6d11 | 7 | pub struct VirtualIndex(u64); |
54a0048b | 8 | |
dc9dc135 | 9 | impl<'a, 'tcx> VirtualIndex { |
cc61c64b | 10 | pub fn from_index(index: usize) -> Self { |
136023e0 | 11 | VirtualIndex(index as u64) |
cc61c64b XL |
12 | } |
13 | ||
a1dfa0c6 XL |
14 | pub fn get_fn<Bx: BuilderMethods<'a, 'tcx>>( |
15 | self, | |
16 | bx: &mut Bx, | |
17 | llvtable: Bx::Value, | |
60c5eb7d | 18 | fn_abi: &FnAbi<'tcx, Ty<'tcx>>, |
a1dfa0c6 | 19 | ) -> Bx::Value { |
cc61c64b | 20 | // Load the data pointer from the object. |
b7449926 | 21 | debug!("get_fn({:?}, {:?})", llvtable, self); |
cc61c64b | 22 | |
136023e0 XL |
23 | let llty = bx.fn_ptr_backend_type(fn_abi); |
24 | let llvtable = bx.pointercast(llvtable, bx.type_ptr_to(llty)); | |
a1dfa0c6 XL |
25 | let ptr_align = bx.tcx().data_layout.pointer_align.abi; |
26 | let gep = bx.inbounds_gep(llvtable, &[bx.const_usize(self.0)]); | |
136023e0 | 27 | let ptr = bx.load(llty, gep, ptr_align); |
2c00a5a8 | 28 | bx.nonnull_metadata(ptr); |
60c5eb7d | 29 | // Vtable loads are invariant. |
2c00a5a8 | 30 | bx.set_invariant_load(ptr); |
cc61c64b | 31 | ptr |
32a655c1 | 32 | } |
54a0048b | 33 | |
a1dfa0c6 XL |
34 | pub fn get_usize<Bx: BuilderMethods<'a, 'tcx>>( |
35 | self, | |
36 | bx: &mut Bx, | |
60c5eb7d | 37 | llvtable: Bx::Value, |
a1dfa0c6 | 38 | ) -> Bx::Value { |
cc61c64b | 39 | // Load the data pointer from the object. |
b7449926 | 40 | debug!("get_int({:?}, {:?})", llvtable, self); |
cc61c64b | 41 | |
136023e0 XL |
42 | let llty = bx.type_isize(); |
43 | let llvtable = bx.pointercast(llvtable, bx.type_ptr_to(llty)); | |
a1dfa0c6 XL |
44 | let usize_align = bx.tcx().data_layout.pointer_align.abi; |
45 | let gep = bx.inbounds_gep(llvtable, &[bx.const_usize(self.0)]); | |
136023e0 | 46 | let ptr = bx.load(llty, gep, usize_align); |
60c5eb7d | 47 | // Vtable loads are invariant. |
2c00a5a8 | 48 | bx.set_invariant_load(ptr); |
cc61c64b XL |
49 | ptr |
50 | } | |
54a0048b SL |
51 | } |
52 | ||
9e0c209e | 53 | /// Creates a dynamic vtable for the given type and vtable origin. |
54a0048b SL |
54 | /// This is used only for objects. |
55 | /// | |
9e0c209e SL |
56 | /// The vtables are cached instead of created on every call. |
57 | /// | |
54a0048b | 58 | /// The `trait_ref` encodes the erased self type. Hence if we are |
a1dfa0c6 | 59 | /// making an object `Foo<dyn Trait>` from a value of type `Foo<T>`, then |
60c5eb7d | 60 | /// `trait_ref` would map `T: Trait`. |
a1dfa0c6 XL |
61 | pub fn get_vtable<'tcx, Cx: CodegenMethods<'tcx>>( |
62 | cx: &Cx, | |
b7449926 | 63 | ty: Ty<'tcx>, |
0731742a | 64 | trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>, |
a1dfa0c6 XL |
65 | ) -> Cx::Value { |
66 | let tcx = cx.tcx(); | |
54a0048b | 67 | |
476ff2be | 68 | debug!("get_vtable(ty={:?}, trait_ref={:?})", ty, trait_ref); |
54a0048b SL |
69 | |
70 | // Check the cache. | |
a1dfa0c6 | 71 | if let Some(&val) = cx.vtables().borrow().get(&(ty, trait_ref)) { |
c30ab7b3 | 72 | return val; |
54a0048b SL |
73 | } |
74 | ||
136023e0 XL |
75 | let vtable_alloc_id = tcx.vtable_allocation(ty, trait_ref); |
76 | let vtable_allocation = tcx.global_alloc(vtable_alloc_id).unwrap_memory(); | |
77 | let vtable_const = cx.const_data_from_alloc(vtable_allocation); | |
a1dfa0c6 XL |
78 | let align = cx.data_layout().pointer_align.abi; |
79 | let vtable = cx.static_addr_of(vtable_const, align, Some("vtable")); | |
54a0048b | 80 | |
a1dfa0c6 | 81 | cx.create_vtable_metadata(ty, vtable); |
a1dfa0c6 | 82 | cx.vtables().borrow_mut().insert((ty, trait_ref), vtable); |
54a0048b SL |
83 | vtable |
84 | } |