@@ -232,6 +232,7 @@ impl<'ll, 'tcx, 'a> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
232232 // Get the return type.
233233 let sig = llvm:: LLVMGetElementType ( self . val_ty ( self . llfn ( ) ) ) ;
234234 let return_ty = llvm:: LLVMGetReturnType ( sig) ;
235+ ;
235236 // Check if new_ty & return_ty are different pointers.
236237 // FIXME: get rid of this nonsense once we are past LLVM 7 and don't have
237238 // to suffer from typed pointers.
@@ -481,6 +482,10 @@ impl<'ll, 'tcx, 'a> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
481482 // Cast to default addrspace if necessary
482483 let alloca_ty = llvm:: LLVMTypeOf ( alloca) ;
483484 let alloca_addrspace = llvm:: LLVMGetPointerAddressSpace ( alloca_ty) ;
485+ let alloca = self . pointercast (
486+ alloca,
487+ self . type_i8p_ext ( rustc_abi:: AddressSpace ( alloca_addrspace) ) ,
488+ ) ;
484489 let dest_ty = self . cx ( ) . type_ptr ( ) ;
485490 let dest_addrspace = llvm:: LLVMGetPointerAddressSpace ( dest_ty) ;
486491 if alloca_addrspace != dest_addrspace {
@@ -493,7 +498,9 @@ impl<'ll, 'tcx, 'a> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
493498
494499 fn load ( & mut self , ty : & ' ll Type , ptr : & ' ll Value , align : Align ) -> & ' ll Value {
495500 trace ! ( "Load {ty:?} {:?}" , ptr) ;
496- let ptr = self . pointercast ( ptr, self . cx . type_ptr_to ( ty) ) ;
501+ let ptr = self . pointercast ( ptr, unsafe {
502+ llvm:: LLVMPointerType ( ty, llvm:: LLVMGetPointerAddressSpace ( self . val_ty ( ptr) ) )
503+ } ) ;
497504 unsafe {
498505 let load = llvm:: LLVMBuildLoad ( self . llbuilder , ptr, UNNAMED ) ;
499506 llvm:: LLVMSetAlignment ( load, align. bytes ( ) as c_uint ) ;
@@ -503,7 +510,9 @@ impl<'ll, 'tcx, 'a> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
503510
504511 fn volatile_load ( & mut self , ty : & ' ll Type , ptr : & ' ll Value ) -> & ' ll Value {
505512 trace ! ( "Volatile load `{:?}`" , ptr) ;
506- let ptr = self . pointercast ( ptr, self . cx . type_ptr_to ( ty) ) ;
513+ let ptr = self . pointercast ( ptr, unsafe {
514+ llvm:: LLVMPointerType ( ty, llvm:: LLVMGetPointerAddressSpace ( self . val_ty ( ptr) ) )
515+ } ) ;
507516 unsafe {
508517 let load = llvm:: LLVMBuildLoad ( self . llbuilder , ptr, UNNAMED ) ;
509518 llvm:: LLVMSetVolatile ( load, llvm:: True ) ;
@@ -711,14 +720,21 @@ impl<'ll, 'tcx, 'a> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
711720 align : Align ,
712721 flags : MemFlags ,
713722 ) -> & ' ll Value {
723+ assert_eq ! ( self . cx. type_kind( self . cx. val_ty( ptr) ) , TypeKind :: Pointer ) ;
724+ let ptr = self . check_store ( val, ptr) ;
725+ let address_space = unsafe { llvm:: LLVMGetPointerAddressSpace ( self . val_ty ( ptr) ) } ;
726+ let store_pointer_ty = unsafe { llvm:: LLVMPointerType ( self . val_ty ( val) , address_space) } ;
727+
728+ let ptr = unsafe {
729+ llvm:: LLVMBuildBitCast ( self . llbuilder , ptr, store_pointer_ty, c"NAME" . as_ptr ( ) )
730+ } ;
714731 trace ! (
715732 "store_with_flags: {:?} into {:?} with align {:?}" ,
716733 val,
717734 ptr,
718735 align. bytes( )
719736 ) ;
720- assert_eq ! ( self . cx. type_kind( self . cx. val_ty( ptr) ) , TypeKind :: Pointer ) ;
721- let ptr = self . check_store ( val, ptr) ;
737+
722738 unsafe {
723739 let store = llvm:: LLVMBuildStore ( self . llbuilder , val, ptr) ;
724740 let align = if flags. contains ( MemFlags :: UNALIGNED ) {
@@ -757,15 +773,20 @@ impl<'ll, 'tcx, 'a> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
757773
758774 fn gep ( & mut self , ty : & ' ll Type , ptr : & ' ll Value , indices : & [ & ' ll Value ] ) -> & ' ll Value {
759775 trace ! ( "gep: {ty:?} {:?} with indices {:?}" , ptr, indices) ;
760- let ptr = self . pointercast ( ptr, self . cx ( ) . type_ptr_to ( ty) ) ;
776+ let address_space = unsafe { llvm:: LLVMGetPointerAddressSpace ( self . val_ty ( ptr) ) } ;
777+ let ptr = self . pointercast ( ptr, unsafe { llvm:: LLVMPointerType ( ty, address_space) } ) ;
761778 unsafe {
762- llvm:: LLVMBuildGEP2 (
779+ let res = llvm:: LLVMBuildGEP2 (
763780 self . llbuilder ,
764781 ty,
765782 ptr,
766783 indices. as_ptr ( ) ,
767784 indices. len ( ) as c_uint ,
768785 UNNAMED ,
786+ ) ;
787+ self . pointercast (
788+ ptr,
789+ self . type_i8p_ext ( rustc_abi:: AddressSpace ( address_space) ) ,
769790 )
770791 }
771792 }
@@ -777,15 +798,20 @@ impl<'ll, 'tcx, 'a> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
777798 indices : & [ & ' ll Value ] ,
778799 ) -> & ' ll Value {
779800 trace ! ( "gep inbounds: {ty:?} {:?} with indices {:?}" , ptr, indices) ;
780- let ptr = self . pointercast ( ptr, self . cx ( ) . type_ptr_to ( ty) ) ;
801+ let address_space = unsafe { llvm:: LLVMGetPointerAddressSpace ( self . val_ty ( ptr) ) } ;
802+ let ptr = self . pointercast ( ptr, unsafe { llvm:: LLVMPointerType ( ty, address_space) } ) ;
781803 unsafe {
782- llvm:: LLVMBuildInBoundsGEP2 (
804+ let res = llvm:: LLVMBuildInBoundsGEP2 (
783805 self . llbuilder ,
784806 ty,
785807 ptr,
786808 indices. as_ptr ( ) ,
787809 indices. len ( ) as c_uint ,
788810 UNNAMED ,
811+ ) ;
812+ self . pointercast (
813+ ptr,
814+ self . type_i8p_ext ( rustc_abi:: AddressSpace ( address_space) ) ,
789815 )
790816 }
791817 }
@@ -1066,6 +1092,7 @@ impl<'ll, 'tcx, 'a> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
10661092
10671093 fn insert_value ( & mut self , agg_val : & ' ll Value , mut elt : & ' ll Value , idx : u64 ) -> & ' ll Value {
10681094 trace ! ( "insert value {:?}, {:?}, {:?}" , agg_val, elt, idx) ;
1095+
10691096 assert_eq ! ( idx as c_uint as u64 , idx) ;
10701097
10711098 let elt_ty = self . cx . val_ty ( elt) ;
@@ -1168,9 +1195,13 @@ impl<'ll, 'tcx, 'a> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
11681195 ) ;
11691196 }
11701197 } ;
1198+ let tuple = self . type_struct ( & [ self . val_ty ( src) , self . type_i1 ( ) ] , false ) ;
11711199 let res = self . atomic_op (
11721200 dst,
1173- |builder, dst| {
1201+ tuple,
1202+ |builder, dst, ty| {
1203+ builder. abort ( ) ;
1204+ return builder. const_undef ( ty) ;
11741205 // We are in a supported address space - just use ordinary atomics
11751206 unsafe {
11761207 llvm:: LLVMRustBuildAtomicCmpXchg (
@@ -1184,7 +1215,7 @@ impl<'ll, 'tcx, 'a> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
11841215 )
11851216 }
11861217 } ,
1187- |builder, dst| {
1218+ |builder, dst, ty | {
11881219 // Local space is only accessible to the current thread.
11891220 // So, there are no synchronization issues, and we can emulate it using a simple load / compare / store.
11901221 let load: & ' ll Value =
@@ -1221,8 +1252,13 @@ impl<'ll, 'tcx, 'a> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
12211252 }
12221253 self . atomic_op (
12231254 dst,
1224- |builder, dst| {
1255+ self . val_ty ( src) ,
1256+ |builder, dst, ty| {
12251257 // We are in a supported address space - just use ordinary atomics
1258+ let address_space = unsafe { llvm:: LLVMGetPointerAddressSpace ( builder. val_ty ( dst) ) } ;
1259+ let dst_ty = unsafe { llvm:: LLVMPointerType ( ty, address_space) } ;
1260+ let dst = builder. pointercast ( dst, dst_ty) ;
1261+ let src = if matches ! ( op, AtomicRmwBinOp :: AtomicXchg ) { builder. pointercast ( src, dst_ty) } else { src} ;
12261262 unsafe {
12271263 llvm:: LLVMBuildAtomicRMW (
12281264 builder. llbuilder ,
@@ -1234,7 +1270,7 @@ impl<'ll, 'tcx, 'a> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
12341270 )
12351271 }
12361272 } ,
1237- |builder, dst| {
1273+ |builder, dst, ty | {
12381274 // Local space is only accessible to the current thread.
12391275 // So, there are no synchronization issues, and we can emulate it using a simple load / compare / store.
12401276 let load: & ' ll Value =
@@ -1314,6 +1350,15 @@ impl<'ll, 'tcx, 'a> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
13141350 let args = self . check_call ( "call" , llty, llfn, args) ;
13151351
13161352 let mut call = unsafe {
1353+ let llfn = if self . cx . type_kind ( llty) == TypeKind :: Pointer {
1354+ self . pointercast ( llfn, llty)
1355+ } else if self . cx . type_kind ( self . val_ty ( llfn) ) == TypeKind :: Pointer {
1356+ let target_fnptr = llvm:: LLVMPointerType ( llty, 0 ) ;
1357+ self . pointercast ( llfn, target_fnptr)
1358+ } else {
1359+ llfn
1360+ } ;
1361+
13171362 llvm:: LLVMRustBuildCall (
13181363 self . llbuilder ,
13191364 llfn,
@@ -1719,7 +1764,6 @@ impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> {
17191764 if !self . cx ( ) . sess ( ) . emit_lifetime_markers ( ) {
17201765 return ;
17211766 }
1722-
17231767 self . call_intrinsic ( intrinsic, & [ self . cx . const_u64 ( size) , ptr] ) ;
17241768 }
17251769
@@ -1750,9 +1794,16 @@ impl<'ll, 'tcx, 'a> Builder<'a, 'll, 'tcx> {
17501794 fn atomic_op (
17511795 & mut self ,
17521796 dst : & ' ll Value ,
1753- atomic_supported : impl FnOnce ( & mut Builder < ' a , ' ll , ' tcx > , & ' ll Value ) -> & ' ll Value ,
1754- emulate_local : impl FnOnce ( & mut Builder < ' a , ' ll , ' tcx > , & ' ll Value ) -> & ' ll Value ,
1797+ ty : & ' ll Type ,
1798+ atomic_supported : impl FnOnce ( & mut Builder < ' a , ' ll , ' tcx > , & ' ll Value , & ' ll Type ) -> & ' ll Value ,
1799+ emulate_local : impl FnOnce ( & mut Builder < ' a , ' ll , ' tcx > , & ' ll Value , & ' ll Type ) -> & ' ll Value ,
17551800 ) -> & ' ll Value {
1801+
1802+ let emulate_local = |builder : & mut Self , _, _|{
1803+ // ATOMICS don't work with untyped pointers *YET*.
1804+ builder. abort ( ) ;
1805+ builder. const_undef ( ty)
1806+ } ;
17561807 // (FractalFir) Atomics in CUDA have some limitations, and we have to work around them.
17571808 // For example, they are restricted in what address space they operate on.
17581809 // CUDA has 4 address spaces(and a generic one, which is an union of all of those).
@@ -1803,7 +1854,7 @@ impl<'ll, 'tcx, 'a> Builder<'a, 'll, 'tcx> {
18031854 let merge_bb = self . append_sibling_block ( "atomic_op_done" ) ;
18041855 // Execute atomic op if supported, then jump to merge
18051856 self . switch_to_block ( supported_bb) ;
1806- let supported_res = atomic_supported ( self , dst) ;
1857+ let supported_res = atomic_supported ( self , dst, ty ) ;
18071858 self . br ( merge_bb) ;
18081859 // Check if the pointer is in the thread space. If so, we can emulate it.
18091860 self . switch_to_block ( unsupported_bb) ;
@@ -1822,15 +1873,15 @@ impl<'ll, 'tcx, 'a> Builder<'a, 'll, 'tcx> {
18221873 self . cond_br ( isspacep_local, local_bb, atomic_ub_bb) ;
18231874 // The pointer is in the thread(local) space.
18241875 self . switch_to_block ( local_bb) ;
1825- let local_res = emulate_local ( self , dst) ;
1876+ let local_res = emulate_local ( self , dst, ty ) ;
18261877 self . br ( merge_bb) ;
18271878 // The pointer is neither in the supported address space, nor the local space.
18281879 // This is very likely UB. So, we trap here.
18291880 // TODO: should we print some kind of a message here? NVVM supports printf.
18301881 self . switch_to_block ( atomic_ub_bb) ;
18311882 self . abort ( ) ;
18321883 self . unreachable ( ) ;
1833- // Atomic is impl has finished, and we can now switch to the merge_bb
1884+ // Atomic impl has finished, and we can now switch to the merge_bb
18341885 self . switch_to_block ( merge_bb) ;
18351886 self . phi (
18361887 self . val_ty ( local_res) ,
0 commit comments