@@ -36,12 +36,14 @@ use ty::util::{IntTypeExt, Discr};
3636use ty:: walk:: TypeWalker ;
3737use util:: captures:: Captures ;
3838use util:: nodemap:: { NodeSet , DefIdMap , FxHashMap } ;
39+ use arena:: SyncDroplessArena ;
3940
4041use serialize:: { self , Encodable , Encoder } ;
4142use std:: cell:: RefCell ;
4243use std:: cmp:: { self , Ordering } ;
4344use std:: fmt;
4445use std:: hash:: { Hash , Hasher } ;
46+ use std:: marker:: PhantomData ;
4547use std:: ops:: Deref ;
4648use rustc_data_structures:: sync:: Lrc ;
4749use std:: slice;
@@ -582,54 +584,120 @@ impl <'gcx: 'tcx, 'tcx> Canonicalize<'gcx, 'tcx> for Ty<'tcx> {
582584 }
583585}
584586
587+ extern {
588+ /// A dummy type used to force Slice to by unsized without requiring fat pointers
589+ type OpaqueSliceContents ;
590+ }
591+
585592/// A wrapper for slices with the additional invariant
586593/// that the slice is interned and no other slice with
587594/// the same contents can exist in the same context.
588595/// This means we can use pointer + length for both
589596/// equality comparisons and hashing.
590- #[ derive( Debug , RustcEncodable ) ]
591- pub struct Slice < T > ( [ T ] ) ;
597+ pub struct Slice < T > ( PhantomData < T > , OpaqueSliceContents ) ;
598+
599+ impl < T > Slice < T > {
600+ /// Returns the offset of the array
601+ #[ inline( always) ]
602+ fn offset ( ) -> usize {
603+ // Align up the size of the len (usize) field
604+ let align = mem:: align_of :: < T > ( ) ;
605+ let align_mask = align - 1 ;
606+ let offset = mem:: size_of :: < usize > ( ) ;
607+ ( offset + align_mask) & !align_mask
608+ }
609+ }
610+
611+ impl < T : Copy > Slice < T > {
612+ #[ inline]
613+ fn from_arena < ' tcx > ( arena : & ' tcx SyncDroplessArena , slice : & [ T ] ) -> & ' tcx Slice < T > {
614+ assert ! ( !mem:: needs_drop:: <T >( ) ) ;
615+ assert ! ( mem:: size_of:: <T >( ) != 0 ) ;
616+ assert ! ( slice. len( ) != 0 ) ;
617+
618+ let offset = Slice :: < T > :: offset ( ) ;
619+ let size = offset + slice. len ( ) * mem:: size_of :: < T > ( ) ;
620+
621+ let mem: * mut u8 = arena. alloc_raw (
622+ size,
623+ cmp:: max ( mem:: align_of :: < T > ( ) , mem:: align_of :: < usize > ( ) ) ) . as_mut_ptr ( ) ;
624+
625+ unsafe {
626+ // Write the length
627+ * ( mem as * mut usize ) = slice. len ( ) ;
628+
629+ // Write the elements
630+ let arena_slice = slice:: from_raw_parts_mut (
631+ mem. offset ( offset as isize ) as * mut T ,
632+ slice. len ( ) ) ;
633+ arena_slice. copy_from_slice ( slice) ;
634+
635+ & * ( mem as * const Slice < T > )
636+ }
637+ }
638+ }
639+
640+ impl < T : fmt:: Debug > fmt:: Debug for Slice < T > {
641+ fn fmt ( & self , f : & mut fmt:: Formatter ) -> fmt:: Result {
642+ ( * * self ) . fmt ( f)
643+ }
644+ }
645+
646+ impl < T : Encodable > Encodable for Slice < T > {
647+ #[ inline]
648+ fn encode < S : Encoder > ( & self , s : & mut S ) -> Result < ( ) , S :: Error > {
649+ ( * * self ) . encode ( s)
650+ }
651+ }
592652
593653impl < T > Ord for Slice < T > where T : Ord {
594654 fn cmp ( & self , other : & Slice < T > ) -> Ordering {
595655 if self == other { Ordering :: Equal } else {
596- <[ T ] as Ord >:: cmp ( & self . 0 , & other. 0 )
656+ <[ T ] as Ord >:: cmp ( & * * self , & * * other)
597657 }
598658 }
599659}
600660
601661impl < T > PartialOrd for Slice < T > where T : PartialOrd {
602662 fn partial_cmp ( & self , other : & Slice < T > ) -> Option < Ordering > {
603663 if self == other { Some ( Ordering :: Equal ) } else {
604- <[ T ] as PartialOrd >:: partial_cmp ( & self . 0 , & other. 0 )
664+ <[ T ] as PartialOrd >:: partial_cmp ( & * * self , & * * other)
605665 }
606666 }
607667}
608668
609- impl < T > PartialEq for Slice < T > {
669+ impl < T : PartialEq > PartialEq for Slice < T > {
610670 #[ inline]
611671 fn eq ( & self , other : & Slice < T > ) -> bool {
612- ( & self . 0 as * const [ T ] ) == ( & other. 0 as * const [ T ] )
672+ ( self as * const _ ) == ( other as * const _ )
613673 }
614674}
615- impl < T > Eq for Slice < T > { }
675+ impl < T : Eq > Eq for Slice < T > { }
616676
617677impl < T > Hash for Slice < T > {
678+ #[ inline]
618679 fn hash < H : Hasher > ( & self , s : & mut H ) {
619- ( self . as_ptr ( ) , self . len ( ) ) . hash ( s)
680+ ( self as * const Slice < T > ) . hash ( s)
620681 }
621682}
622683
623684impl < T > Deref for Slice < T > {
624685 type Target = [ T ] ;
686+ #[ inline( always) ]
625687 fn deref ( & self ) -> & [ T ] {
626- & self . 0
688+ unsafe {
689+ let raw = self as * const _ as * const u8 ;
690+ let len = * ( raw as * const usize ) ;
691+ let slice = raw. offset ( Slice :: < T > :: offset ( ) as isize ) ;
692+ slice:: from_raw_parts ( slice as * const T , len)
693+ }
627694 }
628695}
629696
630697impl < ' a , T > IntoIterator for & ' a Slice < T > {
631698 type Item = & ' a T ;
632699 type IntoIter = <& ' a [ T ] as IntoIterator >:: IntoIter ;
700+ #[ inline( always) ]
633701 fn into_iter ( self ) -> Self :: IntoIter {
634702 self [ ..] . iter ( )
635703 }
@@ -638,9 +706,14 @@ impl<'a, T> IntoIterator for &'a Slice<T> {
638706impl < ' tcx > serialize:: UseSpecializedDecodable for & ' tcx Slice < Ty < ' tcx > > { }
639707
640708impl < T > Slice < T > {
709+ #[ inline( always) ]
641710 pub fn empty < ' a > ( ) -> & ' a Slice < T > {
711+ #[ repr( align( 64 ) , C ) ]
712+ struct EmptySlice ( [ u8 ; 64 ] ) ;
713+ static EMPTY_SLICE : EmptySlice = EmptySlice ( [ 0 ; 64 ] ) ;
714+ assert ! ( mem:: align_of:: <T >( ) <= 64 ) ;
642715 unsafe {
643- mem :: transmute ( slice :: from_raw_parts ( 0x1 as * const T , 0 ) )
716+ & * ( & EMPTY_SLICE as * const _ as * const Slice < T > )
644717 }
645718 }
646719}
0 commit comments