@@ -21,16 +21,16 @@ use std::ptr;
2121use std:: borrow:: Cow ;
2222
2323use rustc:: ty:: { self , Instance , ParamEnv , query:: TyCtxtAt } ;
24- use rustc:: ty:: layout:: { self , Align , TargetDataLayout , Size , HasDataLayout } ;
25- pub use rustc:: mir:: interpret:: { truncate, write_target_uint, read_target_uint} ;
24+ use rustc:: ty:: layout:: { Align , TargetDataLayout , Size , HasDataLayout } ;
25+ pub use rustc:: mir:: interpret:: { truncate, write_target_uint, read_target_uint, MemoryAccess } ;
2626use rustc_data_structures:: fx:: { FxHashSet , FxHashMap } ;
2727
2828use syntax:: ast:: Mutability ;
2929
3030use super :: {
3131 Pointer , AllocId , Allocation , ConstValue , GlobalId ,
3232 EvalResult , Scalar , EvalErrorKind , AllocType , PointerArithmetic ,
33- Machine , MemoryAccess , AllocMap , MayLeak , ScalarMaybeUndef ,
33+ Machine , AllocMap , MayLeak , ScalarMaybeUndef , AllocationExtra ,
3434} ;
3535
3636#[ derive( Debug , PartialEq , Eq , Copy , Clone , Hash ) ]
@@ -297,6 +297,17 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
297297 } )
298298 }
299299 }
300+
301+ /// Convenience forwarding method for `Allocation::check_bounds`.
302+ #[ inline( always) ]
303+ pub fn check_bounds (
304+ & self ,
305+ ptr : Pointer < M :: PointerTag > ,
306+ size : Size ,
307+ access : bool
308+ ) -> EvalResult < ' tcx > {
309+ self . get ( ptr. alloc_id ) ?. check_bounds ( self , ptr, size, access)
310+ }
300311}
301312
302313/// Allocation accessors
@@ -582,6 +593,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
582593impl < ' a , ' mir , ' tcx , M > Memory < ' a , ' mir , ' tcx , M >
583594where
584595 M : Machine < ' a , ' mir , ' tcx , PointerTag =( ) , AllocExtra =( ) > ,
596+ M :: AllocExtra : AllocationExtra < ( ) > ,
585597 M :: MemoryMap : AllocMap < AllocId , ( MemoryKind < M :: MemoryKinds > , Allocation ) > ,
586598{
587599 /// mark an allocation as static and initialized, either mutable or not
@@ -621,3 +633,254 @@ where
621633 Ok ( ( ) )
622634 }
623635}
636+
637+ /// Reading and writing
638+ impl < ' a , ' mir , ' tcx , M : Machine < ' a , ' mir , ' tcx > > Memory < ' a , ' mir , ' tcx , M > {
639+ pub fn copy (
640+ & mut self ,
641+ src : Scalar < M :: PointerTag > ,
642+ src_align : Align ,
643+ dest : Scalar < M :: PointerTag > ,
644+ dest_align : Align ,
645+ size : Size ,
646+ nonoverlapping : bool ,
647+ ) -> EvalResult < ' tcx > {
648+ self . copy_repeatedly ( src, src_align, dest, dest_align, size, 1 , nonoverlapping)
649+ }
650+
651+ pub fn copy_repeatedly (
652+ & mut self ,
653+ src : Scalar < M :: PointerTag > ,
654+ src_align : Align ,
655+ dest : Scalar < M :: PointerTag > ,
656+ dest_align : Align ,
657+ size : Size ,
658+ length : u64 ,
659+ nonoverlapping : bool ,
660+ ) -> EvalResult < ' tcx > {
661+ if size. bytes ( ) == 0 {
662+ // Nothing to do for ZST, other than checking alignment and non-NULLness.
663+ self . check_align ( src, src_align) ?;
664+ self . check_align ( dest, dest_align) ?;
665+ return Ok ( ( ) ) ;
666+ }
667+ let src = src. to_ptr ( ) ?;
668+ let dest = dest. to_ptr ( ) ?;
669+
670+ // first copy the relocations to a temporary buffer, because
671+ // `get_bytes_mut` will clear the relocations, which is correct,
672+ // since we don't want to keep any relocations at the target.
673+ // (`get_bytes_with_undef_and_ptr` below checks that there are no
674+ // relocations overlapping the edges; those would not be handled correctly).
675+ let relocations = {
676+ let relocations = self . relocations ( src, size) ?;
677+ let mut new_relocations = Vec :: with_capacity ( relocations. len ( ) * ( length as usize ) ) ;
678+ for i in 0 ..length {
679+ new_relocations. extend (
680+ relocations
681+ . iter ( )
682+ . map ( |& ( offset, reloc) | {
683+ ( offset + dest. offset - src. offset + ( i * size * relocations. len ( ) as u64 ) ,
684+ reloc)
685+ } )
686+ ) ;
687+ }
688+
689+ new_relocations
690+ } ;
691+
692+ let tcx = self . tcx . tcx ;
693+
694+ // This also checks alignment, and relocation edges on the src.
695+ let src_bytes = self
696+ . get ( src. alloc_id ) ?
697+ . get_bytes_with_undef_and_ptr ( tcx, src, size, src_align) ?
698+ . as_ptr ( ) ;
699+ let dest_bytes = self
700+ . get_mut ( dest. alloc_id ) ?
701+ . get_bytes_mut ( tcx, dest, size * length, dest_align) ?
702+ . as_mut_ptr ( ) ;
703+
704+ // SAFE: The above indexing would have panicked if there weren't at least `size` bytes
705+ // behind `src` and `dest`. Also, we use the overlapping-safe `ptr::copy` if `src` and
706+ // `dest` could possibly overlap.
707+ // The pointers above remain valid even if the `HashMap` table is moved around because they
708+ // point into the `Vec` storing the bytes.
709+ unsafe {
710+ assert_eq ! ( size. bytes( ) as usize as u64 , size. bytes( ) ) ;
711+ if src. alloc_id == dest. alloc_id {
712+ if nonoverlapping {
713+ if ( src. offset <= dest. offset && src. offset + size > dest. offset ) ||
714+ ( dest. offset <= src. offset && dest. offset + size > src. offset )
715+ {
716+ return err ! ( Intrinsic (
717+ "copy_nonoverlapping called on overlapping ranges" . to_string( ) ,
718+ ) ) ;
719+ }
720+ }
721+
722+ for i in 0 ..length {
723+ ptr:: copy ( src_bytes,
724+ dest_bytes. offset ( ( size. bytes ( ) * i) as isize ) ,
725+ size. bytes ( ) as usize ) ;
726+ }
727+ } else {
728+ for i in 0 ..length {
729+ ptr:: copy_nonoverlapping ( src_bytes,
730+ dest_bytes. offset ( ( size. bytes ( ) * i) as isize ) ,
731+ size. bytes ( ) as usize ) ;
732+ }
733+ }
734+ }
735+
736+ // copy definedness to the destination
737+ self . copy_undef_mask ( src, dest, size, length) ?;
738+ // copy the relocations to the destination
739+ self . get_mut ( dest. alloc_id ) ?. relocations . insert_presorted ( relocations) ;
740+
741+ Ok ( ( ) )
742+ }
743+
744+ pub fn read_c_str ( & self , ptr : Pointer < M :: PointerTag > ) -> EvalResult < ' tcx , & [ u8 ] > {
745+ self . get ( ptr. alloc_id ) ?. read_c_str ( self , ptr)
746+ }
747+
748+ pub fn check_bytes (
749+ & self ,
750+ ptr : Scalar < M :: PointerTag > ,
751+ size : Size ,
752+ allow_ptr_and_undef : bool ,
753+ ) -> EvalResult < ' tcx > {
754+ // Empty accesses don't need to be valid pointers, but they should still be non-NULL
755+ let align = Align :: from_bytes ( 1 , 1 ) . unwrap ( ) ;
756+ if size. bytes ( ) == 0 {
757+ self . check_align ( ptr, align) ?;
758+ return Ok ( ( ) ) ;
759+ }
760+ let ptr = ptr. to_ptr ( ) ?;
761+ self . get ( ptr. alloc_id ) ?. check_bytes ( self , ptr, size, allow_ptr_and_undef)
762+ }
763+
764+ pub fn read_bytes ( & self , ptr : Scalar < M :: PointerTag > , size : Size ) -> EvalResult < ' tcx , & [ u8 ] > {
765+ // Empty accesses don't need to be valid pointers, but they should still be non-NULL
766+ let align = Align :: from_bytes ( 1 , 1 ) . unwrap ( ) ;
767+ if size. bytes ( ) == 0 {
768+ self . check_align ( ptr, align) ?;
769+ return Ok ( & [ ] ) ;
770+ }
771+ let ptr = ptr. to_ptr ( ) ?;
772+ self . get ( ptr. alloc_id ) ?. get_bytes ( self , ptr, size, align)
773+ }
774+
775+ pub fn write_bytes ( & mut self , ptr : Scalar < M :: PointerTag > , src : & [ u8 ] ) -> EvalResult < ' tcx > {
776+ // Empty accesses don't need to be valid pointers, but they should still be non-NULL
777+ let align = Align :: from_bytes ( 1 , 1 ) . unwrap ( ) ;
778+ if src. is_empty ( ) {
779+ self . check_align ( ptr, align) ?;
780+ return Ok ( ( ) ) ;
781+ }
782+ let ptr = ptr. to_ptr ( ) ?;
783+ let tcx = self . tcx . tcx ;
784+ self . get_mut ( ptr. alloc_id ) ?. write_bytes ( tcx, ptr, src)
785+ }
786+
787+ pub fn write_repeat (
788+ & mut self ,
789+ ptr : Scalar < M :: PointerTag > ,
790+ val : u8 ,
791+ count : Size
792+ ) -> EvalResult < ' tcx > {
793+ // Empty accesses don't need to be valid pointers, but they should still be non-NULL
794+ let align = Align :: from_bytes ( 1 , 1 ) . unwrap ( ) ;
795+ if count. bytes ( ) == 0 {
796+ self . check_align ( ptr, align) ?;
797+ return Ok ( ( ) ) ;
798+ }
799+ let ptr = ptr. to_ptr ( ) ?;
800+ let tcx = self . tcx . tcx ;
801+ self . get_mut ( ptr. alloc_id ) ?. write_repeat ( tcx, ptr, val, count)
802+ }
803+
804+ /// Read a *non-ZST* scalar
805+ pub fn read_scalar (
806+ & self ,
807+ ptr : Pointer < M :: PointerTag > ,
808+ ptr_align : Align ,
809+ size : Size
810+ ) -> EvalResult < ' tcx , ScalarMaybeUndef < M :: PointerTag > > {
811+ self . get ( ptr. alloc_id ) ?. read_scalar ( self , ptr, ptr_align, size)
812+ }
813+
814+ pub fn read_ptr_sized (
815+ & self ,
816+ ptr : Pointer < M :: PointerTag > ,
817+ ptr_align : Align
818+ ) -> EvalResult < ' tcx , ScalarMaybeUndef < M :: PointerTag > > {
819+ self . read_scalar ( ptr, ptr_align, self . pointer_size ( ) )
820+ }
821+
822+ /// Write a *non-ZST* scalar
823+ pub fn write_scalar (
824+ & mut self ,
825+ ptr : Pointer < M :: PointerTag > ,
826+ ptr_align : Align ,
827+ val : ScalarMaybeUndef < M :: PointerTag > ,
828+ type_size : Size ,
829+ ) -> EvalResult < ' tcx > {
830+ let tcx = self . tcx . tcx ;
831+ self . get_mut ( ptr. alloc_id ) ?. write_scalar ( tcx, ptr, ptr_align, val, type_size)
832+ }
833+
834+ pub fn write_ptr_sized (
835+ & mut self ,
836+ ptr : Pointer < M :: PointerTag > ,
837+ ptr_align : Align ,
838+ val : ScalarMaybeUndef < M :: PointerTag >
839+ ) -> EvalResult < ' tcx > {
840+ let ptr_size = self . pointer_size ( ) ;
841+ self . write_scalar ( ptr, ptr_align, val, ptr_size)
842+ }
843+ }
844+
845+ /// Relocations
846+ impl < ' a , ' mir , ' tcx , M : Machine < ' a , ' mir , ' tcx > > Memory < ' a , ' mir , ' tcx , M > {
847+ /// Return all relocations overlapping with the given ptr-offset pair.
848+ fn relocations (
849+ & self ,
850+ ptr : Pointer < M :: PointerTag > ,
851+ size : Size ,
852+ ) -> EvalResult < ' tcx , & [ ( Size , ( M :: PointerTag , AllocId ) ) ] > {
853+ self . get ( ptr. alloc_id ) ?. relocations ( self , ptr, size)
854+ }
855+ }
856+
857+ /// Undefined bytes
858+ impl < ' a , ' mir , ' tcx , M : Machine < ' a , ' mir , ' tcx > > Memory < ' a , ' mir , ' tcx , M > {
859+ // FIXME: Add a fast version for the common, nonoverlapping case
860+ fn copy_undef_mask (
861+ & mut self ,
862+ src : Pointer < M :: PointerTag > ,
863+ dest : Pointer < M :: PointerTag > ,
864+ size : Size ,
865+ repeat : u64 ,
866+ ) -> EvalResult < ' tcx > {
867+ // The bits have to be saved locally before writing to dest in case src and dest overlap.
868+ assert_eq ! ( size. bytes( ) as usize as u64 , size. bytes( ) ) ;
869+
870+ let undef_mask = self . get ( src. alloc_id ) ?. undef_mask . clone ( ) ;
871+ let dest_allocation = self . get_mut ( dest. alloc_id ) ?;
872+
873+ for i in 0 ..size. bytes ( ) {
874+ let defined = undef_mask. get ( src. offset + Size :: from_bytes ( i) ) ;
875+
876+ for j in 0 ..repeat {
877+ dest_allocation. undef_mask . set (
878+ dest. offset + Size :: from_bytes ( i + ( size. bytes ( ) * j) ) ,
879+ defined
880+ ) ;
881+ }
882+ }
883+
884+ Ok ( ( ) )
885+ }
886+ }
0 commit comments