@@ -416,7 +416,8 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
416
416
/// even when new allocations are pushed to the `HashMap`. `mem_copy_repeatedly` relies
417
417
/// on that.
418
418
#[ inline]
419
- pub fn get_bytes_unchecked ( & self , range : AllocRange ) -> & [ u8 ] {
419
+ pub fn get_bytes_unchecked ( & self , range : impl Into < AllocRange > ) -> & [ u8 ] {
420
+ let range = range. into ( ) ;
420
421
& self . bytes [ range. start . bytes_usize ( ) ..range. end ( ) . bytes_usize ( ) ]
421
422
}
422
423
@@ -430,8 +431,9 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
430
431
pub fn get_bytes_strip_provenance (
431
432
& self ,
432
433
cx : & impl HasDataLayout ,
433
- range : AllocRange ,
434
+ range : impl Into < AllocRange > ,
434
435
) -> AllocResult < & [ u8 ] > {
436
+ let range = range. into ( ) ;
435
437
self . init_mask . is_range_initialized ( range) . map_err ( |uninit_range| {
436
438
AllocError :: InvalidUninitBytes ( Some ( UninitBytesAccess {
437
439
access : range,
@@ -455,8 +457,9 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
455
457
pub fn get_bytes_mut (
456
458
& mut self ,
457
459
cx : & impl HasDataLayout ,
458
- range : AllocRange ,
460
+ range : impl Into < AllocRange > ,
459
461
) -> AllocResult < & mut [ u8 ] > {
462
+ let range = range. into ( ) ;
460
463
self . mark_init ( range, true ) ;
461
464
self . provenance . clear ( range, cx) ?;
462
465
@@ -467,8 +470,9 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
467
470
pub fn get_bytes_mut_ptr (
468
471
& mut self ,
469
472
cx : & impl HasDataLayout ,
470
- range : AllocRange ,
473
+ range : impl Into < AllocRange > ,
471
474
) -> AllocResult < * mut [ u8 ] > {
475
+ let range = range. into ( ) ;
472
476
self . mark_init ( range, true ) ;
473
477
self . provenance . clear ( range, cx) ?;
474
478
@@ -482,7 +486,8 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
482
486
/// Reading and writing.
483
487
impl < Prov : Provenance , Extra , Bytes : AllocBytes > Allocation < Prov , Extra , Bytes > {
484
488
/// Sets the init bit for the given range.
485
- fn mark_init ( & mut self , range : AllocRange , is_init : bool ) {
489
+ fn mark_init ( & mut self , range : impl Into < AllocRange > , is_init : bool ) {
490
+ let range = range. into ( ) ;
486
491
if range. size . bytes ( ) == 0 {
487
492
return ;
488
493
}
@@ -503,9 +508,10 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
503
508
pub fn read_scalar (
504
509
& self ,
505
510
cx : & impl HasDataLayout ,
506
- range : AllocRange ,
511
+ range : impl Into < AllocRange > ,
507
512
read_provenance : bool ,
508
513
) -> AllocResult < Scalar < Prov > > {
514
+ let range = range. into ( ) ;
509
515
// First and foremost, if anything is uninit, bail.
510
516
if self . init_mask . is_range_initialized ( range) . is_err ( ) {
511
517
return Err ( AllocError :: InvalidUninitBytes ( None ) ) ;
@@ -565,9 +571,10 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
565
571
pub fn write_scalar (
566
572
& mut self ,
567
573
cx : & impl HasDataLayout ,
568
- range : AllocRange ,
574
+ range : impl Into < AllocRange > ,
569
575
val : Scalar < Prov > ,
570
576
) -> AllocResult {
577
+ let range = range. into ( ) ;
571
578
assert ! ( self . mutability == Mutability :: Mut ) ;
572
579
573
580
// `to_bits_or_ptr_internal` is the right method because we just want to store this data
@@ -594,7 +601,12 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
594
601
}
595
602
596
603
/// Write "uninit" to the given memory range.
597
- pub fn write_uninit ( & mut self , cx : & impl HasDataLayout , range : AllocRange ) -> AllocResult {
604
+ pub fn write_uninit (
605
+ & mut self ,
606
+ cx : & impl HasDataLayout ,
607
+ range : impl Into < AllocRange > ,
608
+ ) -> AllocResult {
609
+ let range = range. into ( ) ;
598
610
self . mark_init ( range, false ) ;
599
611
self . provenance . clear ( range, cx) ?;
600
612
return Ok ( ( ) ) ;
@@ -614,7 +626,13 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
614
626
///
615
627
/// This is dangerous to use as it can violate internal `Allocation` invariants!
616
628
/// It only exists to support an efficient implementation of `mem_copy_repeatedly`.
617
- pub fn init_mask_apply_copy ( & mut self , copy : InitCopy , range : AllocRange , repeat : u64 ) {
629
+ pub fn init_mask_apply_copy (
630
+ & mut self ,
631
+ copy : InitCopy ,
632
+ range : impl Into < AllocRange > ,
633
+ repeat : u64 ,
634
+ ) {
635
+ let range = range. into ( ) ;
618
636
self . init_mask . apply_copy ( copy, range, repeat)
619
637
}
620
638
}
0 commit comments