@@ -7,6 +7,7 @@ use core::{cmp, fmt, hash, isize, slice, usize};
7
7
use alloc:: {
8
8
borrow:: { Borrow , BorrowMut } ,
9
9
boxed:: Box ,
10
+ collections:: TryReserveError ,
10
11
string:: String ,
11
12
vec,
12
13
vec:: Vec ,
@@ -526,6 +527,8 @@ impl BytesMut {
526
527
/// and the original buffer is large enough to fit the requested additional
527
528
/// capacity, then reallocations will never happen.
528
529
///
530
+ /// See also [`Self::try_reserve()`].
531
+ ///
529
532
/// # Examples
530
533
///
531
534
/// In the following example, a new buffer is allocated.
@@ -564,21 +567,89 @@ impl BytesMut {
564
567
/// Panics if the new capacity overflows `usize`.
565
568
#[ inline]
566
569
pub fn reserve ( & mut self , additional : usize ) {
570
+ match self . try_reserve ( additional) {
571
+ Err ( err) => panic ! ( "fail to reserve: {}" , err) ,
572
+ Ok ( _) => { }
573
+ }
574
+ }
575
+
576
+ /// Tries to reserves capacity for at least `additional` more bytes to be inserted
577
+ /// into the given `BytesMut`.
578
+ ///
579
+ /// More than `additional` bytes may be reserved in order to avoid frequent
580
+ /// reallocations. A call to `try_reserve` may result in an allocation.
581
+ ///
582
+ /// Before allocating new buffer space, the function will attempt to reclaim
583
+ /// space in the existing buffer. If the current handle references a view
584
+ /// into a larger original buffer, and all other handles referencing part
585
+ /// of the same original buffer have been dropped, then the current view
586
+ /// can be copied/shifted to the front of the buffer and the handle can take
587
+ /// ownership of the full buffer, provided that the full buffer is large
588
+ /// enough to fit the requested additional capacity.
589
+ ///
590
+ /// This optimization will only happen if shifting the data from the current
591
+ /// view to the front of the buffer is not too expensive in terms of the
592
+ /// (amortized) time required. The precise condition is subject to change;
593
+ /// as of now, the length of the data being shifted needs to be at least as
594
+ /// large as the distance that it's shifted by. If the current view is empty
595
+ /// and the original buffer is large enough to fit the requested additional
596
+ /// capacity, then reallocations will never happen.
597
+ ///
598
+ /// # Errors
599
+ ///
600
+ /// If the capacity overflows, or the allocator reports a failure, then an error is returned.
601
+ ///
602
+ /// # Examples
603
+ ///
604
+ /// In the following example, a new buffer is allocated.
605
+ ///
606
+ /// ```
607
+ /// use bytes::BytesMut;
608
+ ///
609
+ /// let mut buf = BytesMut::from(&b"hello"[..]);
610
+ /// let res = buf.try_reserve(64);
611
+ /// assert!(res.is_ok());
612
+ /// assert!(buf.capacity() >= 69);
613
+ /// ```
614
+ ///
615
+ /// In the following example, the existing buffer is reclaimed.
616
+ ///
617
+ /// ```
618
+ /// use bytes::{BytesMut, BufMut};
619
+ ///
620
+ /// let mut buf = BytesMut::with_capacity(128);
621
+ /// buf.put(&[0; 64][..]);
622
+ ///
623
+ /// let ptr = buf.as_ptr();
624
+ /// let other = buf.split();
625
+ ///
626
+ /// assert!(buf.is_empty());
627
+ /// assert_eq!(buf.capacity(), 64);
628
+ ///
629
+ /// drop(other);
630
+ /// let res = buf.try_reserve(128);
631
+ ///
632
+ /// assert!(res.is_ok());
633
+ /// assert_eq!(buf.capacity(), 128);
634
+ /// assert_eq!(buf.as_ptr(), ptr);
635
+ /// ```
636
+ #[ inline]
637
+ pub fn try_reserve ( & mut self , additional : usize ) -> Result < ( ) , TryReserveError > {
567
638
let len = self . len ( ) ;
568
639
let rem = self . capacity ( ) - len;
569
640
570
641
if additional <= rem {
571
642
// The handle can already store at least `additional` more bytes, so
572
643
// there is no further work needed to be done.
573
- return ;
644
+ return Ok ( ( ) ) ;
574
645
}
575
646
576
- self . reserve_inner ( additional) ;
647
+ self . reserve_inner ( additional)
577
648
}
578
649
579
- // In separate function to allow the short-circuits in `reserve ` to
650
+ // In separate function to allow the short-circuits in `try_reserve ` to
580
651
// be inline-able. Significant helps performance.
581
- fn reserve_inner ( & mut self , additional : usize ) {
652
+ fn reserve_inner ( & mut self , additional : usize ) -> Result < ( ) , TryReserveError > {
582
653
let len = self . len ( ) ;
583
654
let kind = self . kind ( ) ;
584
655
@@ -627,15 +698,15 @@ impl BytesMut {
627
698
// allocate more space!
628
699
let mut v =
629
700
ManuallyDrop :: new ( rebuild_vec ( self . ptr . as_ptr ( ) , self . len , self . cap , off) ) ;
630
- v. reserve ( additional) ;
701
+ v. try_reserve ( additional) ? ;
631
702
632
703
// Update the info
633
704
self . ptr = vptr ( v. as_mut_ptr ( ) . add ( off) ) ;
634
705
self . len = v. len ( ) - off;
635
706
self . cap = v. capacity ( ) - off;
636
707
}
637
708
638
- return ;
709
+ return Ok ( ( ) ) ;
639
710
}
640
711
}
641
712
@@ -714,21 +785,23 @@ impl BytesMut {
714
785
// care about in the unused capacity before calling `reserve`.
715
786
debug_assert ! ( off + len <= v. capacity( ) ) ;
716
787
v. set_len ( off + len) ;
717
- v. reserve ( new_cap - v. len ( ) ) ;
788
+ v. try_reserve ( new_cap - v. len ( ) ) ? ;
718
789
719
790
// Update the info
720
791
self . ptr = vptr ( v. as_mut_ptr ( ) . add ( off) ) ;
721
792
self . cap = v. capacity ( ) - off;
722
793
}
723
794
724
- return ;
795
+ return Ok ( ( ) ) ;
725
796
} else {
726
797
new_cap = cmp:: max ( new_cap, original_capacity) ;
727
798
}
728
799
}
729
800
730
801
// Create a new vector to store the data
731
- let mut v = ManuallyDrop :: new ( Vec :: with_capacity ( new_cap) ) ;
802
+ let mut v = Vec :: new ( ) ;
803
+ v. try_reserve ( new_cap) ?;
804
+ let mut v = ManuallyDrop :: new ( v) ;
732
805
733
806
// Copy the bytes
734
807
v. extend_from_slice ( self . as_ref ( ) ) ;
@@ -743,6 +816,8 @@ impl BytesMut {
743
816
self . ptr = vptr ( v. as_mut_ptr ( ) ) ;
744
817
self . len = v. len ( ) ;
745
818
self . cap = v. capacity ( ) ;
819
+
820
+ Ok ( ( ) )
746
821
}
747
822
748
823
/// Appends given bytes to this `BytesMut`.
0 commit comments