Skip to content
This repository was archived by the owner on Nov 27, 2020. It is now read-only.

Commit 0794539

Browse files
committed
Make trait functions immutable
This allows the traits to be implemented on immutable references. Internal mutability can still be achieved through Cell or RefCell
1 parent 465a2cf commit 0794539

File tree

5 files changed

+36
-36
lines changed

5 files changed

+36
-36
lines changed

src/alloc/mod.rs

Lines changed: 20 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -149,7 +149,7 @@ pub trait BuildAllocRef: Sized {
149149
/// * the alignment of the `layout` must match the alignment used to allocate that block of
150150
/// memory
151151
unsafe fn build_alloc_ref(
152-
&mut self,
152+
&self,
153153
ptr: NonNull<u8>,
154154
layout: Option<NonZeroLayout>,
155155
) -> Self::Ref;
@@ -158,23 +158,23 @@ pub trait BuildAllocRef: Sized {
158158
pub trait DeallocRef: Sized {
159159
type BuildAlloc: BuildAllocRef<Ref = Self>;
160160

161-
fn get_build_alloc(&mut self) -> Self::BuildAlloc;
161+
fn get_build_alloc(&self) -> Self::BuildAlloc;
162162

163163
/// # Safety
164164
///
165165
/// * `ptr` must denote a block of memory currently allocated via this allocator
166166
/// * `layout` must *fit* that block of memory
167167
/// * the alignment of the `layout` must match the alignment used to allocate that block of
168168
/// memory
169-
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: NonZeroLayout);
169+
unsafe fn dealloc(&self, ptr: NonNull<u8>, layout: NonZeroLayout);
170170
}
171171

172172
pub trait AllocRef: DeallocRef {
173173
type Error;
174174

175-
fn alloc(&mut self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error>;
175+
fn alloc(&self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error>;
176176

177-
fn alloc_zeroed(&mut self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
177+
fn alloc_zeroed(&self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
178178
let size = layout.size();
179179
let p = self.alloc(layout)?;
180180
unsafe {
@@ -193,7 +193,7 @@ pub trait AllocRef: DeallocRef {
193193
/// * `layout` must *fit* the `ptr` (see above); note the `new_size` argument need not fit it
194194
/// * `new_size` must not be less than `layout.size()`
195195
unsafe fn grow_in_place(
196-
&mut self,
196+
&self,
197197
ptr: NonNull<u8>,
198198
layout: NonZeroLayout,
199199
new_size: NonZeroUsize,
@@ -212,7 +212,7 @@ pub trait AllocRef: DeallocRef {
212212
/// * `layout` must *fit* the `ptr` (see above); note the `new_size` argument need not fit it
213213
/// * `new_size` must not be greater than `layout.size()` (and must be greater than zero)
214214
unsafe fn shrink_in_place(
215-
&mut self,
215+
&self,
216216
ptr: NonNull<u8>,
217217
layout: NonZeroLayout,
218218
new_size: NonZeroUsize,
@@ -251,7 +251,7 @@ pub trait ReallocRef: AllocRef {
251251
/// implement this trait atop an underlying native allocation
252252
/// library that aborts on memory exhaustion.)
253253
unsafe fn realloc(
254-
&mut self,
254+
&self,
255255
ptr: NonNull<u8>,
256256
old_layout: NonZeroLayout,
257257
new_layout: NonZeroLayout,
@@ -297,7 +297,7 @@ macro_rules! impl_buildalloc_alloc_zst {
297297
type Ref = Self;
298298

299299
unsafe fn build_alloc_ref(
300-
&mut self,
300+
&self,
301301
_ptr: NonNull<u8>,
302302
_layout: Option<NonZeroLayout>,
303303
) -> Self::Ref {
@@ -314,11 +314,11 @@ impl_buildalloc_alloc_zst!(System);
314314
impl DeallocRef for Global {
315315
type BuildAlloc = Self;
316316

317-
fn get_build_alloc(&mut self) -> Self::BuildAlloc {
317+
fn get_build_alloc(&self) -> Self::BuildAlloc {
318318
Self
319319
}
320320

321-
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: NonZeroLayout) {
321+
unsafe fn dealloc(&self, ptr: NonNull<u8>, layout: NonZeroLayout) {
322322
#[allow(deprecated)]
323323
dealloc(ptr.as_ptr(), layout.into())
324324
}
@@ -327,14 +327,14 @@ impl DeallocRef for Global {
327327
impl AllocRef for Global {
328328
type Error = AllocErr;
329329

330-
fn alloc(&mut self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
330+
fn alloc(&self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
331331
#[allow(deprecated)]
332332
unsafe {
333333
NonNull::new(alloc(layout.into())).ok_or(AllocErr)
334334
}
335335
}
336336

337-
fn alloc_zeroed(&mut self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
337+
fn alloc_zeroed(&self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
338338
#[allow(deprecated)]
339339
unsafe {
340340
NonNull::new(alloc_zeroed(layout.into())).ok_or(AllocErr)
@@ -345,7 +345,7 @@ impl AllocRef for Global {
345345
impl ReallocRef for Global {
346346
// FIXME: Remove `else` branch. This is needed, as std provides old method.
347347
unsafe fn realloc(
348-
&mut self,
348+
&self,
349349
ptr: NonNull<u8>,
350350
old_layout: NonZeroLayout,
351351
new_layout: NonZeroLayout,
@@ -369,11 +369,11 @@ impl ReallocRef for Global {
369369
impl DeallocRef for System {
370370
type BuildAlloc = Self;
371371

372-
fn get_build_alloc(&mut self) -> Self::BuildAlloc {
372+
fn get_build_alloc(&self) -> Self::BuildAlloc {
373373
Self
374374
}
375375

376-
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: NonZeroLayout) {
376+
unsafe fn dealloc(&self, ptr: NonNull<u8>, layout: NonZeroLayout) {
377377
GlobalAlloc::dealloc(self, ptr.as_ptr(), layout.into())
378378
}
379379
}
@@ -382,11 +382,11 @@ impl DeallocRef for System {
382382
impl AllocRef for System {
383383
type Error = AllocErr;
384384

385-
fn alloc(&mut self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
385+
fn alloc(&self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
386386
unsafe { NonNull::new(GlobalAlloc::alloc(self, layout.into())).ok_or(AllocErr) }
387387
}
388388

389-
fn alloc_zeroed(&mut self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
389+
fn alloc_zeroed(&self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
390390
unsafe { NonNull::new(GlobalAlloc::alloc_zeroed(self, layout.into())).ok_or(AllocErr) }
391391
}
392392
}
@@ -395,7 +395,7 @@ impl AllocRef for System {
395395
impl ReallocRef for System {
396396
// FIXME: Remove `else` branch. This is needed, as std provides old method.
397397
unsafe fn realloc(
398-
&mut self,
398+
&self,
399399
ptr: NonNull<u8>,
400400
old_layout: NonZeroLayout,
401401
new_layout: NonZeroLayout,
@@ -417,7 +417,7 @@ impl ReallocRef for System {
417417

418418
#[inline]
419419
unsafe fn alloc_copy_dealloc<A: ReallocRef>(
420-
alloc: &mut A,
420+
alloc: &A,
421421
ptr: NonNull<u8>,
422422
old_layout: NonZeroLayout,
423423
new_layout: NonZeroLayout,

src/boxed.rs

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -201,7 +201,7 @@ impl<T, A: AllocRef> Box<T, A> {
201201
/// let five = Box::try_new_in(5, Global)?;
202202
/// # Ok::<_, alloc_wg::alloc::AllocErr>(())
203203
/// ```
204-
pub fn try_new_in(x: T, mut a: A) -> Result<Self, A::Error> {
204+
pub fn try_new_in(x: T, a: A) -> Result<Self, A::Error> {
205205
let ptr = if let Ok(layout) = NonZeroLayout::new::<T>() {
206206
let ptr = a.alloc(layout)?.cast::<T>();
207207
unsafe {
@@ -257,7 +257,7 @@ impl<T, A: AllocRef> Box<T, A> {
257257
/// assert_eq!(*five, 5);
258258
/// # Ok::<_, alloc_wg::alloc::AllocErr>(())
259259
/// ```
260-
pub fn try_new_uninit_in(mut a: A) -> Result<Box<mem::MaybeUninit<T>, A>, A::Error> {
260+
pub fn try_new_uninit_in(a: A) -> Result<Box<mem::MaybeUninit<T>, A>, A::Error> {
261261
let ptr = if let Ok(layout) = NonZeroLayout::new::<T>() {
262262
let ptr: NonNull<mem::MaybeUninit<T>> = a.alloc(layout)?.cast();
263263
ptr
@@ -365,7 +365,7 @@ impl<T, A: AllocRef> Box<[T], A> {
365365
/// ```
366366
pub fn try_new_uninit_slice_in(
367367
len: usize,
368-
mut a: A,
368+
a: A,
369369
) -> Result<Box<[mem::MaybeUninit<T>], A>, CollectionAllocErr<A>> {
370370
let ptr = if mem::size_of::<T>() == 0 || len == 0 {
371371
NonNull::dangling()
@@ -732,7 +732,7 @@ fn drop_box<T: ?Sized, A: DeallocRef>(boxed: &mut Box<T, A>) {
732732
unsafe {
733733
let ptr = boxed.ptr;
734734
ptr::drop_in_place(ptr.as_ptr());
735-
if let (mut alloc, Some(layout)) = boxed.alloc_ref() {
735+
if let (alloc, Some(layout)) = boxed.alloc_ref() {
736736
alloc.dealloc(ptr.cast().into(), layout)
737737
}
738738
}
@@ -807,7 +807,7 @@ where
807807
/// ```
808808
#[inline]
809809
fn clone(&self) -> Self {
810-
let mut b = self.build_alloc().clone();
810+
let b = self.build_alloc().clone();
811811
let old_ptr = self.ptr.cast();
812812
let old_layout = NonZeroLayout::for_value(self.as_ref());
813813

@@ -1276,7 +1276,7 @@ where
12761276
A::BuildAlloc: Clone,
12771277
{
12781278
fn clone(&self) -> Self {
1279-
let mut b = self.build_alloc().clone();
1279+
let b = self.build_alloc().clone();
12801280
let old_ptr = self.ptr.cast();
12811281
let old_layout = NonZeroLayout::for_value(self.as_ref());
12821282
let a = unsafe { b.build_alloc_ref(old_ptr.into(), old_layout) };

src/raw_vec.rs

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -144,7 +144,7 @@ impl<T> RawVec<T> {
144144

145145
impl<T, A: DeallocRef> RawVec<T, A> {
146146
/// Like `new` but parameterized over the choice of allocator for the returned `RawVec`.
147-
pub fn new_in(mut a: A) -> Self {
147+
pub fn new_in(a: A) -> Self {
148148
let capacity = if mem::size_of::<T>() == 0 { !0 } else { 0 };
149149
Self {
150150
ptr: Unique::empty(),
@@ -226,7 +226,7 @@ impl<T, A: DeallocRef> RawVec<T, A> {
226226
fn allocate_in(
227227
capacity: usize,
228228
zeroed: bool,
229-
mut alloc: A,
229+
alloc: A,
230230
) -> Result<Self, CollectionAllocErr<A>>
231231
where
232232
A: AllocRef,
@@ -443,7 +443,7 @@ impl<T, A: DeallocRef> RawVec<T, A> {
443443
return Err(CollectionAllocErr::CapacityOverflow);
444444
}
445445

446-
let (mut alloc, old_layout) = self.alloc_ref();
446+
let (alloc, old_layout) = self.alloc_ref();
447447
let (new_cap, ptr) = if let Some(old_layout) = old_layout {
448448
// Since we guarantee that we never allocate more than
449449
// `isize::MAX` bytes, `elem_size * self.cap <= isize::MAX` as
@@ -524,7 +524,7 @@ impl<T, A: DeallocRef> RawVec<T, A> {
524524
return Err(CapacityOverflow);
525525
}
526526

527-
let (mut alloc, old_layout) = if let (alloc, Some(layout)) = self.alloc_ref() {
527+
let (alloc, old_layout) = if let (alloc, Some(layout)) = self.alloc_ref() {
528528
(alloc, layout)
529529
} else {
530530
return Ok(false); // nothing to double
@@ -701,7 +701,7 @@ impl<T, A: DeallocRef> RawVec<T, A> {
701701
return Ok(false);
702702
}
703703

704-
let (mut alloc, old_layout) = if let (alloc, Some(layout)) = self.alloc_ref() {
704+
let (alloc, old_layout) = if let (alloc, Some(layout)) = self.alloc_ref() {
705705
(alloc, layout)
706706
} else {
707707
return Ok(false); // nothing to double
@@ -846,7 +846,7 @@ impl<T, A: DeallocRef> RawVec<T, A> {
846846

847847
let _ = alloc_guard(new_layout.size().get(), new_layout.align().get())?;
848848

849-
let (mut alloc, old_layout) = self.alloc_ref();
849+
let (alloc, old_layout) = self.alloc_ref();
850850
let result = if let Some(layout) = old_layout {
851851
unsafe { alloc.realloc(self.ptr.cast().into(), layout, new_layout) }
852852
} else {
@@ -888,7 +888,7 @@ enum ReserveStrategy {
888888
impl<T, A: DeallocRef> RawVec<T, A> {
889889
/// Frees the memory owned by the `RawVec` *without* trying to Drop its contents.
890890
pub fn dealloc_buffer(&mut self) {
891-
if let (mut alloc, Some(layout)) = self.alloc_ref() {
891+
if let (alloc, Some(layout)) = self.alloc_ref() {
892892
unsafe { alloc.dealloc(self.ptr.cast().into(), layout) }
893893
}
894894
}

src/vec.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2185,7 +2185,7 @@ where
21852185
#[must_use]
21862186
#[inline]
21872187
fn clone(&self) -> Self {
2188-
let mut b = self.buf.build_alloc().clone();
2188+
let b = self.buf.build_alloc().clone();
21892189
let old_layout = self.buf.current_layout();
21902190

21912191
unsafe {
@@ -2463,7 +2463,7 @@ where
24632463
}
24642464

24652465
impl<T, A: ReallocRef> SpecExtend<T, IntoIter<T, A>, A> for Vec<T, A> {
2466-
fn try_from_iter_in(iter: IntoIter<T, A>, mut a: A) -> Result<Self, CollectionAllocErr<A>> {
2466+
fn try_from_iter_in(iter: IntoIter<T, A>, a: A) -> Result<Self, CollectionAllocErr<A>> {
24672467
// A common case is passing a vector into a function which immediately
24682468
// re-collects into a vector. We can short circuit this if the IntoIter
24692469
// has not been advanced at all.

tests/heap.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ fn std_heap_overaligned_request() {
1313
check_overalign_requests(Global)
1414
}
1515

16-
fn check_overalign_requests<T: AllocRef>(mut allocator: T)
16+
fn check_overalign_requests<T: AllocRef>(allocator: T)
1717
where
1818
T::Error: Debug,
1919
{

0 commit comments

Comments
 (0)