Skip to content

Commit 20a60a7

Browse files
committed
Simplify the Allocator trait and ScratchSpaceMessageAllocator
1 parent b1e4228 commit 20a60a7

File tree

4 files changed

+134
-113
lines changed

4 files changed

+134
-113
lines changed

benchmark/benchmark.rs

+30-27
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
2020
// THE SOFTWARE.
2121

22-
use std::{mem, io};
22+
use std::{io};
2323

2424
use capnp::{message, serialize, serialize_packed};
2525
use capnp::traits::Owned;
@@ -102,7 +102,7 @@ impl Serialize for Packed {
102102
trait Scratch<'a> {
103103
type Allocator: message::Allocator;
104104

105-
fn get_builders(&'a mut self) -> (message::Builder<Self::Allocator>, message::Builder<Self::Allocator>);
105+
fn get_allocators(&'a mut self) -> (Self::Allocator, Self::Allocator);
106106
}
107107

108108
const SCRATCH_SIZE: usize = 128 * 1024;
@@ -113,51 +113,43 @@ pub struct NoScratch;
113113
impl <'a> Scratch<'a> for NoScratch {
114114
type Allocator = message::HeapAllocator;
115115

116-
fn get_builders(&'a mut self) -> (message::Builder<Self::Allocator>, message::Builder<Self::Allocator>) {
117-
(message::Builder::new_default(), message::Builder::new_default())
116+
fn get_allocators(&'a mut self) -> (Self::Allocator, Self::Allocator) {
117+
(message::HeapAllocator::new(), message::HeapAllocator::new())
118118
}
119119
}
120120

121121
pub struct UseScratch {
122-
_owned_space: ::std::vec::Vec<::std::vec::Vec<capnp::Word>>,
123-
scratch_space: ::std::vec::Vec<message::ScratchSpace<'static>>,
122+
buffer1: Vec<capnp::Word>,
123+
buffer2: Vec<capnp::Word>,
124124
}
125125

126126
impl UseScratch {
127127
pub fn new() -> UseScratch {
128-
let mut owned = Vec::new();
129-
let mut scratch = Vec::new();
130-
for _ in 0..2 {
131-
let mut words = capnp::Word::allocate_zeroed_vec(SCRATCH_SIZE);
132-
scratch.push(message::ScratchSpace::new(
133-
unsafe { mem::transmute(capnp::Word::words_to_bytes_mut(&mut words[..])) }));
134-
owned.push(words);
135-
}
136128
UseScratch {
137-
_owned_space: owned,
138-
scratch_space: scratch,
129+
buffer1: capnp::Word::allocate_zeroed_vec(SCRATCH_SIZE),
130+
buffer2: capnp::Word::allocate_zeroed_vec(SCRATCH_SIZE),
139131
}
140132
}
141133
}
142134

143135
impl <'a> Scratch<'a> for UseScratch {
144-
type Allocator = message::ScratchSpaceHeapAllocator<'a, 'a>;
145-
146-
fn get_builders(&'a mut self) -> (message::Builder<Self::Allocator>, message::Builder<Self::Allocator>) {
147-
(message::Builder::new(message::ScratchSpaceHeapAllocator::new(
148-
unsafe { mem::transmute(&mut self.scratch_space[0]) })),
149-
message::Builder::new(message::ScratchSpaceHeapAllocator::new(
150-
unsafe { mem::transmute(&mut self.scratch_space[1]) })))
136+
type Allocator = message::ScratchSpaceHeapAllocator<'a>;
151137

138+
fn get_allocators(&'a mut self) -> (Self::Allocator, Self::Allocator) {
139+
let UseScratch {ref mut buffer1, ref mut buffer2 } = self;
140+
(message::ScratchSpaceHeapAllocator::new(capnp::Word::words_to_bytes_mut(buffer1)),
141+
message::ScratchSpaceHeapAllocator::new(capnp::Word::words_to_bytes_mut(buffer2)))
152142
}
153143
}
154144

155145
fn pass_by_object<S, T>(testcase: T, mut reuse: S, iters: u64) -> ::capnp::Result<()>
156146
where S: for<'a> Scratch<'a>, T: TestCase,
157147
{
158148
let mut rng = common::FastRand::new();
149+
let (mut allocator_req, mut allocator_res) = reuse.get_allocators();
159150
for _ in 0..iters {
160-
let (mut message_req, mut message_res) = reuse.get_builders();
151+
let mut message_req = message::Builder::new(allocator_req);
152+
let mut message_res = message::Builder::new(allocator_res);
161153

162154
let expected = testcase.setup_request(
163155
&mut rng,
@@ -170,6 +162,9 @@ fn pass_by_object<S, T>(testcase: T, mut reuse: S, iters: u64) -> ::capnp::Resul
170162
testcase.check_response(
171163
message_res.get_root_as_reader()?,
172164
expected)?;
165+
166+
allocator_req = message_req.into_allocator();
167+
allocator_res = message_res.into_allocator();
173168
}
174169
Ok(())
175170
}
@@ -180,8 +175,10 @@ fn pass_by_bytes<C, S, T>(testcase: T, mut reuse: S, compression: C, iters: u64)
180175
let mut request_bytes = vec![0u8; SCRATCH_SIZE * 8];
181176
let mut response_bytes = vec![0u8; SCRATCH_SIZE * 8];
182177
let mut rng = common::FastRand::new();
178+
let (mut allocator_req, mut allocator_res) = reuse.get_allocators();
183179
for _ in 0..iters {
184-
let (mut message_req, mut message_res) = reuse.get_builders();
180+
let mut message_req = message::Builder::new(allocator_req);
181+
let mut message_res = message::Builder::new(allocator_res);
185182

186183
let expected = {
187184
let request = message_req.init_root();
@@ -217,6 +214,8 @@ fn pass_by_bytes<C, S, T>(testcase: T, mut reuse: S, compression: C, iters: u64)
217214

218215
let response_reader = message_reader.get_root()?;
219216
testcase.check_response(response_reader, expected)?;
217+
allocator_req = message_req.into_allocator();
218+
allocator_res = message_res.into_allocator();
220219
}
221220
Ok(())
222221
}
@@ -227,9 +226,10 @@ fn server<C, S, T, R, W>(testcase: T, mut reuse: S, compression: C, iters: u64,
227226
{
228227
let mut out_buffered = io::BufWriter::new(&mut output);
229228
let mut in_buffered = io::BufReader::new(&mut input);
229+
let (mut allocator_res, _) = reuse.get_allocators();
230230
for _ in 0..iters {
231231
use std::io::Write;
232-
let (mut message_res, _) = reuse.get_builders();
232+
let mut message_res = message::Builder::new(allocator_res);
233233

234234
{
235235
let response = message_res.init_root();
@@ -242,6 +242,7 @@ fn server<C, S, T, R, W>(testcase: T, mut reuse: S, compression: C, iters: u64,
242242

243243
compression.write_message(&mut out_buffered, &mut message_res)?;
244244
out_buffered.flush()?;
245+
allocator_res = message_res.into_allocator();
245246
}
246247
Ok(())
247248
}
@@ -255,9 +256,10 @@ fn sync_client<C, S, T>(testcase: T, mut reuse: S, compression: C, iters: u64)
255256
let mut in_buffered = io::BufReader::new(&mut in_stream);
256257
let mut out_buffered = io::BufWriter::new(&mut out_stream);
257258
let mut rng = common::FastRand::new();
259+
let (mut allocator_req, _) = reuse.get_allocators();
258260
for _ in 0..iters {
259261
use std::io::Write;
260-
let (mut message_req, _) = reuse.get_builders();
262+
let mut message_req = message::Builder::new(allocator_req);
261263

262264
let expected = {
263265
let request = message_req.init_root();
@@ -271,6 +273,7 @@ fn sync_client<C, S, T>(testcase: T, mut reuse: S, compression: C, iters: u64)
271273
Default::default())?;
272274
let response_reader = message_reader.get_root()?;
273275
testcase.check_response(response_reader, expected)?;
276+
allocator_req = message_req.into_allocator();
274277
}
275278
Ok(())
276279
}

capnp/src/lib.rs

+2
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,8 @@
2727
//! [capnpc-rust](https://github.com/capnproto/capnproto-rust/capnpc) crate.
2828
#![cfg_attr(feature = "rpc_try", feature(try_trait))]
2929

30+
extern crate alloc;
31+
3032
pub mod any_pointer;
3133
pub mod any_pointer_list;
3234
pub mod capability;

capnp/src/message.rs

+57-58
Original file line numberDiff line numberDiff line change
@@ -262,32 +262,20 @@ impl <A, T> From<Builder<A>> for TypedReader<Builder<A>, T>
262262
/// An object that allocates memory for a Cap'n Proto message as it is being built.
263263
pub unsafe trait Allocator {
264264
/// Allocates zeroed memory for a new segment, returning a pointer to the start of the segment
265-
/// and a u32 indicating the length of the segment (in words).
265+
/// and a u32 indicating the length of the segment (in words). The allocated segment must be
266+
/// at least `minimum_size` words long.
266267
///
267268
/// UNSAFETY ALERT: Implementors must ensure all of the following:
268269
/// 1. the returned memory is initialized to all zeroes,
269-
/// 2. the returned memory is valid for the remaining lifetime of the Allocator object,
270+
/// 2. the returned memory is valid until deallocate_segment() is called on it,
270271
/// 3. the memory doesn't overlap with other allocated memory,
271272
/// 4. the memory is 8-byte aligned (or the "unaligned" feature is enabled for the capnp crate).
272273
fn allocate_segment(&mut self, minimum_size: u32) -> (*mut u8, u32);
273274

274-
/// Called before the Allocator is dropped, to allow the first segment to be re-zeroed
275-
/// before possibly being reused in another allocator.
276-
fn pre_drop(&mut self, _segment0_currently_allocated: u32) {}
275+
/// Indicates that a segment, previously allocated via allocate_segment(), is no longer in use.
276+
fn deallocate_segment(&mut self, ptr: *mut u8, word_size: u32, words_used: u32);
277277
}
278278

279-
/* TODO(version 0.9): update to a more user-friendly trait here?
280-
pub trait Allocator {
281-
/// Allocates memory for a new segment.
282-
fn allocate_segment(&mut self, minimum_size: u32) -> Result<()>;
283-
284-
fn get_segment<'a>(&'a self, id: u32) -> &'a [u8];
285-
fn get_segment_mut<'a>(&'a mut self, id: u32) -> &'a mut [u8];
286-
287-
fn pre_drop(&mut self, _segment0_currently_allocated: u32) {}
288-
}
289-
*/
290-
291279
/// A container used to build a message.
292280
pub struct Builder<A> where A: Allocator {
293281
arena: BuilderArenaImpl<A>,
@@ -380,6 +368,10 @@ impl <A> Builder<A> where A: Allocator {
380368
nesting_limit: i32::max_value()
381369
})
382370
}
371+
372+
pub fn into_allocator(self) -> A {
373+
self.arena.into_allocator()
374+
}
383375
}
384376

385377
impl <A> ReaderSegments for Builder<A> where A: Allocator {
@@ -392,11 +384,9 @@ impl <A> ReaderSegments for Builder<A> where A: Allocator {
392384
}
393385
}
394386

395-
/// Standard segment allocator. Allocates each segment as a `Vec<Word>`, where `Word` has
396-
/// 8-byte alignment.
387+
/// Standard segment allocator. Allocates each segment via `alloc::alloc::alloc_zeroed()`.
397388
#[derive(Debug)]
398389
pub struct HeapAllocator {
399-
owned_memory: Vec<Vec<crate::Word>>,
400390
next_size: u32,
401391
allocation_strategy: AllocationStrategy,
402392
}
@@ -412,8 +402,7 @@ pub const SUGGESTED_ALLOCATION_STRATEGY: AllocationStrategy = AllocationStrategy
412402

413403
impl HeapAllocator {
414404
pub fn new() -> HeapAllocator {
415-
HeapAllocator { owned_memory: Vec::new(),
416-
next_size: SUGGESTED_FIRST_SEGMENT_WORDS,
405+
HeapAllocator { next_size: SUGGESTED_FIRST_SEGMENT_WORDS,
417406
allocation_strategy: SUGGESTED_ALLOCATION_STRATEGY }
418407
}
419408

@@ -431,16 +420,24 @@ impl HeapAllocator {
431420
unsafe impl Allocator for HeapAllocator {
432421
fn allocate_segment(&mut self, minimum_size: u32) -> (*mut u8, u32) {
433422
let size = ::std::cmp::max(minimum_size, self.next_size);
434-
let mut new_words = crate::Word::allocate_zeroed_vec(size as usize);
435-
let ptr = new_words.as_mut_ptr() as *mut u8;
436-
self.owned_memory.push(new_words);
437-
423+
let ptr = unsafe {
424+
alloc::alloc::alloc_zeroed(alloc::alloc::Layout::from_size_align(size as usize * BYTES_PER_WORD, 8).unwrap())
425+
};
438426
match self.allocation_strategy {
439427
AllocationStrategy::GrowHeuristically => { self.next_size += size; }
440428
_ => { }
441429
}
442430
(ptr, size as u32)
443431
}
432+
433+
fn deallocate_segment(&mut self, ptr: *mut u8, word_size: u32, _words_used: u32) {
434+
unsafe {
435+
alloc::alloc::dealloc(ptr,
436+
alloc::alloc::Layout::from_size_align(word_size as usize * BYTES_PER_WORD, 8).unwrap());
437+
}
438+
// TODO move this next_size stuff out of what the Allocator trait should be resposible for.
439+
self.next_size = SUGGESTED_FIRST_SEGMENT_WORDS;
440+
}
444441
}
445442

446443
impl Builder<HeapAllocator> {
@@ -449,57 +446,59 @@ impl Builder<HeapAllocator> {
449446
}
450447
}
451448

452-
#[derive(Debug)]
453-
pub struct ScratchSpace<'a> {
454-
slice: &'a mut [u8],
455-
in_use: bool,
456-
}
457-
458-
impl <'a> ScratchSpace<'a> {
459-
pub fn new(slice: &'a mut [u8]) -> ScratchSpace<'a> {
460-
ScratchSpace { slice: slice, in_use: false }
461-
}
462-
}
463-
464-
pub struct ScratchSpaceHeapAllocator<'a, 'b: 'a> {
465-
scratch_space: &'a mut ScratchSpace<'b>,
449+
/// An Allocator whose first segment is a backed by a user-provided buffer.
450+
pub struct ScratchSpaceHeapAllocator<'a> {
451+
scratch_space: &'a mut [u8],
452+
scratch_space_allocated: bool,
466453
allocator: HeapAllocator,
467454
}
468455

469-
impl <'a, 'b: 'a> ScratchSpaceHeapAllocator<'a, 'b> {
470-
pub fn new(scratch_space: &'a mut ScratchSpace<'b>) -> ScratchSpaceHeapAllocator<'a, 'b> {
456+
impl <'a> ScratchSpaceHeapAllocator<'a> {
457+
/// Writes zeroes into the entire buffer and constructs a new allocator from it.
458+
///
459+
/// If you want to reuse the same buffer and to minimize the cost of zeroing for each message,
460+
/// you can call `message::Builder::into_allocator()` to recover the allocator from
461+
/// the previous message and then pass it into the new message.
462+
pub fn new(scratch_space: &'a mut [u8]) -> ScratchSpaceHeapAllocator<'a> {
463+
// We need to ensure that the buffer is zeroed.
464+
for b in &mut scratch_space[..] {
465+
*b = 0;
466+
}
471467
ScratchSpaceHeapAllocator { scratch_space: scratch_space,
468+
scratch_space_allocated: false,
472469
allocator: HeapAllocator::new()}
473470
}
474471

475-
pub fn second_segment_words(self, value: u32) -> ScratchSpaceHeapAllocator<'a, 'b> {
476-
ScratchSpaceHeapAllocator { scratch_space: self.scratch_space,
477-
allocator: self.allocator.first_segment_words(value) }
472+
pub fn second_segment_words(self, value: u32) -> ScratchSpaceHeapAllocator<'a> {
473+
ScratchSpaceHeapAllocator { allocator: self.allocator.first_segment_words(value), ..self }
478474

479475
}
480476

481-
pub fn allocation_strategy(self, value: AllocationStrategy) -> ScratchSpaceHeapAllocator<'a, 'b> {
482-
ScratchSpaceHeapAllocator { scratch_space: self.scratch_space,
483-
allocator: self.allocator.allocation_strategy(value) }
477+
pub fn allocation_strategy(self, value: AllocationStrategy) -> ScratchSpaceHeapAllocator<'a> {
478+
ScratchSpaceHeapAllocator { allocator: self.allocator.allocation_strategy(value), ..self }
484479
}
485-
486480
}
487481

488-
unsafe impl <'a, 'b: 'a> Allocator for ScratchSpaceHeapAllocator<'a, 'b> {
482+
unsafe impl <'a> Allocator for ScratchSpaceHeapAllocator<'a> {
489483
fn allocate_segment(&mut self, minimum_size: u32) -> (*mut u8, u32) {
490-
if !self.scratch_space.in_use {
491-
self.scratch_space.in_use = true;
492-
(self.scratch_space.slice.as_mut_ptr(), (self.scratch_space.slice.len() / BYTES_PER_WORD) as u32)
484+
if (minimum_size as usize) < (self.scratch_space.len() / BYTES_PER_WORD) && !self.scratch_space_allocated {
485+
self.scratch_space_allocated = true;
486+
(self.scratch_space.as_mut_ptr(), (self.scratch_space.len() / BYTES_PER_WORD) as u32)
493487
} else {
494488
self.allocator.allocate_segment(minimum_size)
495489
}
496490
}
497491

498-
fn pre_drop(&mut self, segment0_currently_allocated: u32) {
499-
let ptr = self.scratch_space.slice.as_mut_ptr();
500-
unsafe {
501-
::std::ptr::write_bytes(ptr, 0u8, (segment0_currently_allocated as usize) * BYTES_PER_WORD);
492+
fn deallocate_segment(&mut self, ptr: *mut u8, word_size: u32, words_used: u32) {
493+
if ptr == self.scratch_space.as_mut_ptr() {
494+
// Rezero the slice to allow reuse of the allocator. We only need to write
495+
// words that we know might contain nonzero values.
496+
unsafe {
497+
::std::ptr::write_bytes(ptr, 0u8, (words_used as usize) * BYTES_PER_WORD);
498+
}
499+
self.scratch_space_allocated = false;
500+
} else {
501+
self.allocator.deallocate_segment(ptr, word_size, words_used);
502502
}
503-
self.scratch_space.in_use = false;
504503
}
505504
}

0 commit comments

Comments
 (0)