@@ -29,6 +29,8 @@ pub struct AddressAllocator {
29
29
// tree will represent a memory location and can have two states either
30
30
// `NodeState::Free` or `NodeState::Allocated`.
31
31
interval_tree : IntervalTree ,
32
+ // Used memory space in the address space.
33
+ used : usize ,
32
34
}
33
35
34
36
impl AddressAllocator {
@@ -43,6 +45,7 @@ impl AddressAllocator {
43
45
Ok ( AddressAllocator {
44
46
address_space : aux_range,
45
47
interval_tree : IntervalTree :: new ( aux_range) ,
48
+ used : 0 ,
46
49
} )
47
50
}
48
51
@@ -63,13 +66,30 @@ impl AddressAllocator {
63
66
policy : AllocPolicy ,
64
67
) -> Result < RangeInclusive > {
65
68
let constraint = Constraint :: new ( size, alignment, policy) ?;
66
- self . interval_tree . allocate ( constraint)
69
+ let allocated = self . interval_tree . allocate ( constraint) ?;
70
+ self . used = self
71
+ . used
72
+ . checked_add ( allocated. len ( ) as usize )
73
+ . expect ( "Failed to calculate used memory" ) ;
74
+ Ok ( allocated)
67
75
}
68
76
69
77
/// Deletes the specified memory slot or returns `ResourceNotAvailable` if
70
78
/// the node was not allocated before.
71
79
pub fn free ( & mut self , key : & RangeInclusive ) -> Result < ( ) > {
72
- self . interval_tree . free ( key)
80
+ self . interval_tree . free ( key) ?;
81
+ self . used = self
82
+ . used
83
+ . checked_sub ( key. len ( ) as usize )
84
+ . expect ( "Failed to calculate used memory" ) ;
85
+ Ok ( ( ) )
86
+ }
87
+
88
+ /// Returns the used memory size in this allocator.
89
+ /// NOTE that due to fragmentations, not all unused memory may be available
90
+ /// for next `allocate()` call!
91
+ pub fn used ( & self ) -> usize {
92
+ self . used
73
93
}
74
94
}
75
95
@@ -158,20 +178,27 @@ mod tests {
158
178
#[ test]
159
179
fn test_allocate_with_alignment_first_ok ( ) {
160
180
let mut pool = AddressAllocator :: new ( 0x1000 , 0x1000 ) . unwrap ( ) ;
181
+ assert_eq ! ( pool. used( ) , 0 ) ;
182
+ // Allocate 0x110
161
183
assert_eq ! (
162
184
pool. allocate( 0x110 , 0x100 , AllocPolicy :: FirstMatch )
163
185
. unwrap( ) ,
164
186
RangeInclusive :: new( 0x1000 , 0x110F ) . unwrap( )
165
187
) ;
188
+ assert_eq ! ( pool. used( ) , 0x110 ) ;
189
+ // Allocate 0x100
166
190
assert_eq ! (
167
191
pool. allocate( 0x100 , 0x100 , AllocPolicy :: FirstMatch )
168
192
. unwrap( ) ,
169
193
RangeInclusive :: new( 0x1200 , 0x12FF ) . unwrap( )
170
194
) ;
195
+ assert_eq ! ( pool. used( ) , 0x110 + 0x100 ) ;
196
+ // Allocate 0x10
171
197
assert_eq ! (
172
198
pool. allocate( 0x10 , 0x100 , AllocPolicy :: FirstMatch ) . unwrap( ) ,
173
199
RangeInclusive :: new( 0x1300 , 0x130F ) . unwrap( )
174
200
) ;
201
+ assert_eq ! ( pool. used( ) , 0x110 + 0x100 + 0x10 ) ;
175
202
}
176
203
177
204
#[ test]
@@ -230,18 +257,24 @@ mod tests {
230
257
#[ test]
231
258
fn test_tree_allocate_address_free_and_realloc ( ) {
232
259
let mut pool = AddressAllocator :: new ( 0x1000 , 0x1000 ) . unwrap ( ) ;
260
+ assert_eq ! ( pool. used( ) , 0 ) ;
261
+ // Allocate 0x800
233
262
assert_eq ! (
234
263
pool. allocate( 0x800 , 0x100 , AllocPolicy :: FirstMatch )
235
264
. unwrap( ) ,
236
265
RangeInclusive :: new( 0x1000 , 0x17FF ) . unwrap( )
237
266
) ;
238
-
267
+ assert_eq ! ( pool. used( ) , 0x800 ) ;
268
+ // Free 0x800
239
269
let _ = pool. free ( & RangeInclusive :: new ( 0x1000 , 0x17FF ) . unwrap ( ) ) ;
270
+ assert_eq ! ( pool. used( ) , 0 ) ;
271
+ // Allocate 0x800 again
240
272
assert_eq ! (
241
273
pool. allocate( 0x800 , 0x100 , AllocPolicy :: FirstMatch )
242
274
. unwrap( ) ,
243
275
RangeInclusive :: new( 0x1000 , 0x17FF ) . unwrap( )
244
276
) ;
277
+ assert_eq ! ( pool. used( ) , 0x800 ) ;
245
278
}
246
279
247
280
#[ test]
0 commit comments