Skip to content

Commit f4d2a52

Browse files
author
a.lisnevskiy
committed
WIP
Signed-off-by: a.lisnevskiy <[email protected]>
1 parent bde0645 commit f4d2a52

File tree

10 files changed

+333
-154
lines changed

10 files changed

+333
-154
lines changed

TODO

+4
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
* write bitmask page allocator
2+
* describe task structure and implemnt system task and clock task
3+
* write fifo process scheduler as a separate service
4+
* move from gnu binutils to llvm binutils

crates/mm/src/zone.rs

+63-15
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,10 @@ use std::{
77

88
use types::linked_list;
99

10-
const MAX_PAGE_ORDER: usize = 10;
11-
const _NR_PAGE_ORDER: usize = 10;
10+
pub const PAGE_SHIFT: usize = 12;
11+
pub const PAGE_SIZE: usize = 1 << PAGE_SHIFT;
12+
pub const MAX_PAGE_ORDER: usize = 10;
13+
pub const _NR_PAGE_ORDER: usize = 10;
1214

1315
pub struct Zone {
1416
free_area: [linked_list::List; MAX_PAGE_ORDER],
@@ -17,8 +19,25 @@ pub struct Zone {
1719
_present_pages: usize,
1820
}
1921

20-
pub(crate) fn prev_power_of_two(num: usize) -> usize {
21-
1 << (usize::BITS as usize - num.leading_zeros() as usize - 1)
22+
pub(crate) fn prev_two_order(num: usize) -> usize {
23+
usize::BITS as usize - num.leading_zeros() as usize - 1
24+
}
25+
26+
pub(crate) fn _prev_power_of_two(num: usize) -> usize {
27+
1 << prev_two_order(num)
28+
}
29+
30+
pub fn next_aligned_by(address: usize, alignment: usize) -> usize {
31+
if alignment == 0 {
32+
panic!("Alignment must be a positive integer.");
33+
}
34+
35+
let remainder = address & (alignment - 1);
36+
address + (alignment - remainder) * (remainder != 0) as usize
37+
}
38+
39+
pub fn prev_aligned_by(address: usize, alignment: usize) -> usize {
40+
address & (!alignment + 1)
2241
}
2342

2443
impl Zone {
@@ -30,30 +49,44 @@ impl Zone {
3049
}
3150
}
3251

52+
pub fn managed_pages(&self) -> usize {
53+
self._managed_pages
54+
}
55+
56+
pub fn present_pages(&self) -> usize {
57+
self._present_pages
58+
}
59+
3360
pub const fn empty() -> Self {
3461
Self::new()
3562
}
3663

3764
pub unsafe fn add_to_heap(&mut self, mut start: usize, mut end: usize) {
38-
start = (start + size_of::<usize>() - 1) & (!size_of::<usize>() + 1);
39-
end &= !size_of::<usize>() + 1;
65+
use types::linked_list::ListHead;
66+
67+
start = next_aligned_by(start, PAGE_SIZE);
68+
end = prev_aligned_by(end, PAGE_SIZE);
4069
assert!(start <= end);
4170

4271
let mut current_start = start;
43-
44-
while current_start + size_of::<usize>() <= end {
45-
let lowbit = current_start & (!current_start + 1);
46-
let mut size = min(lowbit, prev_power_of_two(end - current_start));
47-
48-
let mut order = size.trailing_zeros() as usize;
72+
while current_start + PAGE_SIZE <= end {
73+
let mut order = prev_two_order(end - current_start) - PAGE_SHIFT;
4974
if order > MAX_PAGE_ORDER - 1 {
5075
order = MAX_PAGE_ORDER - 1;
51-
size = 1 << order;
5276
}
5377

78+
println!("{:?}", *(current_start as *mut ListHead));
79+
5480
self.free_area[order].push_front(current_start as *mut usize);
55-
current_start += size;
81+
current_start += 1 << (order + PAGE_SHIFT);
82+
83+
println!("{:?}", self);
84+
println!("end = {}; current_start = {}",
85+
end,
86+
current_start
87+
);
5688
}
89+
println!("");
5790
}
5891

5992
pub unsafe fn init(&mut self, start: usize, size: usize) {
@@ -132,7 +165,22 @@ impl Zone {
132165

133166
impl fmt::Debug for Zone {
134167
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
168+
let sizes = self.free_area.iter().map(|area| area.count()).collect::<Vec<_>>();
135169
fmt.debug_struct(std::any::type_name::<Self>())
170+
.field("managed", &self._managed_pages)
171+
.field("present", &self._present_pages)
172+
.field("sizes", &sizes)
136173
.finish()
137174
}
138-
}
175+
}
176+
177+
#[cfg(test)]
178+
pub mod tests {
179+
use super::*;
180+
181+
#[test]
182+
pub fn test_align_by() {
183+
assert_eq!(next_aligned_by(0x1234, 0x1000), 0x2000);
184+
assert_eq!(prev_aligned_by(0x1234, 0x1000), 0x1000);
185+
}
186+
}

crates/mm/tests/default.rs

+72-84
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,8 @@
1-
use std::{
2-
alloc::Layout,
3-
mem::size_of,
4-
};
1+
use std::alloc::Layout;
52

6-
use mm::Zone;
3+
use mm::zone::{Zone, PAGE_SIZE, next_aligned_by};
74

8-
const MACHINE_ALIGN: usize = core::mem::size_of::<usize>();
9-
const HEAP_SIZE: usize = 4 * 1024 * 1024;
10-
const HEAP_BLOCK: usize = HEAP_SIZE / MACHINE_ALIGN;
11-
static mut HEAP: [usize; HEAP_BLOCK] = [0; HEAP_BLOCK];
5+
static HEAP: [u8; PAGE_SIZE << 5] = [0; PAGE_SIZE << 5];
126

137
#[test]
148
fn test_empty_heap() {
@@ -19,84 +13,78 @@ fn test_empty_heap() {
1913
#[test]
2014
fn test_heap_add() {
2115
let mut heap = Zone::new();
22-
assert!(heap.alloc(Layout::from_size_align(1, 1).unwrap()).is_err());
23-
24-
let space: [usize; 100] = [0; 100];
25-
unsafe {
26-
heap.add_to_heap(space.as_ptr() as usize, space.as_ptr().add(100) as usize);
27-
}
28-
let addr = heap.alloc(Layout::from_size_align(1, 1).unwrap());
29-
assert!(addr.is_ok());
30-
}
31-
32-
#[test]
33-
fn test_heap_add_large() {
34-
let mut heap = Zone::new();
35-
assert!(heap.alloc(Layout::from_size_align(1, 1).unwrap()).is_err());
36-
37-
let space: [u8; 512] = [0; 512];
38-
unsafe {
39-
heap.add_to_heap(space.as_ptr() as usize, space.as_ptr().add(512) as usize);
40-
}
41-
let addr = heap.alloc(Layout::from_size_align(1, 1).unwrap());
42-
assert!(addr.is_ok());
43-
}
44-
45-
#[test]
46-
fn test_heap_oom() {
47-
let mut heap = Zone::new();
48-
let space: [usize; 100] = [0; 100];
4916
unsafe {
50-
heap.add_to_heap(space.as_ptr() as usize, space.as_ptr().add(100) as usize);
17+
heap.add_to_heap(
18+
next_aligned_by(HEAP.as_ptr() as usize, PAGE_SIZE),
19+
next_aligned_by(HEAP.as_ptr().add(PAGE_SIZE) as usize, PAGE_SIZE)
20+
);
5121
}
52-
53-
assert!(heap
54-
.alloc(Layout::from_size_align(100 * size_of::<usize>(), 1).unwrap())
55-
.is_err());
56-
assert!(heap.alloc(Layout::from_size_align(1, 1).unwrap()).is_ok());
22+
// let addr = heap.alloc(Layout::from_size_align(1, 1).unwrap());
23+
// assert!(addr.is_ok());
5724
}
5825

59-
#[test]
60-
fn test_heap_alloc_and_free() {
61-
let mut heap = Zone::new();
62-
assert!(heap.alloc(Layout::from_size_align(1, 1).unwrap()).is_err());
63-
64-
let space: [usize; 100] = [0; 100];
65-
unsafe {
66-
heap.add_to_heap(space.as_ptr() as usize, space.as_ptr().add(100) as usize);
67-
}
68-
for _ in 0..100 {
69-
let addr = heap.alloc(Layout::from_size_align(1, 1).unwrap()).unwrap();
70-
heap.dealloc(addr, Layout::from_size_align(1, 1).unwrap());
71-
}
72-
}
73-
74-
#[test]
75-
fn test_heap_alloc_and_free_different_sizes() {
76-
let mut heap = Zone::new();
77-
assert!(heap.alloc(Layout::from_size_align(1, 1).unwrap()).is_err());
78-
79-
unsafe {
80-
heap.add_to_heap(HEAP.as_ptr() as usize, HEAP.as_ptr().add(1 << 16) as usize);
81-
}
82-
for block_size in 1..12 {
83-
let addr = heap.alloc(Layout::from_size_align(1 << block_size, 1).unwrap()).unwrap();
84-
heap.dealloc(addr, Layout::from_size_align(1 << block_size, 1).unwrap());
85-
}
86-
}
87-
88-
#[test]
89-
fn test_heap_alloc_and_free_different_sizes_lowering() {
90-
let mut heap = Zone::new();
91-
assert!(heap.alloc(Layout::from_size_align(1, 1).unwrap()).is_err());
92-
93-
unsafe {
94-
heap.add_to_heap(HEAP.as_ptr() as usize, HEAP.as_ptr().add(1 << 16) as usize);
95-
}
96-
for block_size in (12..1).rev() {
97-
let addr = heap.alloc(Layout::from_size_align(1 << block_size, 1).unwrap()).unwrap();
98-
heap.dealloc(addr, Layout::from_size_align(1 << block_size, 1).unwrap());
99-
}
100-
}
26+
// #[test]
27+
// fn test_heap_add_large() {
28+
// let mut heap = Zone::new();
29+
// unsafe {
30+
// heap.add_to_heap(HEAP.as_ptr() as usize, HEAP.as_ptr().add(512) as usize);
31+
// }
32+
// let addr = heap.alloc(Layout::from_size_align(1, 1).unwrap());
33+
// assert!(addr.is_ok());
34+
// }
35+
36+
// #[test]
37+
// fn test_heap_oom() {
38+
// let mut heap = Zone::new();
39+
// let space: [usize; 100] = [0; 100];
40+
// unsafe {
41+
// heap.add_to_heap(space.as_ptr() as usize, space.as_ptr().add(100) as usize);
42+
// }
43+
//
44+
// assert!(heap
45+
// .alloc(Layout::from_size_align(100 * size_of::<usize>(), 1).unwrap())
46+
// .is_err());
47+
// assert!(heap.alloc(Layout::from_size_align(1, 1).unwrap()).is_ok());
48+
// }
49+
50+
// #[test]
51+
// fn test_heap_alloc_and_free() {
52+
// let mut heap = Zone::new();
53+
// unsafe {
54+
// heap.add_to_heap(HEAP.as_ptr() as usize, HEAP.as_ptr().add(100) as usize);
55+
// }
56+
// for _ in 0..100 {
57+
// let addr = heap.alloc(Layout::from_size_align(1, 1).unwrap()).unwrap();
58+
// heap.dealloc(addr, Layout::from_size_align(1, 1).unwrap());
59+
// }
60+
// }
61+
62+
// #[test]
63+
// fn test_heap_alloc_and_free_different_sizes() {
64+
// let mut heap = Zone::new();
65+
// unsafe {
66+
// heap.add_to_heap(HEAP.as_ptr() as usize, HEAP.as_ptr().add(1 << 16) as usize);
67+
// }
68+
// print!("{:?}", heap);
69+
// for block_size in 1..1 {
70+
// let addr = heap.alloc(
71+
// Layout::from_size_align(1 << block_size, MACHINE_ALIGN).unwrap()
72+
// ).unwrap();
73+
// print!("{:?}", heap);
74+
// // heap.dealloc(addr, Layout::from_size_align(1 << block_size, 1).unwrap());
75+
// }
76+
// }
77+
78+
// #[test]
79+
// fn test_heap_alloc_and_free_different_sizes_lowering() {
80+
// let mut heap = Zone::new();
81+
// unsafe {
82+
// heap.add_to_heap(HEAP.as_ptr() as usize, HEAP.as_ptr().add(1 << 16) as usize);
83+
// }
84+
// for block_size in (12..1).rev() {
85+
// let addr = heap.alloc(Layout::from_size_align(1 << block_size, 1).unwrap()).unwrap();
86+
// heap.dealloc(addr, Layout::from_size_align(1 << block_size, 1).unwrap());
87+
// }
88+
// }
10189

10290
// TODO: add test with scatter/gather loading/storing with different sizes

crates/types/src/lib.rs

+2-1
Original file line numberDiff line numberDiff line change
@@ -1 +1,2 @@
1-
pub mod linked_list;
1+
pub mod linked_list;
2+
pub mod safe_linked_list;

0 commit comments

Comments
 (0)