|
1 | 1 | use os_bootinfo::{MemoryMap, MemoryRegion, MemoryRegionType};
|
2 |
| -use x86_64::structures::paging::{PhysFrame, PAGE_SIZE}; |
3 |
| -use x86_64::{align_up, PhysAddr}; |
| 2 | +use x86_64::structures::paging::{PhysFrame, PageSize, PhysFrameRange}; |
| 3 | +use x86_64::PhysAddr; |
4 | 4 |
|
5 | 5 | pub(crate) struct FrameAllocator<'a> {
|
6 | 6 | pub memory_map: &'a mut MemoryMap,
|
7 | 7 | }
|
8 | 8 |
|
9 | 9 | impl<'a> FrameAllocator<'a> {
|
10 | 10 | pub(crate) fn allocate_frame(&mut self, region_type: MemoryRegionType) -> Option<PhysFrame> {
|
11 |
| - let page_size = u64::from(PAGE_SIZE); |
12 |
| - let mut frame = None; |
13 |
| - |
14 |
| - if frame.is_none() { |
15 |
| - // look for an adjacent regions of same types |
16 |
| - let mut last_region_end = PhysAddr::new(0); |
17 |
| - for region in self.memory_map.iter_mut() { |
18 |
| - if region.region_type == region_type { |
19 |
| - last_region_end = region.end_addr(); |
20 |
| - } else if region.region_type == MemoryRegionType::Usable { |
21 |
| - if region.start_addr() == last_region_end { |
22 |
| - frame = Some(PhysFrame::containing_address(region.start_addr)); |
23 |
| - region.start_addr += page_size; |
24 |
| - region.len -= page_size; |
25 |
| - break |
| 11 | + // try to find an existing region of same type that can be enlarged |
| 12 | + let mut iter = self.memory_map.iter_mut().peekable(); |
| 13 | + while let Some(region) = iter.next() { |
| 14 | + if region.region_type == region_type { |
| 15 | + if let Some(next) = iter.peek() { |
| 16 | + if next.range.start == region.range.end && next.region_type == MemoryRegionType::Usable && !next.range.is_empty() { |
| 17 | + let frame = region.range.end; |
| 18 | + region.range.end += 1; |
| 19 | + iter.next().unwrap().range.start += 1; |
| 20 | + return Some(frame); |
26 | 21 | }
|
27 | 22 | }
|
28 | 23 | }
|
29 | 24 | }
|
30 | 25 |
|
31 |
| - if frame.is_none() { |
32 |
| - // search all regions |
33 |
| - for region in self.memory_map.iter_mut() { |
| 26 | + fn split_usable_region<'a, I>(iter: &mut I) -> Option<(PhysFrame, PhysFrameRange)> |
| 27 | + where |
| 28 | + I: Iterator<Item = &'a mut MemoryRegion>, |
| 29 | + { |
| 30 | + for region in iter { |
34 | 31 | if region.region_type != MemoryRegionType::Usable {
|
35 | 32 | continue;
|
36 | 33 | }
|
37 |
| - if region.len < page_size { |
| 34 | + if region.range.is_empty() { |
38 | 35 | continue;
|
39 | 36 | }
|
40 | 37 |
|
41 |
| - assert_eq!( |
42 |
| - 0, |
43 |
| - region.start_addr.as_u64() & 0xfff, |
44 |
| - "Region start address is not page aligned: {:?}", |
45 |
| - region |
46 |
| - ); |
47 |
| - |
48 |
| - frame = Some(PhysFrame::containing_address(region.start_addr)); |
49 |
| - region.start_addr += page_size; |
50 |
| - region.len -= page_size; |
51 |
| - break; |
| 38 | + let frame = region.range.start; |
| 39 | + region.range.start += 1; |
| 40 | + return Some((frame, PhysFrame::range(frame, frame + 1))); |
52 | 41 | }
|
| 42 | + None |
53 | 43 | }
|
54 | 44 |
|
55 |
| - if let Some(frame) = frame { |
56 |
| - self.add_region(MemoryRegion { |
57 |
| - start_addr: frame.start_address(), |
58 |
| - len: page_size, |
59 |
| - region_type, |
60 |
| - }); |
| 45 | + let result = if region_type == MemoryRegionType::PageTable { |
| 46 | + // prevent fragmentation when page tables are allocated in between |
| 47 | + split_usable_region(&mut self.memory_map.iter_mut().rev()) |
| 48 | + } else { |
| 49 | + split_usable_region(&mut self.memory_map.iter_mut()) |
| 50 | + }; |
| 51 | + |
| 52 | + if let Some((frame, range)) = result { |
| 53 | + self.memory_map.add_region(MemoryRegion { range, region_type, }); |
61 | 54 | Some(frame)
|
62 | 55 | } else {
|
63 | 56 | None
|
64 | 57 | }
|
65 | 58 | }
|
66 | 59 |
|
67 | 60 | pub(crate) fn deallocate_frame(&mut self, frame: PhysFrame) {
|
68 |
| - let page_size = u64::from(PAGE_SIZE); |
69 |
| - self.add_region_overwrite( |
70 |
| - MemoryRegion { |
71 |
| - start_addr: frame.start_address(), |
72 |
| - len: page_size, |
73 |
| - region_type: MemoryRegionType::Usable, |
74 |
| - }, |
75 |
| - true, |
76 |
| - ); |
| 61 | + // try to find an existing region of same type that can be enlarged |
| 62 | + let mut iter = self.memory_map.iter_mut().peekable(); |
| 63 | + while let Some(region) = iter.next() { |
| 64 | + if region.range.end == frame && region.region_type == MemoryRegionType::Usable { |
| 65 | + region.range.end += 1; |
| 66 | + if let Some(next) = iter.next() { |
| 67 | + if next.range.start == frame { |
| 68 | + next.range.start += 1; |
| 69 | + } |
| 70 | + } |
| 71 | + return; |
| 72 | + } |
| 73 | + } |
| 74 | + |
| 75 | + // insert frame as a new region |
| 76 | + self.memory_map.add_region(MemoryRegion { |
| 77 | + range: PhysFrame::range(frame, frame + 1), |
| 78 | + region_type: MemoryRegionType::Usable, |
| 79 | + }); |
77 | 80 | }
|
78 | 81 |
|
79 |
| - /// Adds the passed region to the memory map. |
80 |
| - /// |
81 |
| - /// This function automatically adjusts the existing regions so that no overlap occurs. |
| 82 | + /// Marks the passed region in the memory map. |
82 | 83 | ///
|
83 | 84 | /// Panics if a non-usable region (e.g. a reserved region) overlaps with the passed region.
|
84 |
| - pub(crate) fn add_region(&mut self, region: MemoryRegion) { |
85 |
| - self.add_region_overwrite(region, false); |
86 |
| - } |
87 |
| - |
88 |
| - fn add_region_overwrite(&mut self, mut region: MemoryRegion, overwrite: bool) { |
89 |
| - assert_eq!( |
90 |
| - 0, |
91 |
| - region.start_addr.as_u64() & 0xfff, |
92 |
| - "Region start address is not page aligned: {:?}", |
93 |
| - region |
94 |
| - ); |
95 |
| - |
96 |
| - match region.region_type { |
97 |
| - MemoryRegionType::Kernel | MemoryRegionType::Bootloader => { |
98 |
| - region.len = align_up(region.len, PAGE_SIZE.into()); |
| 85 | + pub(crate) fn mark_allocated_region(&mut self, region: MemoryRegion) { |
| 86 | + for r in self.memory_map.iter_mut() { |
| 87 | + if region.range.start >= r.range.end { |
| 88 | + continue |
| 89 | + } |
| 90 | + if region.range.end <= r.range.start { |
| 91 | + continue |
99 | 92 | }
|
100 |
| - _ => {} |
101 |
| - } |
102 | 93 |
|
103 |
| - let mut region_already_inserted = false; |
104 |
| - let mut split_region = None; |
| 94 | + if r.region_type != MemoryRegionType::Usable { |
| 95 | + panic!("region {:x?} overlaps with non-usable region {:x?}", region, r); |
| 96 | + } |
105 | 97 |
|
106 |
| - for r in self.memory_map.iter_mut() { |
107 |
| - // check if region overlaps with another region |
108 |
| - if r.start_addr() < region.end_addr() && r.end_addr() > region.start_addr() { |
109 |
| - // region overlaps with `r` |
110 |
| - match r.region_type { |
111 |
| - MemoryRegionType::Usable => { |
112 |
| - if region.region_type == MemoryRegionType::Usable { |
113 |
| - panic!( |
114 |
| - "region {:?} overlaps with other usable region {:?}", |
115 |
| - region, r |
116 |
| - ) |
117 |
| - } |
118 |
| - } |
119 |
| - MemoryRegionType::InUse => {} |
120 |
| - MemoryRegionType::Bootloader |
121 |
| - | MemoryRegionType::Kernel |
122 |
| - | MemoryRegionType::PageTable if overwrite => {} |
123 |
| - _ => { |
124 |
| - panic!("can't override region {:?} with {:?}", r, region); |
125 |
| - } |
126 |
| - } |
127 |
| - if r.start_addr() < region.start_addr() && r.end_addr() > region.end_addr() { |
| 98 | + if region.range.start == r.range.start { |
| 99 | + if region.range.end < r.range.end { |
128 | 100 | // Case: (r = `r`, R = `region`)
|
129 | 101 | // ----rrrrrrrrrrr----
|
130 |
| - // ------RRRR--------- |
131 |
| - assert!( |
132 |
| - split_region.is_none(), |
133 |
| - "area overlaps with multiple regions" |
134 |
| - ); |
135 |
| - split_region = Some(MemoryRegion { |
136 |
| - start_addr: region.end_addr(), |
137 |
| - len: r.end_addr() - region.end_addr(), |
138 |
| - region_type: r.region_type, |
139 |
| - }); |
140 |
| - r.len = region.start_addr() - r.start_addr(); |
141 |
| - } else if region.start_addr() <= r.start_addr() { |
| 102 | + // ----RRRR----------- |
| 103 | + r.range.start = region.range.end; |
| 104 | + self.memory_map.add_region(region); |
| 105 | + } else { |
142 | 106 | // Case: (r = `r`, R = `region`)
|
143 | 107 | // ----rrrrrrrrrrr----
|
144 |
| - // --RRRR------------- |
145 |
| - r.len = r.len.checked_sub(region.end_addr() - r.start_addr()).unwrap(); |
146 |
| - r.start_addr = region.end_addr(); |
147 |
| - } else if region.end_addr() >= r.end_addr() { |
| 108 | + // ----RRRRRRRRRRRRRR- |
| 109 | + *r = region; |
| 110 | + } |
| 111 | + } else if region.range.start > r.range.start { |
| 112 | + if region.range.end < r.range.end { |
148 | 113 | // Case: (r = `r`, R = `region`)
|
149 | 114 | // ----rrrrrrrrrrr----
|
150 |
| - // -------------RRRR-- |
151 |
| - r.len = region.start_addr() - r.start_addr(); |
| 115 | + // ------RRRR--------- |
| 116 | + let mut behind_r = r.clone(); |
| 117 | + behind_r.range.start = region.range.end; |
| 118 | + r.range.end = region.range.start; |
| 119 | + self.memory_map.add_region(behind_r); |
| 120 | + self.memory_map.add_region(region); |
152 | 121 | } else {
|
153 |
| - unreachable!("region overlaps in an unexpected way") |
154 |
| - } |
155 |
| - } |
156 |
| - // check if region is adjacent to already existing region (only if same type) |
157 |
| - if r.region_type == region.region_type { |
158 |
| - if region.end_addr() == r.start_addr() { |
159 | 122 | // Case: (r = `r`, R = `region`)
|
160 |
| - // ------rrrrrrrrrrr-- |
161 |
| - // --RRRR------------- |
162 |
| - // => merge regions |
163 |
| - r.start_addr = region.start_addr(); |
164 |
| - r.len += region.len; |
165 |
| - region_already_inserted = true; |
166 |
| - } else if region.start_addr() == r.end_addr() { |
167 |
| - // Case: (r = `r`, R = `region`) |
168 |
| - // --rrrrrrrrrrr------ |
| 123 | + // ----rrrrrrrrrrr---- |
| 124 | + // -----------RRRR---- or |
169 | 125 | // -------------RRRR--
|
170 |
| - // => merge regions |
171 |
| - r.len += region.len; |
172 |
| - region_already_inserted = true; |
| 126 | + r.range.end = region.range.start; |
| 127 | + self.memory_map.add_region(region); |
173 | 128 | }
|
| 129 | + } else { |
| 130 | + // Case: (r = `r`, R = `region`) |
| 131 | + // ----rrrrrrrrrrr---- |
| 132 | + // --RRRR------------- |
| 133 | + r.range.start = region.range.end; |
| 134 | + self.memory_map.add_region(region); |
174 | 135 | }
|
| 136 | + return; |
175 | 137 | }
|
176 |
| - |
177 |
| - if let Some(split_region) = split_region { |
178 |
| - self.memory_map.add_region(split_region); |
179 |
| - } |
180 |
| - if !region_already_inserted { |
181 |
| - self.memory_map.add_region(region); |
182 |
| - } |
183 |
| - |
184 |
| - self.memory_map.sort(); |
| 138 | + panic!("region {:x?} is not a usable memory region", region); |
185 | 139 | }
|
186 | 140 | }
|
0 commit comments