1extern crate alloc;
39use alloc::vec::Vec;
40
41use crate::{arch::vm::{get_page_table, get_root_page_table_idx, mmu::PageTable}, environment::PAGE_SIZE};
42
43use super::vmem::VirtualMemoryMap;
44
45#[derive(Debug, Clone)]
46pub struct VirtualMemoryManager {
47 memmap: Vec<VirtualMemoryMap>,
48 asid: usize,
49}
50
51impl VirtualMemoryManager {
52 pub fn new() -> Self {
57 VirtualMemoryManager {
58 memmap: Vec::new(),
59 asid: 0,
60 }
61 }
62
63 pub fn set_asid(&mut self, asid: usize) {
68 self.asid = asid;
69 }
70
71 pub fn get_asid(&self) -> usize {
76 self.asid
77 }
78
79 pub fn get_memmap(&self) -> &Vec<VirtualMemoryMap> {
80 &self.memmap
81 }
82
83 pub fn add_memory_map(&mut self, map: VirtualMemoryMap) -> Result<(), &'static str> {
92 if map.vmarea.start % PAGE_SIZE != 0 || map.pmarea.start % PAGE_SIZE != 0 ||
94 map.vmarea.size() % PAGE_SIZE != 0 || map.pmarea.size() % PAGE_SIZE != 0 {
95 return Err("Address or size is not aligned to PAGE_SIZE");
96 }
97
98 self.memmap.push(map);
99 Ok(())
100 }
101
102 pub fn get_memory_map(&self, idx: usize) -> Option<&VirtualMemoryMap> {
110 self.memmap.get(idx)
111 }
112
113 pub fn remove_memory_map(&mut self, idx: usize) -> Option<VirtualMemoryMap> {
121 if idx < self.memmap.len() {
122 Some(self.memmap.remove(idx))
123 } else {
124 None
125 }
126 }
127
128 pub fn remove_all_memory_maps(&mut self) -> Vec<VirtualMemoryMap> {
133 let mut removed_maps = Vec::new();
134 while !self.memmap.is_empty() {
135 removed_maps.push(self.memmap.remove(0));
136 }
137 removed_maps
138 }
139
140 pub fn restore_memory_maps(&mut self, maps: Vec<VirtualMemoryMap>) -> Result<(), &'static str> {
149 for map in maps {
150 if let Err(e) = self.add_memory_map(map) {
151 return Err(e);
152 }
153 }
154 Ok(())
155 }
156
157 pub fn search_memory_map(&self, vaddr: usize) -> Option<&VirtualMemoryMap> {
165 let mut ret = None;
166 for map in self.memmap.iter() {
167 if map.vmarea.start <= vaddr && vaddr <= map.vmarea.end {
168 ret = Some(map);
169 }
170 }
171 ret
172 }
173
174 pub fn search_memory_map_idx(&self, vaddr: usize) -> Option<usize> {
182 let mut ret = None;
183 for (i, map) in self.memmap.iter().enumerate() {
184 if map.vmarea.start <= vaddr && vaddr <= map.vmarea.end {
185 ret = Some(i);
186 }
187 }
188 ret
189 }
190
191 pub fn get_root_page_table(&self) -> Option<&mut PageTable> {
196 let idx = get_root_page_table_idx(self.asid);
197 if let Some(root_page_table_idx) = idx {
198 get_page_table(root_page_table_idx)
199 } else {
200 None
201 }
202 }
203
204 pub fn translate_vaddr(&self, vaddr: usize) -> Option<usize> {
214 for map in self.memmap.iter() {
216 if vaddr >= map.vmarea.start && vaddr <= map.vmarea.end {
217 let offset = vaddr - map.vmarea.start;
219 let paddr = map.pmarea.start + offset;
221 return Some(paddr);
222 }
223 }
224 None
225 }
226}
227
228#[cfg(test)]
229mod tests {
230 use crate::arch::vm::alloc_virtual_address_space;
231 use crate::vm::VirtualMemoryMap;
232 use crate::vm::{manager::VirtualMemoryManager, vmem::MemoryArea};
233
234 #[test_case]
235 fn test_new_virtual_memory_manager() {
236 let vmm = VirtualMemoryManager::new();
237 assert_eq!(vmm.get_asid(), 0);
238 }
239
240 #[test_case]
241 fn test_set_and_get_asid() {
242 let mut vmm = VirtualMemoryManager::new();
243 vmm.set_asid(42);
244 assert_eq!(vmm.get_asid(), 42);
245 }
246
247 #[test_case]
248 fn test_add_and_get_memory_map() {
249 let mut vmm = VirtualMemoryManager::new();
250 let vma = MemoryArea { start: 0x1000, end: 0x1fff };
251 let map = VirtualMemoryMap { vmarea: vma, pmarea: vma, permissions: 0, is_shared: false };
252 vmm.add_memory_map(map).unwrap();
253 assert_eq!(vmm.get_memory_map(0).unwrap().vmarea.start, 0x1000);
254 }
255
256 #[test_case]
257 fn test_remove_memory_map() {
258 let mut vmm = VirtualMemoryManager::new();
259 let vma = MemoryArea { start: 0x1000, end: 0x1fff };
260 let map = VirtualMemoryMap { vmarea: vma, pmarea: vma, permissions: 0, is_shared: false };
261 vmm.add_memory_map(map).unwrap();
262 let removed_map = vmm.remove_memory_map(0).unwrap();
263 assert_eq!(removed_map.vmarea.start, 0x1000);
264 assert!(vmm.get_memory_map(0).is_none());
265 }
266
267 #[test_case]
268 fn test_search_memory_map() {
269 let mut vmm = VirtualMemoryManager::new();
270 let vma1 = MemoryArea { start: 0x1000, end: 0x1fff };
271 let map1 = VirtualMemoryMap { vmarea: vma1, pmarea: vma1, permissions: 0, is_shared: false };
272 let vma2 = MemoryArea { start: 0x3000, end: 0x3fff };
273 let map2 = VirtualMemoryMap { vmarea: vma2, pmarea: vma2, permissions: 0, is_shared: false };
274 vmm.add_memory_map(map1).unwrap();
275 vmm.add_memory_map(map2).unwrap();
276 let found_map = vmm.search_memory_map(0x3500).unwrap();
277 assert_eq!(found_map.vmarea.start, 0x3000);
278 }
279
280 #[test_case]
281 fn test_get_root_page_table() {
282 let mut vmm = VirtualMemoryManager::new();
283 let asid = alloc_virtual_address_space();
284 vmm.set_asid(asid);
285 let page_table = vmm.get_root_page_table();
286 assert!(page_table.is_some());
287 }
288}