1extern crate alloc;
39use alloc::{sync::Arc, vec::Vec};
40
41use crate::{arch::vm::{free_virtual_address_space, get_root_pagetable, is_asid_used, mmu::PageTable}, environment::PAGE_SIZE};
42
43use super::vmem::VirtualMemoryMap;
44
45#[derive(Debug, Clone)]
46pub struct VirtualMemoryManager {
47 memmap: Vec<VirtualMemoryMap>,
48 asid: u16,
49 page_tables: Vec<Arc<PageTable>>,
50}
51
52impl VirtualMemoryManager {
53 pub fn new() -> Self {
58 VirtualMemoryManager {
59 memmap: Vec::new(),
60 asid: 0,
61 page_tables: Vec::new(),
62 }
63 }
64
65 pub fn set_asid(&mut self, asid: u16) {
70 if self.asid == asid {
71 return; }
73 if self.asid != 0 && is_asid_used(self.asid) {
74 free_virtual_address_space(self.asid);
76 }
77 self.asid = asid;
78 }
79
80 pub fn get_asid(&self) -> u16 {
85 self.asid
86 }
87
88 pub fn get_memmap(&self) -> &Vec<VirtualMemoryMap> {
89 &self.memmap
90 }
91
92 pub fn add_memory_map(&mut self, map: VirtualMemoryMap) -> Result<(), &'static str> {
101 if map.vmarea.start % PAGE_SIZE != 0 || map.pmarea.start % PAGE_SIZE != 0 ||
103 map.vmarea.size() % PAGE_SIZE != 0 || map.pmarea.size() % PAGE_SIZE != 0 {
104 return Err("Address or size is not aligned to PAGE_SIZE");
105 }
106
107 self.memmap.push(map);
108 Ok(())
109 }
110
111 pub fn get_memory_map(&self, idx: usize) -> Option<&VirtualMemoryMap> {
119 self.memmap.get(idx)
120 }
121
122 pub fn remove_memory_map(&mut self, idx: usize) -> Option<VirtualMemoryMap> {
130 if idx < self.memmap.len() {
131 Some(self.memmap.remove(idx))
132 } else {
133 None
134 }
135 }
136
137 pub fn remove_all_memory_maps(&mut self) -> Vec<VirtualMemoryMap> {
142 let mut removed_maps = Vec::new();
143 while !self.memmap.is_empty() {
144 removed_maps.push(self.memmap.remove(0));
145 }
146 removed_maps
147 }
148
149 pub fn restore_memory_maps(&mut self, maps: Vec<VirtualMemoryMap>) -> Result<(), &'static str> {
158 for map in maps {
159 if let Err(e) = self.add_memory_map(map) {
160 return Err(e);
161 }
162 }
163 Ok(())
164 }
165
166 pub fn search_memory_map(&self, vaddr: usize) -> Option<&VirtualMemoryMap> {
174 let mut ret = None;
175 for map in self.memmap.iter() {
176 if map.vmarea.start <= vaddr && vaddr <= map.vmarea.end {
177 ret = Some(map);
178 }
179 }
180 ret
181 }
182
183 pub fn search_memory_map_idx(&self, vaddr: usize) -> Option<usize> {
191 let mut ret = None;
192 for (i, map) in self.memmap.iter().enumerate() {
193 if map.vmarea.start <= vaddr && vaddr <= map.vmarea.end {
194 ret = Some(i);
195 }
196 }
197 ret
198 }
199
200 pub fn add_page_table(&mut self, page_table: Arc<PageTable>) {
202 self.page_tables.push(page_table);
203 }
204
205 pub fn get_root_page_table(&self) -> Option<&mut PageTable> {
210 get_root_pagetable(self.asid)
211 }
212
213 pub fn translate_vaddr(&self, vaddr: usize) -> Option<usize> {
223 for map in self.memmap.iter() {
225 if vaddr >= map.vmarea.start && vaddr <= map.vmarea.end {
226 let offset = vaddr - map.vmarea.start;
228 let paddr = map.pmarea.start + offset;
230 return Some(paddr);
231 }
232 }
233 None
234 }
235}
236
237impl Drop for VirtualMemoryManager {
238 fn drop(&mut self) {
240 if self.asid != 0 && is_asid_used(self.asid) {
241 free_virtual_address_space(self.asid);
243 }
244 }
245}
246
247#[cfg(test)]
248mod tests {
249 use crate::arch::vm::alloc_virtual_address_space;
250 use crate::vm::VirtualMemoryMap;
251 use crate::vm::{manager::VirtualMemoryManager, vmem::MemoryArea};
252
253 #[test_case]
254 fn test_new_virtual_memory_manager() {
255 let vmm = VirtualMemoryManager::new();
256 assert_eq!(vmm.get_asid(), 0);
257 }
258
259 #[test_case]
260 fn test_set_and_get_asid() {
261 let mut vmm = VirtualMemoryManager::new();
262 vmm.set_asid(42);
263 assert_eq!(vmm.get_asid(), 42);
264 }
265
266 #[test_case]
267 fn test_add_and_get_memory_map() {
268 let mut vmm = VirtualMemoryManager::new();
269 let vma = MemoryArea { start: 0x1000, end: 0x1fff };
270 let map = VirtualMemoryMap { vmarea: vma, pmarea: vma, permissions: 0, is_shared: false };
271 vmm.add_memory_map(map).unwrap();
272 assert_eq!(vmm.get_memory_map(0).unwrap().vmarea.start, 0x1000);
273 }
274
275 #[test_case]
276 fn test_remove_memory_map() {
277 let mut vmm = VirtualMemoryManager::new();
278 let vma = MemoryArea { start: 0x1000, end: 0x1fff };
279 let map = VirtualMemoryMap { vmarea: vma, pmarea: vma, permissions: 0, is_shared: false };
280 vmm.add_memory_map(map).unwrap();
281 let removed_map = vmm.remove_memory_map(0).unwrap();
282 assert_eq!(removed_map.vmarea.start, 0x1000);
283 assert!(vmm.get_memory_map(0).is_none());
284 }
285
286 #[test_case]
287 fn test_search_memory_map() {
288 let mut vmm = VirtualMemoryManager::new();
289 let vma1 = MemoryArea { start: 0x1000, end: 0x1fff };
290 let map1 = VirtualMemoryMap { vmarea: vma1, pmarea: vma1, permissions: 0, is_shared: false };
291 let vma2 = MemoryArea { start: 0x3000, end: 0x3fff };
292 let map2 = VirtualMemoryMap { vmarea: vma2, pmarea: vma2, permissions: 0, is_shared: false };
293 vmm.add_memory_map(map1).unwrap();
294 vmm.add_memory_map(map2).unwrap();
295 let found_map = vmm.search_memory_map(0x3500).unwrap();
296 assert_eq!(found_map.vmarea.start, 0x3000);
297 }
298
299 #[test_case]
300 fn test_get_root_page_table() {
301 let mut vmm = VirtualMemoryManager::new();
302 let asid = alloc_virtual_address_space();
303 vmm.set_asid(asid);
304 let page_table = vmm.get_root_page_table();
305 assert!(page_table.is_some());
306 }
307}