Physical Memory Management Improvements.
This commit is contained in:
parent
3020b28c47
commit
9ce13a85b6
4 changed files with 212 additions and 18 deletions
|
|
@ -239,9 +239,7 @@ fn load_program_segment(
|
||||||
|
|
||||||
let mem_object = crate::mem::MemoryRegion::new(mem_size)?;
|
let mem_object = crate::mem::MemoryRegion::new(mem_size)?;
|
||||||
|
|
||||||
for i in mem_object.mut_slice() {
|
mem_object.zero_region();
|
||||||
*i = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
let file_start = prog_header.offset as usize;
|
let file_start = prog_header.offset as usize;
|
||||||
let file_end = file_start + prog_header.file_size as usize;
|
let file_end = file_start + prog_header.file_size as usize;
|
||||||
|
|
|
||||||
|
|
@ -13,6 +13,7 @@ mod cap_syscall;
|
||||||
pub mod elf;
|
pub mod elf;
|
||||||
pub mod init;
|
pub mod init;
|
||||||
pub mod mem;
|
pub mod mem;
|
||||||
|
pub mod physical_box;
|
||||||
pub mod port;
|
pub mod port;
|
||||||
pub mod sync;
|
pub mod sync;
|
||||||
pub mod syscall;
|
pub mod syscall;
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,8 @@ use crate::syscall;
|
||||||
use crate::zion::ZError;
|
use crate::zion::ZError;
|
||||||
use alloc::slice;
|
use alloc::slice;
|
||||||
use core::fmt::Debug;
|
use core::fmt::Debug;
|
||||||
use core::ptr::{addr_of, addr_of_mut};
|
use core::ops::Deref;
|
||||||
|
use core::ptr::{addr_of, addr_of_mut, read_volatile, write_volatile, NonNull};
|
||||||
|
|
||||||
#[cfg(feature = "hosted")]
|
#[cfg(feature = "hosted")]
|
||||||
use linked_list_allocator::LockedHeap;
|
use linked_list_allocator::LockedHeap;
|
||||||
|
|
@ -29,6 +30,7 @@ pub fn init_heap() {
|
||||||
pub struct MemoryRegion {
|
pub struct MemoryRegion {
|
||||||
mem_cap: Capability,
|
mem_cap: Capability,
|
||||||
virt_addr: u64,
|
virt_addr: u64,
|
||||||
|
// TODO: This should be a usize probably.
|
||||||
size: u64,
|
size: u64,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -94,13 +96,50 @@ impl MemoryRegion {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn zero_region(&self) {
|
||||||
|
for i in self.mut_slice() {
|
||||||
|
*i = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn raw_ptr_at_offset<T>(&self, offset: u64) -> *const T {
|
pub fn raw_ptr_at_offset<T>(&self, offset: u64) -> *const T {
|
||||||
// TODO: Come up with a better safety check here.
|
|
||||||
// We can't use the size of T because it might not be sized.
|
|
||||||
assert!(offset + size_of::<T>() as u64 <= self.size);
|
assert!(offset + size_of::<T>() as u64 <= self.size);
|
||||||
(self.virt_addr + offset) as *const T
|
(self.virt_addr + offset) as *const T
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn mut_ptr_at_offset<T>(&self, offset: usize) -> *mut T {
|
||||||
|
assert!(offset + size_of::<T>() <= self.size as usize);
|
||||||
|
(self.virt_addr as usize + offset) as *mut T
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a reference from a given offset.
|
||||||
|
///
|
||||||
|
/// SAFETY: Caller must ensure that the memory pointed to by this
|
||||||
|
/// pointer must not get mutated while the reference exists.
|
||||||
|
pub unsafe fn as_ref_at_offset<T>(&self, offset: usize) -> &T {
|
||||||
|
let ptr: *const T = self.raw_ptr_at_offset(offset as u64);
|
||||||
|
assert!(ptr.is_aligned(), "");
|
||||||
|
// SAFETY:
|
||||||
|
// - We checked alignment.
|
||||||
|
// - self.vaddr + offset can't be null.
|
||||||
|
// - It is dereferenceable because it is entirely within this memory region.
|
||||||
|
&*self.raw_ptr_at_offset::<T>(offset as u64)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a reference from a given offset.
|
||||||
|
///
|
||||||
|
/// SAFETY: Caller must ensure that this is the only reference to the memory pointed
|
||||||
|
/// to by this pointer.
|
||||||
|
pub unsafe fn as_mut_ref_at_offset<T>(&self, offset: usize) -> &mut T {
|
||||||
|
let ptr: *const T = self.raw_ptr_at_offset(offset as u64);
|
||||||
|
assert!(ptr.is_aligned(), "");
|
||||||
|
// SAFETY:
|
||||||
|
// - We checked alignment.
|
||||||
|
// - self.vaddr + offset can't be null.
|
||||||
|
// - It is dereferenceable because it is entirely within this memory region.
|
||||||
|
&mut *self.mut_ptr_at_offset::<T>(offset)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn cap(&self) -> &Capability {
|
pub fn cap(&self) -> &Capability {
|
||||||
&self.mem_cap
|
&self.mem_cap
|
||||||
}
|
}
|
||||||
|
|
@ -137,28 +176,22 @@ impl Drop for MemoryRegion {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Volatile<T> {
|
#[repr(transparent)]
|
||||||
/// TODO: This should maybe be MaybeUninit.
|
pub struct Volatile<T: Copy>(T);
|
||||||
data: T,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> Volatile<T> {
|
impl<T: Copy> Volatile<T> {
|
||||||
pub fn read(&self) -> T
|
pub fn read(&self) -> T {
|
||||||
where
|
unsafe { read_volatile(addr_of!(self.0)) }
|
||||||
T: Copy,
|
|
||||||
{
|
|
||||||
unsafe { addr_of!(self.data).cast::<T>().read_volatile() }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn write(&mut self, data: T) {
|
pub fn write(&mut self, data: T) {
|
||||||
unsafe {
|
unsafe {
|
||||||
addr_of_mut!(self.data).cast::<T>().write_volatile(data);
|
write_volatile(addr_of_mut!(self.0), data);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn update<F>(&mut self, func: F)
|
pub fn update<F>(&mut self, func: F)
|
||||||
where
|
where
|
||||||
T: Copy,
|
|
||||||
F: Fn(&mut T),
|
F: Fn(&mut T),
|
||||||
{
|
{
|
||||||
let mut data = self.read();
|
let mut data = self.read();
|
||||||
|
|
@ -176,6 +209,37 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! read_unaligned_volatile {
|
||||||
|
($struct_ptr:expr, $field:ident) => {
|
||||||
|
unsafe {
|
||||||
|
let field_ptr = core::ptr::addr_of!((*$struct_ptr).$field);
|
||||||
|
core::ptr::read_volatile(field_ptr as *const _)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! write_unaligned_volatile {
|
||||||
|
($struct_ptr:expr, $field:ident, $value:expr) => {
|
||||||
|
unsafe {
|
||||||
|
let field_ptr = core::ptr::addr_of!((*$struct_ptr).$field);
|
||||||
|
core::ptr::write_volatile(field_ptr as *mut _, $value);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! map_unaligned_volatile {
|
||||||
|
($struct_ptr:expr, $field:ident, $func:expr) => {
|
||||||
|
unsafe {
|
||||||
|
let field_ptr = core::ptr::addr_of!((*$struct_ptr).$field);
|
||||||
|
let value = core::ptr::read_volatile(field_ptr as *const _);
|
||||||
|
core::ptr::write_volatile(field_ptr as *mut _, ($func)(value));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
pub fn map_cap_and_leak(mem_cap: Capability) -> u64 {
|
pub fn map_cap_and_leak(mem_cap: Capability) -> u64 {
|
||||||
let vaddr = syscall::address_space_map(&mem_cap).unwrap();
|
let vaddr = syscall::address_space_map(&mem_cap).unwrap();
|
||||||
mem_cap.release();
|
mem_cap.release();
|
||||||
|
|
|
||||||
131
rust/lib/mammoth/src/physical_box.rs
Normal file
131
rust/lib/mammoth/src/physical_box.rs
Normal file
|
|
@ -0,0 +1,131 @@
|
||||||
|
use core::{
|
||||||
|
marker::PhantomData,
|
||||||
|
ops::{Deref, DerefMut, Index, IndexMut},
|
||||||
|
ptr::NonNull,
|
||||||
|
};
|
||||||
|
|
||||||
|
use alloc::{slice, vec::Vec};
|
||||||
|
|
||||||
|
use crate::mem::MemoryRegion;
|
||||||
|
|
||||||
|
pub struct PhysicalBox<T: ?Sized> {
|
||||||
|
data: NonNull<T>,
|
||||||
|
region: MemoryRegion,
|
||||||
|
physical_address: usize,
|
||||||
|
_marker: PhantomData<T>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: ?Sized> PhysicalBox<T> {
|
||||||
|
pub fn physical_address(&self) -> usize {
|
||||||
|
self.physical_address
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: ?Sized> Deref for PhysicalBox<T> {
|
||||||
|
type Target = T;
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
// SAFETY:
|
||||||
|
// - Alignment: This is page aligned.
|
||||||
|
// - Dereferenceable: Guaranteed in same allocation.
|
||||||
|
// - Aliasing: The borrow rules ensure this
|
||||||
|
unsafe { self.data.as_ref() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: ?Sized> DerefMut for PhysicalBox<T> {
|
||||||
|
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||||
|
// SAFETY:
|
||||||
|
// - Alignment: This is page aligned.
|
||||||
|
// - Dereferenceable: Guaranteed in same allocation.
|
||||||
|
// - Aliasing: The borrow rules ensure this
|
||||||
|
unsafe { self.data.as_mut() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> PhysicalBox<[T]> {
|
||||||
|
pub fn default_with_count(default: T, len: usize) -> Self
|
||||||
|
where
|
||||||
|
T: Clone,
|
||||||
|
{
|
||||||
|
let layout = core::alloc::Layout::array::<T>(len).expect("Layout overflow");
|
||||||
|
|
||||||
|
// TODO: Implement a function like alloc that takes a layout. let (memory_region, paddr) =
|
||||||
|
let (memory_region, paddr) =
|
||||||
|
MemoryRegion::contiguous_physical(layout.size() as u64).expect("Failed to allocate");
|
||||||
|
|
||||||
|
let ptr: *mut T = memory_region.mut_ptr_at_offset(0);
|
||||||
|
for i in 0..len {
|
||||||
|
unsafe {
|
||||||
|
ptr.add(i).write(default.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let slice_ptr = core::ptr::slice_from_raw_parts_mut(ptr, len);
|
||||||
|
|
||||||
|
Self {
|
||||||
|
// UNWRAP: We know this isn't null.
|
||||||
|
data: NonNull::new(slice_ptr).unwrap(),
|
||||||
|
region: memory_region,
|
||||||
|
physical_address: paddr as usize,
|
||||||
|
_marker: PhantomData,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_vec(mut vec: Vec<T>) -> Self {
|
||||||
|
let len = vec.len();
|
||||||
|
let layout = core::alloc::Layout::array::<T>(len).expect("Layout overflow");
|
||||||
|
|
||||||
|
// TODO: Implement a function like alloc that takes a layout.
|
||||||
|
let (memory_region, paddr) =
|
||||||
|
MemoryRegion::contiguous_physical(layout.size() as u64).expect("Failed to allocate");
|
||||||
|
|
||||||
|
let ptr: *mut T = memory_region.mut_ptr_at_offset(0);
|
||||||
|
for (i, item) in vec.into_iter().enumerate() {
|
||||||
|
unsafe {
|
||||||
|
ptr.add(i).write(item);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let slice_ptr = core::ptr::slice_from_raw_parts_mut(ptr, len);
|
||||||
|
|
||||||
|
Self {
|
||||||
|
// UNWRAP: We know this isn't null.
|
||||||
|
data: NonNull::new(slice_ptr).unwrap(),
|
||||||
|
region: memory_region,
|
||||||
|
physical_address: paddr as usize,
|
||||||
|
_marker: PhantomData,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn len(&self) -> usize {
|
||||||
|
(**self).len()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<I, T> Index<I> for PhysicalBox<[T]>
|
||||||
|
where
|
||||||
|
I: slice::SliceIndex<[T]>,
|
||||||
|
{
|
||||||
|
type Output = I::Output;
|
||||||
|
|
||||||
|
fn index(&self, index: I) -> &Self::Output {
|
||||||
|
&(**self)[index]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<I, T> IndexMut<I> for PhysicalBox<[T]>
|
||||||
|
where
|
||||||
|
I: slice::SliceIndex<[T]>,
|
||||||
|
{
|
||||||
|
fn index_mut(&mut self, index: I) -> &mut Self::Output {
|
||||||
|
&mut (**self)[index]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: ?Sized> Drop for PhysicalBox<T> {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
// SAFETY:
|
||||||
|
// - We own this data.
|
||||||
|
unsafe { core::ptr::drop_in_place(self.data.as_ptr()) }
|
||||||
|
}
|
||||||
|
}
|
||||||
Loading…
Add table
Add a link
Reference in a new issue