[Mammoth] Move all callers of FromCapability to OwnedMemoryObject.

This commit is contained in:
Drew Galbraith 2023-11-19 20:33:15 -08:00
parent 337126cabb
commit 8e827a5dfb
24 changed files with 175 additions and 87 deletions

View file

@ -64,7 +64,7 @@ target_link_libraries(zion
# -mno-red-zone -- Don't put data below the stack pointer (clobbered by interrupts).
# -mcmodel=kernel -- Assume the kernel code is running in the higher half.
# -mgeneral-regs-only -- Prevent GCC from using a whole host of nonsense registers (that we have to enable).
set(_Z_COMPILE_FLAGS "${CMAKE_CXX_FLAGS} -c -ffreestanding -fno-rtti -fno-exceptions -nostdlib -mabi=sysv -mno-red-zone -mcmodel=kernel -mgeneral-regs-only")
set(_Z_COMPILE_FLAGS "${CMAKE_CXX_FLAGS} -c -ffreestanding -fno-rtti -fno-exceptions -fno-use-cxa-atexit -nostdlib -mabi=sysv -mno-red-zone -mcmodel=kernel -mgeneral-regs-only")
set(_Z_LINK_SCRIPT "${CMAKE_CURRENT_SOURCE_DIR}/linker.ld")

View file

@ -9,7 +9,7 @@
void dbgln(const glcr::StringView& str);
template <typename... Args>
void dbgln(const char* str, Args... args) {
void dbgln(const char* str, Args&&... args) {
char buffer[256];
glcr::FixedStringBuilder builder(buffer, 256);
glcr::StrFormatIntoBuffer(builder, str, args...);
@ -17,12 +17,12 @@ void dbgln(const char* str, Args... args) {
}
template <typename... Args>
void dbgln_large(const char* str, Args... args) {
void dbgln_large(const char* str, Args&&... args) {
dbgln(glcr::StrFormat(str, args...));
}
template <typename... Args>
void panic(const char* str, Args... args) {
void panic(const char* str, Args&&... args) {
dbgln(str, args...);
dbgln("PANIC");
asm volatile("cli; hlt;");
@ -34,6 +34,6 @@ void panic(const char* str, Args... args) {
panic(str); \
} \
}
#define UNREACHABLE \
panic("Unreachable {}, {}", __FILE__, __LINE__); \
#define UNREACHABLE \
panic("Unreachable {}, {}", glcr::StringView(__FILE__), __LINE__); \
__builtin_unreachable();

View file

@ -1,7 +1,17 @@
#include "lib/memory_mapping_tree.h"
#include <glacier/string/str_format.h>
#include "debug/debug.h"
template <>
void glcr::StrFormatValue(glcr::StringBuilder& builder,
const MemoryMappingTree::MemoryMapping& value,
glcr::StringView opts) {
builder.PushBack(
glcr::StrFormat("Range {x}-{x}", value.vaddr_base, value.vaddr_limit));
}
glcr::ErrorCode MemoryMappingTree::AddInMemoryObject(
uint64_t vaddr, const glcr::RefPtr<MemoryObject>& object) {
// TODO: This implementation is inefficient as it traverses the tree a lot, we
@ -45,6 +55,9 @@ glcr::ErrorCode MemoryMappingTree::FreeMemoryRange(uint64_t vaddr_base,
auto find_or = mapping_tree_.Find(vaddr_base);
if (find_or) {
dbgln("Mem addr {x} refcnt {}",
(uint64_t)find_or.value().get().mem_object.get(),
find_or.value().get().mem_object->ref_count());
mapping_tree_.Delete(vaddr_base);
}
while (true) {

View file

@ -21,13 +21,13 @@ class MemoryMappingTree {
glcr::ErrorOr<uint64_t> GetPhysicalPageAtVaddr(uint64_t vaddr);
private:
struct MemoryMapping {
uint64_t vaddr_base;
uint64_t vaddr_limit;
glcr::RefPtr<MemoryObject> mem_object;
};
private:
// TODO: Consider adding a red-black tree implementation here.
// As is this tree functions about as well as a linked list
// because mappings are likely to be added in near-perfect ascedning order.

View file

@ -164,3 +164,8 @@ void operator delete[](void* addr) {
SlabFree(addr);
}
}
void operator delete[](void* addr, uint64_t size) {
if (IsSlab(addr)) {
SlabFree(addr);
}
}

View file

@ -20,6 +20,8 @@ MemoryObject::MemoryObject(uint64_t size) : size_(size) {
}
}
MemoryObject::~MemoryObject() { dbgln("Memory Object Freed"); }
uint64_t MemoryObject::PhysicalPageAtOffset(uint64_t offset) {
if (offset > size_) {
panic("Invalid offset");

View file

@ -27,7 +27,8 @@ class MemoryObject : public KernelObject {
kZionPerm_Transmit;
}
MemoryObject(uint64_t size);
explicit MemoryObject(uint64_t size);
~MemoryObject();
uint64_t size() { return size_; }
uint64_t num_pages() { return size_ / 0x1000; }