create copys of appcode in thread constructor

This commit is contained in:
2026-02-28 18:01:15 +01:00
parent 1acc652446
commit 4976ea91be
9 changed files with 265 additions and 180 deletions

View File

@@ -3,10 +3,22 @@
#include "pageframealloc.h"
#include "../utils/string.h"
void invlpg(uintptr_t virt_addr) {
asm volatile("invlpg (%0)" : : "r" (virt_addr) : "memory");
}
void load_cr3( void* cr3_value ) {
asm volatile("mov %0, %%cr3" :: "r"((uint64_t)cr3_value) : "memory");
}
uintptr_t get_cr3() {
uint64_t cr3;
__asm__ __volatile__("mov %%cr3, %0" : "=r"(cr3));
return cr3;
}
void write_identity_map(pagetable_t* identity_table, uint64_t size){
for(uintptr_t i=0; i<size/4096; i++){
identity_table[i/512].entries[i%512] = {
@@ -18,6 +30,41 @@ void write_identity_map(pagetable_t* identity_table, uint64_t size){
}
}
bool copy_page(pagetable_t* l4, uintptr_t src_paddr, void* dest_vaddr) {
//allocate frame and map in given address space
//if no mapping is present
uintptr_t dest_paddr = isMapped((uintptr_t)dest_vaddr, l4);
if(dest_paddr == 0){
dest_paddr = (uintptr_t)PageFrameAllocator::alloc(false);
if(dest_paddr == 0)
return false;
setMapping((uintptr_t)dest_vaddr, (void*)dest_paddr, l4);
}
//get temp page1
void* temp_dest_vaddr = getFreeVirtSpace((pagetable_t*)get_cr3(), 1);
if(temp_dest_vaddr == nullptr)
return false;
setMapping((uintptr_t)temp_dest_vaddr, (void*)dest_paddr, (pagetable_t*)get_cr3());
//get temp page2
void* temp_src_vaddr = getFreeVirtSpace((pagetable_t*)get_cr3(), 1);
if(temp_src_vaddr == nullptr || temp_dest_vaddr == nullptr)
return false;
setMapping((uintptr_t) temp_src_vaddr, (void*)src_paddr, (pagetable_t*)get_cr3());
//copy
memcpy(temp_dest_vaddr, temp_src_vaddr, 4096);
//unmap temp pages
setMapping((uintptr_t)(temp_src_vaddr), 0, (pagetable_t*)get_cr3());
setMapping((uintptr_t)(temp_dest_vaddr), 0, (pagetable_t*)get_cr3());
return true;
}
void create_basic_page_table(four_lvl_paging_t* table, pagetable_t* kernel_identity){
assert(table);
@@ -104,17 +151,19 @@ void create_basic_page_table(four_lvl_paging_t* table, pagetable_t* kernel_ident
kernel_identity[0].entries[0].present = 0;
}
uintptr_t isMapped(uintptr_t vaddr, four_lvl_paging_t* flpt){
uintptr_t isMapped(uintptr_t vaddr, pagetable_t* l4){
uint16_t l4Index = (vaddr>>39) & 0x1FF;
uint16_t l3Index = (vaddr>>30) & 0x1FF;
uint16_t l2Index = (vaddr>>21) & 0x1FF;
uint16_t l1Index = (vaddr>>12) & 0x1FF;
if(flpt->l4->entries[l4Index].present){
pagetable_t* lvl3 = (pagetable_t*)(flpt->l4->entries[l4Index].address<<12);
if(l4->entries[l4Index].present){
pagetable_t* lvl3 = (pagetable_t*)(l4->entries[l4Index].address<<12);
if(lvl3->entries[l3Index].present){
pagetable_t* lvl2 = (pagetable_t*)(lvl3->entries[l3Index].address<<12);
if(lvl2->entries[l2Index].present){
if(lvl2->entries[l2Index].pat)
return (lvl2->entries[l2Index].address<<21)+(l1Index<<12);
pagetable_t* lvl1 = (pagetable_t*)(lvl2->entries[l2Index].address<<12);
if(lvl1->entries[l1Index].present)
return lvl1->entries[l1Index].address<<12;
@@ -124,17 +173,17 @@ uintptr_t isMapped(uintptr_t vaddr, four_lvl_paging_t* flpt){
return 0;
}
void* getFreeVirtSpace(four_lvl_paging_t* search_table, uint8_t num_pages){
uint32_t start_v = 0x4000;
uint32_t stop_v = 0x6000;
static uint32_t next_start_v = 0x4000;
//static uint32_t next_start_v = start_v;
void* getFreeVirtSpace(pagetable_t* l4, uint8_t num_pages){
uint64_t start_v = 0x4000;
uint64_t stop_v = 0x200000;
static uint64_t next_start_v = 0x4000;
//static uint64_t next_start_v = start_v;
//start from last found address
for (uint32_t v=next_start_v; v<stop_v; v++) {
for (uint64_t v=next_start_v; v<stop_v; v++) {
bool space_is_free = true;
for(uint32_t i=0; i<num_pages; i++){
if(isMapped((uintptr_t)(v+i)<<12, search_table)) {
if(isMapped((uintptr_t)(v+i)<<12, l4)) {
space_is_free = false;
}
}
@@ -144,10 +193,10 @@ void* getFreeVirtSpace(four_lvl_paging_t* search_table, uint8_t num_pages){
}
}
//start again from the real start address
for (uint32_t v=start_v; v<next_start_v; v++) {
for (uint64_t v=start_v; v<next_start_v; v++) {
bool space_is_free = true;
for(uint32_t i=0; i<num_pages; i++){
if(isMapped((uintptr_t)(v+i)<<12, search_table)) {
if(isMapped((uintptr_t)(v+i)<<12, l4)) {
space_is_free = false;
}
}
@@ -160,16 +209,16 @@ void* getFreeVirtSpace(four_lvl_paging_t* search_table, uint8_t num_pages){
}
void setMapping(uintptr_t vaddr, void* frame, four_lvl_paging_t* flpt, bool write){
void setMapping(uintptr_t vaddr, void* frame, pagetable_t* l4, bool write, bool remap){
uint16_t l4Index = (vaddr>>39) & 0x1FF;
uint16_t l3Index = (vaddr>>30) & 0x1FF;
uint16_t l2Index = (vaddr>>21) & 0x1FF;
uint16_t l1Index = (vaddr>>12) & 0x1FF;
if(!(flpt->l4->entries[l4Index].present)){
if(!(l4->entries[l4Index].present)){
pagetable_t* newl3 = (pagetable_t*)PageFrameAllocator::alloc(true);
memset(newl3, 0, 4096);
flpt->l4->entries[l4Index] = {
l4->entries[l4Index] = {
.present = 1,
.write = 1,
.user = 1,
@@ -177,7 +226,7 @@ void setMapping(uintptr_t vaddr, void* frame, four_lvl_paging_t* flpt, bool writ
};
}
pagetable_t* lvl3 = (pagetable_t*)(flpt->l4->entries[l4Index].address<<12);
pagetable_t* lvl3 = (pagetable_t*)(l4->entries[l4Index].address<<12);
if(!(lvl3->entries[l3Index].present)){
pagetable_t* newl2 = (pagetable_t*)PageFrameAllocator::alloc(true);
memset(newl2, 0, 4096);
@@ -204,7 +253,8 @@ void setMapping(uintptr_t vaddr, void* frame, four_lvl_paging_t* flpt, bool writ
pagetable_t* lvl1 = (pagetable_t*)(lvl2->entries[l2Index].address<<12);
if(frame){
assert(!(lvl1->entries[l1Index].present)); // should not be present, bc its a new mapping
if(!remap)
assert(!(lvl1->entries[l1Index].present)); // should not be present, bc its a new mapping
lvl1->entries[l1Index] = {
.present = 1,
.write = write,
@@ -212,8 +262,11 @@ void setMapping(uintptr_t vaddr, void* frame, four_lvl_paging_t* flpt, bool writ
.address = (uintptr_t)frame >> 12
};
}
else //unmap if nullptr
else{ //unmap if nullptr
lvl1->entries[l1Index].present = 0;
if((void*)l4 == (void*)get_cr3())
invlpg(vaddr);
}
}
void copy_pagetable(four_lvl_paging_t* parent_table, four_lvl_paging_t* child_table){
@@ -233,17 +286,17 @@ void copy_pagetable(four_lvl_paging_t* parent_table, four_lvl_paging_t* child_ta
((uintptr_t)i1<<12) ;
if(vaddr < 0x4000000)
continue;
if(!isMapped(vaddr, child_table)){
if(!isMapped(vaddr, child_table->l4)){
//only copy if not part of basic pagetable
assert( (i3>0) || (i2>=32)); //assert user memory
void* frame = PageFrameAllocator::alloc(false);
setMapping(vaddr, frame, child_table);
setMapping(vaddr, frame, child_table->l4);
//map to local space for copy
void* dest = getFreeVirtSpace(parent_table, 1);
setMapping((uintptr_t)dest, frame, parent_table);
void* dest = getFreeVirtSpace(parent_table->l4, 1);
setMapping((uintptr_t)dest, frame, parent_table->l4);
memcpy(dest, (void*)vaddr, 4096);
setMapping((uintptr_t)dest, 0, parent_table);
setMapping((uintptr_t)dest, 0, parent_table->l4);
}
}
}

View File

@@ -50,10 +50,11 @@ typedef struct {
void write_identity_map(pagetable_t* identity_table, uint64_t size);
void create_basic_page_table(four_lvl_paging_t* table, pagetable_t* kernel_identity);
void load_cr3(void* cr3_value);
void setMapping(uintptr_t vaddr, void* frame, four_lvl_paging_t* flpt, bool write=true);
uintptr_t isMapped(uintptr_t vaddr, four_lvl_paging_t* flpt);
void* getFreeVirtSpace(four_lvl_paging_t* search_table, uint8_t num_pages);
void setMapping(uintptr_t vaddr, void* frame, pagetable_t* l4, bool write=true, bool remap=false);
uintptr_t isMapped(uintptr_t vaddr, pagetable_t* l4);
void* getFreeVirtSpace(pagetable_t* l4, uint8_t num_pages);
void copy_pagetable(four_lvl_paging_t* parent_table, four_lvl_paging_t* child_table);
bool copy_page(pagetable_t* l4, uintptr_t src_paddr, void* dest_vaddr);
//typedef struct {
// same