Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Implement interrupts, serial, keyboard input and commands added previously #6

Merged
merged 29 commits into from
Jul 20, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
d2466a1
interrrupts.....
adamperkowski Jul 18, 2024
ce715f1
fixed
adamperkowski Jul 18, 2024
507134e
double faults
adamperkowski Jul 18, 2024
8d5416a
Implement serial
adamperkowski Jul 18, 2024
a39e524
cargo fixes
adamperkowski Jul 18, 2024
c07fec8
add author
adamperkowski Jul 18, 2024
dccd57f
todo
adamperkowski Jul 18, 2024
4650275
CHECKPOINT
adamperkowski Jul 18, 2024
d926254
Hardware Interrupts Implemented
adamperkowski Jul 18, 2024
30ad34d
Keyboard Input
adamperkowski Jul 18, 2024
e770195
Merge branch 'main' into kbicmd
adamperkowski Jul 18, 2024
8297a74
Merge branch 'main' into kbicmd
adamperkowski Jul 18, 2024
9ffe502
Add page fault handling
adamperkowski Jul 18, 2024
e473225
Accessing tables
adamperkowski Jul 18, 2024
f507d87
move shell to res
adamperkowski Jul 18, 2024
8087d68
Everything works
adamperkowski Jul 18, 2024
7afff11
Add correct version displaying
adamperkowski Jul 19, 2024
99b5949
Heap allocation works
adamperkowski Jul 19, 2024
338107b
...
adamperkowski Jul 19, 2024
c3a5620
Allocator from scratch
adamperkowski Jul 19, 2024
2f58af5
Clippy calm down
adamperkowski Jul 19, 2024
3165508
Move res to cmd
adamperkowski Jul 19, 2024
4edaa57
commands integrated
adamperkowski Jul 19, 2024
3240986
TODO
adamperkowski Jul 19, 2024
8068612
howing rawkeys only for debug
adamperkowski Jul 19, 2024
85afd88
saving input
adamperkowski Jul 19, 2024
5d8378a
upate the panic handler loop
adamperkowski Jul 20, 2024
2a2b997
DONE
adamperkowski Jul 20, 2024
0f5233a
better init system
adamperkowski Jul 20, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion shell/.cargo/config.toml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[unstable]
build-std = ["core", "compiler_builtins"]
build-std = ["core", "compiler_builtins", "alloc"]
build-std-features = ["compiler-builtins-mem"]

[build]
Expand Down
11 changes: 10 additions & 1 deletion shell/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,12 +1,21 @@
[package]
name = "hlshell"
version = "0.3.0"
authors = ["Adam Perkowski <[email protected]>"]
edition = "2021"

[dependencies]
bootloader = "0.9"
volatile = "0.2.6"
spin = "0.5.2"
x86_64 = "0.14.2"
uart_16550 = "0.2.0"
pic8259 = "0.10.1"
pc-keyboard = "0.7.0"
linked_list_allocator = "0.9.0"

[dependencies.bootloader]
version = "0.9"
features = ["map_physical_memory"]

[dependencies.lazy_static]
version = "1.0"
Expand Down
4 changes: 4 additions & 0 deletions shell/TODO
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
- RTC
- update dependencies
- add debug commands that exist only in debug build
- add 'other' section to commands that lets you create commands and install them via 'patching'
80 changes: 80 additions & 0 deletions shell/src/allocator.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
use alloc::alloc::{GlobalAlloc, Layout};
use core::ptr::null_mut;
use fixed_size_block::FixedSizeBlockAllocator;
use x86_64::{
structures::paging::{
mapper::MapToError, FrameAllocator, Mapper, Page, PageTableFlags, Size4KiB,
},
VirtAddr,
};

pub mod bump;
pub mod fixed_size_block;
pub mod linked_list;

pub const HEAP_START: usize = 0x_4444_4444_0000;
pub const HEAP_SIZE: usize = 100 * 1024; // 100 KiB

#[global_allocator]
static ALLOCATOR: Locked<FixedSizeBlockAllocator> = Locked::new(FixedSizeBlockAllocator::new());

pub fn init_heap(
mapper: &mut impl Mapper<Size4KiB>,
frame_allocator: &mut impl FrameAllocator<Size4KiB>,
) -> Result<(), MapToError<Size4KiB>> {
let page_range = {
let heap_start = VirtAddr::new(HEAP_START as u64);
let heap_end = heap_start + HEAP_SIZE - 1u64;
let heap_start_page = Page::containing_address(heap_start);
let heap_end_page = Page::containing_address(heap_end);

Page::range_inclusive(heap_start_page, heap_end_page)
};

for page in page_range {
let frame = frame_allocator
.allocate_frame()
.ok_or(MapToError::FrameAllocationFailed)?;
let flags = PageTableFlags::PRESENT | PageTableFlags::WRITABLE;

unsafe { mapper.map_to(page, frame, flags, frame_allocator)?.flush() };
}

unsafe {
ALLOCATOR.lock().init(HEAP_START, HEAP_SIZE);
}

Ok(())
}

pub struct Dummy;

unsafe impl GlobalAlloc for Dummy {
unsafe fn alloc(&self, _layout: Layout) -> *mut u8 {
null_mut()
}

unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {
panic!("dealloc should be never called")
}
}

pub struct Locked<A> {
inner: spin::Mutex<A>,
}

impl<A> Locked<A> {
pub const fn new(inner: A) -> Self {
Locked {
inner: spin::Mutex::new(inner),
}
}

pub fn lock(&self) -> spin::MutexGuard<A> {
self.inner.lock()
}
}

fn align_up(addr: usize, align: usize) -> usize {
(addr + align - 1) & !(align - 1)
}
55 changes: 55 additions & 0 deletions shell/src/allocator/bump.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
use super::{align_up, Locked};
use alloc::alloc::{GlobalAlloc, Layout};
use core::ptr;

pub struct BumpAllocator {
heap_start: usize,
heap_end: usize,
next: usize,
allocations: usize,
}

impl BumpAllocator {
pub const fn new() -> Self {
BumpAllocator {
heap_start: 0,
heap_end: 0,
next: 0,
allocations: 0,
}
}

pub unsafe fn init(&mut self, heap_start: usize, heap_size: usize) {
self.heap_start = heap_start;
self.heap_end = heap_start.saturating_add(heap_size);
self.next = heap_start;
}
}

unsafe impl GlobalAlloc for Locked<BumpAllocator> {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
let mut bump = self.lock();
let alloc_start = align_up(bump.next, layout.align());
let alloc_end = match alloc_start.checked_add(layout.size()) {
Some(end) => end,
None => return ptr::null_mut(),
};

if alloc_end > bump.heap_end {
ptr::null_mut()
} else {
bump.next = alloc_end;
bump.allocations += 1;
alloc_start as *mut u8
}
}

unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {
let mut bump = self.lock();

bump.allocations -= 1;
if bump.allocations == 0 {
bump.next = bump.heap_start;
}
}
}
86 changes: 86 additions & 0 deletions shell/src/allocator/fixed_size_block.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
use super::Locked;
use alloc::alloc::{GlobalAlloc, Layout};
use core::{
mem,
ptr::{self, NonNull},
};

const BLOCK_SIZES: &[usize] = &[8, 16, 32, 64, 128, 256, 512, 1024, 2048];

fn list_index(layout: &Layout) -> Option<usize> {
let required_block_size = layout.size().max(layout.align());
BLOCK_SIZES.iter().position(|&s| s >= required_block_size)
}

struct ListNode {
next: Option<&'static mut ListNode>,
}

pub struct FixedSizeBlockAllocator {
list_heads: [Option<&'static mut ListNode>; BLOCK_SIZES.len()],
fallback_allocator: linked_list_allocator::Heap,
}

impl FixedSizeBlockAllocator {
pub const fn new() -> Self {
const EMPTY: Option<&'static mut ListNode> = None;
FixedSizeBlockAllocator {
list_heads: [EMPTY; BLOCK_SIZES.len()],
fallback_allocator: linked_list_allocator::Heap::empty(),
}
}

pub unsafe fn init(&mut self, heap_start: usize, heap_size: usize) {
self.fallback_allocator.init(heap_start, heap_size);
}

/// Allocates using the fallback allocator.
fn fallback_alloc(&mut self, layout: Layout) -> *mut u8 {
match self.fallback_allocator.allocate_first_fit(layout) {
Ok(ptr) => ptr.as_ptr(),
Err(_) => ptr::null_mut(),
}
}
}

unsafe impl GlobalAlloc for Locked<FixedSizeBlockAllocator> {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
let mut allocator = self.lock();
match list_index(&layout) {
Some(index) => match allocator.list_heads[index].take() {
Some(node) => {
allocator.list_heads[index] = node.next.take();
node as *mut ListNode as *mut u8
}
None => {
let block_size = BLOCK_SIZES[index];
let block_align = block_size;
let layout = Layout::from_size_align(block_size, block_align).unwrap();
allocator.fallback_alloc(layout)
}
},
None => allocator.fallback_alloc(layout),
}
}

unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
let mut allocator = self.lock();
match list_index(&layout) {
Some(index) => {
let new_node = ListNode {
next: allocator.list_heads[index].take(),
};

assert!(mem::size_of::<ListNode>() <= BLOCK_SIZES[index]);
assert!(mem::align_of::<ListNode>() <= BLOCK_SIZES[index]);
let new_node_ptr = ptr as *mut ListNode;
new_node_ptr.write(new_node);
allocator.list_heads[index] = Some(&mut *new_node_ptr);
}
None => {
let ptr = NonNull::new(ptr).unwrap();
allocator.fallback_allocator.deallocate(ptr, layout);
}
}
}
}
114 changes: 114 additions & 0 deletions shell/src/allocator/linked_list.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
use super::{align_up, Locked};
use alloc::alloc::{GlobalAlloc, Layout};
use core::{mem, ptr};

struct ListNode {
size: usize,
next: Option<&'static mut ListNode>,
}

impl ListNode {
const fn new(size: usize) -> Self {
ListNode { size, next: None }
}

fn start_addr(&self) -> usize {
self as *const Self as usize
}

fn end_addr(&self) -> usize {
self.start_addr() + self.size
}
}

pub struct LinkedListAllocator {
head: ListNode,
}

impl LinkedListAllocator {
pub const fn new() -> Self {
Self {
head: ListNode::new(0),
}
}

pub unsafe fn init(&mut self, heap_start: usize, heap_size: usize) {
self.add_free_region(heap_start, heap_size);
}

unsafe fn add_free_region(&mut self, addr: usize, size: usize) {
assert_eq!(align_up(addr, mem::align_of::<ListNode>()), addr);
assert!(size >= mem::size_of::<ListNode>());

let mut node = ListNode::new(size);
node.next = self.head.next.take();
let node_ptr = addr as *mut ListNode;
node_ptr.write(node);
self.head.next = Some(&mut *node_ptr)
}

fn find_region(&mut self, size: usize, align: usize) -> Option<(&'static mut ListNode, usize)> {
let mut current = &mut self.head;
while let Some(ref mut region) = current.next {
if let Ok(alloc_start) = Self::alloc_from_region(&region, size, align) {
let next = region.next.take();
let ret = Some((current.next.take().unwrap(), alloc_start));
current.next = next;
return ret;
} else {
current = current.next.as_mut().unwrap();
}
}

None
}

fn alloc_from_region(region: &ListNode, size: usize, align: usize) -> Result<usize, ()> {
let alloc_start = align_up(region.start_addr(), align);
let alloc_end = alloc_start.checked_add(size).ok_or(())?;

if alloc_end > region.end_addr() {
return Err(());
}

let excess_size = region.end_addr() - alloc_end;
if excess_size > 0 && excess_size < mem::size_of::<ListNode>() {
return Err(());
}

Ok(alloc_start)
}

fn size_align(layout: Layout) -> (usize, usize) {
let layout = layout
.align_to(mem::align_of::<ListNode>())
.expect("adjusting alignment failed")
.pad_to_align();
let size = layout.size().max(mem::size_of::<ListNode>());
(size, layout.align())
}
}

unsafe impl GlobalAlloc for Locked<LinkedListAllocator> {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
let (size, align) = LinkedListAllocator::size_align(layout);
let mut allocator = self.lock();

if let Some((region, alloc_start)) = allocator.find_region(size, align) {
let alloc_end = alloc_start.checked_add(size).expect("overflow");
let excess_size = region.end_addr() - alloc_end;
if excess_size > 0 {
allocator.add_free_region(alloc_end, excess_size);
}
alloc_start as *mut u8
} else {
ptr::null_mut()
}
}

unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
let (size, _) = LinkedListAllocator::size_align(layout);

self.lock().add_free_region(ptr as usize, size)
}
}
Loading
Loading