summaryrefslogtreecommitdiff
path: root/src/kernel/arch/amd64/interrupts/isr_stub.s
diff options
context:
space:
mode:
authordzwdz2024-07-22 18:05:07 +0200
committerdzwdz2024-07-22 18:05:07 +0200
commit2a09d77902c5007fb30c40f9c89ecb46aedf4006 (patch)
tree35e3f3332982fc359941e8cbc4ed5e3c5bf73425 /src/kernel/arch/amd64/interrupts/isr_stub.s
parent1fc2d9c88af77c3af81b6bd461e6b019b97d2dbb (diff)
kernel/isr: improve interrupt handling code
On the assembly side, ensure the stack frame looks always the same, by pushing a fake "error code" for the interrupts that don't generate one. On the C side, use a struct instead of magic indices into an "array", and make it consistent with the current style.
Diffstat (limited to 'src/kernel/arch/amd64/interrupts/isr_stub.s')
-rw-r--r--src/kernel/arch/amd64/interrupts/isr_stub.s22
1 files changed, 17 insertions, 5 deletions
diff --git a/src/kernel/arch/amd64/interrupts/isr_stub.s b/src/kernel/arch/amd64/interrupts/isr_stub.s
index 75934d5..3134808 100644
--- a/src/kernel/arch/amd64/interrupts/isr_stub.s
+++ b/src/kernel/arch/amd64/interrupts/isr_stub.s
@@ -2,21 +2,27 @@
.global _isr_stubs
_isr_stubs:
+.set i, 0
.rept 256
.set _stub_start, .
cli
+ .if i != 8 && i != 10 && i != 11 && i != 12 && i != 13 && i != 14 && i != 17 && i != 21 && i != 29 && i != 30
+ /* no error code was pushed - push anything, just to line up the stack */
+ push %rbx
+ .endif
call _isr_stage2
.if . - _stub_start > 8
.error "isr stubs over maximum size"
.abort
.endif
- .align 8
+ .align 8, 0xCC
+ .set i, i + 1
.endr
_isr_stage2:
- // pushal order, without %esp
+ /* pushal order, without %esp. 15 in total */
push %rax
push %rcx
push %rdx
@@ -33,13 +39,19 @@ _isr_stage2:
push %r14
push %r15
// TODO FXSAVE might be required on interrupts too?
+ // on interrupts - no
+ // on syscalls - yes
+ // have fun figuring that one out
+ // basically if you're context switching, FXSAVE
- // convert the return address into the vector nr
+ /* first argument: vector nr. computed from the return address at offset
+ * 15*8 = 120 */
mov 120(%rsp), %rdi
sub $_isr_stubs, %rdi
shr $3, %rdi
- lea 128(%rsp), %rsi // second argument - IRET stack frame
+ /* second argument: IRET stack frame */
+ lea 136(%rsp), %rsi
// load kernel paging
mov %cr3, %rbx
@@ -72,7 +84,7 @@ _isr_stage2:
pop %rcx
pop %rax
- add $8, %rsp // skip call's return address
+ add $16, %rsp /* skip return address from call and the error code */
iretq
.align 8