/* Copyright (C) 2021,2022 fef . All rights reserved. */ #include /* * Alright, a lot of stuff that is not immediately obvious to someone who hasn't * done this sort of thing before is going on here, and since i'm totally new to * x86 myself, here is some excessive documentation of the entire process (which * will hopefully also help other newcomers in understanding the actual switching * mechanism). I think the main reason this particular function might seem a * little confusing is that it returns to a different place than where it came * from, which is kind of the whole point if you think about it. * * This routine is called from within kernel space, and will perform a switch to * another task that also runs in kernel space. So, this has nothing to do with * changing ring levels. When another task switches back to the original task, * that original task just returns to where it called this function as if * nothing happened in the meantime. * As per the System V amd64 ABI, the two arguments `new' and `old' are passed * using the scratch registers %rdi and %rsi respectively, and the return * address is stored on the stack. * * What we need to do now is store all caller saved registers (which critically * include the stack pointer) into `old', set their values to the ones from * `new' (again, including the stack pointer) and then just return. * The new stack pointer will point to the same stack layout shown above, but * this time that of the new task we are going to switch to. Since the stack * also includes the %rip from when the new task called arch_switch_to(), we * automatically switch to that task when returning. */ .text /* void arch_switch_to(tcb_t *new, tcb_t *old); */ ASM_ENTRY(arch_switch_to) movq %rsp, (%rsi) /* old->rsp = %rsp */ movq %rbx, 8(%rsi) /* old->rbx = %rbx */ movq %rbp, 16(%rsi) /* ... */ movq %r12, 24(%rsi) movq %r13, 32(%rsi) movq %r14, 40(%rsi) movq %r15, 48(%rsi) movq (%rdi), %rsp /* %rsp = new->rsp */ movq 8(%rdi), %rbx /* %rbx = new->rbx */ movq 16(%rdi), %rbp /* ... */ movq 24(%rdi), %r12 movq 32(%rdi), %r13 movq 40(%rdi), %r14 movq 48(%rdi), %r15 retq ASM_END(arch_switch_to)