kern/arch/x86/sys/atom.S
fef 6865864444
x86/atom: use neg instead of not/inc
The x86 apparently has a dedicated instruction for
finding the 2's complement.
2021-10-16 21:30:13 +02:00

223 lines
3.7 KiB
ArmAsm

/* See the end of this file for copyright and license terms. */
#include <asm/common.h>
/*
* These routines are only really used if debugging is enabled, where everything
* is compiled with -O0. Otherwise clang would just take the inline definitions
* from the header file (what an elegant way of creating bugs that only appear
* in optimized code!). Therefore, we sacrifice a bit of performance for the
* sake of being nicer to gdb by creating frame pointers.
*/
/* int atom_read(const atom_t *atom) */
ASM_ENTRY(atom_read)
push %ebp
mov %esp, %ebp
mov 8(%ebp), %ecx
mov (%ecx), %eax
pop %ebp
ret
ASM_END(atom_read)
/* int atom_write(atom_t *atom, int val) */
ASM_ENTRY(atom_write)
push %ebp
mov %esp, %ebp
mov 8(%ebp), %edx
mov 12(%ebp), %ecx
mov (%edx), %eax
1: lock
cmpxchgl %ecx, (%edx)
jne 1b
pop %ebp
ret
ASM_END(atom_write)
/* bool atom_inc(atom_t *atom) */
ASM_ENTRY(atom_inc)
push %ebp
mov %esp, %ebp
mov 8(%ebp), %edx
lock
incl (%edx)
xor %eax, %eax
setne %al
pop %ebp
ret
ASM_END(atom_inc)
/* bool atom_dec(atom_t *atom) */
ASM_ENTRY(atom_dec)
push %ebp
mov %esp, %ebp
mov 8(%ebp), %edx
lock
decl (%edx)
xor %eax, %eax
setne %al
pop %ebp
ret
ASM_END(atom_dec)
/* int atom_add(atom_t *atom, int val) */
ASM_ENTRY(atom_add)
push %ebp
mov %esp, %ebp
mov 8(%ebp), %edx
mov 12(%ebp), %eax
lock
xaddl %eax, (%edx)
pop %ebp
ret
ASM_END(atom_add)
/* int atom_sub(atom_t *atom, int val) */
ASM_ENTRY(atom_sub)
push %ebp
mov %esp, %ebp
mov 8(%ebp), %edx
mov 12(%ebp), %eax
/* there is no xsubl, so we add the two's complement */
neg %eax
lock
xaddl %eax, (%edx)
pop %ebp
ret
ASM_END(atom_sub)
/* int atom_and(atom_t *atom, int val) */
ASM_ENTRY(atom_and)
push %ebp
mov %esp, %ebp
mov 8(%ebp), %edx
mov (%edx), %eax
1: mov %eax, %ecx
and 12(%ebp), %ecx
lock
cmpxchgl %ecx, (%edx)
jne 1b
pop %ebp
ret
ASM_END(atom_and)
/* int atom_or(atom_t *atom, int val) */
ASM_ENTRY(atom_or)
push %ebp
mov %esp, %ebp
mov 8(%ebp), %edx
mov (%edx), %eax
1: mov %eax, %ecx
and 12(%ebp), %ecx
lock
cmpxchgl %ecx, (%edx)
jne 1b
pop %ebp
ret
ASM_END(atom_or)
/* int atom_xor(atom_t *atom, int val) */
ASM_ENTRY(atom_xor)
push %ebp
mov %esp, %ebp
mov 8(%ebp), %edx
mov (%edx), %eax
1: mov %eax, %ecx
xor 12(%ebp), %ecx
lock
cmpxchgl %ecx, (%edx)
jne 1b
pop %ebp
ret
ASM_END(atom_xor)
/* bool atom_set_bit(atom_t *atom, int bit) */
ASM_ENTRY(atom_set_bit)
push %ebp
mov %esp, %ebp
push %ebx
mov 8(%ebp), %edx
mov 12(%ebp), %ecx
mov $1, %ebx
shl %cl, %ebx
mov (%edx), %eax
1: mov %eax, %ecx
or %ebx, %ecx
lock
cmpxchgl %ecx, (%edx)
jne 1b
/* return true if bit was clear before */
not %eax
and %ebx, %eax
shr %cl, %eax
pop %ebx
pop %ebp
ret
ASM_END(atom_set_bit)
/* bool atom_clr_bit(atom_t *atom, int bit) */
ASM_ENTRY(atom_clr_bit)
push %ebp
mov %esp, %ebp
push %ebx
mov 8(%ebp), %edx
mov 12(%ebp), %ecx
mov $0xfffffffe, %ebx
rol %cl, %ebx
mov (%edx), %eax
1: mov %eax, %ecx
and %ebx, %ecx
lock
cmpxchgl %ecx, (%edx)
jne 1b
/* return true if bit was set before */
not %ebx
and %ebx, %eax
shr %cl, %eax
pop %ebx
pop %ebp
ret
ASM_END(atom_clr_bit)
/*
* This file is part of GayBSD.
* Copyright (c) 2021 fef <owo@fef.moe>.
*
* GayBSD is nonviolent software: you may only use, redistribute, and/or
* modify it under the terms of the Cooperative Nonviolent Public License
* (CNPL) as found in the LICENSE file in the source code root directory
* or at <https://git.pixie.town/thufie/npl-builder>; either version 7
* of the license, or (at your option) any later version.
*
* GayBSD comes with ABSOLUTELY NO WARRANTY, to the extent
* permitted by applicable law. See the CNPL for details.
*/