/* * hal_cpu.S - Miscellaneous CPU functions * * Copyright (c) 2017 Maxime Villard * * This file is part of ALMOS-MKH. * * ALMOS-MKH is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by * the Free Software Foundation; version 2.0 of the License. * * ALMOS-MKH is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * General Public License for more details. * * You should have received a copy of the GNU General Public License * along with ALMOS-MKH; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */ #define x86_ASM #include #include #include #include ASM_ENTRY(lgdt) lgdt (%rdi) /* Reload the prefetch queue */ jmp 1f nop 1: /* Reload stale selectors */ movl $GDT_FIXED_SEL(GDT_KDATA_SEL, SEL_KPL),%eax movl %eax,%ds movl %eax,%es movl %eax,%ss ret ASM_ENTRY(lidt) lidt (%rdi) ret ASM_ENTRY(ltr) ltr %di ret ASM_ENTRY(invlpg) invlpg (%rdi) ret ASM_ENTRY(sti) sti ret ASM_ENTRY(cli) cli ret ASM_ENTRY(rdtsc) xorq %rax,%rax rdtsc shlq $32,%rdx orq %rdx,%rax ret ASM_ENTRY(in8) movq %rdi,%rdx xorq %rax,%rax inb %dx,%al ret ASM_ENTRY(in16) movq %rdi,%rdx xorq %rax,%rax inw %dx,%ax ret ASM_ENTRY(out8) movq %rdi,%rdx movq %rsi,%rax outb %al,%dx ret ASM_ENTRY(out16) movq %rdi,%rdx movq %rsi,%rax outw %ax,%dx ret ASM_ENTRY(rdmsr) movq %rdi,%rcx xorq %rax,%rax rdmsr shlq $32,%rdx orq %rdx,%rax ret ASM_ENTRY(wrmsr) movq %rdi,%rcx movq %rsi,%rax movq %rsi,%rdx shrq $32,%rdx wrmsr ret ASM_ENTRY(mfence) mfence ret ASM_ENTRY(rcr0) movq %cr0,%rax ret ASM_ENTRY(rcr2) movq %cr2,%rax ret ASM_ENTRY(rcr3) movq %cr3,%rax ret ASM_ENTRY(lcr3) movq %rdi,%cr3 ret ASM_ENTRY(rcr4) movq %cr4,%rax ret ASM_ENTRY(lcr4) movq %rdi,%cr4 ret ASM_ENTRY(cpuid) movq %rbx,%r8 movq %rdi,%rax movq %rsi,%rcx movq %rdx,%rsi cpuid movl %eax,0(%rsi) movl %ebx,4(%rsi) movl %ecx,8(%rsi) movl %edx,12(%rsi) movq %r8,%rbx ret /* * To flush all TLB entries, we must re-set the CR4_PGE flag in %cr4. */ ASM_ENTRY(tlbflushg) movq %cr4,%rax movq %rax,%rdx andq $~CR4_PGE,%rdx movq %rdx,%cr4 movq %rax,%cr4 ret ASM_ENTRY(tlbflush) movq %cr3,%rax movq %rax,%cr3 ret ASM_ENTRY(clts) clts ret ASM_ENTRY(stts) movq %cr0,%rax orq $CR0_TS,%rax movq %rax,%cr0 ret ASM_ENTRY(pause) pause ret ASM_ENTRY(wbinvd) wbinvd ret /* -------------------------------------------------------------------------- */ ASM_ENTRY(cpu_context_switch) pushq %r12 pushq %r13 movq %rdi,%r13 /* oldctx */ movq %rsi,%r12 /* newctx */ /* * Save the current stack in %rdx, and switch to the trap frame of * the old thread. */ movq %rsp,%rdx movq CTX_TF(%r13),%rsp addq $TF_SIZE,%rsp /* end of the structure */ /* * Save the TLS intr flag, and set the new one. */ movq TLSVAR(INTR),%rax movq %rax,CTX_INTR(%r13) movq CTX_INTR(%r12),%rax movq %rax,TLSVAR(INTR) /* Build the trap frame */ movl %ss,%eax pushq %rax /* tf_ss */ pushq %rdx /* tf_rsp */ pushfq /* tf_rflags */ movl %cs,%eax pushq %rax /* tf_cs */ movabsq $thr_resume,%rax pushq %rax /* tf_rip */ pushq $0 /* tf_err */ pushq $T_ASTFLT /* tf_trapno */ INTR_SAVE_REGS /* Switch rsp0 */ movq CTX_RSP0(%r12),%rax movq TLSVAR(RSP0),%rdx movq %rax,(%rdx) /* Switch to the new trap frame */ movq CTX_TF(%r12),%rsp /* * Restore the context, and jump into the new thread. */ INTR_RESTORE_REGS addq $16,%rsp iretq thr_resume: /* * Only pop %r12 and %r13, and return. */ popq %r13 popq %r12 ret /* -------------------------------------------------------------------------- */ ASM_ENTRY(atomic_cas_32) movl %esi,%eax lock cmpxchgl %edx,(%rdi) /* %eax now contains the old value */ ret ASM_ENTRY(atomic_add_32) movl %esi,%eax lock xaddl %eax,(%rdi) /* %eax now contains the old value */ ret ASM_ENTRY(atomic_and_32) movl (%rdi),%eax 1: movl %eax,%ecx andl %esi,%ecx lock cmpxchgl %ecx,(%rdi) jnz 1b /* %eax now contains the old value */ ret ASM_ENTRY(atomic_or_32) movl (%rdi),%eax 1: movl %eax,%ecx orl %esi,%ecx lock cmpxchgl %ecx,(%rdi) jnz 1b /* %eax now contains the old value */ ret