/* * hal_witch.S - CPU context switch function for TSAR-MIPS32 * * Author Alain Greiner (2016) * * Copyright (c) UPMC Sorbonne Universites * * This file is part of ALMOS-MKH. * * ALMOS-MKH.is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by * the Free Software Foundation; version 2.0 of the License. * * ALMOS-MKH.is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * General Public License for more details. * * You should have received a copy of the GNU General Public License * along with ALMOS-MKH.; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */ .section .switch , "ax" , @progbits .global hal_do_cpu_switch .global hal_do_cpu_save .global hal_do_cpu_restore .set noat .set noreorder #--------------------------------------------------------------------------------- # The hal_do_cpu_switch() function makes the following assumptions: # - register $4 contains a pointer on the old thread context. # - register $5 contains a pointer on the new thread context. #--------------------------------------------------------------------------------- hal_do_cpu_switch: /* save old thread context */ move $26, $4 /* $26 <= ctx_old */ mfc0 $27, $14 sw $27, 0*4($26) /* save c0_epc to slot 0 */ sw $1, 1*4($26) sw $2, 2*4($26) sw $3, 3*4($26) sw $4, 4*4($26) sw $5, 5*4($26) sw $6, 6*4($26) sw $7, 7*4($26) sw $8, 8*4($26) sw $9, 9*4($26) sw $10, 10*4($26) sw $11, 11*4($26) sw $12, 12*4($26) sw $13, 13*4($26) sw $14, 14*4($26) sw $15, 15*4($26) sw $16, 16*4($26) sw $17, 17*4($26) sw $18, 18*4($26) sw $19, 19*4($26) sw $20, 20*4($26) sw $21, 21*4($26) sw $22, 22*4($26) sw $23, 23*4($26) sw $24, 24*4($26) sw $25, 25*4($26) mfhi $27 sw $27, 26*4($26) /* save hi to slot 26 */ mflo $27 sw $27, 27*4($26) /* save lo to slot 27 */ sw $28, 28*4($26) sw $29, 29*4($26) sw $30, 30*4($26) sw $31, 31*4($26) mfc0 $27, $12 sw $27, 34*4($26) /* save c0_sr to slot 34 */ mfc0 $27, $4, 2 sw $27, 35*4($26) /* save c0_th to slot 35 */ mfc2 $27, $0 sw $27, 32*4($26) /* save c2_ptpr to slot 32 */ mfc2 $27, $1 sw $27, 33*4($26) /* save c2_mode to slot 33 */ sync /* restore new thread context */ move $26, $5 /* $26 <= ctx_new */ lw $27, 0*4($26) mtc0 $27, $14 /* restore C0_epc from slot 0 */ lw $1, 1*4($26) lw $2, 2*4($26) lw $3, 3*4($26) lw $4, 4*4($26) lw $5, 5*4($26) lw $6, 6*4($26) lw $7, 7*4($26) lw $8, 8*4($26) lw $9, 9*4($26) lw $10, 10*4($26) lw $11, 11*4($26) lw $12, 12*4($26) lw $13, 13*4($26) lw $14, 14*4($26) lw $15, 15*4($26) lw $16, 16*4($26) lw $17, 17*4($26) lw $18, 18*4($26) lw $19, 19*4($26) lw $20, 20*4($26) lw $21, 21*4($26) lw $22, 22*4($26) lw $23, 23*4($26) lw $24, 24*4($26) lw $25, 25*4($26) lw $27, 26*4($26) mthi $27 /* restore hi from slot 26 */ lw $27, 27*4($26) mtlo $27 /* restore lo from slot 27 */ lw $28, 28*4($26) lw $29, 29*4($26) lw $30, 30*4($26) lw $31, 31*4($26) /* restore ra from slot 31 */ lw $27, 32*4($26) mtc2 $27, $0 /* restore c2_ptpr from slot 32 */ lw $27, 35*4($26) mtc0 $27, $4, 2 /* restore c0_th from slot 35 */ lw $27, 33*4($26) lw $26, 34*4($26) mtc2 $27, $1 /* restore c2_mode from slot 33 */ mtc0 $26, $12 /* restore c0_sr from slot 34 */ jr $31 /* return to caller */ nop #--------------------------------------------------------------------------------- # The hal_do_cpu_save()function makes the following assumption: # - register $4 contains a pointer on the target thread context. #--------------------------------------------------------------------------------- hal_do_cpu_save: move $26, $4 /* $26 <= &context */ mfc0 $27, $14 sw $27, 0*4($26) /* save c0_epc to slot 0 */ sw $1, 1*4($26) sw $2, 2*4($26) sw $3, 3*4($26) sw $4, 4*4($26) sw $5, 5*4($26) sw $6, 6*4($26) sw $7, 7*4($26) sw $8, 8*4($26) sw $9, 9*4($26) sw $10, 10*4($26) sw $11, 11*4($26) sw $12, 12*4($26) sw $13, 13*4($26) sw $14, 14*4($26) sw $15, 15*4($26) sw $16, 16*4($26) sw $17, 17*4($26) sw $18, 18*4($26) sw $19, 19*4($26) sw $20, 20*4($26) sw $21, 21*4($26) sw $22, 22*4($26) sw $23, 23*4($26) sw $24, 24*4($26) sw $25, 25*4($26) mfhi $27 sw $27, 26*4($26) /* save hi to slot 26 */ mflo $27 sw $27, 27*4($26) /* save lo to slot 27 */ sw $28, 28*4($26) /* save gp to slot 28 */ sw $29, 29*4($26) /* save sp to slot 29 */ sw $30, 30*4($26) /* save s8 to slot 30 */ sw $31, 31*4($26) /* save ra to slot 31 */ mfc2 $27, $0 sw $27, 32*4($26) /* save c2_ptpr to slot 32 */ mfc2 $27, $1 sw $27, 33*4($26) /* save c2_mode to slot 33 */ mfc0 $27, $12 sw $27, 34*4($26) /* save c0_sr to slot 34 */ mfc0 $27, $4, 2 sw $27, 35*4($26) /* save c0_th to slot 35 */ sync jr $31 /* return to caller */ nop #--------------------------------------------------------------------------------- # The hal_do_cpu_restore()function makes the following assumption: # - register $4 contains a pointer on the target thread context. #--------------------------------------------------------------------------------- hal_do_cpu_restore: move $26, $4 /* $26 <= &context */ lw $27, 0*4($26) mtc0 $27, $14 /* restore C0_epc from slot 0 */ lw $1, 1*4($26) lw $2, 2*4($26) lw $3, 3*4($26) lw $4, 4*4($26) lw $5, 5*4($26) lw $6, 6*4($26) lw $7, 7*4($26) lw $8, 8*4($26) lw $9, 9*4($26) lw $10, 10*4($26) lw $11, 11*4($26) lw $12, 12*4($26) lw $13, 13*4($26) lw $14, 14*4($26) lw $15, 15*4($26) lw $16, 16*4($26) lw $17, 17*4($26) lw $18, 18*4($26) lw $19, 19*4($26) lw $20, 20*4($26) lw $21, 21*4($26) lw $22, 22*4($26) lw $23, 23*4($26) lw $24, 24*4($26) lw $25, 25*4($26) lw $27, 26*4($26) mthi $27 /* restore hi from slot 26 */ lw $27, 27*4($26) mtlo $27 /* restore lo from slot 27 */ lw $28, 28*4($26) lw $29, 29*4($26) lw $30, 30*4($26) lw $31, 31*4($26) /* restore ra from slot 31 */ lw $27, 32*4($26) mtc2 $27, $0 /* restore c2_ptpr from slot 32 */ lw $27, 35*4($26) mtc0 $27, $4, 2 /* restore c0_th from slot 35 */ lw $27, 33*4($26) lw $26, 34*4($26) mtc2 $27, $1 /* restore c2_mode from slot 33 */ mtc0 $26, $12 /* restore c0_sr from slot 34 */ jr $31 /* return to caller */ nop .set reorder .set at