From 007540a872d2a68ac3ea489d139ffe5809a27b13 Mon Sep 17 00:00:00 2001 From: umershahidengr Date: Tue, 17 Jan 2023 18:39:36 +0500 Subject: [PATCH] LI() macro fixed along with a few minor changes related to offset --- riscv-test-suite/env/arch_test.h | 1523 +++++++++++++++--------------- 1 file changed, 768 insertions(+), 755 deletions(-) diff --git a/riscv-test-suite/env/arch_test.h b/riscv-test-suite/env/arch_test.h index 7fb18eeaf..498b50c04 100644 --- a/riscv-test-suite/env/arch_test.h +++ b/riscv-test-suite/env/arch_test.h @@ -1,31 +1,31 @@ // This file is divided into the following sections: -// RV Arch Test Constants -// general test and helper macros, required, optional, or just useful -// _ARGn, SIG[BASE/UPD[_F/ID]],BASE_UPD,BIT,LA[U],LI[U],RVTEST_[INIT/SAVE]_GPRS, XCSR_RENAME -// RV ARCH Test Interrupt Macros ****FIXME:spec which regs must not be altered -// primary macros used by handle: RVTEST_TRAP{_PROLOG/_HANDLER/_EPILOG/SAVEAREA} -// required test format spec macros: RVTEST_{Code)/DATA/SIG}{_BEGIN/_END} -// macros from Andrew Waterman's risc-v test macros -// deprecated macro name aliases, just for migration ease +// RV Arch Test Constants +// general test and helper macros, required, optional, or just useful +// _ARGn, SIG[BASE/UPD[_F/ID]],BASE_UPD,BIT,LA[U],LI[U],RVTEST_[INIT/SAVE]_GPRS, XCSR_RENAME +// RV ARCH Test Interrupt Macros ****FIXME:spec which regs must not be altered +// primary macros used by handle: RVTEST_TRAP{_PROLOG/_HANDLER/_EPILOG/SAVEAREA} +// required test format spec macros: RVTEST_{Code)/DATA/SIG}{_BEGIN/_END} +// macros from Andrew Waterman's risc-v test macros +// deprecated macro name aliases, just for migration ease // The resulting memory layout of the trap handler is (MACRO_NAME, label [function]) //**************************************************************** // (code section) // RVMODEL_BOOT -// rvtest_entry_point: [boot code] +// rvtest_entry_point: [boot code] // RVTEST_CODE_BEGIN -// rvtest_init: [TRAP_PROLOG] (m, ms, or msv) -// [INIT_GPRS] -// rvtest_code_begin: +// rvtest_init: [TRAP_PROLOG] (m, ms, or msv) +// [INIT_GPRS] +// rvtest_code_begin: //***************************** //********(body of tests)****** //***************************** // RVTEST_CODE_END -// rvtest_code_end: [*optional* SAVE_GPRS routine] -// [RVTEST_GOTO_MMODE ] **FIXME** this won't work if MMU enabled unless VA=PA -// cleanup_epilogs [TRAP_EPILOG (m, ms, or msv)] (jump to exit_cleanup) -// [TRAP_HANDLER (m, ms, or msv)] -// exit_cleanup: [RVMODEL_HALT macro or a branch to it.] +// rvtest_code_end: [*optional* SAVE_GPRS routine] +// [RVTEST_GOTO_MMODE ] **FIXME** this won't work if MMU enabled unless VA=PA +// cleanup_epilogs [TRAP_EPILOG (m, ms, or msv)] (jump to exit_cleanup) +// [TRAP_HANDLER (m, ms, or msv)] +// exit_cleanup: [RVMODEL_HALT macro or a branch to it.] // //--------------------------------this could start a new section-------------------------------- // (Data section) - align to 4K boundary @@ -38,49 +38,49 @@ //*****(trap handler data is here)****** //************************************** // -// rvtest_trap_sig: [global trap signature start (shared by all modes) inited to mtrap_sigptr] **FIXME: needs VA=PA -// RVTEST_TRAP_SAVEAREA [handler sv area(m, ms, or msv) temp reg save), CSRs, tramp table, ptrs] -// rvtest_data_begin: [input data (shared by all modes)] +// rvtest_trap_sig: [global trap signature start (shared by all modes) inited to mtrap_sigptr] **FIXME: needs VA=PA +// RVTEST_TRAP_SAVEAREA [handler sv area(m, ms, or msv) temp reg save), CSRs, tramp table, ptrs] +// rvtest_data_begin: [input data (shared by all modes)] // RVTEST_DATA_END -// rvtest_data_end: +// rvtest_data_end: // RVTEST_ROOT_PG_TBL [sets up identity map (VA=PA) -// sroot_pg_tbl: (if smode) -// vroot_pg_tbl: (if hypervisor) +// sroot_pg_tbl: (if smode) +// vroot_pg_tbl: (if hypervisor) //--------------------------------this could start a new section-------------------------------- // RVTEST_SIG_BEGIN // RVMODEL_DATA_BEGIN -// rvtest_sig_begin: [beginning of signature, used by signature dump, can be used by tests] -// mtrap_sigptr: [global trap signature start (shared by all modes)] - defined by tests -// gpr_save: [gpr save area (optional, enabled if rvtest_gpr_save is defined)] +// rvtest_sig_begin: [beginning of signature, used by signature dump, can be used by tests] +// mtrap_sigptr: [global trap signature start (shared by all modes)] - defined by tests +// gpr_save: [gpr save area (optional, enabled if rvtest_gpr_save is defined)] // RVTEST_SIG_END -// rvtest_sig_end: [global test end signature (shared by all modes)] (shouldn't matter what RVMODEL_DATA_END does) +// rvtest_sig_end: [global test end signature (shared by all modes)] (shouldn't matter what RVMODEL_DATA_END does) // RVMODEL_DATA_END //--------------------------------end of test-------------------------------- /* The following macros are optional if interrupt tests are enabled (defaulted if not defined): - RVMODEL_SET_[M/V/S]_[SW]_INT - RVMODEL_CLR_[M/V/S]_[SW/TIMTER/EXT]_INT - rvtest_[M/V/S]trap_routine - GOTO_[M/S/U]MODE, INSTANTIATE_MODE_MACRO (prolog/handler/epilog/savearea) + RVMODEL_SET_[M/V/S]_[SW]_INT + RVMODEL_CLR_[M/V/S]_[SW/TIMTER/EXT]_INT + rvtest_[M/V/S]trap_routine + GOTO_[M/S/U]MODE, INSTANTIATE_MODE_MACRO (prolog/handler/epilog/savearea) The following macro is optional, and defaults to fence.i if not defined - RVMODEL.FENCEI - The following variables are used if interrupt tests are enabled (defaulted if not defined): - NUM_SPECD_INTCAUSES + RVMODEL.FENCEI + The following variables are used if interrupt tests are enabled (defaulted if not defined): + NUM_SPECD_INTCAUSES The following variables are optional if exception tests are enabled (defaulted if not defined): - DATA_REL_TVAL_MSK CODE_REL_TVAL_MSK + DATA_REL_TVAL_MSK CODE_REL_TVAL_MSK The following variables are optional: - rvtest_gpr_save: if defined, stores GPR contents into signature at test end (for debug) + rvtest_gpr_save: if defined, stores GPR contents into signature at test end (for debug) The following labels are required and defined by required macros: - rvtest_code_begin: defined by RVTEST_CODE_BEGIN macro (boot code can precede this) - rvtest_code_end: defined by RVTEST_CODE_END macro (trap handlers follow this) - rvtest_data_begin: defined by RVTEST_DATA_BEGIN macro - rvtest_data_end: defined by RVTEST_DATA_END macro - rvtest_sig_begin: defined by RVTEST_SIG_BEGIN macro (after RVMODEL_DATA_BEGIN) defines signature begin - rvtest_sig_end: defined by RVTEST_SIG_END macro (before RVMODEL_DATA_END) defines signature end - rvtest_Sroot_pg_tbl: defined by RVTEST_PTE_IDENT_MAP macro inside RVTEST_DATA_BEGIN if Smode implemented - rvtest_Vroot_pg_tbl: defined by RVTEST_PTE_IDENT_MAP macro inside RVTEST_DATA_BEGIN if VSmode implemented + rvtest_code_begin: defined by RVTEST_CODE_BEGIN macro (boot code can precede this) + rvtest_code_end: defined by RVTEST_CODE_END macro (trap handlers follow this) + rvtest_data_begin: defined by RVTEST_DATA_BEGIN macro + rvtest_data_end: defined by RVTEST_DATA_END macro + rvtest_sig_begin: defined by RVTEST_SIG_BEGIN macro (after RVMODEL_DATA_BEGIN) defines signature begin + rvtest_sig_end: defined by RVTEST_SIG_END macro (before RVMODEL_DATA_END) defines signature end + rvtest_Sroot_pg_tbl: defined by RVTEST_PTE_IDENT_MAP macro inside RVTEST_DATA_BEGIN if Smode implemented + rvtest_Vroot_pg_tbl: defined by RVTEST_PTE_IDENT_MAP macro inside RVTEST_DATA_BEGIN if VSmode implemented labels/variables that must be defined by the DUT in model specific macros or #defines - mtrap_sigptr: defined by test if traps are possible, else is defaulted + mtrap_sigptr: defined by test if traps are possible, else is defaulted */ // don't put C-style macros (#define xxx) inside assembly macros; C-style is evaluated before assembly @@ -89,13 +89,13 @@ #define MIN(a,b) (((a)<(b))?(a):(b)) #define MAX(a,b) (((a)>(b))?(a):(b)) #define MASK_XLEN(x) ((x) & ((1 << (__riscv_xlen - 1) << 1) - 1)) -#define BIT(addr, bit) (((addr)>>bit)&1) -#define SEXT_IMM(x) ((x&0x0FFF) | (-BIT(x,11)<<12)) +#define BIT(addr, bit) (((addr)>>(bit&MASK))&1) +#define SEXT_IMM(x) ((x&0xFFF) | (-BIT(x,11)<<12)) -#define REGWIDTH (XLEN>>3) // in units of #bytes -#define MASK (((1<<(XLEN-1))-1) + (1<<(XLEN-1))) // XLEN bits of 1s +#define REGWIDTH (XLEN>>3) // in units of #bytes +#define MASK (((1<<(XLEN-1))-1) + (1<<(XLEN-1))) // XLEN bits of 1s -#define ALIGNSZ ((XLEN>>5)+2) // log2(XLEN): 2,3,4 for XLEN 32,64,128 +#define ALIGNSZ ((XLEN>>5)+2) // log2(XLEN): 2,3,4 for XLEN 32,64,128 #if XLEN>FLEN #define SIGALIGN REGWIDTH #else @@ -108,17 +108,17 @@ //============================================================================== // set defaults -#ifndef NUM_SPECD_INTCAUSES +#ifndef NUM_SPECD_INTCAUSES #define NUM_SPECD_INTCAUSES 16 #endif - // set defaults -#ifndef NUM_SPECD_EXCPTCAUSES + // set defaults +#ifndef NUM_SPECD_EXCPTCAUSES #define NUM_SPECD_EXCPTCAUSES 16 #endif -#ifndef RVMODEL_FENCEI - #define RVMODEL_FENCEI fence.i // make sure ifetches get new code +#ifndef RVMODEL_FENCEI + #define RVMODEL_FENCEI fence.i // make sure ifetches get new code #endif #ifndef UNROLLSZ @@ -129,19 +129,19 @@ // if xTVAL is set to zero for some cause, then the corresponding bit in SET_REL_TVAL_MSK should be cleared #ifndef SET_REL_TVAL_MSK -#define SET_REL_TVAL_MSK ((1<>(incrpos)))!=0) ;\ - addi reg, reg, BIT(incrval,incrpos-1)+0x0FFF & (incrval>>(incrpos)) ;\ +#define COND_INCR(incr_reg, incrval, incrpos) ;\ + .if ((BIT(incrval,incrpos-1)+0x0FFF & (incrval>>(incrpos)))!=0) ;\ + addi reg, reg, BIT(incrval,incrpos-1)+0x0FFF & (incrval>>(incrpos)) ;\ .endif // this generates a constants using the standard addi or lui/addi sequences // but also handles cases that are contiguous bit masks in any position, // and also constants handled with the addi/lui/addi but are shifted left - + /**** fixed length LI macro ****/ +#define WDWIDTH ( 32>>3) // in units of #bytes +#define WDSZ (WDWIDTH) +#define WDMSK 3 +#define IMMSZ 12 +#define IMMSGN (IMMSZ-1) +#define WDSGN 31 +#define IMMMSK ((1<>11)&1)==1) ;\ - .set imm12, imm12 - 4096 ;\ - .endif ;\ - .set absimm, immx /* convert to positive number to simplify code */ ;\ - .if((immx>>(XLEN-1)&1)==1) ;\ - .set absimm, ((immx)^(-1)) & MASK /* Invert if negative */ ;\ - .endif ;\ - .if ((absimm>>12)==0) /* are bits MSB:12 all the same? */ ;\ - li reg, imm12 /* yes, <= 12bit, will be simple li */ ;\ - .elseif ((absimm>>31)==0) /* are bits MSB:31 all the same? */ ;\ - .if ((immx >> 11) == 0x01FFFFF) ;\ - li reg, imm12 ;\ - .else ;\ - lui reg, (((immx>>12)+((immx>>11)&1))&0xFFFFF) /* yes<= 32bit, use lui/addi pair */ ;\ - .if ((immx&0x0FFF)!=0) /* but skip this if lower bits are zero */ ;\ - addi reg, reg, imm12 ;\ - .endif ;\ - .endif ;\ - .else /* see if its a single field bitmask */ ;\ - .set pos, 0 ;\ - .set edge1, -1 /* 1st "1" bit pos scanning r to l */ ;\ - .set edge2, -1 /* 1st "0" bit pos scanning r to l */ ;\ - .set imme, immx /* imm or complement (if odd) */ ;\ - .if (immx&1 == 1) ;\ - .set imme, (immx)^(-1) /* cvt to even, cvt back at end */ ;\ - .endif ;\ -/****************************************************************************/ ;\ -/*** find first 0->1, then 1->0 transition fm LSB->MSB give even operand ***/ ;\ - .rept XLEN ;\ - .if ((edge1==-1) && (((imme>>pos)&1)==1)) /*look for falling edge[pos] */ ;\ - .set edge1,pos /* found falling edge, don’t check for more */ ;\ - .elseif ((edge1>=0) && (edge2==-1) && (((imme>>pos)&1)==0)) /*look for rising edge[pos] */ ;\ - .set edge2, pos /* found rising edge, don’t check for more */ ;\ - .endif ;\ - .set pos, pos+1 /* keep looking (even if already found */ ;\ - .endr /* assert: edge must be found since imm!= 0 */ ;\ - /****************************************************************************/ ;\ - .set shiftimm, (imm>>edge1)&0xFFFFFFFF ;\ - .set imm12, shiftimm&0xFFF /* Took 1st 12 bits of shifted immediate value */ ;\ - .if (((shiftimm>>11)&1)==1) ;\ - .set imm12, imm12 - 4096 ;\ - .endif ;\ - .set absimm, (absimm>>edge1)&0xFFFFFFFF /* convert to 32bit positive number to simplify code */ ;\ - .if (edge2==-1) /* found just 1 edge, so its 111000 or 000111 */ ;\ - li reg, -1 ;\ - .if (immx == imme) ;\ - slli reg, reg, edge1 /* 111s followed by 000s mask */ ;\ - .else ;\ - srli reg, reg, XLEN-edge1 /* 000s followed by 111s mask */ ;\ - .endif ;\ - .elseif (imme == (1<>12)==0)) /* fits in 12b after shifting?*/ ;\ - li reg, imm12 /* yes, <= 12bit, will be simple li */ ;\ - slli reg, reg, edge1 /* add trailing zeros */ ;\ - .elseif ((immx==imme) & ((absimm>>31)==0)) /* fits in 32b after shifting?*/ ;\ - .if ((absimm>>31)==0) /* are bits MSB:31 all the same? */ ;\ - lui reg, (((shiftimm>>12)+((shiftimm>>11)&1))&0xFFFFF) /* yes<= 32bit, use lui/addi pair */ ;\ - .if ((shiftimm&0x0FFF)!=0) /* but skip this if lower bits are zero */ ;\ - addi reg, reg, imm12 ;\ - .endif ;\ - slli reg, reg, edge1 /* add trailing zeros */ ;\ - .endif ;\ - .else /* give up and unroll */ ;\ - .option pop ;\ - .align UNROLLSZ ;\ - .option push ;\ - .option norvc ;\ - .if(XLEN==32) ;\ - .warning "Should never get her for RV32" ;\ - .endif ;\ - li reg,imm ;\ - .align UNROLLSZ ;\ - .endif ;\ - .endif ;\ - .option pop +#define LI(reg, imm) ;\ + .option push ;\ + .option norvc ;\ + .set even, (1-BIT(imm, 0)) /* imm has at least 1 trailing zero */ ;\ + .set immx, (imm & MASK) /* trim to XLEN (noeffect on RV64) */ ;\ + .set imm12, (SEXT_IMM(immx)) ;\ + .set absimm, ((immx^(-BIT(immx,XLEN-1)))&MASK) /* cvt to posnum to simplify code */ ;\ + .set imme, ((immx^(-BIT(immx,0 )))&MASK) /* cvt to even, cvt back at end */ ;\ + .set cry, (BIT(immx, IMMSGN)) ;\ + .set cryh, (BIT(immx, IMMSGN+32)) ;\ + .set pos, 0 ;\ + .set edge1, -1 /* 1st "1" bit pos scanning r to l */ ;\ + .set edge2, -1 /* 1st "0" bit pos scanning r to l */ ;\ +/**** find 1st 0->1, then 1->0 transition fm LSB->MSB given even operand ****/ ;\ + .rept XLEN ;\ + .if (edge1<0) /* looking for first edge? */ ;\ + .if (BIT(imme,pos)==1) /* look for falling edge[pos] */ ;\ + .set edge1,pos /* fnd falling edge, don’t chk for more */ ;\ + .endif ;\ + .elseif (edge2<0) /* looking for second edge? */ ;\ + .if (BIT(imme,pos)==0) /* yes, found rising edge[pos]? */ ;\ + .set edge2, pos /* fnd rising edge, don’t chk for more */ ;\ + .endif ;\ + .endif ;\ + .set pos, pos+1 /* keep looking (even if already found) */ ;\ + .endr ;\ + .set immxsh, (immx>>edge1) /* *sh variables only used if positive */ ;\ + .set imm12sh,(SEXT_IMM(immxsh))/* look @1st 12b of shifted imm val */ ;\ + .set crysh, (BIT(immxsh, IMMSGN)) ;\ + .set absimmsh, immxsh /* pos, no inversion needed, just shift */ ;\ +/*******does it fit into std li or lui+li sequence****************************/ ;\ + .if ((absimm>>IMMSGN)==0) /* fits 12b signed imm (properly sgnext)? */ ;\ + li reg, imm12 /* yes, <= 12bit, will be simple li */ ;\ + .elseif ((absimm>> 31)==0) /* fits 32b signed imm?(properly sgnext)? */ ;\ + lui reg, (((immx>>12)+cry)&LIMMMSK) /* <= 32b, use lui/addi */ ;\ + .if(XLEN==64 && (BIT(immx,XLEN-1)==0)) ;\ + sll reg, reg, 32 ;\ + srl reg, reg, 32 ;\ + .endif ;\ + .if ((imm&IMMMSK)!=0) /* but skip this if lower bits are zero */;\ + addi reg, reg, imm12 ;\ + .endif ;\ + /*********** look for various masks 0s->1s->0s, etc **********************/ ;\ + .elseif ( even && (edge2==-1)) /* only rising edge, so 111000 */ ;\ + li reg, -1 ;\ + slli reg, reg, edge1 /* make 111s --> 000s mask */ ;\ + .elseif (!even && (edge2==-1)) /* only falling edge, so 000111 */ ;\ + li reg, -1 ;\ + srli reg, reg, XLEN-edge1 /* make 000s --> 111s mask */ ;\ + .elseif (imme == (1<>IMMSGN)==0))/* fits 12b after shift? */ ;\ + li reg, imm12sh /* <= 12bit, will be simple li */ ;\ + slli reg, reg, edge1 /* add trailing zeros */ ;\ + .elseif ((immx==imme)&&((absimmsh>>WDSGN)==0)) /* fits 32b after shft? */ ;\ + lui reg, ((immxsh>>IMMSZ)+crysh)&LIMMMSK /* <=32b, use lui/addi */ ;\ + .if(XLEN==64 && (BIT(immx,XLEN-1)==0)) ;\ + sll reg, reg, 32 ;\ + srl reg, reg, 32 ;\ + .endif ;\ + .if ((imm12sh&IMMMSK)!=0) /* but skip this if low bits ==0 */ ;\ + addi reg, reg, imm12sh ;\ + .endif ;\ + slli reg, reg, edge1 /* add trailing zeros */ ;\ + .else /* give up, use fixed 8op sequence */ ;\ + /******** TBD add case of form MSB + 12b/32b (un)shifted constant? ********/ ;\ + lui reg, ((immx>>44)+cryh)&LIMMMSK /* 1st 20b (63:44) */ ;\ + .set cry, (BIT(immx, IMMSGN+32-12)) ;\ + addi reg, reg, SEXT_IMM(immx>>32)+cry /* nxt 12b (43:32) */ ;\ + slli reg, reg, 12 /* following are <12b, don't need SEXT */ ;\ + .set cry, (BIT(immx, IMMSGN+32-24)) ;\ + addi reg, reg, SEXT_IMM(immx>>20)+cry /* nxt 12b (31:20) */ ;\ + slli reg, reg, 12 ;\ + addi reg, reg, SEXT_IMM(immx>>8) /* nxt 12b (19:08) */ ;\ + slli reg, reg, 8 ;\ + addi reg, reg, (immx) & (0xFF) /* lst 8b (07:00) */ ;\ + .if(XLEN==32) ;\ + .warning "Should never get here for RV32" ;\ + .endif ;\ + .endif ;\ + .option pop /**** fixed length LA macro ****/ -#define LA(reg,val) ;\ - .option push ;\ - .option norvc ;\ - .align UNROLLSZ ;\ - la reg,val ;\ - .align UNROLLSZ ;\ - .option pop ; +#define LA(reg,val) ;\ + .option push ;\ + .option norvc ;\ + la reg,val ;\ + .option pop ; /*****************************************************************/ /**** initialize regs, just to make sure you catch any errors ****/ /*****************************************************************/ /* init regs, to ensure you catch any errors */ -#define RVTEST_INIT_GPRS ;\ - LI (x1, (0xFEEDBEADFEEDBEAD & MASK)) ;\ - LI (x2, (0xFF76DF56FF76DF56 & MASK)) ;\ - LI (x3, (0x7FBB6FAB7FBB6FAB & MASK)) ;\ - LI (x4, (0xBFDDB7D5BFDDB7D5 & MASK)) ;\ - LA (x5, rvtest_code_begin) ;\ - LA (x6, rvtest_data_begin) ;\ - LI (x7, (0xB7FBB6FAB7FBB6FA & MASK)) ;\ - LI (x8, (0x5BFDDB7D5BFDDB7D & MASK)) ;\ - LI (x9, (0xADFEEDBEADFEEDBE & MASK)) ;\ - LI (x10, (0x56FF76DF56FF76DF & MASK)) ;\ - LI (x11, (0xAB7FBB6FAB7FBB6F & MASK)) ;\ - LI (x12, (0xD5BFDDB7D5BFDDB7 & MASK)) ;\ - LI (x13, (0xEADFEEDBEADFEEDB & MASK)) ;\ - LI (x14, (0xF56FF76DF56FF76D & MASK)) ;\ - LI (x15, (0xFAB7FBB6FAB7FBB6 & MASK)) ;\ - #ifndef RVTEST_E ;\ - LI (x16, (0x7D5BFDDB7D5BFDDB & MASK)) ;\ - LI (x17, (0xBEADFEEDBEADFEED & MASK)) ;\ - LI (x18, (0xDF56FF76DF56FF76 & MASK)) ;\ - LI (x19, (0x6FAB7FBB6FAB7FBB & MASK)) ;\ - LI (x20, (0xB7D5BFDDB7D5BFDD & MASK)) ;\ - LI (x21, (0xDBEADFEEDBEADFEE & MASK)) ;\ - LI (x22, (0x6DF56FF76DF56FF7 & MASK)) ;\ - LI (x23, (0xB6FAB7FBB6FAB7FB & MASK)) ;\ - LI (x24, (0xDB7D5BFDDB7D5BFD & MASK)) ;\ - LI (x25, (0xEDBEADFEEDBEADFE & MASK)) ;\ - LI (x26, (0x76DF56FF76DF56FF & MASK)) ;\ - LI (x27, (0xBB6FAB7FBB6FAB7F & MASK)) ;\ - LI (x28, (0xDDB7D5BFDDB7D5BF & MASK)) ;\ - LI (x29, (0xEEDBEADFEEDBEADF & MASK)) ;\ - LI (x30, (0xF76DF56FF76DF56F & MASK)) ;\ - LI (x31, (0xFBB6FAB7FBB6FAB7 & MASK)) ;\ +#define RVTEST_INIT_GPRS ;\ + LI (x1, (0xFEEDBEADFEEDBEAD & MASK)) ;\ + LI (x2, (0xFF76DF56FF76DF56 & MASK)) ;\ + LI (x3, (0x7FBB6FAB7FBB6FAB & MASK)) ;\ + LI (x4, (0xBFDDB7D5BFDDB7D5 & MASK)) ;\ + LA (x5, rvtest_code_begin) ;\ + LA (x6, rvtest_data_begin) ;\ + LI (x7, (0xB7FBB6FAB7FBB6FA & MASK)) ;\ + LI (x8, (0x5BFDDB7D5BFDDB7D & MASK)) ;\ + LI (x9, (0xADFEEDBEADFEEDBE & MASK)) ;\ + LI (x10, (0x56FF76DF56FF76DF & MASK)) ;\ + LI (x11, (0xAB7FBB6FAB7FBB6F & MASK)) ;\ + LI (x12, (0xD5BFDDB7D5BFDDB7 & MASK)) ;\ + LI (x13, (0xEADFEEDBEADFEEDB & MASK)) ;\ + LI (x14, (0xF56FF76DF56FF76D & MASK)) ;\ + LI (x15, (0xFAB7FBB6FAB7FBB6 & MASK)) ;\ + #ifndef RVTEST_E ;\ + LI (x16, (0x7D5BFDDB7D5BFDDB & MASK)) ;\ + LI (x17, (0xBEADFEEDBEADFEED & MASK)) ;\ + LI (x18, (0xDF56FF76DF56FF76 & MASK)) ;\ + LI (x19, (0x6FAB7FBB6FAB7FBB & MASK)) ;\ + LI (x20, (0xB7D5BFDDB7D5BFDD & MASK)) ;\ + LI (x21, (0xDBEADFEEDBEADFEE & MASK)) ;\ + LI (x22, (0x6DF56FF76DF56FF7 & MASK)) ;\ + LI (x23, (0xB6FAB7FBB6FAB7FB & MASK)) ;\ + LI (x24, (0xDB7D5BFDDB7D5BFD & MASK)) ;\ + LI (x25, (0xEDBEADFEEDBEADFE & MASK)) ;\ + LI (x26, (0x76DF56FF76DF56FF & MASK)) ;\ + LI (x27, (0xBB6FAB7FBB6FAB7F & MASK)) ;\ + LI (x28, (0xDDB7D5BFDDB7D5BF & MASK)) ;\ + LI (x29, (0xEEDBEADFEEDBEADF & MASK)) ;\ + LI (x30, (0xF76DF56FF76DF56F & MASK)) ;\ + LI (x31, (0xFBB6FAB7FBB6FAB7 & MASK)) ;\ #endif /******************************************************************************/ -/**** this is a helper macro that conditionally instantiates the macros ****/ +/**** this is a helper macro that conditionally instantiates the macros ****/ /**** PROLOG/HANDLER/EPILOG/SAVEAREA depending on test type & mode support ****/ /******************************************************************************/ .macro INSTANTIATE_MODE_MACRO MACRO_NAME #ifdef rvtest_mtrap_routine - \MACRO_NAME M // actual m-mode prolog/epilog/handler code + \MACRO_NAME M // actual m-mode prolog/epilog/handler code - #ifdef rvtest_strap_routine - \MACRO_NAME S // actual s-mode prolog/epilog/handler code + #ifdef rvtest_strap_routine + \MACRO_NAME S // actual s-mode prolog/epilog/handler code - #ifdef rvtest_vtrap_routine - \MACRO_NAME V // actual v-mode prolog/epilog/handler code - #endif + #ifdef rvtest_vtrap_routine + \MACRO_NAME V // actual v-mode prolog/epilog/handler code + #endif #endif #endif .endm /******************************************************************************/ /**** this is a helper macro defining int macro if the macro if undefined ****/ -/**** It builds the macro name from arguments prefix, mode, and type ****/ -/**** The prefix is only SET or CLEAR here, preceded by RVMODEL. ****/ +/**** It builds the macro name from arguments prefix, mode, and type ****/ +/**** The prefix is only SET or CLEAR here, preceded by RVMODEL. ****/ /**** The types are the 3 types of standard interrupts: SW, EXT, & TIMER ****/ /******************************************************************************/ .macro INIT_MODE_MACRO MACRO_NAME #ifndef MACRO_NAME //#warning "MACRO_NAME is not defined by target. Executing this will end test" - .macro \MACRO_NAME + .macro \MACRO_NAME j cleanup_epilogs .endm #endif .endm /******************************************************************************/ -/**** this is a helper macro that conditionally instantiates default ****/ -/**** interrupt set/clear depending on type of test & mode support ****/ +/**** this is a helper macro that conditionally instantiates default ****/ +/**** interrupt set/clear depending on type of test & mode support ****/ /******************************************************************************/ .macro INSTANTIATE_INT_MACRO INT_MACRO_NAME - #ifndef INT_MACRO_NAME // actual m-mode handler code + #ifndef INT_MACRO_NAME // actual m-mode handler code // #warning: "INT_MACRO_NAME is not defined by target. Executing this will end test" .macro \INT_MACRO_NAME - j cleanup_epilogs + j cleanup_epilogs .endm #endif .endm /******************************************************************************/ -/**** this is a helper macro that creates CSR aliases so code that ****/ -/**** accesses CSRs when V=1 in different modes can share the code ****/ +/**** this is a helper macro that creates CSR aliases so code that ****/ +/**** accesses CSRs when V=1 in different modes can share the code ****/ /******************************************************************************/ - .macro XCSR_RENAME __MODE__ // enable CSR names to be parameterized. + .macro XCSR_RENAME __MODE__ // enable CSR names to be parameterized. .if ((\__MODE__\() == S) | (\__MODE__\() == V)) - .set CSR_XSTATUS, CSR_SSTATUS - .set CSR_XEDELEG, CSR_SEDELEG - .set CSR_XIE, CSR_SIE - .set CSR_XIP, CSR_SIP - .set CSR_XCAUSE, CSR_SCAUSE - .set CSR_XEPC, CSR_SEPC - .set CSR_XSATP, CSR_SATP + .set CSR_XSTATUS, CSR_SSTATUS + .set CSR_XEDELEG, CSR_SEDELEG + .set CSR_XIE, CSR_SIE + .set CSR_XIP, CSR_SIP + .set CSR_XCAUSE, CSR_SCAUSE + .set CSR_XEPC, CSR_SEPC + .set CSR_XSATP, CSR_SATP .set CSR_XSCRATCH,CSR_SSCRATCH - .set CSR_XTVAL, CSR_STVAL - .set CSR_XTVEC, CSR_STVEC + .set CSR_XTVAL, CSR_STVAL + .set CSR_XTVEC, CSR_STVEC .else .set CSR_XSTATUS, CSR_MSTATUS .set CSR_XEDELEG, CSR_MEDELEG - .set CSR_XIE, CSR_MIE - .set CSR_XIP, CSR_MIP - .set CSR_XCAUSE, CSR_MCAUSE - .set CSR_XEPC, CSR_MEPC - .set CSR_XSATP, CSR_SATP + .set CSR_XIE, CSR_MIE + .set CSR_XIP, CSR_MIP + .set CSR_XCAUSE, CSR_MCAUSE + .set CSR_XEPC, CSR_MEPC + .set CSR_XSATP, CSR_SATP .set CSR_XSCRATCH,CSR_MSCRATCH - .set CSR_XTVAL, CSR_MTVAL - .set CSR_XTVEC, CSR_MTVEC + .set CSR_XTVAL, CSR_MTVAL + .set CSR_XTVEC, CSR_MTVEC .endif .endm /******************************************************************************/ -/**** this is a helper macro that creates CSR aliases so code that ****/ -/**** accesses CSRs when V=1in different modes can share the code ****/ +/**** this is a helper macro that creates CSR aliases so code that ****/ +/**** accesses CSRs when V=1in different modes can share the code ****/ /******************************************************************************/ - .macro XCSR_VRENAME __MODE__ // enable CSR names to be parameterized, with H/V,S aliasing + .macro XCSR_VRENAME __MODE__ // enable CSR names to be parameterized, with H/V,S aliasing .if (\__MODE__\() == S) .set CSR_XSTATUS, CSR_SSTATUS .set CSR_XEDELEG, CSR_SEDELEG - .set CSR_XIE, CSR_SIE - .set CSR_XIP, CSR_SIP - .set CSR_XCAUSE, CSR_SCAUSE - .set CSR_XEPC, CSR_SEPC - .set CSR_XSATP, CSR_SATP + .set CSR_XIE, CSR_SIE + .set CSR_XIP, CSR_SIP + .set CSR_XCAUSE, CSR_SCAUSE + .set CSR_XEPC, CSR_SEPC + .set CSR_XSATP, CSR_SATP .set CSR_XSCRATCH,CSR_SSCRATCH - .set CSR_XTVAL, CSR_STVAL - .set CSR_XTVEC, CSR_STVEC + .set CSR_XTVAL, CSR_STVAL + .set CSR_XTVEC, CSR_STVEC .elseif (\__MODE__\() == V) .set CSR_XSTATUS, CSR_HSTATUS .set CSR_XEDELEG, CSR_HEDELEG - .set CSR_XIE, CSR_HIE - .set CSR_XIP, CSR_HIP - .set CSR_XCAUSE, CSR_VSCAUSE - .set CSR_XEPC, CSR_VSEPC - .set CSR_XSATP, CSR_VSATP + .set CSR_XIE, CSR_HIE + .set CSR_XIP, CSR_HIP + .set CSR_XCAUSE, CSR_VSCAUSE + .set CSR_XEPC, CSR_VSEPC + .set CSR_XSATP, CSR_VSATP .set CSR_XSCRATCH,CSR_VSSCRATCH - .set CSR_XTVAL, CSR_VSTVAL - .set CSR_XTVEC, CSR_VSTVEC + .set CSR_XTVAL, CSR_VSTVAL + .set CSR_XTVEC, CSR_VSTVEC .else .set CSR_XSTATUS, CSR_MSTATUS .set CSR_XEDELEG, CSR_MEDELEG - .set CSR_XIE, CSR_MIE - .set CSR_XIP, CSR_MIP - .set CSR_XCAUSE, CSR_MCAUSE - .set CSR_XEPC, CSR_MEPC - .set CSR_XSATP, CSR_SATP + .set CSR_XIE, CSR_MIE + .set CSR_XIP, CSR_MIP + .set CSR_XCAUSE, CSR_MCAUSE + .set CSR_XEPC, CSR_MEPC + .set CSR_XSATP, CSR_SATP .set CSR_XSCRATCH,CSR_MSCRATCH - .set CSR_XTVAL, CSR_MTVAL - .set CSR_XTVEC, CSR_MTVEC + .set CSR_XTVAL, CSR_MTVAL + .set CSR_XTVEC, CSR_MTVEC .endif .endm //////////////////////////////////////////////////////////////////////////////////////// //**** This is a helper macro that saves GPRs. Normally used only inside CODE_END ****// //**** Note: this needs a temp scratch register, & there isn't anything that will ****// -//**** will work, so we always trash some register, determined by macro param ****// +//**** will work, so we always trash some register, determined by macro param ****// //**** NOTE: Only be use for debug! Xregs containing addresses won't be relocated ****// //////////////////////////////////////////////////////////////////////////////////////// -.macro RVTEST_SAVE_GPRS BASEREG REG_SV_ADDR // optionally save GPRs +.macro RVTEST_SAVE_GPRS BASEREG REG_SV_ADDR // optionally save GPRs .option push .option norvc .set offset,0 - LA( \BASEREG, \REG_SV_ADDR) //this destroys basereg, but saves rest + LA( \BASEREG, \REG_SV_ADDR) //this destroys basereg, but saves rest SIGUPD( \BASEREG, x1) SIGUPD( \BASEREG, x2) SIGUPD( \BASEREG, x3) @@ -571,42 +579,42 @@ /********************* REQUIRED FOR NEW TESTS *************************/ /**** new macro encapsulating RVMODEL_DATA_BEGIN (signature area) ****/ /**** defining rvtest_sig_begin: label to enabling direct stores ****/ -/**** into the signature area to be properly relocated ****/ +/**** into the signature area to be properly relocated ****/ /**********************************************************************/ -#define RVTEST_SIG_BEGIN ;\ -.global rvtest_sig_begin /* defines beginning of signature area */ ;\ - ;\ - RVMODEL_DATA_BEGIN /* model specific stuff */ ;\ +#define RVTEST_SIG_BEGIN ;\ +.global rvtest_sig_begin /* defines beginning of signature area */ ;\ + ;\ + RVMODEL_DATA_BEGIN /* model specific stuff */ ;\ /* DFLT_TRAP_SIG is defined to simplify the tests. Tests that don't define mtrap_routine don't need 64 words of trap signature; they only need 3 at most, because any unexpected traps will write 4 words and overwrite the canary and stop the test after the 4rd word is written */ - -#define DFLT_TRAP_SIG ;\ -#ifndef rvtest_mtrap_routine ;\ - tsig_begin_canary: CANARY; ;\ - mtrap_sigptr: .fill 3*(XLEN/32),4,0xdeadbeef ;\ - tsig_end_canary: CANARY; ;\ -#endif + +#define DFLT_TRAP_SIG ;\ +#ifndef rvtest_mtrap_routine ;\ + tsig_begin_canary: CANARY; ;\ + mtrap_sigptr: .fill 3*(XLEN/32),4,0xdeadbeef ;\ + tsig_end_canary: CANARY; ;\ +#endif // tests allocate normal signature space here, then define // the mtrap_sigptr: label to separate normal and trap // signature space, then allocate trap signature space /********************* REQUIRED FOR NEW TESTS *************************/ -/**** new macro encapsulating RVMODEL_DATA_END (signature area) ****/ -/**** defining rvtest_sig_end: label to enabling direct stores ****/ -/**** into the signature area to be properLY relocated ****/ +/**** new macro encapsulating RVMODEL_DATA_END (signature area) ****/ +/**** defining rvtest_sig_end: label to enabling direct stores ****/ +/**** into the signature area to be properLY relocated ****/ /**********************************************************************/ -#define RVTEST_SIG_END ;\ -.global rvtest_sig_end /* defines end of signature area */ ;\ -DFLT_TRAP_SIG ;\ - #ifdef rvtest_gpr_save ;\ - gpr_save: .fill 32*(XLEN/32),4,0xdeadbeef ;\ - #endif ;\ -sig_end_canary: CANARY; /* add one extra word of guardband */ ;\ -rvtest_sig_end: ;\ -RVMODEL_DATA_END /* model specific stuff */ +#define RVTEST_SIG_END ;\ +.global rvtest_sig_end /* defines end of signature area */ ;\ +DFLT_TRAP_SIG ;\ + #ifdef rvtest_gpr_save ;\ + gpr_save: .fill 32*(XLEN/32),4,0xdeadbeef ;\ + #endif ;\ +sig_end_canary: CANARY; /* add one extra word of guardband */ ;\ +rvtest_sig_end: ;\ +RVMODEL_DATA_END /* model specific stuff */ //#define rvtest_sig_sz (rvtest_sig_end - rvtest_sig_begin) not currently used /***************************************************************************************/ @@ -615,117 +623,117 @@ RVMODEL_DATA_END /* model specific stuff */ /**** trampoline. This either falls through if already in Mmode, or traps to mmode ****/ /**** because the prolog cleared all edeleg CSRs. This code makes no assumptions ****/ /**** about the current operating mode. and assumes that xedeleg[illegal_op]==0 to ****/ -/**** prevent an infinite delegation loop. This will overwrite t1..t6 ****/ +/**** prevent an infinite delegation loop. This will overwrite t1..t6 ****/ /**** NOTE: that any test that sets xedeleg[illegal_op] must replace mode_rtn_inst ****/ -/**** with an op that causes a different exception cause that isn't delegated. ****/ -/**** NOTE: this will overwrite non-Mmode trap status CSRs ****/ -/****FIXME - check that SATP and VSATP point to the identity map page table ****/ +/**** with an op that causes a different exception cause that isn't delegated. ****/ +/**** NOTE: this will overwrite non-Mmode trap status CSRs ****/ +/****FIXME - check that SATP and VSATP point to the identity map page table ****/ /***************************************************************************************/ -// #define TESTCODE_ONLY_TVAL_ILLOP //FIXME comment this out when Sail fixes illop +// #define TESTCODE_ONLY_TVAL_ILLOP //FIXME comment this out when Sail fixes illop -.macro RVTEST_GOTO_MMODE -#ifdef rvtest_mtrap_routine /**** FIXME this can be empty if no Umode ****/ +.macro RVTEST_GOTO_MMODE +#ifdef rvtest_mtrap_routine /**** FIXME this can be empty if no Umode ****/ .option push .option norvc -#ifdef rvtest_strap_routine /* assume save are is in order M/S/V */ - LA( t1, Mtrapreg_sv) /* addr of next higher priv mode save area */ - lw t2, mode_rtn_inst(t1) /* get replacement op: jr 8(t2) */ +#ifdef rvtest_strap_routine /* assume save are is in order M/S/V */ + LA( t1, Mtrapreg_sv) /* addr of next higher priv mode save area */ + lw t2, mode_rtn_inst(t1) /* get replacement op: jr 8(t2) */ - LREG t3, xtvec_new_addr(t1) /* get mtvec value from save area */ - lw t5, 0(t3) /* get tramp entry br inst, pted to by mtvec */ - sw t2, 0(t3) /* overwrite it w/ replacement op */ + LREG t3, xtvec_new_addr(t1) /* get mtvec value from save area */ + lw t5, 0(t3) /* get tramp entry br inst, pted to by mtvec */ + sw t2, 0(t3) /* overwrite it w/ replacement op */ #ifdef rvtest_vtrap_routine - LREG t4, xtvec_new_addr+Msv_area_sz(t1) /* get stvec value from save area */ - lw t6, 0(t4) /* get tramp entry br inst, pted to by stvec */ - sw t2, 0(t4) /* overwrite it w/ replacement op */ + LREG t4, xtvec_new_addr+Msv_area_sz(t1) /* get stvec value from save area */ + lw t6, 0(t4) /* get tramp entry br inst, pted to by stvec */ + sw t2, 0(t4) /* overwrite it w/ replacement op */ #endif #endif - RVMODEL_FENCEI /* orig tramp entries now in t5,t6 for S,V */ + RVMODEL_FENCEI /* orig tramp entries now in t5,t6 for S,V */ /****FIXME**** this must relocate the PC to the equivalent Mmode physical address */ - auipc t2, 0 /* set rtnaddr base= GOTO_M_OP-4 (reuse t2) */ + auipc t2, 0 /* set rtnaddr base= GOTO_M_OP-4 (reuse t2) */ // Note that if illegal op trap is delegated , this may infinite loop // The solution is either for test to disable delegation, or to // redefine the GOTO_M_OP to be an op that will trap vertically - GOTO_M_OP /* illegal op if < Mmode , else falls through*/ - /* if delegated to S/ V, it rtns here&re-trap*/ -#ifdef rvtest_strap_routine /* fallthru to here when csrr succeeds */ - sw t5, 0(t3) /* restore old smode trampoline head inst */ + GOTO_M_OP /* illegal op if < Mmode , else falls through*/ + /* if delegated to S/ V, it rtns here&re-trap*/ +#ifdef rvtest_strap_routine /* fallthru to here when csrr succeeds */ + sw t5, 0(t3) /* restore old smode trampoline head inst */ #ifdef rvtest_vtrap_routine - sw t6, 0(t4) /* restore old vsmode trampoline head inst */ + sw t6, 0(t4) /* restore old vsmode trampoline head inst */ #endif #endif - RVMODEL_FENCEI /* ensure tramp ifetches get new code */ - /* if in a mode w/ MMU enabled, req VA==PA */ + RVMODEL_FENCEI /* ensure tramp ifetches get new code */ + /* if in a mode w/ MMU enabled, req VA==PA */ .option pop -#endif /* not implemented if no trap */ +#endif /* not implemented if no trap */ .endm /**** This is a helper macro that causes harts to transition from ****/ /**** M-mode to a lower priv mode. Legal params are VS,HS,VU,HU,S,U. ****/ -/**** The H,U variations leave V unchanged. This uses t4 only. ****/ +/**** The H,U variations leave V unchanged. This uses t4 only. ****/ /**** NOTE: this MUST be executed in M-mode. Precede with GOTO_MMODE ****/ /**** FIXME - SATP & VSATP must point to the identity map page table ****/ -#define MPV_LSB 7 // bit pos of prev vmod mstatush.MPV in either mstatush or upper half of mstatus -#define HSmode 0x9 -#define HUmode 0x8 -#define VUmode 0x4 -#define VSmode 0x5 -#define Smode 0x1 -#define Umode 0x0 +#define MPV_LSB 7 // bit pos of prev vmod mstatush.MPV in either mstatush or upper half of mstatus +#define HSmode 0x9 +#define HUmode 0x8 +#define VUmode 0x4 +#define VSmode 0x5 +#define Smode 0x1 +#define Umode 0x0 .macro RVTEST_GOTO_LOWER_MODE LMODE .option push .option norvc - // first, clear MSTATUS.PP (and .MPV if it will be changed_ - // then set them to the values that represent the lower mode + // first, clear MSTATUS.PP (and .MPV if it will be changed_ + // then set them to the values that represent the lower mode .if (XLEN==32) - .if ((\LMODE\()==VUmode) | (\LMODE\()==VSmode)) - csrsi CSR_MSTATUS, MSTATUS_MPV /* set V */ + .if ((\LMODE\()==VUmode) | (\LMODE\()==VSmode)) + csrsi CSR_MSTATUS, MSTATUS_MPV /* set V */ .elseif ((\LMODE\()==HUmode) | (\LMODE\()==HSmode)) - csrci CSR_MSTATUS, MSTATUS_MPV /* clr V */ - .endif /* lv V unchged for S or U */ + csrci CSR_MSTATUS, MSTATUS_MPV /* clr V */ + .endif /* lv V unchged for S or U */ - LI( t4, MSTATUS_MPP) - csrc CSR_MSTATUS, t4 /* clr PP always */ + LI( t4, MSTATUS_MPP) + csrc CSR_MSTATUS, t4 /* clr PP always */ - .if ((\LMODE\()==VSmode) | (\LMODE\()==HSmode) | (\LMODE\()==Smode)) - LI( t4, MPP_SMODE) /* val for Smode */ - csrs CSR_MSTATUS, t4 /* set in PP */ + .if ((\LMODE\()==VSmode) | (\LMODE\()==HSmode) | (\LMODE\()==Smode)) + LI( t4, MPP_SMODE) /* val for Smode */ + csrs CSR_MSTATUS, t4 /* set in PP */ .endif - // do the same if XLEN=64 -.else /* XLEN=64, maybe 128? FIXME for 128 */ - .if ((\LMODE\()==Smode) | (\LMODE\()==Umode)) /* lv V unchanged here */ - LI( t4, MSTATUS_MPP) /* but always clear PP */ + // do the same if XLEN=64 +.else /* XLEN=64, maybe 128? FIXME for 128 */ + .if ((\LMODE\()==Smode) | (\LMODE\()==Umode)) /* lv V unchanged here */ + LI( t4, MSTATUS_MPP) /* but always clear PP */ .else - LI( t4, (MSTATUS_MPP | MSTATUS_MPV)) /* clr V and P */ + LI( t4, (MSTATUS_MPP | MSTATUS_MPV)) /* clr V and P */ .endif - csrc CSR_MSTATUS, t4 /* clr PP to umode & maybe Vmode */ + csrc CSR_MSTATUS, t4 /* clr PP to umode & maybe Vmode */ - .if (!((\LMODE\()==HUmode) | (\LMODE\()==Umode))) /* lv pp unchged, v=0 or unchged */ - .if (\LMODE\()==VSmode) - LI( t4, (MPP_SMODE | MSTATUS_MPV)) /* val for pp & v */ + .if (!((\LMODE\()==HUmode) | (\LMODE\()==Umode))) /* lv pp unchged, v=0 or unchged */ + .if (\LMODE\()==VSmode) + LI( t4, (MPP_SMODE | MSTATUS_MPV)) /* val for pp & v */ .elseif ((\LMODE\()==HSmode) | (\LMODE\()==Smode)) - LI( t4, (MPP_SMODE)) /* val for pp only */ - .else /* only VU left; set MPV only */ - li t4, 1 /* optimize for single bit */ - slli t4, t4, 32+MPV_LSB /* val for v only */ + LI( t4, (MPP_SMODE)) /* val for pp only */ + .else /* only VU left; set MPV only */ + li t4, 1 /* optimize for single bit */ + slli t4, t4, 32+MPV_LSB /* val for v only */ .endif - csrs CSR_MSTATUS, t4 /* set correct mode and Vbit */ + csrs CSR_MSTATUS, t4 /* set correct mode and Vbit */ .endif .endif - /**** mstatus MPV and PP now set up to desired mode ****/ - /**** set MEPC to mret+4; requires an identity mapping ****/ - auipc t4, 0 - addi t4, t4, 16 - csrrw t4, CSR_MEPC, t4 /* set rtn addr to mret+4 */ - mret /* transition to desired mode */ + /**** mstatus MPV and PP now set up to desired mode ****/ + /**** set MEPC to mret+4; requires an identity mapping ****/ + auipc t4, 0 + addi t4, t4, 16 + csrrw t4, CSR_MEPC, t4 /* set rtn addr to mret+4 */ + mret /* transition to desired mode */ .option pop .endm @@ -737,75 +745,75 @@ RVMODEL_DATA_END /* model specific stuff */ #ifndef RVMODEL_SET_MSW_INT // #warning: "RVMODEL_SET_MSW_INT is not defined by target. Executing this will end test" .macro RVMODEL_SET_MSW_INT - j cleanup_epilogs + j cleanup_epilogs .endm #endif #ifndef RVMODEL_CLEAR_MSW_INT // #warning: "RVMODEL_CLEAR_MSW_INT is not defined by target. Executing this will end test" .macro RVMODEL_CLEAR_MSW_INT - j cleanup_epilogs + j cleanup_epilogs .endm #endif #ifndef RVMODEL_CLEAR_MTIMER_INT // #warning: "RVMODEL_CLEAR_MTIMER_INT is not defined by target. Executing this will end test" .macro RVMODEL_CLEAR_MTIMER_INT - j cleanup_epilogs + j cleanup_epilogs .endm #endif #ifndef RVMODEL_CLEAR_MEXT_INT // #warning: "RVMODEL_CLEAR_MEXT_INT is not defined by target. Executing this will end test" .macro RVMODEL_CLEAR_MEXT_INT - j cleanup_epilogs + j cleanup_epilogs .endm #endif #ifndef RVMODEL_SET_SSW_INT // #warning: "RVMODEL_SET_SSW_INT is not defined by target. Executing this will end test" .macro RVMODEL_SET_SSW_INT - j cleanup_epilogs + j cleanup_epilogs .endm #endif #ifndef RVMODEL_CLEAR_SSW_INT // #warning: "RVMODEL_CLEAR_SSW_INT is not defined by target. Executing this will end test" .macro RVMODEL_CLEAR_SSW_INT - j cleanup_epilogs + j cleanup_epilogs .endm #endif #ifndef RVMODEL_CLEAR_STIMER_INT // #warning: "RVMODEL_CLEAR_STIMER_INT is not defined by target. Executing this will end test" .macro RVMODEL_CLEAR_STIMER_INT - j cleanup_epilogs + j cleanup_epilogs .endm #endif #ifndef RVMODEL_CLEAR_SEXT_INT // #warning: "RVMODEL_CLEAR_SEXT_INT is not defined by target. Executing this will end test" .macro RVMODEL_CLEAR_SEXT_INT - j cleanup_epilogs + j cleanup_epilogs .endm #endif #ifndef RVMODEL_SET_VSW_INT // #warning: "RVMODEL_SET_VSW_INT is not defined by target. Executing this will end test" .macro RVMODEL_SET_VSW_INT - j cleanup_epilogs + j cleanup_epilogs .endm #endif #ifndef RVMODEL_CLEAR_VSW_INT // #warning: "RVMODEL_CLEAR_VSW_INT is not defined by target. Executing this will end test" .macro RVMODEL_CLEAR_VSW_INT - j cleanup_epilogs + j cleanup_epilogs .endm #endif #ifndef RVMODEL_CLEAR_VTIMER_INT // #warning: "RVMODEL_CLEAR_VTIMER_INT is not defined by target. Executing this will end test" .macro RVMODEL_CLEAR_VTIMER_INT - j cleanup_epilogs + j cleanup_epilogs .endm #endif #ifndef RVMODEL_CLEAR_VEXT_INT // #warning: "RVMODEL_CLEAR_VEXT_INT is not defined by target. Executing this will end test" .macro RVMODEL_CLEAR_VEXT_INT - j cleanup_epilogs + j cleanup_epilogs .endm #endif @@ -817,18 +825,18 @@ RVMODEL_DATA_END /* model specific stuff */ // The helper INSTANTIATE_MODE_MACRO actually handles the replication //============================================================================== -.macro RVTEST_TRAP_PROLOG __MODE__ +.macro RVTEST_TRAP_PROLOG __MODE__ .option push .option norvc /******************************************************************************/ /**** this is a mode-configured version of the prolog, which either saves and */ - /**** replaces xtvec, or saves and replaces the code located at xtvec if it */ - /**** it xtvec isn't arbitrarily writable. If not writable, restore & exit */ + /**** replaces xtvec, or saves and replaces the code located at xtvec if it */ + /**** it xtvec isn't arbitrarily writable. If not writable, restore & exit */ /******************************************************************************/ /******************************************************************************/ - /**** Prolog, to be run before any tests ****/ - /**** #include 1 copy of this per mode in rvmodel_boot code? ****/ + /**** Prolog, to be run before any tests ****/ + /**** #include 1 copy of this per mode in rvmodel_boot code? ****/ /**** ------------------------------------------------------------------- ****/ /**** if xTVEC isn't completely RW, then we need to change the code at its ****/ /**** target. The entire trap trampoline and mtrap handler replaces the ****/ @@ -843,100 +851,100 @@ RVMODEL_DATA_END /* model specific stuff */ .global \__MODE__\()trampoline //.global mtrap_sigptr - XCSR_VRENAME \__MODE__ //retarget XCSR names to this modes CSRs, separate V/S copies + XCSR_VRENAME \__MODE__ //retarget XCSR names to this modes CSRs, separate V/S copies - LA( t1, \__MODE__\()trapreg_sv) // get pointer to save area + LA( t1, \__MODE__\()trapreg_sv) // get pointer to save area //---------------------------------------------------------------------- init_\__MODE__\()scratch: - csrrw t3, CSR_XSCRATCH, t1 // swap xscratch with save area ptr (will be used by handler) - SREG t3, xscr_save_addr(t1) // save old mscratch in xscratch_save + csrrw t3, CSR_XSCRATCH, t1 // swap xscratch with save area ptr (will be used by handler) + SREG t3, xscr_save_addr(t1) // save old mscratch in xscratch_save //---------------------------------------------------------------------- init_\__MODE__\()edeleg: - li t2, 0 // save and clear edeleg so we can exit to Mmode -.if (\__MODE__\() == V) - csrrw t2, CSR_VEDELEG, t2 // special case: VS EDELEG available from Vmode -.elseif (\__MODE__\() == M) + li t2, 0 // save and clear edeleg so we can exit to Mmode +.if (\__MODE__\() == V) + csrrw t2, CSR_VEDELEG, t2 // special case: VS EDELEG available from Vmode +.elseif (\__MODE__\() == M) #ifdef rvtest_strap_routine - csrrw t2, CSR_XEDELEG, t2 // this handles M mode save, but only if Smode exists + csrrw t2, CSR_XEDELEG, t2 // this handles M mode save, but only if Smode exists #endif .else //FIXME: if N-extension or anything like it is implemented, uncomment the following -// csrrw t2, CSR_XEDELEG, t2 // this handles S mode +// csrrw t2, CSR_XEDELEG, t2 // this handles S mode .endif - SREG t2, xedeleg_sv_addr(t1) // now do the save + SREG t2, xedeleg_sv_addr(t1) // now do the save //---------------------------------------------------------------------- init_\__MODE__\()satp: -.if (\__MODE__\() != M) // if S or VS mode - LA( t4, rvtest_\__MODE__\()root_pg_tbl) // rplc xsatp w/ identity-mapped pg table **FIXME: fixed offset frm trapreg_sv? - csrrw t4, CSR_XSATP, t4 - SREG t4, xsatp_sv_addr(t1) +.if (\__MODE__\() != M) // if S or VS mode + LA( t4, rvtest_\__MODE__\()root_pg_tbl) // rplc xsatp w/ identity-mapped pg table **FIXME: fixed offset frm trapreg_sv? + csrrw t4, CSR_XSATP, t4 + SREG t4, xsatp_sv_addr(t1) .endif //---------------------------------------------------------------------- init_\__MODE__\()tvec: - LA( t4, \__MODE__\()trampoline) //this is a code-relative pointer - csrr t3, CSR_XTVEC - SREG t3, xtvec_sav_addr(t1) // save orig mtvec+mode in tvec_save - andi t2, t3, 3 // merge .mode & tramp ptr and store to both XTVEC, tvec_new - or t2, t4, t2 - SREG t2, xtvec_new_addr(t1) - csrw CSR_XTVEC, t2 // write xtvec with trap_trampoline+mode, so trap will go to the trampoline - - csrr t5, CSR_XTVEC // now read new_mtval back & make sure we could write it + LA( t4, \__MODE__\()trampoline) //this is a code-relative pointer + csrr t3, CSR_XTVEC + SREG t3, xtvec_sav_addr(t1) // save orig mtvec+mode in tvec_save + andi t2, t3, 3 // merge .mode & tramp ptr and store to both XTVEC, tvec_new + or t2, t4, t2 + SREG t2, xtvec_new_addr(t1) + csrw CSR_XTVEC, t2 // write xtvec with trap_trampoline+mode, so trap will go to the trampoline + + csrr t5, CSR_XTVEC // now read new_mtval back & make sure we could write it #ifndef HANDLER_TESTCODE_ONLY - beq t5, t2, rvtest_\__MODE__\()prolog_done // if mtvec==trap_trampoline, mtvec is writable, continue + beq t5, t2, rvtest_\__MODE__\()prolog_done // if mtvec==trap_trampoline, mtvec is writable, continue #endif - csrw CSR_XTVEC, t3 // xTVEC not completely writable, restore old value & exit if uninitialized - beqz t3, abort\__MODE__\()test - SREG t3, xtvec_new_addr(t1) // else update tvect_new with orig mtvec + csrw CSR_XTVEC, t3 // xTVEC not completely writable, restore old value & exit if uninitialized + beqz t3, abort\__MODE__\()test + SREG t3, xtvec_new_addr(t1) // else update tvect_new with orig mtvec /*****************************************************************/ - /**** fixed mtvec, can't move it so move trampoline instead ****/ - /**** t1=tramp sv, t2=orig tvec, t3=sv end, t4=tramp ****/ + /**** fixed mtvec, can't move it so move trampoline instead ****/ + /**** t1=tramp sv, t2=orig tvec, t3=sv end, t4=tramp ****/ /*****************************************************************/ -init_\__MODE__\()tramp: /**** copy trampoline at mtvec tgt; t4->t2->t1 t3=end of save ****/ - andi t2, t3, -4 // calc bgn of orig tramp area by rmving mode bits - addi t3, t2, tramp_sz // calc end of orig tramp area (+4) - addi t1, t1, tramp_sv_addr // calc bgn of tramp save area +init_\__MODE__\()tramp: /**** copy trampoline at mtvec tgt; t4->t2->t1 t3=end of save ****/ + andi t2, t3, -4 // calc bgn of orig tramp area by rmving mode bits + addi t3, t2, tramp_sz // calc end of orig tramp area (+4) + addi t1, t1, tramp_sv_addr // calc bgn of tramp save area //---------------------------------------------------------------------- - overwt_tt_\__MODE__\()loop: // now build new tramp table w/ local offsets - lw t6, 0(t2) // move original mtvec target to save area - sw t6, 0(t1) - lw t5, 0(t4) // move traphandler trampoline into orig mtvec target - sw t5, 0(t2) - lw t6, 0(t2) // rd it back to make sure it was written - bne t6, t5, endcopy_\__MODE__\()tramp // table isn't fully writable, restore and give up + overwt_tt_\__MODE__\()loop: // now build new tramp table w/ local offsets + lw t6, 0(t2) // move original mtvec target to save area + sw t6, 0(t1) + lw t5, 0(t4) // move traphandler trampoline into orig mtvec target + sw t5, 0(t2) + lw t6, 0(t2) // rd it back to make sure it was written + bne t6, t5, endcopy_\__MODE__\()tramp // table isn't fully writable, restore and give up #ifdef HANDLER_TESTCODE_ONLY - csrr t5, CSR_XSCRATCH // load trapreg_sv from scratch - addi t5, t5,256 // calculate some offset into the save area - bgt t5, t1, endcopy_\__MODE__\()tramp // and pretend if couldnt be written + csrr t5, CSR_XSCRATCH // load trapreg_sv from scratch + addi t5, t5,256 // calculate some offset into the save area + bgt t5, t1, endcopy_\__MODE__\()tramp // and pretend if couldnt be written #endif - addi t2, t2, 4 // next tvec inst. index - addi t1, t1, 4 // next save inst. index - addi t4, t4, 4 // next tramp inst. index - bne t3, t2, overwt_tt_\__MODE__\()loop // haven't reached end of save area, loop + addi t2, t2, 4 // next tvec inst. index + addi t1, t1, 4 // next save inst. index + addi t4, t4, 4 // next tramp inst. index + bne t3, t2, overwt_tt_\__MODE__\()loop // haven't reached end of save area, loop //---------------------------------------------------------------------- - endcopy_\__MODE__\()tramp: // vector table not writeable, restore - RVMODEL_FENCEI // make sure ifetches get new code - csrr t1, CSR_XSCRATCH // reload trapreg_sv from scratch - sw t2, trampend_addr(t1) // save copy progress - beq t3,t2, rvtest_\__MODE__\()prolog_done //full loop, don't exit + endcopy_\__MODE__\()tramp: // vector table not writeable, restore + RVMODEL_FENCEI // make sure ifetches get new code + csrr t1, CSR_XSCRATCH // reload trapreg_sv from scratch + sw t2, trampend_addr(t1) // save copy progress + beq t3,t2, rvtest_\__MODE__\()prolog_done //full loop, don't exit abort\__MODE__\()test: - LA( t6, exit_\__MODE__\()cleanup) // trampoline rplc failure **FIXME: precalc& put into savearea? - jalr x0, t6 // this branch may be too far away, so longjmp + LA( t6, exit_\__MODE__\()cleanup) // trampoline rplc failure **FIXME: precalc& put into savearea? + jalr x0, t6 // this branch may be too far away, so longjmp rvtest_\__MODE__\()prolog_done: .option pop .endm /*******************************************************************************/ -/*************** end of prolog macro ************/ +/*************** end of prolog macro ************/ /*******************************************************************************/ .macro RVTEST_TRAP_HANDLER __MODE__ .option push -.option rvc // temporarily allow compress to allow c.nop alignment -.align 6 // ensure that a trampoline is on a typical cacheline boundary, just in case +.option rvc // temporarily allow compress to allow c.nop alignment +.align 6 // ensure that a trampoline is on a typical cacheline boundary, just in case .option pop /**********************************************************************/ @@ -949,159 +957,165 @@ rvtest_\__MODE__\()prolog_done: /**** to a return for anything above that (which causes a mismatch)****/ /**********************************************************************/ - XCSR_RENAME \__MODE__ //retarget XCSR names to this modes CSRs + XCSR_RENAME \__MODE__ //retarget XCSR names to this modes CSRs -.global \__MODE__\()trampoline // define the label and make it available +.global \__MODE__\()trampoline // define the label and make it available .global common_\__MODE__\()entry .option push .option norvc \__MODE__\()trampoline: - .set value, 0 - .rept NUM_SPECD_INTCAUSES // located at each possible int vectors - j trap_\__MODE__\()handler+ value // offset < +/- 1MB - .set value, value + 12 // length of xhandler trampoline spreader code + .set value, 0 + .rept NUM_SPECD_INTCAUSES // located at each possible int vectors + j trap_\__MODE__\()handler+ value // offset < +/- 1MB + .set value, value + 12 // length of xhandler trampoline spreader code .endr - .rept XLEN-NUM_SPECD_INTCAUSES // fill at each impossible entry - j rvtest_\__MODE__\()endtest // end test if this happens + .rept XLEN-NUM_SPECD_INTCAUSES // fill at each impossible entry + j rvtest_\__MODE__\()endtest // end test if this happens .endr /*********************************************************************/ - /**** this is spreader stub array; it saves enough info (sp & ****/ + /**** this is spreader stub array; it saves enough info (sp & ****/ /**** vec-offset) to enable branch to common routine to save rest ****/ /*********************************************************************/ /**** !!CSR_xSCRATCH is preloaded w/ xtrapreg_sv in init_xscratch:****/ - trap_\__MODE__\()handler: // on exit sp swapped w/ save ptr, t6 is vector addr + trap_\__MODE__\()handler: // on exit sp swapped w/ save ptr, t6 is vector addr .rept NUM_SPECD_INTCAUSES - csrrw sp, CSR_XSCRATCH, sp // save sp, replace w/trapreg_sv regtmp save ptr - SREG t6, 6*REGWIDTH(sp) // save t6 in temp save area offset 6 - jal t6, common_\__MODE__\()handler // jmp to common code, saving vector in t6 + csrrw sp, CSR_XSCRATCH, sp // save sp, replace w/trapreg_sv regtmp save ptr + SREG t6, 6*REGWIDTH(sp) // save t6 in temp save area offset 6 + jal t6, common_\__MODE__\()handler // jmp to common code, saving vector in t6 .endr - rvtest_\__MODE__\()endtest: // target may be too far away, so longjmp - LA (t1, rvtest_\__MODE__\()end) // FIXME: must be identity mapped if its a VA - jalr x0, t1 + rvtest_\__MODE__\()endtest: // target may be too far away, so longjmp + LA (t1, rvtest_\__MODE__\()end) // FIXME: must be identity mapped if its a VA + jalr x0, t1 /*********************************************************************/ /**** common code for all ints & exceptions, will fork to handle ****/ /**** each separately. The common handler first stores trap mode+ ****/ /**** vector, & mcause signatures. Most traps have 4wd sigs, but ****/ /**** sw and timer ints only store 3 of the 4, & some hypervisor ****/ - /**** traps will set store 6 ops ****/ - /**** sig offset Exception ExtInt SWInt TimerInt ****/ - /**** 0: <--------------------- Vect+mode ----------> ****/ - /**** 4: <---------------------- xcause -------------> ****/ - /**** 8: xepc <------------- xip --------------> ****/ - /**** 12: tval IntID <---- x ----------------> ****/ - /**** 16: tval2/x * <-------------- x ----------------> ****/ - /**** 20: tinst/x * <-------------- x ----------------> ****/ - /**** * only loaded for Mmode traps when hypervisor implemented ****/ + /**** traps will set store 6 ops ****/ + /**** sig offset Exception ExtInt SWInt TimerInt ****/ + /**** 0: <--------------------- Vect+mode ----------> ****/ + /**** 4: <---------------------- xcause -------------> ****/ + /**** 8: xepc <------------- xip --------------> ****/ + /**** 12: tval IntID <---- x ----------------> ****/ + /**** 16: tval2/x * <-------------- x ----------------> ****/ + /**** 20: tinst/x * <-------------- x ----------------> ****/ + /**** * only loaded for Mmode traps when hypervisor implemented ****/ /*********************************************************************/ - /* in general, CSRs loaded in t2, addresses into t3 */ + /* in general, CSRs loaded in t2, addresses into t3 */ - //If we can distinguish between HS and S mode, we can share S and V code. - //except for prolog code which needs to initialize CSRs, and the save area - //To do this, we need to read one of the CSRs (e.g. xSCRATCH) and compare - //it to either Strapreg_sv or Vtrapreg_sv to determine which it is. + //If we can distinguish between HS and S mode, we can share S and V code. + //except for prolog code which needs to initialize CSRs, and the save area + //To do this, we need to read one of the CSRs (e.g. xSCRATCH) and compare + //it to either Strapreg_sv or Vtrapreg_sv to determine which it is. -common_\__MODE__\()handler: // enter with vector addr in t6 (orig t6 is at offset 6*REGWIDTH) - SREG t5, 5*REGWIDTH(sp) // x30 save remaining regs, starting with t5 - csrrw t5, CSR_XSCRATCH, sp // restore ptr to reg sv area, and get old sp - SREG t5, 7*REGWIDTH(sp) // save old sp - auipc t5, 0 - addi t5, t5, 12 // quick calculation of common Xentry: label - jr t5 // needed if trampoline gets moved elsewhere, else it's effectively a noop +common_\__MODE__\()handler: // enter with vector addr in t6 (orig t6 is at offset 6*REGWIDTH) + SREG t5, 5*REGWIDTH(sp) // x30 save remaining regs, starting with t5 + csrrw t5, CSR_XSCRATCH, sp // restore ptr to reg sv area, and get old sp + SREG t5, 7*REGWIDTH(sp) // save old sp + auipc t5, 0 + addi t5, t5, 12 // quick calculation of common Xentry: label + jr t5 // needed if trampoline gets moved elsewhere, else it's effectively a noop common_\__MODE__\()entry: - SREG t4, 4*REGWIDTH(sp) //x29 - SREG t3, 3*REGWIDTH(sp) //x28 - SREG t2, 2*REGWIDTH(sp) //x7 - SREG t1, 1*REGWIDTH(sp) //x6 save other temporaries + SREG t4, 4*REGWIDTH(sp) //x29 + SREG t3, 3*REGWIDTH(sp) //x28 + SREG t2, 2*REGWIDTH(sp) //x7 + SREG t1, 1*REGWIDTH(sp) //x6 save other temporaries //------pre-update trap_sig pointer so handlers can themselves trap----- \__MODE__\()trapsig_ptr_upd: - li t2, 4*REGWIDTH // standard entry length - csrr t5, CSR_XCAUSE - bgez t5, \__MODE__\()xcpt_sig_sv // Keep std length if this is an exception for now (MSB==0) + li t2, 4*REGWIDTH // standard entry length + csrr t5, CSR_XCAUSE + bgez t5, \__MODE__\()xcpt_sig_sv // Keep std length if this is an exception for now (MSB==0) \__MODE__\()int_sig_sv: - slli t3, t5, 1 // remove MSB, cause<<1 - addi t3, t3, -(IRQ_M_TIMER)<<1 // is cause (w/o MSB) an extint or larger? ( (cause<<1) > (8<<1) )? - bgez t3, \__MODE__\()trap_sig_sv // yes, keep std length - li t2, 3*REGWIDTH // no, its a timer or swint, overrride preinc to 3*regsz - j \__MODE__\()trap_sig_sv + slli t3, t5, 1 // remove MSB, cause<<1 + addi t3, t3, -(IRQ_M_TIMER)<<1 // is cause (w/o MSB) an extint or larger? ( (cause<<1) > (8<<1) )? + bgez t3, \__MODE__\()trap_sig_sv // yes, keep std length + li t2, 3*REGWIDTH // no, its a timer or swint, overrride preinc to 3*regsz + j \__MODE__\()trap_sig_sv \__MODE__\()xcpt_sig_sv: -.if (\__MODE__\() == M) // exception case, don't adjust if hypervisor mode disabled - csrr t1, CSR_MISA - slli t1, t1, XLEN-8 // shift H bit into msb - bgez t1, \__MODE__\()trap_sig_sv // no hypervisor mode, keep std width - li t2, 6*REGWIDTH // Hmode implemented & Mmode trap, override prein to be 6*regsz +.if (\__MODE__\() == M) // exception case, don't adjust if hypervisor mode disabled + csrr t1, CSR_MISA + slli t1, t1, XLEN-8 // shift H bit into msb + bgez t1, \__MODE__\()trap_sig_sv // no hypervisor mode, keep std width + li t2, 6*REGWIDTH // Hmode implemented & Mmode trap, override prein to be 6*regsz .endif \__MODE__\()trap_sig_sv: - LA( t3, rvtest_trap_sig) // this is where curr trap signature pointer is stored + LA( t3, rvtest_trap_sig) // this is where curr trap signature pointer is stored //------this should be atomic------------------------------------- - LREG t1, 0(t3) // get the trap signature pointer (initialized to mtrap_sigptr) - add t4, t1, t2 // pre-inc pointer so nested traps don't corrupt signature queue - SREG t4, 0(t3) // and save new value (old value is still in t1) + LREG t1, 0(t3) // get the trap signature pointer (initialized to mtrap_sigptr) + add t4, t1, t2 // pre-inc pointer so nested traps don't corrupt signature queue + SREG t4, 0(t3) // and save new value (old value is still in t1) //------end atomic------------------------------------------------ - LREG t3, xtvec_new_addr(sp) // get pointer to actual tramp table + LREG t3, xtvec_new_addr(sp) // get pointer to actual tramp table //---------------------------------------------------------------- sv_\__MODE__\()vect: - sub t6, t6, t3 // cvt vec-addr in t6 to offset fm top of tramptable **FIXME: breaks if tramp crosses pg && MMU enabled - slli t6, t6, 3 // make room for 3 bits; MSBs aren't useful **FIXME: won't work for SV64!) - or t6, t6, t2 // insert entry size into bits 5:2 - addi t6, t6, \__MODE__\()MODE_SIG // insert mode# into 1:0 - SREG t6, 0*REGWIDTH(t1) // save 1st sig value, (vec-offset, entrysz, trapmode) + sub t6, t6, t3 // cvt vec-addr in t6 to offset fm top of tramptable **FIXME: breaks if tramp crosses pg && MMU enabled + slli t6, t6, 3 // make room for 3 bits; MSBs aren't useful **FIXME: won't work for SV64!) + or t6, t6, t2 // insert entry size into bits 5:2 + addi t6, t6, \__MODE__\()MODE_SIG // insert mode# into 1:0 + SREG t6, 0*REGWIDTH(t1) // save 1st sig value, (vec-offset, entrysz, trapmode) //---------------------------------------------------------------- sv_\__MODE__\()cause: - SREG t5, 1*REGWIDTH(t1) // save 2nd sig value, (mcause) + SREG t5, 1*REGWIDTH(t1) // save 2nd sig value, (mcause) //---------------------------------------------------------------- - bltz t5, common_\__MODE__\()int_handler // split off if this is an interrupt + bltz t5, common_\__MODE__\()int_handler // split off if this is an interrupt /********************************************************************/ /**** This is the exceptions specific code, storing relative mepc****/ /**** & relative tval signatures. tval is relocated by code or ****/ /**** data start, or 0 depending on mcause. mepc signature value ****/ /**** is relocated by code start, and restored bumped by 2..6B ****/ - /**** depending on op alignment so trapped op isn't re-executed ****/ + /**** depending on op alignment so trapped op isn't re-executed ****/ /********************************************************************/ common_\__MODE__\()excpt_handler: - csrr t2, CSR_XEPC + csrr t2, CSR_XEPC sv_\__MODE__\()epc: - LA(t3, rvtest_code_end) // Fetch the address of rvtest_code_end to compare against mepc - bge t2, t3, epc_data_\__MODE__\()adj // If mepc > rvtest_code_end, (outside the code memory) - // then trap came from data area and epc_data_\__MODE__\()adj will adjust to offset wrt to rvtest_data_begin - LA( t3, rvtest_code_begin) // Fetch the address of rvtest_code_begin to compare against mepc. - blt t2, t3, epc_data_\__MODE__\()adj // Check if mepc < rvtest_code_begin, (trap came from outside the code memory), - // then ask epc_data_\__MODE__\()adj to adjust the offset wrt data memory starting address - sub t4, t2, t3 // if mepc < rvtest_code_end, then offset will be adjusted wrt rvtest_code_begin - SREG t4, 2*REGWIDTH(t1) // save 3rd sig value, (normalized rel mepc) into trap signature area - j adj_\__MODE__\()epc + LA(t3, rvtest_code_end) // Fetch the address of rvtest_code_end to compare against mepc + bge t2, t3, epc_data_\__MODE__\()adj // If mepc > rvtest_code_end, (outside the code memory) + // then trap came from data area and epc_data_\__MODE__\()adj will adjust to offset wrt to rvtest_data_begin + LA( t3, rvtest_code_begin) // Fetch the address of rvtest_code_begin to compare against mepc. + blt t2, t3, epc_data_\__MODE__\()adj // Check if mepc < rvtest_code_begin, (trap came from outside the code memory), + // then ask epc_data_\__MODE__\()adj to adjust the offset wrt data memory starting address + sub t4, t2, t3 // if mepc < rvtest_code_end, then offset will be adjusted wrt rvtest_code_begin + SREG t4, 2*REGWIDTH(t1) // save 3rd sig value, (normalized rel mepc) into trap signature area + j adj_\__MODE__\()epc epc_data_\__MODE__\()adj: - LA( t4, rvtest_data_begin) // Fetch rvtest_data_begin to adjust mepc offset against it - sub t4, t2, t4 // Offset adjustment - SREG t4, 2*REGWIDTH(t1) // save 3rd sig value, (normalized rel mepc) into trap signature area - -adj_\__MODE__\()epc: // adj mepc so there is at least 4B of padding after op - andi t6, t2, -0x4 // adjust mepc to prev 4B alignment (if 2B aligned) - addi t6, t6, 0x8 // adjust mepc so it skips past op, has padding & 4B aligned - csrw CSR_XEPC, t6 // restore adjusted value, w/ 2,4 or 6B of padding - - /****WARNING needs updating when insts>32b are ratified, only 4 or 6B of padding; for 64b insts, 2B or 4B of padding ****/ + LA( t4, rvtest_data_begin) // Fetch rvtest_data_begin to adjust mepc offset against it + blt t2, t4, epc_between_data_code_\__MODE__\()adj // Trap came in between rvtest_code_end & rvtest_data_begin + sub t4, t4, t2 // Offset adjustment + SREG t4, 2*REGWIDTH(t1) // save 3rd sig value, (normalized rel mepc) into trap signature area +epc_between_data_code_\__MODE__\()adj: + LA( t4, rvtest_code_begin) // Fetch the address of rvtest_code_begin to compare against mepc. + sub t4, t2, t4 // Offset adjustment + SREG t4, 2*REGWIDTH(t1) // save 3rd sig value, (normalized rel mepc) into trap signature area + j adj_\__MODE__\()epc + +adj_\__MODE__\()epc: // adj mepc so there is at least 4B of padding after op + andi t6, t2, -0x4 // adjust mepc to prev 4B alignment (if 2B aligned) + addi t6, t6, 0x8 // adjust mepc so it skips past op, has padding & 4B aligned + csrw CSR_XEPC, t6 // restore adjusted value, w/ 2,4 or 6B of padding + + /****WARNING needs updating when insts>32b are ratified, only 4 or 6B of padding; for 64b insts, 2B or 4B of padding ****/ /******************************************************************************/ /* Calculate relative mtval if it’s an addr (by code_begin or data_begin amt) */ - /* Special case if its in the signature region as well. */ - /* Model-defined mask values (indexed by xcause value) select which to use */ - /* Enter with rvtest_code_begin (which is start of actual test) in t3 */ - /* if illegal op, load opcode from mtval (if !=0) or from istream (if ==0) */ + /* Special case if its in the signature region as well. */ + /* Model-defined mask values (indexed by xcause value) select which to use */ + /* Enter with rvtest_code_begin (which is start of actual test) in t3 */ + /* if illegal op, load opcode from mtval (if !=0) or from istream (if ==0) */ /******************************************************************************/ -// csrr t4, CSR_XCAUSE - csrr t6, CSR_XTVAL +// csrr t4, CSR_XCAUSE + csrr t6, CSR_XTVAL /******************************************************************************/ /* null adjustment if exception address is neither code nor data relative; ****/ @@ -1110,236 +1124,235 @@ adj_\__MODE__\()epc: // adj mepc so there is at least 4B of padding after op /* now check for addr adjustments - code, data, or sig region (cause is in t5)*/ -illop_\__MODE__\()tval: // if cause is illop, save always - addi t2, t5, -CAUSE_ILLEGAL_INSTRUCTION // spcl case mcause==2 (illegal op) no addr adj - beqz t2, sv_\__MODE__\()tval +illop_\__MODE__\()tval: // if cause is illop, save always + addi t2, t5, -CAUSE_ILLEGAL_INSTRUCTION // spcl case mcause==2 (illegal op) no addr adj + beqz t2, sv_\__MODE__\()tval chk_\__MODE__\()tval: - LI( t4, SET_REL_TVAL_MSK) // now check if code or data (or sig) region adjustment + LI( t4, SET_REL_TVAL_MSK) // now check if code or data (or sig) region adjustment - srl t4, t4, t5 // put mcause bit# into LSB - slli t4, t4, XLEN-1 // put mcause bit# into MSB - bge t4, x0, sv_\__MODE__\()tval // if MSB=0, no adj, sv to ensure tval was cleared + srl t4, t4, t5 // put mcause bit# into LSB + slli t4, t4, XLEN-1 // put mcause bit# into MSB + bge t4, x0, sv_\__MODE__\()tval // if MSB=0, no adj, sv to ensure tval was cleared code_\__MODE__\()adj: - LA(t4, rvtest_code_end) // Fetch rvtest_code_end to compare against mtval. - bge t6, t4, data_\__MODE__\()adj // Compare if mtval > rvtest_code_end ; address belong outside code area ==> will be adjusted via data memory - LA( t4, rvtest_code_begin) // Fetch rvtest_code_end to compare against mtval. - blt t6, t4, data_\__MODE__\()adj // Compare if mtval < rvtest_code_begin ; address belong outside code area ==> will be adjusted via data memory - addi t3, t4, 0 // If rvtest_code_begin < mtval < rvtest_code_end ==> Adjustment will be made via code region - j adj_\__MODE__\()tval - -data_\__MODE__\()adj: // only possibilities left are testdata or sigdata - LA( t3, rvtest_data_end) - bge t6, t3, sig_\__MODE__\()adj // after data area, must be sig, adj according to sig label - LA( t3, rvtest_data_begin) - bge t6, t3, adj_\__MODE__\()tval // inside data area, lv rvtest_data_begin as adjustment amt - + LA(t4, rvtest_code_end) // Fetch rvtest_code_end to compare against mtval. + bgeu t6, t4, data_\__MODE__\()adj // Compare if mtval > rvtest_code_end ; address belong outside code area ==> will be adjusted via data memory + LA( t4, rvtest_code_begin) // Fetch rvtest_code_end to compare against mtval. + bltu t6, t4, sv_\__MODE__\()tval // if mtval < rvtest_code_begin ==> No adjustment needed + addi t3, t4, 0 // If rvtest_code_begin < mtval < rvtest_code_end ==> Adjustment will be made via code region + j adj_\__MODE__\()tval + +data_\__MODE__\()adj: // only possibilities left are testdata or sigdata + LA( t3, rvtest_data_end) + bgeu t6, t3, sig_\__MODE__\()adj // after data area, must be sig, adj according to sig label + LA( t3, rvtest_data_begin) + bgeu t6, t3, adj_\__MODE__\()tval // inside data area, lv rvtest_data_begin as adjustment amt sig_\__MODE__\()adj: - LA( t3, mtrap_sigptr) // adj assuming sig_region access + LA( t3, mtrap_sigptr) // adj assuming sig_region access -adj_\__MODE__\()tval: // For Illegal op handling or tval not loaded - opcode not address - sub t6, t6, t3 // perform mtval adjust by either code, data, or sig position in t3 +adj_\__MODE__\()tval: // For Illegal op handling or tval not loaded - opcode not address + sub t6, t6, t3 // perform mtval adjust by either code, data, or sig position in t3 sv_\__MODE__\()tval: - SREG t6, 3*REGWIDTH(t1) // save 4rd sig value, (intID) + SREG t6, 3*REGWIDTH(t1) // save 4rd sig value, (intID) skp_\__MODE__\()tval: .if (\__MODE__\() == M) .ifdef __H_EXT__ - csrr t2, CSR_MTVAL2 - SREG t2, 4*REGWIDTH(t1) // store 5th sig value, only if mmode handler and VS mode exists - csrr t2, CSR_MTINST - SREG t2, 5*REGWIDTH(t1) // store 6th sig value, only if mmode handler and VS mode exists + csrr t2, CSR_MTVAL2 + SREG t2, 4*REGWIDTH(t1) // store 5th sig value, only if mmode handler and VS mode exists + csrr t2, CSR_MTINST + SREG t2, 5*REGWIDTH(t1) // store 6th sig value, only if mmode handler and VS mode exists .endif .endif -chk_\__MODE__\()trapsig_overrun: //FIXME: move trap signature check to here -#ifdef rvtest_sig_end // if not defined, we can't test for overrun +chk_\__MODE__\()trapsig_overrun: //FIXME: move trap signature check to here +#ifdef rvtest_sig_end // if not defined, we can't test for overrun #define trap_sig_sz (rvtest_sig_end-mtrap_sigptr) - LA( t4, rvtest_trap_sig) - LREG t4, 0(t4) // get the preincremented trap signature ptr - LI( t6, trap_sig_sz) // length of trap sig; exceeding this is an overrun, test error - add t3, t3, t6 // calculate rvtest_sig_end from mtrap_sigptr, avoiding an LA - bgtu t4, t3, cleanup_epilogs // abort test if pre-incremented value overruns + LA( t4, rvtest_trap_sig) + LREG t4, 0(t4) // get the preincremented trap signature ptr + LI( t6, trap_sig_sz) // length of trap sig; exceeding this is an overrun, test error + add t3, t3, t6 // calculate rvtest_sig_end from mtrap_sigptr, avoiding an LA + bgtu t4, t3, cleanup_epilogs // abort test if pre-incremented value overruns #endif /**** vector to execption special handling routines ****/ - li t2, XLEN<<3 // offset of exception dispatch table bzse - j spcl_\__MODE__\()handler // jump to shared int/excpt spcl handling dispatcher + li t2, XLEN<<3 // offset of exception dispatch table bzse + j spcl_\__MODE__\()handler // jump to shared int/excpt spcl handling dispatcher /**** common return code for both interrupts and exceptions ****/ -resto_\__MODE__\()rtn: // restore and return - LREG t1, 1*REGWIDTH(sp) - LREG t2, 2*REGWIDTH(sp) - LREG t3, 3*REGWIDTH(sp) - LREG t4, 4*REGWIDTH(sp) - LREG t5, 5*REGWIDTH(sp) - LREG t6, 6*REGWIDTH(sp) - LREG sp, 7*REGWIDTH(sp) // restore temporaries +resto_\__MODE__\()rtn: // restore and return + LREG t1, 1*REGWIDTH(sp) + LREG t2, 2*REGWIDTH(sp) + LREG t3, 3*REGWIDTH(sp) + LREG t4, 4*REGWIDTH(sp) + LREG t5, 5*REGWIDTH(sp) + LREG t6, 6*REGWIDTH(sp) + LREG sp, 7*REGWIDTH(sp) // restore temporaries - \__MODE__\()RET // return to test, after padding adjustment (macro to handle case) + \__MODE__\()RET // return to test, after padding adjustment (macro to handle case) /***************************************************/ - /**** This is the interrupt specific code. It ****/ + /**** This is the interrupt specific code. It ****/ /**** clears the int and saves int-specific CSRS****/ /***************************************************/ -common_\__MODE__\()int_handler: // t1 has sig ptr, t5 has mcause - LI( t3, 1) +common_\__MODE__\()int_handler: // t1 has sig ptr, t5 has mcause + LI( t3, 1) //**FIXME** - make sure this is kept up-to-date with fast int extension and others - andi t2, t5, XLEN-1 // clr INT bit (**NOTE rmv extra bits if future extns use upper bits) - sll t3, t3, t2 // create mask 1<>2)+1) & -2 // align to dblwd - j .+0 // prototype jump instruction, offset to be filled in +\__MODE__\()tramptbl_sv: // save area of existing trampoline table +.rept ((tramp_sz>>2)+1) & -2 // align to dblwd + j .+0 // prototype jump instruction, offset to be filled in .endr \__MODE__\()satp_sv: - .dword 0 // save area for incoming xsatp + .dword 0 // save area for incoming xsatp \__MODE__\()mode_rtn: - jalr x0, 8(t2) // This replaces trap trampoline entry for S/Vmode - .word 0 // dummy to keep alignment + jalr x0, 8(t2) // This replaces trap trampoline entry for S/Vmode + .word 0 // dummy to keep alignment \__MODE__\()trampend_sv: - .dword 0 // save location of end of trampoline + .dword 0 // save location of end of trampoline \__MODE__\()edeleg_sv: - .dword 0 // save location for edeleg CSR + .dword 0 // save location for edeleg CSR \__MODE__\()tvec_new: - .dword 0 // points to the in-use tvec, actual tramp table used + .dword 0 // points to the in-use tvec, actual tramp table used \__MODE__\()tvec_save: - .dword 0 // save area for incoming mtvec + .dword 0 // save area for incoming mtvec \__MODE__\()scratch_save: - .dword 0 // save area for incoming mscratch + .dword 0 // save area for incoming mscratch \__MODE__\()trapreg_sv: - .fill 8, REGWIDTH, 0xdeadbeef // handler reg save area, 2 extra, keep dbl alignment + .fill 8, REGWIDTH, 0xdeadbeef // handler reg save area, 2 extra, keep dbl alignment -\__MODE__\()sv_area_end: // used to clc size, which is used to avoid CSR read +\__MODE__\()sv_area_end: // used to clc size, which is used to avoid CSR read .set \__MODE__\()sv_area_sz, (\__MODE__\()sv_area_end-\__MODE__\()satp_sv) .option pop @@ -1456,7 +1469,7 @@ rvtest_\__MODE__\()end: //============================================================================== -/**************************** CODE BEGIN w/ TRAP HANDLER START *********************/ +/**************************** CODE BEGIN w/ TRAP HANDLER START *********************/ /**** instantiate prologs using RVTEST_TRAP_PROLOG() if rvtests_xtrap_routine is ****/ /**** is defined, then initializes regs & defines rvtest_code_begin global label ****/ /************************************************************************************/ @@ -1465,66 +1478,66 @@ rvtest_\__MODE__\()end: .option norvc .align UNROLLSZ .section .text.init - .globl rvtest_init - .global rvtest_code_begin //define the label and make it available + .globl rvtest_init + .global rvtest_code_begin //define the label and make it available -rvtest_init: //instantiate prologs here +rvtest_init: //instantiate prologs here INSTANTIATE_MODE_MACRO RVTEST_TRAP_PROLOG - RVTEST_INIT_GPRS // 0xF0E1D2C3B4A59687 + RVTEST_INIT_GPRS // 0xF0E1D2C3B4A59687 rvtest_code_begin: .option pop -.endm //end of RVTEST_CODE_BEGIN +.endm //end of RVTEST_CODE_BEGIN /*********************** end of RVTEST_CODE_BEGIN ***********************************/ /************************************************************************************/ -/**** The above is instantiated at the start of the actual test ****/ -/**** So the test is here ****/ -/**** the below is instantiated at the end of the actual test ****/ +/**** The above is instantiated at the start of the actual test ****/ +/**** So the test is here ****/ +/**** the below is instantiated at the end of the actual test ****/ /************************************************************************************/ /**************************************************************************************/ /**** RVTEST_CODE_END macro defines end of test code: saves regs, transitions to ****/ -/**** Mmode, & instantiates epilog using RVTEST_TRAP_EPILOG() macros. Test code ****/ +/**** Mmode, & instantiates epilog using RVTEST_TRAP_EPILOG() macros. Test code ****/ /**** falls through to this else must branch to label rvtest_code_end. This must ****/ /**** branch to a RVMODEL_HALT macro at the end. The actual trap handlers for each ****/ /**** mode are instantiated immediately following with RVTEST_TRAP_HANDLER() macro ****/ /**************************************************************************************/ -.macro RVTEST_CODE_END // test is ended, but in no particular mode +.macro RVTEST_CODE_END // test is ended, but in no particular mode .option push .option norvc - .global rvtest_code_end // define the label and make it available + .global rvtest_code_end // define the label and make it available .global cleanup_epilogs -rvtest_code_end: // COMPLIANCE_HALT should get here - #ifdef rvtest_gpr_save // gpr_save area is instantiated at end of signature - RVTEST_SAVE_GPRS x1 gpr_save +rvtest_code_end: // COMPLIANCE_HALT should get here + #ifdef rvtest_gpr_save // gpr_save area is instantiated at end of signature + RVTEST_SAVE_GPRS x1 gpr_save #endif - RVTEST_GOTO_MMODE // if only Mmode used by tests, this has no effect -cleanup_epilogs: // jump here to quit, will restore state for each mode + RVTEST_GOTO_MMODE // if only Mmode used by tests, this has no effect +cleanup_epilogs: // jump here to quit, will restore state for each mode - //restore xTVEC, trampoline, regs for each mode in opposite order that they were saved + //restore xTVEC, trampoline, regs for each mode in opposite order that they were saved #ifdef rvtest_mtrap_routine #ifdef rvtest_strap_routine - #ifdef rvtest_vtrap_routine - RVTEST_TRAP_EPILOG V // actual v-mode prolog/epilog/handler code - #endif - RVTEST_TRAP_EPILOG S // actual s-mode prolog/epilog/handler code + #ifdef rvtest_vtrap_routine + RVTEST_TRAP_EPILOG V // actual v-mode prolog/epilog/handler code + #endif + RVTEST_TRAP_EPILOG S // actual s-mode prolog/epilog/handler code #endif - RVTEST_TRAP_EPILOG M // actual m-mode prolog/epilog/handler code + RVTEST_TRAP_EPILOG M // actual m-mode prolog/epilog/handler code #endif -// INSTANTIATE_MODE_MACRO RVTEST_TRAP_EPILOG //restore xTVEC, trampoline, regs for each mode +// INSTANTIATE_MODE_MACRO RVTEST_TRAP_EPILOG //restore xTVEC, trampoline, regs for each mode /************* test done, epolog has restored everying, jump to halt ****************/ - j exit_cleanup //skip around handlers, go to RVMODEL_HALT + j exit_cleanup //skip around handlers, go to RVMODEL_HALT /********************** trap handlers inserted here ***********************************/ INSTANTIATE_MODE_MACRO RVTEST_TRAP_HANDLER -exit_cleanup: // *** RVMODEL_HALT MUST follow this***, then data +exit_cleanup: // *** RVMODEL_HALT MUST follow this***, then data .option pop -.endm // end of RVTEST_CODE_END +.endm // end of RVTEST_CODE_END /************************************************************************************/ /**** RVTEST_CODE_END macros must fall thru or jump to an RVMODEL_HALT macro here ***/ @@ -1533,14 +1546,14 @@ rvtest_\__MODE__\()end: /*===================================data section starts here========================*/ /************************************************************************************/ -/**** RVTEST_DATA_BEGIN macro defines end of input data & rvtest_data_end label ****/ -/**** this is a data area, so we instantiate trap save areas for each mode here ****/ +/**** RVTEST_DATA_BEGIN macro defines end of input data & rvtest_data_end label ****/ +/**** this is a data area, so we instantiate trap save areas for each mode here ****/ /************************************************************************************/ .macro RVTEST_DATA_BEGIN .data -.align 4 //ensure dbl alignment +.align 4 //ensure dbl alignment /**************************************************************************************/ /**** this is the pointer to the current trap signature part of the signature area ****/ /**** it is shared by all trap modes, but shouldn't be instantiated unless at least****/ @@ -1549,12 +1562,12 @@ rvtest_\__MODE__\()end: .global rvtest_trap_sig rvtest_trap_sig: - .dword mtrap_sigptr + .dword mtrap_sigptr -/**** now instantiate separate save areas for each modes state ****/ +/**** now instantiate separate save areas for each modes state ****/ /**** strictly speaking, should only be needed for reentrant traps ****/ -.align 3 //redundant? - INSTANTIATE_MODE_MACRO RVTEST_TRAP_SAVEAREA +.align 3 //redundant? + INSTANTIATE_MODE_MACRO RVTEST_TRAP_SAVEAREA .align 4 @@ -1567,7 +1580,7 @@ rvtest_\__MODE__\()end: .endm /************************************************************************************/ -/**************** RVTEST_DATA_END macro; defines global label rvtest_data_end ****/ +/**************** RVTEST_DATA_END macro; defines global label rvtest_data_end ****/ /************************************************************************************/ .macro RVTEST_DATA_END .global rvtest_data_end