diff options
author | Dale Rahn <drahn@cvs.openbsd.org> | 2003-07-02 21:30:14 +0000 |
---|---|---|
committer | Dale Rahn <drahn@cvs.openbsd.org> | 2003-07-02 21:30:14 +0000 |
commit | 5035c30707e318455efb4630f1bade1beb524b25 (patch) | |
tree | dd8de754f63df3af8948c3c6d5bf201a7d5e354a /sys/arch/powerpc/include | |
parent | a3cd3a488774228f4f17f1843c18df95f43a9089 (diff) |
Reduce the amount of asm code in powerpc/macppc by replacing it with
inlined functions, helps improve readability and fix a couple of bugs.
ok miod@
Diffstat (limited to 'sys/arch/powerpc/include')
-rw-r--r-- | sys/arch/powerpc/include/cpu.h | 94 |
1 files changed, 89 insertions, 5 deletions
diff --git a/sys/arch/powerpc/include/cpu.h b/sys/arch/powerpc/include/cpu.h index 9d4e4705c61..bc4607e7a31 100644 --- a/sys/arch/powerpc/include/cpu.h +++ b/sys/arch/powerpc/include/cpu.h @@ -1,4 +1,4 @@ -/* $OpenBSD: cpu.h,v 1.14 2003/07/02 21:23:35 drahn Exp $ */ +/* $OpenBSD: cpu.h,v 1.15 2003/07/02 21:30:12 drahn Exp $ */ /* $NetBSD: cpu.h,v 1.1 1996/09/30 16:34:21 ws Exp $ */ /* @@ -100,6 +100,90 @@ invdcache(void *from, int len) __asm__ __volatile__ ("sync"); } +#define FUNC_SPR(n, name) \ +static __inline u_int32_t ppc_mf ## name (void) \ +{ \ + int ret; \ + __asm __volatile ("mfspr %0," # n : "=r" (ret)); \ + return ret; \ +} \ +static __inline void ppc_mt ## name (u_int32_t val) \ +{ \ + __asm __volatile ("mtspr "# n ",%0" :: "r" (val)); \ +} \ + +FUNC_SPR(0, mq) +FUNC_SPR(1, xer) +FUNC_SPR(4, rtcu) +FUNC_SPR(5, rtcl) +FUNC_SPR(8, lr) +FUNC_SPR(9, ctr) +FUNC_SPR(18, dsisr) +FUNC_SPR(19, dar) +FUNC_SPR(22, dec) +FUNC_SPR(25, sdr1) +FUNC_SPR(26, srr0) +FUNC_SPR(27, srr1) +FUNC_SPR(256, vrsave) +FUNC_SPR(272, sprg0) +FUNC_SPR(273, sprg1) +FUNC_SPR(274, sprg2) +FUNC_SPR(275, sprg3) +FUNC_SPR(282, ear) +FUNC_SPR(287, pvr) +FUNC_SPR(528, ibat0u) +FUNC_SPR(529, ibat0l) +FUNC_SPR(530, ibat1u) +FUNC_SPR(531, ibat1l) +FUNC_SPR(532, ibat2u) +FUNC_SPR(533, ibat2l) +FUNC_SPR(534, ibat3u) +FUNC_SPR(535, ibat3l) +FUNC_SPR(536, dbat0u) +FUNC_SPR(537, dbat0l) +FUNC_SPR(538, dbat1u) +FUNC_SPR(539, dbat1l) +FUNC_SPR(540, dbat2u) +FUNC_SPR(541, dbat2l) +FUNC_SPR(542, dbat3u) +FUNC_SPR(543, dbat3l) +FUNC_SPR(1008, hid0) +FUNC_SPR(1009, hid1) +FUNC_SPR(1010, iabr) +FUNC_SPR(1017, l2cr) +FUNC_SPR(1018, l3cr) +FUNC_SPR(1013, dabr) +FUNC_SPR(1023, pir) + +static __inline u_int32_t +ppc_mftbl (void) +{ + int ret; + __asm __volatile ("mftb %0" : "=r" (ret)); + return ret; +} + +static __inline u_int32_t +ppc_mfmsr (void) +{ + int ret; + __asm __volatile ("mfmsr %0" : "=r" (ret)); + return ret; +} + +static __inline void +ppc_mtmsr (u_int32_t val) +{ + __asm __volatile ("mtmsr %0" :: "r" (val)); +} + +static __inline void +ppc_mtsrin(u_int32_t val, u_int32_t sn_shifted) +{ + asm volatile ("mtsrin %0,%1" :: "r"(val), "r"(sn_shifted) ); + +} + /* * General functions to enable and disable interrupts * without having inlined assembly code in many functions. @@ -109,9 +193,9 @@ ppc_intr_enable(int enable) { u_int32_t msr; if (enable != 0) { - __asm__ volatile("mfmsr %0" : "=r"(msr)); + msr = ppc_mfmsr(); msr |= PSL_EE; - __asm__ volatile("mtmsr %0" :: "r"(msr)); + ppc_mtmsr(msr); } } @@ -119,9 +203,9 @@ static __inline int ppc_intr_disable(void) { u_int32_t emsr, dmsr; - __asm__ volatile("mfmsr %0" : "=r"(emsr)); + emsr = ppc_mfmsr(); dmsr = emsr & ~PSL_EE; - __asm__ volatile("mtmsr %0" :: "r"(dmsr)); + ppc_mtmsr(dmsr); return (emsr & PSL_EE); } #endif /* _POWERPC_CPU_H_ */ |