diff options
author | Brandon Mercer <bmercer@cvs.openbsd.org> | 2013-01-25 20:23:49 +0000 |
---|---|---|
committer | Brandon Mercer <bmercer@cvs.openbsd.org> | 2013-01-25 20:23:49 +0000 |
commit | 8a79eabcd16e4908347f828d9520b683cef54f1c (patch) | |
tree | 584d50baa89a21341b71a4f3855794a6e385f080 /sys | |
parent | d7cd0e08da1870e657180ec804686f37474fe780 (diff) |
Updates for beagle/panda work. OK patrick@ and miod@
Diffstat (limited to 'sys')
-rw-r--r-- | sys/arch/arm/arm/cpufunc_asm_armv7.S | 151 |
1 files changed, 88 insertions, 63 deletions
diff --git a/sys/arch/arm/arm/cpufunc_asm_armv7.S b/sys/arch/arm/arm/cpufunc_asm_armv7.S index 6cbf1abbebd..ed6833fc923 100644 --- a/sys/arch/arm/arm/cpufunc_asm_armv7.S +++ b/sys/arch/arm/arm/cpufunc_asm_armv7.S @@ -1,4 +1,4 @@ -/* $OpenBSD: cpufunc_asm_armv7.S,v 1.3 2011/11/09 12:29:00 miod Exp $ */ +/* $OpenBSD: cpufunc_asm_armv7.S,v 1.4 2013/01/25 20:23:48 bmercer Exp $ */ /* * Copyright (c) 2008 Dale Rahn <drahn@openbsd.org> * @@ -18,33 +18,35 @@ #include <machine/cpu.h> #include <machine/asm.h> +#define DSB .long 0xf57ff040 +#define ISB .long 0xf57ff060 +#define WFI .long 0xe320f003 + ENTRY(armv7_cpu_sleep) -#if 0 - wfi -#else - .long 0xe320f003; -#endif + WFI mov pc, lr ENTRY(armv7_drain_writebuf) + DSB + ISB mcr p15, 0, r0, c7, c10, 4 /* drain write buffer */ mov pc, lr /* * Functions to set the MMU Translation Table Base register - * - * We need to clean and flush the cache as it uses virtual - * addresses that are about to change. */ ENTRY(armv7_setttb) - stmfd sp!, {r0, lr} - bl _C_LABEL(armv7_idcache_wbinv_all) - ldmfd sp!, {r0, lr} + mcr p15, 0, r0, c7, c5, 0 /* Flush I cache */ + mcr p15, 0, r0, c7, c5, 6 /* Flush BP cache */ + DSB + ISB + mcr p15, 0, r0, c7, c10, 4 /* drain write buffer */ mcr p15, 0, r0, c2, c0, 0 /* load new TTB */ - mcr p15, 0, r0, c8, c7, 0 /* invalidate I+D TLBs */ - mcr p15, 0, r0, c7, c10, 4 /* drain write buffer */ + DSB + ISB + mov pc, lr /* @@ -53,11 +55,17 @@ ENTRY(armv7_setttb) ENTRY(armv7_tlb_flushID_SE) mcr p15, 0, r0, c8, c6, 1 /* flush D tlb single entry */ mcr p15, 0, r0, c8, c5, 1 /* flush I tlb single entry */ + mcr p15, 0, r0, c7, c5, 7 /* flush va from BP */ + DSB + ISB mcr p15, 0, r0, c7, c10, 4 /* drain write buffer */ mov pc, lr ENTRY(armv7_tlb_flushI_SE) mcr p15, 0, r0, c8, c5, 1 /* flush I tlb single entry */ + mcr p15, 0, r0, c7, c5, 7 /* flush va from BP */ + DSB + ISB mcr p15, 0, r0, c7, c10, 4 /* drain write buffer */ mov pc, lr @@ -66,21 +74,31 @@ ENTRY(armv7_tlb_flushI_SE) */ ENTRY(armv7_tlb_flushID) mcr p15, 0, r0, c8, c7, 0 /* flush I+D tlb */ + mcr p15, 0, r0, c7, c5, 6 /* Flush BP cache */ + DSB + ISB mcr p15, 0, r0, c7, c10, 4 /* drain write buffer */ mov pc, lr ENTRY(armv7_tlb_flushI) mcr p15, 0, r0, c8, c5, 0 /* flush I tlb */ + mcr p15, 0, r0, c7, c5, 6 /* Flush BP cache */ + DSB + ISB mcr p15, 0, r0, c7, c10, 4 /* drain write buffer */ mov pc, lr ENTRY(armv7_tlb_flushD) mcr p15, 0, r0, c8, c6, 0 /* flush D tlb */ + DSB + ISB mcr p15, 0, r0, c7, c10, 4 /* drain write buffer */ mov pc, lr ENTRY(armv7_tlb_flushD_SE) mcr p15, 0, r0, c8, c6, 1 /* flush D tlb single entry */ + DSB + ISB mcr p15, 0, r0, c7, c10, 4 /* drain write buffer */ mov pc, lr @@ -95,8 +113,8 @@ ENTRY(armv7_tlb_flushD_SE) i_inc .req r3 ENTRY(armv7_icache_sync_range) ldr ip, .Larmv7_line_size - cmp r1, #0x4000 - bcs .Larmv7_icache_sync_all + cmp r1, #0x8000 + movcs r1, #0x8000 /* XXX needs to match cache size... */ ldr ip, [ip] sub r1, r1, #1 /* Don't overrun */ sub r3, ip, #1 @@ -104,11 +122,14 @@ ENTRY(armv7_icache_sync_range) add r1, r1, r2 bic r0, r0, r3 1: + mcr p15, 0, r0, c7, c11, 1 /* Clean D cache SE with VA to PoU */ mcr p15, 0, r0, c7, c5, 1 /* Invalidate I cache SE with VA */ - mcr p15, 0, r0, c7, c10, 1 /* Clean D cache SE with VA */ add r0, r0, ip subs r1, r1, ip - bpl 1b + bhi 1b + mcr p15, 0, r0, c7, c5, 6 /* Flush BP cache */ + DSB + ISB mcr p15, 0, r0, c7, c10, 4 /* drain the write buffer */ mov pc, lr @@ -120,36 +141,17 @@ ENTRY(armv7_icache_sync_all) * into the Dcache cleaning code. */ mcr p15, 0, r0, c7, c5, 0 /* Flush I cache */ - /* Fall through to clean Dcache. */ - -.Larmv7_dcache_wb: - ldr ip, .Larmv7_cache_data - ldmia ip, {s_max, i_max, s_inc, i_inc} -1: - orr ip, s_max, i_max -2: - mcr p15, 0, ip, c7, c10, 2 /* Clean D cache SE with Set/Index */ - sub ip, ip, i_inc - tst ip, i_max /* Index 0 is last one */ - bne 2b /* Next index */ - mcr p15, 0, ip, c7, c10, 2 /* Clean D cache SE with Set/Index */ - subs s_max, s_max, s_inc - bpl 1b /* Next set */ - mcr p15, 0, r0, c7, c10, 4 /* drain the write buffer */ + mcr p15, 0, r0, c7, c5, 6 /* Flush BP cache */ + ISB mov pc, lr .Larmv7_line_size: .word _C_LABEL(arm_pdcache_line_size) ENTRY(armv7_dcache_wb_range) -#if 1 - /* until pmap can invalidate before unmapping */ - /* XXX this also invalidates */ - b _C_LABEL(armv7_dcache_wbinv_all) -#else ldr ip, .Larmv7_line_size - cmp r1, #0x4000 - bcs .Larmv7_dcache_wb + cmp r1, #0x8000 + movcs r1, #0x8000 /* XXX needs to match cache size... */ ldr ip, [ip] sub r1, r1, #1 /* Don't overrun */ sub r3, ip, #1 @@ -160,21 +162,16 @@ ENTRY(armv7_dcache_wb_range) mcr p15, 0, r0, c7, c10, 1 /* Clean D cache SE with VA */ add r0, r0, ip subs r1, r1, ip - bpl 1b + bhi 1b + DSB + ISB mcr p15, 0, r0, c7, c10, 4 /* drain the write buffer */ -#endif mov pc, lr ENTRY(armv7_idcache_wbinv_range) ldr ip, .Larmv7_line_size - cmp r1, #0x4000 -#if 0 - bcs .Larmv7_idcache_wbinv_all -#else - bcc 1f - mov r1, #0x4000 -1: -#endif + cmp r1, #0x8000 + movcs r1, #0x8000 /* XXX needs to match cache size... */ ldr ip, [ip] sub r1, r1, #1 /* Don't overrun */ sub r3, ip, #1 @@ -182,18 +179,22 @@ ENTRY(armv7_idcache_wbinv_range) add r1, r1, r2 bic r0, r0, r3 1: + mcr p15, 0, r0, c7, c11, 1 /* Clean D cache SE with VA to PoU */ mcr p15, 0, r0, c7, c5, 1 /* Invalidate I cache SE with VA */ mcr p15, 0, r0, c7, c14, 1 /* Purge D cache SE with VA */ add r0, r0, ip subs r1, r1, ip - bpl 1b + bhi 1b + mcr p15, 0, r0, c7, c5, 6 /* Flush BP cache */ + DSB + ISB mcr p15, 0, r0, c7, c10, 4 /* drain the write buffer */ mov pc, lr ENTRY(armv7_dcache_wbinv_range) ldr ip, .Larmv7_line_size - cmp r1, #0x4000 - bcs _C_LABEL(armv7_dcache_wbinv_all) + cmp r1, #0x8000 + movcs r1, #0x8000 /* XXX needs to match cache size... */ ldr ip, [ip] sub r1, r1, #1 /* Don't overrun */ sub r3, ip, #1 @@ -201,11 +202,36 @@ ENTRY(armv7_dcache_wbinv_range) add r1, r1, r2 bic r0, r0, r3 1: + mcr p15, 0, r0, c7, c11, 1 /* Clean D cache SE with VA to PoU */ mcr p15, 0, r0, c7, c5, 1 /* Invalidate I cache SE with VA */ mcr p15, 0, r0, c7, c14, 1 /* Purge D cache SE with VA */ add r0, r0, ip subs r1, r1, ip - bpl 1b + bhi 1b + DSB + ISB + mcr p15, 0, r0, c7, c10, 4 /* drain the write buffer */ + mov pc, lr + +ENTRY(armv7_dcache_inv_range) + ldr ip, .Larmv7_line_size + cmp r1, #0x8000 + movcs r1, #0x8000 /* XXX needs to match cache size... */ + ldr ip, [ip] + sub r1, r1, #1 /* Don't overrun */ + sub r3, ip, #1 + and r2, r0, r3 + add r1, r1, r2 + bic r0, r0, r3 +1: + mcr p15, 0, r0, c7, c11, 1 /* Clean D cache SE with VA to PoU */ + mcr p15, 0, r0, c7, c5, 1 /* Invalidate I cache SE with VA */ + mcr p15, 0, r0, c7, c6, 1 /* Invalidate D cache SE with VA */ + add r0, r0, ip + subs r1, r1, ip + bhi 1b + DSB + ISB mcr p15, 0, r0, c7, c10, 4 /* drain the write buffer */ mov pc, lr @@ -224,23 +250,22 @@ ENTRY(armv7_context_switch) * We can assume that the caches will only contain kernel addresses * at this point. So no need to flush them again. */ + mcr p15, 0, r0, c7, c5, 0 /* Flush I cache */ + mcr p15, 0, r0, c7, c5, 6 /* Flush BP cache */ + DSB + ISB mcr p15, 0, r0, c7, c10, 4 /* drain the write buffer */ + mcr p15, 0, r0, c2, c0, 0 /* set the new TTB */ mcr p15, 0, r0, c8, c7, 0 /* and flush the I+D tlbs */ - - /* Paranoia -- make sure the pipeline is empty. */ - nop - nop - nop + DSB + ISB mov pc, lr /* XXX The following macros should probably be moved to asm.h */ #define _DATA_OBJECT(x) .globl x; .type x,_ASM_TYPE_OBJECT; x: #define C_OBJECT(x) _DATA_OBJECT(_C_LABEL(x)) -.Larmv7_cache_data: - .word _C_LABEL(armv7_dcache_sets_max) - .align 2 C_OBJECT(armv7_dcache_sets_max) .word 0 |