summaryrefslogtreecommitdiff
path: root/sys/arch
diff options
context:
space:
mode:
authorJasper Lievisse Adriaanse <jasper@cvs.openbsd.org>2010-06-06 15:12:34 +0000
committerJasper Lievisse Adriaanse <jasper@cvs.openbsd.org>2010-06-06 15:12:34 +0000
commitc0441e2f542258942bfc42ff461b8ed1adc3c3c9 (patch)
treeb220e59cd648b507f0bd77ffa43210c97cccaffa /sys/arch
parenta6871c0ebaeb39742308454846222e8da10ab6fc (diff)
use .L* for local labels and other small cosmetics
from uwe@netbsd ok miod@
Diffstat (limited to 'sys/arch')
-rw-r--r--sys/arch/sh/sh/vectors.S104
1 files changed, 54 insertions, 50 deletions
diff --git a/sys/arch/sh/sh/vectors.S b/sys/arch/sh/sh/vectors.S
index ccf7bb5c821..694bf3107b9 100644
--- a/sys/arch/sh/sh/vectors.S
+++ b/sys/arch/sh/sh/vectors.S
@@ -1,4 +1,4 @@
-/* $OpenBSD: vectors.S,v 1.3 2008/06/26 05:42:13 ray Exp $ */
+/* $OpenBSD: vectors.S,v 1.4 2010/06/06 15:12:33 jasper Exp $ */
/* $NetBSD: exception_vector.S,v 1.19 2006/08/22 21:47:57 uwe Exp $ */
/*-
@@ -38,7 +38,8 @@
#include <sh/mmu_sh4.h>
/*
- * Exception vectors. The following routines are copied to vector addreses.
+ * Exception vectors.
+ * The following routines are copied to vector addreses.
* sh_vector_generic: VBR + 0x100
* sh_vector_tlbmiss: VBR + 0x400
* sh_vector_interrupt: VBR + 0x600
@@ -52,9 +53,9 @@
/*
* LINTSTUB: Var: char sh_vector_generic[1];
*
- * void sh_vector_generic(void) __attribute__((__noreturn__))
+ * void sh_vector_generic(void);
* Copied to VBR+0x100. This code should be position independent
- * and no more than 786 bytes long (== 0x400 - 0x100).
+ * and maximum 786 bytes long (== 0x400 - 0x100).
*/
NENTRY(sh_vector_generic)
__EXCEPTION_ENTRY
@@ -62,23 +63,23 @@ NENTRY(sh_vector_generic)
/* Identify exception cause */
MOV (EXPEVT, r0)
mov.l @r0, r0
- mov.l r0, @(TF_EXPEVT, r14) /* trapframe->tf_expevt = EXPEVT */
+ mov.l r0, @(TF_EXPEVT, r14) /* trapframe->tf_expevt = EXPEVT */
/* Get curproc */
- mov.l _L.curproc, r1
+ mov.l .Lg_curproc, r1
mov.l @r1, r4 /* 1st arg */
/* Get TEA */
MOV (TEA, r1)
mov.l @r1, r6 /* 3rd arg */
/* Check TLB exception or not */
- mov.l _L.TLB_PROT_ST, r1
+ mov.l .Lg_TLB_PROT_ST, r1
cmp/hi r1, r0
bt 1f
- /* tlb_exception(curproc, trapframe, trunc_page(TEA)); */
- mov.l _L.VPN_MASK, r1
+ /* tlb_exception(curlwp, trapframe, TEA); */
+ mov.l .Lg_VPN_MASK, r1
and r1, r6 /* va = trunc_page(va) */
__EXCEPTION_UNBLOCK(r0, r1)
- mov.l _L.tlb, r0
+ mov.l .Lg_tlb_exception, r0
jsr @r0
mov r14, r5 /* 2nd arg */
bra 2f
@@ -93,13 +94,13 @@ NENTRY(sh_vector_generic)
mov.l r2, @(TF_UBC, r14) /* clear trapframe->tf_ubc */
#endif /* DDB */
__EXCEPTION_UNBLOCK(r0, r1)
- mov.l _L.general, r0
+ mov.l .Lg_general_exception, r0
jsr @r0
mov r14, r5 /* 2nd arg */
/* Check for ASTs on exit to user mode. */
mov r8, r4
- mov.l _L.ast, r0
+ mov.l .Lg_ast, r0
jsr @r0
mov r14, r5
#ifdef DDB /* BBRA = trapframe->tf_ubc */
@@ -111,15 +112,15 @@ NENTRY(sh_vector_generic)
2: __EXCEPTION_RETURN
/* NOTREACHED */
.align 2
-_L.curproc: .long _C_LABEL(cpu_info_store) + CI_CURPROC
+.Lg_curproc: .long _C_LABEL(cpu_info_store) + CI_CURPROC
REG_SYMBOL(EXPEVT)
REG_SYMBOL(BBRA)
REG_SYMBOL(TEA)
-_L.tlb: .long _C_LABEL(tlb_exception)
-_L.general: .long _C_LABEL(general_exception)
-_L.ast: .long _C_LABEL(ast)
-_L.TLB_PROT_ST: .long 0xc0
-_L.VPN_MASK: .long 0xfffff000
+.Lg_tlb_exception: .long _C_LABEL(tlb_exception)
+.Lg_general_exception: .long _C_LABEL(general_exception)
+.Lg_ast: .long _C_LABEL(ast)
+.Lg_TLB_PROT_ST: .long EXPEVT_TLB_PROT_ST
+.Lg_VPN_MASK: .long 0xfffff000
/* LINTSTUB: Var: char sh_vector_generic_end[1]; */
VECTOR_END_MARKER(sh_vector_generic_end)
@@ -130,9 +131,9 @@ VECTOR_END_MARKER(sh_vector_generic_end)
/*
* LINTSTUB: Var: char sh3_vector_tlbmiss[1];
*
- * void sh3_vector_tlbmiss(void) __attribute__((__noreturn__))
+ * void sh3_vector_tlbmiss(void);
* Copied to VBR+0x400. This code should be position independent
- * and no more than 512 bytes long (== 0x600 - 0x400).
+ * and maximum 512 bytes long (== 0x600 - 0x400).
*/
NENTRY(sh3_vector_tlbmiss)
__EXCEPTION_ENTRY
@@ -140,11 +141,11 @@ NENTRY(sh3_vector_tlbmiss)
mov.l @r0, r6 /* 3rd arg: va = TEA */
#if !defined(P1_STACK)
/* Load kernel stack */
- mov.l __L.VPN_MASK, r0
+ mov.l .L3_VPN_MASK, r0
and r6, r0
tst r0, r0 /* check VPN == 0 */
bt 6f
- mov.l _L.CURUPTE, r1
+ mov.l .L3_CURUPTE, r1
mov.l @r1, r1
mov #UPAGES,r3
mov #1, r2
@@ -162,7 +163,7 @@ NENTRY(sh3_vector_tlbmiss)
mov.l r2, @r1
mov #(SH3_PTEH & 0xff), r1
mov.l @r1, r2
- mov.l __L.VPN_MASK, r0
+ mov.l .L3_VPN_MASK, r0
and r2, r0
mov.l r0, @r1 /* ASID 0 */
ldtlb
@@ -172,19 +173,20 @@ NENTRY(sh3_vector_tlbmiss)
6: mov #(SH3_EXPEVT & 0xff), r0
7: mov.l @r0, r0
mov.l r0, @(TF_EXPEVT, r14) /* trapframe->tf_expevt = EXPEVT */
- mov.l 2f, r0
+ mov.l .L3_curproc, r0
mov.l @r0, r4 /* 1st arg */
__INTR_MASK(r0, r1)
__EXCEPTION_UNBLOCK(r0, r1)
- mov.l 1f, r0
+ mov.l .L3_tlb_exception, r0
jsr @r0
mov r14, r5 /* 2nd arg */
3: __EXCEPTION_RETURN
+
.align 2
-2: .long _C_LABEL(cpu_info_store) + CI_CURPROC
-1: .long _C_LABEL(tlb_exception)
-__L.VPN_MASK: .long 0xfffff000
-_L.CURUPTE: .long _C_LABEL(curupte)
+.L3_curproc: .long _C_LABEL(cpu_info_store) + CI_CURPROC
+.L3_tlb_exception: .long _C_LABEL(tlb_exception)
+.L3_VPN_MASK: .long 0xfffff000
+.L3_CURUPTE: .long _C_LABEL(curupte)
/* LINTSTUB: Var: char sh3_vector_tlbmiss_end[1]; */
VECTOR_END_MARKER(sh3_vector_tlbmiss_end)
@@ -196,30 +198,31 @@ VECTOR_END_MARKER(sh3_vector_tlbmiss_end)
/*
* LINTSTUB: Var: char sh4_vector_tlbmiss[1];
*
- * void sh4_vector_tlbmiss(void) __attribute__((__noreturn__))
+ * void sh4_vector_tlbmiss(void);
* Copied to VBR+0x400. This code should be position independent
- * and no more than 512 bytes long (== 0x600 - 0x400).
+ * and maximum 512 bytes long (== 0x600 - 0x400).
*/
NENTRY(sh4_vector_tlbmiss)
__EXCEPTION_ENTRY
- mov.l _L.TEA4, r0
+ mov.l .L4_TEA4, r0
mov.l @r0, r6
- mov.l _L.EXPEVT4, r0
+ mov.l .L4_EXPEVT4, r0
mov.l @r0, r0
mov.l r0, @(TF_EXPEVT, r14) /* trapframe->tf_expevt = EXPEVT */
- mov.l 2f, r0
+ mov.l .L4_curproc, r0
mov.l @r0, r4 /* 1st arg */
__INTR_MASK(r0, r1)
__EXCEPTION_UNBLOCK(r0, r1)
- mov.l 1f, r0
+ mov.l .L4_tlb_exception, r0
jsr @r0
mov r14, r5 /* 2nd arg */
__EXCEPTION_RETURN
+
.align 2
-1: .long _C_LABEL(tlb_exception)
-2: .long _C_LABEL(cpu_info_store) + CI_CURPROC
-_L.EXPEVT4: .long SH4_EXPEVT
-_L.TEA4: .long SH4_TEA
+.L4_tlb_exception: .long _C_LABEL(tlb_exception)
+.L4_curproc: .long _C_LABEL(cpu_info_store) + CI_CURPROC
+.L4_EXPEVT4: .long SH4_EXPEVT
+.L4_TEA4: .long SH4_TEA
/* LINTSTUB: Var: char sh4_vector_tlbmiss_end[1]; */
VECTOR_END_MARKER(sh4_vector_tlbmiss_end)
@@ -230,38 +233,39 @@ VECTOR_END_MARKER(sh4_vector_tlbmiss_end)
/*
* LINTSTUB: Var: char sh_vector_interrupt[1];
*
- * void sh_vector_interrupt(void) __attribute__((__noreturn__)):
- * copied to VBR+0x600. This code should be relocatable.
+ * void sh_vector_interrupt(void);
+ * Copied to VBR+0x600. This code should be position independent.
*/
NENTRY(sh_vector_interrupt)
__EXCEPTION_ENTRY
xor r0, r0
mov.l r0, @(TF_EXPEVT, r14) /* (for debug) */
stc r0_bank,r6 /* ssp */
- /* Enable exception for P3 access */
+ /* Enable exceptions for P3 access */
__INTR_MASK(r0, r1)
__EXCEPTION_UNBLOCK(r0, r1)
/* uvmexp.intrs++ */
- mov.l __L.uvmexp.intrs, r0
+ mov.l .Li_uvmexp_intrs, r0
mov.l @r0, r1
add #1 r1
mov.l r1, @r0
/* Dispatch interrupt handler */
- mov.l __L.intc_intr, r0
+ mov.l .Li_intc_intr, r0
jsr @r0 /* intc_intr(ssr, spc, ssp) */
nop
/* Check for ASTs on exit to user mode. */
- mov.l 1f, r0
+ mov.l .Li_curproc, r0
mov.l @r0, r4 /* 1st arg */
- mov.l __L.ast, r0
+ mov.l .Li_ast, r0
jsr @r0
mov r14, r5 /* 2nd arg */
__EXCEPTION_RETURN
+
.align 2
-1: .long _C_LABEL(cpu_info_store) + CI_CURPROC
-__L.intc_intr: .long _C_LABEL(intc_intr)
-__L.ast: .long _C_LABEL(ast)
-__L.uvmexp.intrs: .long _C_LABEL(uvmexp) + UVMEXP_INTRS
+.Li_curproc: .long _C_LABEL(cpu_info_store) + CI_CURPROC
+.Li_intc_intr: .long _C_LABEL(intc_intr)
+.Li_ast: .long _C_LABEL(ast)
+.Li_uvmexp_intrs: .long _C_LABEL(uvmexp) + UVMEXP_INTRS
/* LINTSTUB: Var: char sh_vector_interrupt_end[1]; */
VECTOR_END_MARKER(sh_vector_interrupt_end)