diff options
author | Philip Guenther <guenther@cvs.openbsd.org> | 2022-12-07 19:26:03 +0000 |
---|---|---|
committer | Philip Guenther <guenther@cvs.openbsd.org> | 2022-12-07 19:26:03 +0000 |
commit | fa2d8ca89eae06a25bc70129e3641ccd922f3ada (patch) | |
tree | 480524845a2717dce01d58734dd7b9735a952cce /sys/lib | |
parent | 6312fc30c7d52bcbdc3ec3a515ca81dbbf40b6b2 (diff) |
Prefer numeric labels over L<digit> labels, as the latter clutter
the final kernel symbol table.
Add END()s to match ENTRY()s.
ok deraadt@
Diffstat (limited to 'sys/lib')
-rw-r--r-- | sys/lib/libkern/arch/amd64/bcmp.S | 9 | ||||
-rw-r--r-- | sys/lib/libkern/arch/amd64/bzero.S | 5 | ||||
-rw-r--r-- | sys/lib/libkern/arch/amd64/ffs.S | 9 | ||||
-rw-r--r-- | sys/lib/libkern/arch/amd64/memchr.S | 11 | ||||
-rw-r--r-- | sys/lib/libkern/arch/amd64/memcmp.S | 13 | ||||
-rw-r--r-- | sys/lib/libkern/arch/amd64/memset.S | 5 |
6 files changed, 29 insertions, 23 deletions
diff --git a/sys/lib/libkern/arch/amd64/bcmp.S b/sys/lib/libkern/arch/amd64/bcmp.S index 2ef8087f8e4..573d7b14753 100644 --- a/sys/lib/libkern/arch/amd64/bcmp.S +++ b/sys/lib/libkern/arch/amd64/bcmp.S @@ -8,15 +8,16 @@ ENTRY(bcmp) shrq $3,%rcx repe cmpsq - jne L1 + jne 1f movq %rdx,%rcx /* compare remainder by bytes */ andq $7,%rcx repe cmpsb - je L2 + je 2f -L1: incl %eax -L2: RETGUARD_CHECK(bcmp, r11) +1: incl %eax +2: RETGUARD_CHECK(bcmp, r11) ret lfence +END(bcmp) diff --git a/sys/lib/libkern/arch/amd64/bzero.S b/sys/lib/libkern/arch/amd64/bzero.S index db05dbfa87e..54074986d98 100644 --- a/sys/lib/libkern/arch/amd64/bzero.S +++ b/sys/lib/libkern/arch/amd64/bzero.S @@ -18,7 +18,7 @@ ENTRY(bzero) * unaligned set. */ cmpq $16,%rdx - jb L1 + jb 1f movq %rdi,%rcx /* compute misalignment */ negq %rcx @@ -33,9 +33,10 @@ ENTRY(bzero) rep stosq -L1: movq %rdx,%rcx /* zero remainder by bytes */ +1: movq %rdx,%rcx /* zero remainder by bytes */ rep stosb RETGUARD_CHECK(bzero, r11) ret lfence +END(bzero) diff --git a/sys/lib/libkern/arch/amd64/ffs.S b/sys/lib/libkern/arch/amd64/ffs.S index 10154debeb2..62689ca8708 100644 --- a/sys/lib/libkern/arch/amd64/ffs.S +++ b/sys/lib/libkern/arch/amd64/ffs.S @@ -9,12 +9,13 @@ ENTRY(ffs) RETGUARD_SETUP(ffs, r11) bsfl %edi,%eax - jz L1 /* ZF is set if all bits are 0 */ + jz 1f /* ZF is set if all bits are 0 */ incl %eax /* bits numbered from 1, not 0 */ - jmp L2 + jmp 2f _ALIGN_TRAPS -L1: xorl %eax,%eax /* clear result */ -L2: RETGUARD_CHECK(ffs, r11) +1: xorl %eax,%eax /* clear result */ +2: RETGUARD_CHECK(ffs, r11) ret lfence +END(ffs) diff --git a/sys/lib/libkern/arch/amd64/memchr.S b/sys/lib/libkern/arch/amd64/memchr.S index 6a5943e40b2..063159b36a0 100644 --- a/sys/lib/libkern/arch/amd64/memchr.S +++ b/sys/lib/libkern/arch/amd64/memchr.S @@ -11,13 +11,14 @@ ENTRY(memchr) movb %sil,%al /* set character to search for */ movq %rdx,%rcx /* set length of search */ testq %rcx,%rcx /* test for len == 0 */ - jz L1 + jz 1f repne /* search! */ scasb - jne L1 /* scan failed, return null */ + jne 1f /* scan failed, return null */ leaq -1(%rdi),%rax /* adjust result of scan */ - jmp L2 -L1: xorq %rax,%rax -L2: RETGUARD_CHECK(memchr, r11) + jmp 2f +1: xorq %rax,%rax +2: RETGUARD_CHECK(memchr, r11) ret lfence +END(memchr) diff --git a/sys/lib/libkern/arch/amd64/memcmp.S b/sys/lib/libkern/arch/amd64/memcmp.S index c16879cda78..b9944361869 100644 --- a/sys/lib/libkern/arch/amd64/memcmp.S +++ b/sys/lib/libkern/arch/amd64/memcmp.S @@ -12,27 +12,28 @@ ENTRY(memcmp) shrq $3,%rcx repe cmpsq - jne L5 /* do we match so far? */ + jne 5f /* do we match so far? */ movq %rdx,%rcx /* compare remainder by bytes */ andq $7,%rcx repe cmpsb - jne L6 /* do we match? */ + jne 6f /* do we match? */ xorl %eax,%eax /* we match, return zero */ - jmp L7 + jmp 7f -L5: movl $8,%ecx /* We know that one of the next */ +5: movl $8,%ecx /* We know that one of the next */ subq %rcx,%rdi /* eight pairs of bytes do not */ subq %rcx,%rsi /* match. */ repe cmpsb -L6: xorl %eax,%eax /* Perform unsigned comparison */ +6: xorl %eax,%eax /* Perform unsigned comparison */ movb -1(%rdi),%al xorl %edx,%edx movb -1(%rsi),%dl subl %edx,%eax -L7: RETGUARD_CHECK(memcmp, r11) +7: RETGUARD_CHECK(memcmp, r11) ret lfence +END(memcmp) diff --git a/sys/lib/libkern/arch/amd64/memset.S b/sys/lib/libkern/arch/amd64/memset.S index e57a7c84599..ffab4df9875 100644 --- a/sys/lib/libkern/arch/amd64/memset.S +++ b/sys/lib/libkern/arch/amd64/memset.S @@ -19,7 +19,7 @@ ENTRY(memset) * unaligned set. */ cmpq $0x0f,%rcx - jle L1 + jle 1f movb %al,%ah /* copy char to all bytes in word */ movl %eax,%edx @@ -47,9 +47,10 @@ ENTRY(memset) movq %r8,%rcx /* set remainder by bytes */ andq $7,%rcx -L1: rep +1: rep stosb movq %r11,%rax RETGUARD_CHECK(memset, r10) ret lfence +END(memset) |