summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authormortimer <mortimer@cvs.openbsd.org>2018-07-03 23:22:49 +0000
committermortimer <mortimer@cvs.openbsd.org>2018-07-03 23:22:49 +0000
commitf4926f8c6a3d5eab2b2fe4eef72fba3f26a650d0 (patch)
tree69847af6fc5c00a4d789094d423093521024a4ab
parentf06cc304c4be8d184fc01e20563b9a0b25e07e01 (diff)
Add retguard macros for libkern
ok deraadt
-rw-r--r--sys/lib/libkern/arch/amd64/bcmp.S4
-rw-r--r--sys/lib/libkern/arch/amd64/bzero.S3
-rw-r--r--sys/lib/libkern/arch/amd64/ffs.S4
-rw-r--r--sys/lib/libkern/arch/amd64/htonl.S2
-rw-r--r--sys/lib/libkern/arch/amd64/htons.S2
-rw-r--r--sys/lib/libkern/arch/amd64/memchr.S4
-rw-r--r--sys/lib/libkern/arch/amd64/memcmp.S4
-rw-r--r--sys/lib/libkern/arch/amd64/memmove.S5
-rw-r--r--sys/lib/libkern/arch/amd64/memset.S3
-rw-r--r--sys/lib/libkern/arch/amd64/scanc.S2
-rw-r--r--sys/lib/libkern/arch/amd64/skpc.S2
-rw-r--r--sys/lib/libkern/arch/amd64/strchr.S8
-rw-r--r--sys/lib/libkern/arch/amd64/strcmp.S4
-rw-r--r--sys/lib/libkern/arch/amd64/strlen.S4
-rw-r--r--sys/lib/libkern/arch/amd64/strrchr.S4
15 files changed, 43 insertions, 12 deletions
diff --git a/sys/lib/libkern/arch/amd64/bcmp.S b/sys/lib/libkern/arch/amd64/bcmp.S
index 4cb1b3d6894..96754ed68a4 100644
--- a/sys/lib/libkern/arch/amd64/bcmp.S
+++ b/sys/lib/libkern/arch/amd64/bcmp.S
@@ -1,6 +1,7 @@
#include <machine/asm.h>
ENTRY(bcmp)
+ RETGUARD_SETUP(bcmp, r11)
xorl %eax,%eax /* clear return value */
movq %rdx,%rcx /* compare by words */
@@ -16,4 +17,5 @@ ENTRY(bcmp)
je L2
L1: incl %eax
-L2: ret
+L2: RETGUARD_CHECK(bcmp, r11)
+ ret
diff --git a/sys/lib/libkern/arch/amd64/bzero.S b/sys/lib/libkern/arch/amd64/bzero.S
index 354e2066ed3..a1c9a94d955 100644
--- a/sys/lib/libkern/arch/amd64/bzero.S
+++ b/sys/lib/libkern/arch/amd64/bzero.S
@@ -7,6 +7,7 @@
#include <machine/asm.h>
ENTRY(bzero)
+ RETGUARD_SETUP(bzero, r11)
movq %rsi,%rdx
xorq %rax,%rax /* set fill data to 0 */
@@ -35,5 +36,5 @@ ENTRY(bzero)
L1: movq %rdx,%rcx /* zero remainder by bytes */
rep
stosb
-
+ RETGUARD_CHECK(bzero, r11)
ret
diff --git a/sys/lib/libkern/arch/amd64/ffs.S b/sys/lib/libkern/arch/amd64/ffs.S
index 1beb25861b2..4acb74c4132 100644
--- a/sys/lib/libkern/arch/amd64/ffs.S
+++ b/sys/lib/libkern/arch/amd64/ffs.S
@@ -7,11 +7,13 @@
#include <machine/asm.h>
ENTRY(ffs)
+ RETGUARD_SETUP(ffs, r11)
bsfl %edi,%eax
jz L1 /* ZF is set if all bits are 0 */
incl %eax /* bits numbered from 1, not 0 */
- ret
+ jmp L2
_ALIGN_TEXT
L1: xorl %eax,%eax /* clear result */
+L2: RETGUARD_CHECK(ffs, r11)
ret
diff --git a/sys/lib/libkern/arch/amd64/htonl.S b/sys/lib/libkern/arch/amd64/htonl.S
index b9b121597f6..f7d640521d1 100644
--- a/sys/lib/libkern/arch/amd64/htonl.S
+++ b/sys/lib/libkern/arch/amd64/htonl.S
@@ -44,6 +44,8 @@ _ENTRY(_C_LABEL(htonl))
_ENTRY(_C_LABEL(ntohl))
_ENTRY(_C_LABEL(bswap32))
_PROF_PROLOGUE
+ RETGUARD_SETUP(htonl, r11)
movl %edi,%eax
bswap %eax
+ RETGUARD_CHECK(htonl, r11)
ret
diff --git a/sys/lib/libkern/arch/amd64/htons.S b/sys/lib/libkern/arch/amd64/htons.S
index a35b67b2951..15016f5736e 100644
--- a/sys/lib/libkern/arch/amd64/htons.S
+++ b/sys/lib/libkern/arch/amd64/htons.S
@@ -44,6 +44,8 @@ _ENTRY(_C_LABEL(htons))
_ENTRY(_C_LABEL(ntohs))
_ENTRY(_C_LABEL(bswap16))
_PROF_PROLOGUE
+ RETGUARD_SETUP(htons, r11)
movl %edi,%eax
xchgb %ah,%al
+ RETGUARD_CHECK(htons, r11)
ret
diff --git a/sys/lib/libkern/arch/amd64/memchr.S b/sys/lib/libkern/arch/amd64/memchr.S
index 1738e17f8ee..1ec17272f1b 100644
--- a/sys/lib/libkern/arch/amd64/memchr.S
+++ b/sys/lib/libkern/arch/amd64/memchr.S
@@ -7,6 +7,7 @@
#include <machine/asm.h>
ENTRY(memchr)
+ RETGUARD_SETUP(memchr, r11)
movb %sil,%al /* set character to search for */
movq %rdx,%rcx /* set length of search */
testq %rcx,%rcx /* test for len == 0 */
@@ -15,6 +16,7 @@ ENTRY(memchr)
scasb
jne L1 /* scan failed, return null */
leaq -1(%rdi),%rax /* adjust result of scan */
- ret
+ jmp L2
L1: xorq %rax,%rax
+L2: RETGUARD_CHECK(memchr, r11)
ret
diff --git a/sys/lib/libkern/arch/amd64/memcmp.S b/sys/lib/libkern/arch/amd64/memcmp.S
index 0d97f6a9bd2..b53e2d1c21a 100644
--- a/sys/lib/libkern/arch/amd64/memcmp.S
+++ b/sys/lib/libkern/arch/amd64/memcmp.S
@@ -7,6 +7,7 @@
#include <machine/asm.h>
ENTRY(memcmp)
+ RETGUARD_SETUP(memcmp, r11)
movq %rdx,%rcx /* compare by longs */
shrq $3,%rcx
repe
@@ -20,7 +21,7 @@ ENTRY(memcmp)
jne L6 /* do we match? */
xorl %eax,%eax /* we match, return zero */
- ret
+ jmp L7
L5: movl $8,%ecx /* We know that one of the next */
subq %rcx,%rdi /* eight pairs of bytes do not */
@@ -32,4 +33,5 @@ L6: xorl %eax,%eax /* Perform unsigned comparison */
xorl %edx,%edx
movb -1(%rsi),%dl
subl %edx,%eax
+L7: RETGUARD_CHECK(memcmp, r11)
ret
diff --git a/sys/lib/libkern/arch/amd64/memmove.S b/sys/lib/libkern/arch/amd64/memmove.S
index 351e4d53a49..3a0bed88790 100644
--- a/sys/lib/libkern/arch/amd64/memmove.S
+++ b/sys/lib/libkern/arch/amd64/memmove.S
@@ -45,6 +45,7 @@ ENTRY(bcopy)
/* fall into memmove */
ENTRY(memmove)
+ RETGUARD_SETUP(memmove, r10)
movq %rdi,%r11 /* save dest */
movq %rdx,%rcx
movq %rdi,%rax
@@ -54,6 +55,7 @@ ENTRY(memmove)
jmp 2f /* nope */
ENTRY(memcpy)
+ RETGUARD_SETUP(memmove, r10)
movq %rdi,%r11 /* save dest */
movq %rdx,%rcx
2:
@@ -65,7 +67,7 @@ ENTRY(memcpy)
rep
movsb
movq %r11,%rax
- ret
+ jmp 3f
1:
addq %rcx,%rdi /* copy backwards. */
addq %rcx,%rsi
@@ -83,4 +85,5 @@ ENTRY(memcpy)
movsq
movq %r11,%rax
cld
+3: RETGUARD_CHECK(memmove, r10)
ret
diff --git a/sys/lib/libkern/arch/amd64/memset.S b/sys/lib/libkern/arch/amd64/memset.S
index 67d6404e932..1ccdfe79a9d 100644
--- a/sys/lib/libkern/arch/amd64/memset.S
+++ b/sys/lib/libkern/arch/amd64/memset.S
@@ -7,6 +7,7 @@
#include <machine/asm.h>
ENTRY(memset)
+ RETGUARD_SETUP(memset, r10)
movq %rsi,%rax
andq $0xff,%rax
movq %rdx,%rcx
@@ -49,5 +50,5 @@ ENTRY(memset)
L1: rep
stosb
movq %r11,%rax
-
+ RETGUARD_CHECK(memset, r10)
ret
diff --git a/sys/lib/libkern/arch/amd64/scanc.S b/sys/lib/libkern/arch/amd64/scanc.S
index 4ecaae3c15b..2ae94d96d74 100644
--- a/sys/lib/libkern/arch/amd64/scanc.S
+++ b/sys/lib/libkern/arch/amd64/scanc.S
@@ -36,6 +36,7 @@
#include <machine/asm.h>
ENTRY(scanc)
+ RETGUARD_SETUP(scanc, r10)
movq %rdx,%r11
movb %cl,%dl
movl %edi,%ecx
@@ -51,4 +52,5 @@ ENTRY(scanc)
jnz 1b
2:
movl %ecx,%eax
+ RETGUARD_CHECK(scanc, r10)
ret
diff --git a/sys/lib/libkern/arch/amd64/skpc.S b/sys/lib/libkern/arch/amd64/skpc.S
index 5942a1c07e8..c67f9c3b252 100644
--- a/sys/lib/libkern/arch/amd64/skpc.S
+++ b/sys/lib/libkern/arch/amd64/skpc.S
@@ -36,6 +36,7 @@
#include <machine/asm.h>
ENTRY(skpc)
+ RETGUARD_SETUP(skpc, r10)
movl %edi,%eax
movq %rsi,%rcx
movq %rdx,%rdi
@@ -45,4 +46,5 @@ ENTRY(skpc)
incq %rcx
1:
movl %ecx,%eax
+ RETGUARD_CHECK(skpc, r10)
ret
diff --git a/sys/lib/libkern/arch/amd64/strchr.S b/sys/lib/libkern/arch/amd64/strchr.S
index f78e030486e..a4234184b47 100644
--- a/sys/lib/libkern/arch/amd64/strchr.S
+++ b/sys/lib/libkern/arch/amd64/strchr.S
@@ -1,4 +1,4 @@
-/* $OpenBSD: strchr.S,v 1.3 2014/12/09 15:13:57 reyk Exp $ */
+/* $OpenBSD: strchr.S,v 1.4 2018/07/03 23:22:48 mortimer Exp $ */
/* $NetBSD: strchr.S,v 1.7 2014/03/22 19:16:34 jakllsch Exp $ */
/*-
@@ -44,6 +44,8 @@ STRONG_ALIAS(index, strchr)
*/
ENTRY(strchr)
+ RETGUARD_SETUP(strchr, r9)
+ RETGUARD_PUSH(r9)
movabsq $0x0101010101010101,%r8
movzbq %sil,%rdx /* value to search for (c) */
@@ -85,7 +87,7 @@ ENTRY(strchr)
bsf %r11,%r11 /* 7, 15, 23 ... 63 */
8: shr $3,%r11 /* 0, 1, 2 .. 7 */
lea -8(%r11,%rdi),%rax
- ret
+ jmp 12f
/* End of string, check whether char is before NUL */
_ALIGN_TEXT /* adds three byte nop */
@@ -97,6 +99,8 @@ ENTRY(strchr)
cmp %r11,%rax
jae 8b /* return 'found' if same - searching for NUL */
11: xor %eax,%eax /* char not found */
+12: RETGUARD_POP(r9)
+ RETGUARD_CHECK(strchr, r9)
ret
/* Source misaligned: read aligned word and make low bytes invalid */
diff --git a/sys/lib/libkern/arch/amd64/strcmp.S b/sys/lib/libkern/arch/amd64/strcmp.S
index 7f1656ee80a..3ba13a49c34 100644
--- a/sys/lib/libkern/arch/amd64/strcmp.S
+++ b/sys/lib/libkern/arch/amd64/strcmp.S
@@ -1,4 +1,4 @@
-/* $OpenBSD: strcmp.S,v 1.3 2014/12/09 15:13:57 reyk Exp $ */
+/* $OpenBSD: strcmp.S,v 1.4 2018/07/03 23:22:48 mortimer Exp $ */
/* $NetBSD: strcmp.S,v 1.2 2014/03/22 19:16:34 jakllsch Exp $ */
/*
@@ -9,6 +9,7 @@
#include <machine/asm.h>
ENTRY(strcmp)
+ RETGUARD_SETUP(strcmp, r10)
/*
* Align s1 to word boundary.
* Consider unrolling loop?
@@ -68,4 +69,5 @@ ENTRY(strcmp)
movzbq %al,%rax
movzbq %dl,%rdx
subq %rdx,%rax
+ RETGUARD_CHECK(strcmp, r10)
ret
diff --git a/sys/lib/libkern/arch/amd64/strlen.S b/sys/lib/libkern/arch/amd64/strlen.S
index 1528ad3eb67..888f2c05d12 100644
--- a/sys/lib/libkern/arch/amd64/strlen.S
+++ b/sys/lib/libkern/arch/amd64/strlen.S
@@ -1,4 +1,4 @@
-/* $OpenBSD: strlen.S,v 1.5 2016/03/20 16:50:30 krw Exp $ */
+/* $OpenBSD: strlen.S,v 1.6 2018/07/03 23:22:48 mortimer Exp $ */
/* $NetBSD: strlen.S,v 1.6 2014/03/22 19:16:34 jakllsch Exp $ */
/*-
@@ -112,6 +112,7 @@
*/
ENTRY(strlen)
+ RETGUARD_SETUP(strlen, r10)
movabsq $0x0101010101010101,%r8
test $7,%dil
@@ -139,6 +140,7 @@ ENTRY(strlen)
bsf %rdx,%rdx /* 7, 15, 23 ... 63 */
shr $3,%rdx /* 0, 1, 2 ... 7 */
lea -8(%rax,%rdx),%rax
+ RETGUARD_CHECK(strlen, r10)
ret
/* Misaligned, read aligned word and make low bytes non-zero */
diff --git a/sys/lib/libkern/arch/amd64/strrchr.S b/sys/lib/libkern/arch/amd64/strrchr.S
index 50561cba786..945146d0059 100644
--- a/sys/lib/libkern/arch/amd64/strrchr.S
+++ b/sys/lib/libkern/arch/amd64/strrchr.S
@@ -1,4 +1,4 @@
-/* $OpenBSD: strrchr.S,v 1.3 2014/12/09 15:13:57 reyk Exp $ */
+/* $OpenBSD: strrchr.S,v 1.4 2018/07/03 23:22:48 mortimer Exp $ */
/* $NetBSD: strrchr.S,v 1.3 2014/03/22 19:16:34 jakllsch Exp $ */
/*
@@ -11,6 +11,7 @@
STRONG_ALIAS(rindex, strrchr)
ENTRY(strrchr)
+ RETGUARD_SETUP(strrchr, r10)
movzbq %sil,%rcx
/* zero return value */
@@ -120,4 +121,5 @@ ENTRY(strrchr)
jne .Lloop
.Ldone:
+ RETGUARD_CHECK(strrchr, r10)
ret