diff options
author | Theo de Raadt <deraadt@cvs.openbsd.org> | 2018-07-10 16:01:27 +0000 |
---|---|---|
committer | Theo de Raadt <deraadt@cvs.openbsd.org> | 2018-07-10 16:01:27 +0000 |
commit | 170337a2e5dad781c35dbe77f15f5b07d3f988c1 (patch) | |
tree | c48a1f05be998269703cfd4631c1a7fc69b83f2f /sys/lib/libkern | |
parent | 2bd710f5b4fe8036d1371d1b47d23a1eae9e0d02 (diff) |
In asm.h ensure NENTRY uses the old-school nop-sled align, but change standard
ENTRY is a trapsled. Fix a few functions which fall-through into an ENTRY
macro. amd64 binaries now are free of double+-nop sequences (except for one
assember nit in aes-586.pl). Previous changes by guenther got us here.
ok mortimer kettenis
Diffstat (limited to 'sys/lib/libkern')
-rw-r--r-- | sys/lib/libkern/arch/amd64/htonl.S | 4 | ||||
-rw-r--r-- | sys/lib/libkern/arch/amd64/htons.S | 4 | ||||
-rw-r--r-- | sys/lib/libkern/arch/amd64/memmove.S | 2 |
3 files changed, 5 insertions, 5 deletions
diff --git a/sys/lib/libkern/arch/amd64/htonl.S b/sys/lib/libkern/arch/amd64/htonl.S index f7d640521d1..07965cd1fd9 100644 --- a/sys/lib/libkern/arch/amd64/htonl.S +++ b/sys/lib/libkern/arch/amd64/htonl.S @@ -41,8 +41,8 @@ #include <machine/asm.h> _ENTRY(_C_LABEL(htonl)) -_ENTRY(_C_LABEL(ntohl)) -_ENTRY(_C_LABEL(bswap32)) +_NENTRY(_C_LABEL(ntohl)) +_NENTRY(_C_LABEL(bswap32)) _PROF_PROLOGUE RETGUARD_SETUP(htonl, r11) movl %edi,%eax diff --git a/sys/lib/libkern/arch/amd64/htons.S b/sys/lib/libkern/arch/amd64/htons.S index 15016f5736e..3a702594733 100644 --- a/sys/lib/libkern/arch/amd64/htons.S +++ b/sys/lib/libkern/arch/amd64/htons.S @@ -41,8 +41,8 @@ #include <machine/asm.h> _ENTRY(_C_LABEL(htons)) -_ENTRY(_C_LABEL(ntohs)) -_ENTRY(_C_LABEL(bswap16)) +_NENTRY(_C_LABEL(ntohs)) +_NENTRY(_C_LABEL(bswap16)) _PROF_PROLOGUE RETGUARD_SETUP(htons, r11) movl %edi,%eax diff --git a/sys/lib/libkern/arch/amd64/memmove.S b/sys/lib/libkern/arch/amd64/memmove.S index 3a0bed88790..71d5b007f41 100644 --- a/sys/lib/libkern/arch/amd64/memmove.S +++ b/sys/lib/libkern/arch/amd64/memmove.S @@ -44,7 +44,7 @@ ENTRY(bcopy) xchgq %rdi,%rsi /* fall into memmove */ -ENTRY(memmove) +NENTRY(memmove) RETGUARD_SETUP(memmove, r10) movq %rdi,%r11 /* save dest */ movq %rdx,%rcx |