Merge "forkpty: set the output fd to -1 on the slave side."
diff --git a/libc/Android.bp b/libc/Android.bp
index 89a41cb..ca8a4d5 100644
--- a/libc/Android.bp
+++ b/libc/Android.bp
@@ -793,7 +793,6 @@
"bionic/setjmp_cookie.cpp",
"bionic/__memcpy_chk.cpp",
- "bionic/__memset_chk.cpp",
"bionic/__strcat_chk.cpp",
"bionic/__strcpy_chk.cpp",
"bionic/strchr.cpp",
@@ -825,7 +824,6 @@
],
exclude_srcs: [
"bionic/__memcpy_chk.cpp",
- "bionic/__memset_chk.cpp",
],
cortex_a7: {
srcs: [
@@ -1075,7 +1073,6 @@
"arch-mips/string/strlen.c",
"arch-mips/bionic/__bionic_clone.S",
- "arch-mips/bionic/bzero.S",
"arch-mips/bionic/cacheflush.cpp",
"arch-mips/bionic/_exit_with_stack_teardown.S",
"arch-mips/bionic/libgcc_compat.c",
@@ -1124,8 +1121,6 @@
"arch-x86/atom/string/sse2-wcsrchr-atom.S",
"arch-x86/atom/string/sse2-wcslen-atom.S",
"arch-x86/atom/string/sse2-wcscmp-atom.S",
- "arch-x86/silvermont/string/sse2-bcopy-slm.S",
- "arch-x86/silvermont/string/sse2-bzero-slm.S",
"arch-x86/silvermont/string/sse2-memcpy-slm.S",
"arch-x86/silvermont/string/sse2-memmove-slm.S",
"arch-x86/silvermont/string/sse2-memset-slm.S",
@@ -1151,10 +1146,8 @@
],
atom: {
srcs: [
- "arch-x86/atom/string/sse2-bzero-atom.S",
"arch-x86/atom/string/sse2-memset-atom.S",
"arch-x86/atom/string/sse2-strlen-atom.S",
- "arch-x86/atom/string/ssse3-bcopy-atom.S",
"arch-x86/atom/string/ssse3-memcmp-atom.S",
"arch-x86/atom/string/ssse3-memcpy-atom.S",
"arch-x86/atom/string/ssse3-memmove-atom.S",
@@ -1164,8 +1157,6 @@
],
exclude_srcs: [
"arch-x86/generic/string/memcmp.S",
- "arch-x86/silvermont/string/sse2-bcopy-slm.S",
- "arch-x86/silvermont/string/sse2-bzero-slm.S",
"arch-x86/silvermont/string/sse2-memcpy-slm.S",
"arch-x86/silvermont/string/sse2-memmove-slm.S",
"arch-x86/silvermont/string/sse2-memset-slm.S",
diff --git a/libc/arch-arm/bionic/setjmp.S b/libc/arch-arm/bionic/setjmp.S
index 464f7d8..91d158a 100644
--- a/libc/arch-arm/bionic/setjmp.S
+++ b/libc/arch-arm/bionic/setjmp.S
@@ -36,10 +36,13 @@
// According to the ARM AAPCS document, we only need to save
// the following registers:
//
-// Core r4-r14
+// Core r4-r11, sp, lr
+// AAPCS 5.1.1:
+// A subroutine must preserve the contents of the registers r4-r8, r10, r11
+// and SP (and r9 in PCS variants that designate r9 as v6).
//
-// VFP d8-d15 (see section 5.1.2.1)
-//
+// VFP d8-d15
+// AAPCS 5.1.2.1:
// Registers s16-s31 (d8-d15, q4-q7) must be preserved across subroutine
// calls; registers s0-s15 (d0-d7, q0-q3) do not need to be preserved
// (and can be used for passing arguments or returning results in standard
@@ -49,14 +52,15 @@
// FPSCR saved because glibc does.
// The internal structure of a jmp_buf is totally private.
-// Current layout (may change in the future):
+// Current layout (changes from release to release):
//
// word name description
// 0 sigflag/cookie setjmp cookie in top 31 bits, signal mask flag in low bit
// 1 sigmask signal mask (not used with _setjmp / _longjmp)
// 2 float_base base of float registers (d8 to d15)
// 18 float_state floating-point status and control register
-// 19 core_base base of core registers (r4 to r14)
+// 19 core_base base of core registers (r4-r11, r13-r14)
+// 29 checksum checksum of all of the core registers, to give better error messages.
// 30 reserved reserved entries (room to grow)
// 64
//
@@ -69,6 +73,7 @@
#define _JB_FLOAT_BASE (_JB_SIGMASK+1)
#define _JB_FLOAT_STATE (_JB_FLOAT_BASE + (15-8+1)*2)
#define _JB_CORE_BASE (_JB_FLOAT_STATE+1)
+#define _JB_CHECKSUM (_JB_CORE_BASE+10)
ENTRY(setjmp)
mov r1, #1
@@ -81,6 +86,8 @@
END(_setjmp)
#define MANGLE_REGISTERS 1
+#define USE_CHECKSUM 1
+
.macro m_mangle_registers reg
#if MANGLE_REGISTERS
eor r4, r4, \reg
@@ -91,7 +98,6 @@
eor r9, r9, \reg
eor r10, r10, \reg
eor r11, r11, \reg
- eor r12, r12, \reg
eor r13, r13, \reg
eor r14, r14, \reg
#endif
@@ -101,6 +107,14 @@
m_mangle_registers \reg
.endm
+.macro m_calculate_checksum dst, src, scratch
+ mov \dst, #0
+ .irp i,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28
+ ldr \scratch, [\src, #(\i * 4)]
+ eor \dst, \dst, \scratch
+ .endr
+.endm
+
// int sigsetjmp(sigjmp_buf env, int save_signal_mask);
ENTRY(sigsetjmp)
stmfd sp!, {r0, lr}
@@ -153,8 +167,8 @@
m_mangle_registers r2
// ARM deprecates using sp in the register list for stmia.
- stmia r1, {r4-r12, lr}
- str sp, [r1, #(10 * 4)]
+ stmia r1, {r4-r11, lr}
+ str sp, [r1, #(9 * 4)]
m_unmangle_registers r2
// Save floating-point registers.
@@ -165,6 +179,12 @@
fmrx r1, fpscr
str r1, [r0, #(_JB_FLOAT_STATE * 4)]
+#if USE_CHECKSUM
+ // Calculate the checksum.
+ m_calculate_checksum r12, r0, r2
+ str r12, [r0, #(_JB_CHECKSUM * 4)]
+#endif
+
mov r0, #0
bx lr
END(sigsetjmp)
@@ -177,6 +197,15 @@
.cfi_rel_offset r1, 4
.cfi_rel_offset lr, 8
+#if USE_CHECKSUM
+ // Check the checksum before doing anything.
+ m_calculate_checksum r12, r0, r3
+ ldr r2, [r0, #(_JB_CHECKSUM * 4)]
+
+ teq r2, r12
+ bne __bionic_setjmp_checksum_mismatch
+#endif
+
// Fetch the signal flag.
ldr r1, [r0, #(_JB_SIGFLAG * 4)]
@@ -203,14 +232,16 @@
ldr r2, [r0, #(_JB_FLOAT_STATE * 4)]
fmxr fpscr, r2
- // Restore core registers.
+ // Load the cookie.
ldr r3, [r0, #(_JB_SIGFLAG * 4)]
bic r3, r3, #1
+
+ // Restore core registers.
add r2, r0, #(_JB_CORE_BASE * 4)
// ARM deprecates using sp in the register list for ldmia.
- ldmia r2, {r4-r12, lr}
- ldr sp, [r2, #(10 * 4)]
+ ldmia r2, {r4-r11, lr}
+ ldr sp, [r2, #(9 * 4)]
m_unmangle_registers r3
// Save the return value/address and check the setjmp cookie.
diff --git a/libc/arch-arm/cortex-a15/bionic/memset.S b/libc/arch-arm/cortex-a15/bionic/memset.S
index 732a039..6458f97 100644
--- a/libc/arch-arm/cortex-a15/bionic/memset.S
+++ b/libc/arch-arm/cortex-a15/bionic/memset.S
@@ -40,7 +40,7 @@
ENTRY(__memset_chk)
cmp r2, r3
- bls .L_done
+ bls memset
// Preserve lr for backtrace.
push {lr}
@@ -50,13 +50,6 @@
bl __memset_chk_fail
END(__memset_chk)
-ENTRY(bzero)
- mov r2, r1
- mov r1, #0
-.L_done:
- // Fall through to memset...
-END(bzero)
-
ENTRY(memset)
stmfd sp!, {r0}
.cfi_def_cfa_offset 4
diff --git a/libc/arch-arm/cortex-a7/bionic/memset.S b/libc/arch-arm/cortex-a7/bionic/memset.S
index 0b96d62..357416c 100644
--- a/libc/arch-arm/cortex-a7/bionic/memset.S
+++ b/libc/arch-arm/cortex-a7/bionic/memset.S
@@ -40,7 +40,7 @@
ENTRY(__memset_chk)
cmp r2, r3
- bls .L_done
+ bls memset
// Preserve lr for backtrace.
push {lr}
@@ -50,13 +50,6 @@
bl __memset_chk_fail
END(__memset_chk)
-ENTRY(bzero)
- mov r2, r1
- mov r1, #0
-.L_done:
- // Fall through to memset...
-END(bzero)
-
ENTRY(memset)
mov r3, r0
// At this point only d0, d1 are going to be used below.
diff --git a/libc/arch-arm/cortex-a9/bionic/memset.S b/libc/arch-arm/cortex-a9/bionic/memset.S
index a2c8110..d00231b 100644
--- a/libc/arch-arm/cortex-a9/bionic/memset.S
+++ b/libc/arch-arm/cortex-a9/bionic/memset.S
@@ -38,7 +38,7 @@
ENTRY(__memset_chk)
cmp r2, r3
- bls .L_done
+ bls memset
// Preserve lr for backtrace.
push {lr}
@@ -48,14 +48,6 @@
bl __memset_chk_fail
END(__memset_chk)
-ENTRY(bzero)
- mov r2, r1
- mov r1, #0
-
-.L_done:
- // Fall through to memset...
-END(bzero)
-
/* memset() returns its first argument. */
ENTRY(memset)
// The neon memset only wins for less than 132.
diff --git a/libc/arch-arm/denver/bionic/memset.S b/libc/arch-arm/denver/bionic/memset.S
index 8d79e5b..1b0152a 100644
--- a/libc/arch-arm/denver/bionic/memset.S
+++ b/libc/arch-arm/denver/bionic/memset.S
@@ -42,7 +42,7 @@
ENTRY(__memset_chk)
cmp r2, r3
- bls .L_done
+ bls memset
// Preserve lr for backtrace.
push {lr}
@@ -52,13 +52,6 @@
bl __memset_chk_fail
END(__memset_chk)
-ENTRY(bzero)
- mov r2, r1
- mov r1, #0
-.L_done:
- // Fall through to memset...
-END(bzero)
-
ENTRY(memset)
pldw [r0]
mov r3, r0
diff --git a/libc/arch-arm/generic/bionic/memset.S b/libc/arch-arm/generic/bionic/memset.S
index 6e70397..1fd0de1 100644
--- a/libc/arch-arm/generic/bionic/memset.S
+++ b/libc/arch-arm/generic/bionic/memset.S
@@ -38,19 +38,11 @@
ENTRY(__memset_chk)
cmp r2, r3
- bls done
+ bls memset
bl __memset_chk_fail
END(__memset_chk)
-ENTRY(bzero)
- mov r2, r1
- mov r1, #0
-
-done:
- // Fall through to memset...
-END(bzero)
-
ENTRY(memset)
/* compute the offset to align the destination
* offset = (4-(src&3))&3 = -src & 3
diff --git a/libc/arch-arm/krait/bionic/memset.S b/libc/arch-arm/krait/bionic/memset.S
index 0264dd3..81ba74b 100644
--- a/libc/arch-arm/krait/bionic/memset.S
+++ b/libc/arch-arm/krait/bionic/memset.S
@@ -40,7 +40,7 @@
ENTRY(__memset_chk)
cmp r2, r3
- bls .L_done
+ bls memset
// Preserve lr for backtrace.
push {lr}
@@ -50,14 +50,6 @@
bl __memset_chk_fail
END(__memset_chk)
-ENTRY(bzero)
- mov r2, r1
- mov r1, #0
-
-.L_done:
- // Fall through to memset...
-END(bzero)
-
/* memset() returns its first argument. */
ENTRY(memset)
mov r3, r0
diff --git a/libc/arch-arm64/bionic/setjmp.S b/libc/arch-arm64/bionic/setjmp.S
index c06a671..2550134 100644
--- a/libc/arch-arm64/bionic/setjmp.S
+++ b/libc/arch-arm64/bionic/setjmp.S
@@ -37,6 +37,18 @@
// NOTE: All the registers saved here will have 64 bit vales.
// AAPCS mandates that the higher part of q registers do not need to
// be saved by the callee.
+//
+// The internal structure of a jmp_buf is totally private.
+// Current layout (changes from release to release):
+//
+// word name description
+// 0 sigflag/cookie setjmp cookie in top 31 bits, signal mask flag in low bit
+// 1 sigmask signal mask (not used with _setjmp / _longjmp)
+// 2 core_base base of core registers (x19-x30, sp)
+// 15 float_base base of float registers (d8-d15)
+// 23 checksum checksum of core registers
+// 24 reserved reserved entries (room to grow)
+// 32
#define _JB_SIGFLAG 0
#define _JB_SIGMASK (_JB_SIGFLAG + 1)
@@ -51,8 +63,11 @@
#define _JB_D12_D13 (_JB_D14_D15 + 2)
#define _JB_D10_D11 (_JB_D12_D13 + 2)
#define _JB_D8_D9 (_JB_D10_D11 + 2)
+#define _JB_CHECKSUM (_JB_D8_D9 + 2)
#define MANGLE_REGISTERS 1
+#define USE_CHECKSUM 1
+
.macro m_mangle_registers reg, sp_reg
#if MANGLE_REGISTERS
eor x19, x19, \reg
@@ -71,6 +86,14 @@
#endif
.endm
+.macro m_calculate_checksum dst, src, scratch
+ mov \dst, #0
+ .irp i,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22
+ ldr \scratch, [\src, #(\i * 8)]
+ eor \dst, \dst, \scratch
+ .endr
+.endm
+
.macro m_unmangle_registers reg, sp_reg
m_mangle_registers \reg, sp_reg=\sp_reg
.endm
@@ -143,12 +166,27 @@
stp d10, d11, [x0, #(_JB_D10_D11 * 8)]
stp d8, d9, [x0, #(_JB_D8_D9 * 8)]
+#if USE_CHECKSUM
+ // Calculate the checksum.
+ m_calculate_checksum x12, x0, x2
+ str x12, [x0, #(_JB_CHECKSUM * 8)]
+#endif
+
mov w0, #0
ret
END(sigsetjmp)
// void siglongjmp(sigjmp_buf env, int value);
ENTRY(siglongjmp)
+#if USE_CHECKSUM
+ // Check the checksum before doing anything.
+ m_calculate_checksum x12, x0, x2
+ ldr x2, [x0, #(_JB_CHECKSUM * 8)]
+
+ cmp x2, x12
+ bne __bionic_setjmp_checksum_mismatch
+#endif
+
// Do we need to restore the signal mask?
ldr x2, [x0, #(_JB_SIGFLAG * 8)]
tbz w2, #0, 1f
diff --git a/libc/arch-mips/bionic/bzero.S b/libc/arch-mips/bionic/bzero.S
deleted file mode 100644
index 6e5d294..0000000
--- a/libc/arch-mips/bionic/bzero.S
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright (C) 2008 The Android Open Source Project
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * * Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * * Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in
- * the documentation and/or other materials provided with the
- * distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
- * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
- * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
- * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
- * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
- * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
- * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
- * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
- * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
- * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
- * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
- * SUCH DAMAGE.
- */
-
-#include <private/bionic_asm.h>
-
-// void bzero(void*, size_t);
-ENTRY(bzero)
- .set noreorder
- .cpload t9
- move a2,a1
- la t9,memset
- j t9
- move a1,zero
-END(bzero)
diff --git a/libc/arch-mips/mips.mk b/libc/arch-mips/mips.mk
index 3663fab..b98d525 100644
--- a/libc/arch-mips/mips.mk
+++ b/libc/arch-mips/mips.mk
@@ -12,7 +12,6 @@
libc_bionic_src_files_mips += \
arch-mips/bionic/__bionic_clone.S \
- arch-mips/bionic/bzero.S \
arch-mips/bionic/cacheflush.cpp \
arch-mips/bionic/_exit_with_stack_teardown.S \
arch-mips/bionic/libgcc_compat.c \
diff --git a/libc/arch-mips/string/memset.S b/libc/arch-mips/string/memset.S
index 65bb5b5..dd94f3b 100644
--- a/libc/arch-mips/string/memset.S
+++ b/libc/arch-mips/string/memset.S
@@ -170,11 +170,6 @@
# define R6_CODE
#endif
-/* Allow the routine to be named something else if desired. */
-#ifndef MEMSET_NAME
-# define MEMSET_NAME memset
-#endif
-
/* We load/store 64 bits at a time when USE_DOUBLE is true.
The C_ prefix stands for CHUNK and is used to avoid macro name
conflicts with system header files. */
@@ -209,9 +204,19 @@
#define UNITM1(unit) (((unit)*NSIZE)-1)
#ifdef __ANDROID__
-LEAF(MEMSET_NAME,0)
+LEAF(__memset_chk,0)
#else
-LEAF(MEMSET_NAME)
+LEAF(__memset_chk)
+#endif
+ bgtu a2, a3, __memset_chk_fail
+
+ // Fall through to memset...
+END(__memset_chk)
+
+#ifdef __ANDROID__
+LEAF(memset,0)
+#else
+LEAF(memset)
#endif
.set nomips16
@@ -428,9 +433,10 @@
.set at
.set reorder
-END(MEMSET_NAME)
+END(memset)
#ifndef __ANDROID__
# ifdef _LIBC
-libc_hidden_builtin_def (MEMSET_NAME)
+libc_hidden_builtin_def (memset)
+libc_hidden_builtin_def (__memset_chk)
# endif
#endif
diff --git a/libc/arch-x86/atom/atom.mk b/libc/arch-x86/atom/atom.mk
index 1afabac..4de4185 100644
--- a/libc/arch-x86/atom/atom.mk
+++ b/libc/arch-x86/atom/atom.mk
@@ -1,8 +1,6 @@
libc_bionic_src_files_x86 += \
- arch-x86/atom/string/sse2-bzero-atom.S \
arch-x86/atom/string/sse2-memset-atom.S \
arch-x86/atom/string/sse2-strlen-atom.S \
- arch-x86/atom/string/ssse3-bcopy-atom.S \
arch-x86/atom/string/ssse3-memcmp-atom.S \
arch-x86/atom/string/ssse3-memcpy-atom.S \
arch-x86/atom/string/ssse3-memmove-atom.S \
@@ -14,8 +12,6 @@
arch-x86/generic/string/memcmp.S \
libc_bionic_src_files_exclude_x86 += \
- arch-x86/silvermont/string/sse2-bcopy-slm.S \
- arch-x86/silvermont/string/sse2-bzero-slm.S \
arch-x86/silvermont/string/sse2-memcpy-slm.S \
arch-x86/silvermont/string/sse2-memmove-slm.S \
arch-x86/silvermont/string/sse2-memset-slm.S \
diff --git a/libc/arch-x86/atom/string/sse2-bzero-atom.S b/libc/arch-x86/atom/string/sse2-bzero-atom.S
deleted file mode 100644
index 0ddc499..0000000
--- a/libc/arch-x86/atom/string/sse2-bzero-atom.S
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
-Copyright (c) 2010, Intel Corporation
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are met:
-
- * Redistributions of source code must retain the above copyright notice,
- * this list of conditions and the following disclaimer.
-
- * Redistributions in binary form must reproduce the above copyright notice,
- * this list of conditions and the following disclaimer in the documentation
- * and/or other materials provided with the distribution.
-
- * Neither the name of Intel Corporation nor the names of its contributors
- * may be used to endorse or promote products derived from this software
- * without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
-ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
-ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-*/
-
-#define USE_AS_BZERO
-#define MEMSET bzero
-#include "sse2-memset-atom.S"
diff --git a/libc/arch-x86/atom/string/sse2-memset-atom.S b/libc/arch-x86/atom/string/sse2-memset-atom.S
index b0963a1..30fb3f1 100644
--- a/libc/arch-x86/atom/string/sse2-memset-atom.S
+++ b/libc/arch-x86/atom/string/sse2-memset-atom.S
@@ -28,6 +28,8 @@
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
+#include <private/bionic_asm.h>
+
#include "cache.h"
#ifndef L
@@ -38,14 +40,6 @@
# define ALIGN(n) .p2align n
#endif
-#ifndef cfi_startproc
-# define cfi_startproc .cfi_startproc
-#endif
-
-#ifndef cfi_endproc
-# define cfi_endproc .cfi_endproc
-#endif
-
#ifndef cfi_rel_offset
# define cfi_rel_offset(reg, off) .cfi_rel_offset reg, off
#endif
@@ -58,21 +52,6 @@
# define cfi_adjust_cfa_offset(off) .cfi_adjust_cfa_offset off
#endif
-#ifndef ENTRY
-# define ENTRY(name) \
- .type name, @function; \
- .globl name; \
- .p2align 4; \
-name: \
- cfi_startproc
-#endif
-
-#ifndef END
-# define END(name) \
- cfi_endproc; \
- .size name, .-name
-#endif
-
#define CFI_PUSH(REG) \
cfi_adjust_cfa_offset (4); \
cfi_rel_offset (REG, 0)
@@ -84,16 +63,11 @@
#define PUSH(REG) pushl REG; CFI_PUSH (REG)
#define POP(REG) popl REG; CFI_POP (REG)
-#ifdef USE_AS_BZERO
-# define DEST PARMS
-# define LEN DEST+4
-# define SETRTNVAL
-#else
-# define DEST PARMS
-# define CHR DEST+4
-# define LEN CHR+4
-# define SETRTNVAL movl DEST(%esp), %eax
-#endif
+#define DST PARMS
+#define CHR DST+4
+#define LEN CHR+4
+#define CHK_DST_LEN (LEN+4)
+#define SETRTNVAL movl DST(%esp), %eax
#if (defined SHARED || defined __PIC__)
# define ENTRANCE PUSH (%ebx);
@@ -138,27 +112,27 @@
jmp *TABLE(,%ecx,4)
#endif
-#ifndef MEMSET
-# define MEMSET memset
-#endif
+ENTRY(__memset_chk)
+ movl LEN(%esp), %ecx
+ cmpl %ecx, CHK_DST_LEN(%esp)
+ jbe memset
+
+ jmp __memset_chk_fail
+END(__memset_chk)
.section .text.sse2,"ax",@progbits
ALIGN (4)
-ENTRY (MEMSET)
+ENTRY (memset)
ENTRANCE
movl LEN(%esp), %ecx
-#ifdef USE_AS_BZERO
- xor %eax, %eax
-#else
movzbl CHR(%esp), %eax
movb %al, %ah
/* Fill the whole EAX with pattern. */
movl %eax, %edx
shl $16, %eax
or %edx, %eax
-#endif
- movl DEST(%esp), %edx
+ movl DST(%esp), %edx
cmp $32, %ecx
jae L(32bytesormore)
@@ -287,12 +261,8 @@
/* ECX > 32 and EDX is 4 byte aligned. */
L(32bytesormore):
/* Fill xmm0 with the pattern. */
-#ifdef USE_AS_BZERO
- pxor %xmm0, %xmm0
-#else
movd %eax, %xmm0
pshufd $0, %xmm0, %xmm0
-#endif
testl $0xf, %edx
jz L(aligned_16)
/* ECX > 32 and EDX is not 16 byte aligned. */
@@ -917,4 +887,4 @@
SETRTNVAL
RETURN_END
-END (MEMSET)
+END (memset)
diff --git a/libc/arch-x86/atom/string/ssse3-bcopy-atom.S b/libc/arch-x86/atom/string/ssse3-bcopy-atom.S
deleted file mode 100644
index e4b791a..0000000
--- a/libc/arch-x86/atom/string/ssse3-bcopy-atom.S
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
-Copyright (c) 2010, Intel Corporation
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are met:
-
- * Redistributions of source code must retain the above copyright notice,
- * this list of conditions and the following disclaimer.
-
- * Redistributions in binary form must reproduce the above copyright notice,
- * this list of conditions and the following disclaimer in the documentation
- * and/or other materials provided with the distribution.
-
- * Neither the name of Intel Corporation nor the names of its contributors
- * may be used to endorse or promote products derived from this software
- * without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
-ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
-ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-*/
-
-
-#define MEMCPY bcopy
-#define USE_AS_MEMMOVE
-#define USE_AS_BCOPY
-#include "ssse3-memcpy-atom.S"
diff --git a/libc/arch-x86/atom/string/ssse3-memcpy-atom.S b/libc/arch-x86/atom/string/ssse3-memcpy-atom.S
index ac5ec2d..4b2fb8e 100644
--- a/libc/arch-x86/atom/string/ssse3-memcpy-atom.S
+++ b/libc/arch-x86/atom/string/ssse3-memcpy-atom.S
@@ -73,15 +73,9 @@
.size name, .-name
#endif
-#ifdef USE_AS_BCOPY
-# define SRC PARMS
-# define DEST SRC+4
-# define LEN DEST+4
-#else
-# define DEST PARMS
-# define SRC DEST+4
-# define LEN SRC+4
-#endif
+#define DEST PARMS
+#define SRC DEST+4
+#define LEN SRC+4
#define CFI_PUSH(REG) \
cfi_adjust_cfa_offset (4); \
@@ -2018,12 +2012,10 @@
L(fwd_write_4bytes):
movl -4(%eax), %ecx
movl %ecx, -4(%edx)
-#ifndef USE_AS_BCOPY
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl %edx, %eax
-# else
+#else
movl DEST(%esp), %eax
-# endif
#endif
RETURN
@@ -2044,12 +2036,10 @@
movq -8(%eax), %xmm0
movq %xmm0, -8(%edx)
L(fwd_write_0bytes):
-#ifndef USE_AS_BCOPY
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl %edx, %eax
-# else
+#else
movl DEST(%esp), %eax
-# endif
#endif
RETURN
@@ -2059,12 +2049,10 @@
movl -4(%eax), %eax
movl %ecx, -5(%edx)
movl %eax, -4(%edx)
-#ifndef USE_AS_BCOPY
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl %edx, %eax
-# else
+#else
movl DEST(%esp), %eax
-# endif
#endif
RETURN
@@ -2088,12 +2076,10 @@
movl %ecx, -5(%edx)
movzbl -1(%eax), %ecx
movb %cl, -1(%edx)
-#ifndef USE_AS_BCOPY
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl %edx, %eax
-# else
+#else
movl DEST(%esp), %eax
-# endif
#endif
RETURN
@@ -2116,12 +2102,10 @@
L(fwd_write_1bytes):
movzbl -1(%eax), %ecx
movb %cl, -1(%edx)
-#ifndef USE_AS_BCOPY
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl %edx, %eax
-# else
+#else
movl DEST(%esp), %eax
-# endif
#endif
RETURN
@@ -2146,12 +2130,10 @@
movl %ecx, -6(%edx)
movzwl -2(%eax), %ecx
movw %cx, -2(%edx)
-#ifndef USE_AS_BCOPY
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl %edx, %eax
-# else
+#else
movl DEST(%esp), %eax
-# endif
#endif
RETURN
@@ -2174,12 +2156,10 @@
L(fwd_write_2bytes):
movzwl -2(%eax), %ecx
movw %cx, -2(%edx)
-#ifndef USE_AS_BCOPY
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl %edx, %eax
-# else
+#else
movl DEST(%esp), %eax
-# endif
#endif
RETURN
@@ -2206,12 +2186,10 @@
movzbl -1(%eax), %eax
movw %cx, -3(%edx)
movb %al, -1(%edx)
-#ifndef USE_AS_BCOPY
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl %edx, %eax
-# else
+#else
movl DEST(%esp), %eax
-# endif
#endif
RETURN
@@ -2236,12 +2214,10 @@
movzbl -1(%eax), %eax
movw %cx, -3(%edx)
movb %al, -1(%edx)
-#ifndef USE_AS_BCOPY
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl %edx, %eax
-# else
+#else
movl DEST(%esp), %eax
-# endif
#endif
RETURN
@@ -2256,12 +2232,10 @@
movq -8(%eax), %xmm0
movq %xmm0, -8(%edx)
L(fwd_write_0bytes_align):
-#ifndef USE_AS_BCOPY
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl %edx, %eax
-# else
+#else
movl DEST(%esp), %eax
-# endif
#endif
RETURN
@@ -2272,12 +2246,10 @@
L(fwd_write_16bytes_align):
movdqa -16(%eax), %xmm0
movdqa %xmm0, -16(%edx)
-#ifndef USE_AS_BCOPY
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl %edx, %eax
-# else
+#else
movl DEST(%esp), %eax
-# endif
#endif
RETURN
@@ -2287,12 +2259,10 @@
movl -4(%eax), %eax
movl %ecx, -5(%edx)
movl %eax, -4(%edx)
-#ifndef USE_AS_BCOPY
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl %edx, %eax
-# else
+#else
movl DEST(%esp), %eax
-# endif
#endif
RETURN
@@ -2310,12 +2280,10 @@
movl %ecx, -5(%edx)
movzbl -1(%eax), %ecx
movb %cl, -1(%edx)
-#ifndef USE_AS_BCOPY
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl %edx, %eax
-# else
+#else
movl DEST(%esp), %eax
-# endif
#endif
RETURN
@@ -2330,12 +2298,10 @@
movl %ecx, -5(%edx)
movzbl -1(%eax), %ecx
movb %cl, -1(%edx)
-#ifndef USE_AS_BCOPY
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl %edx, %eax
-# else
+#else
movl DEST(%esp), %eax
-# endif
#endif
RETURN
@@ -2352,12 +2318,10 @@
L(fwd_write_1bytes_align):
movzbl -1(%eax), %ecx
movb %cl, -1(%edx)
-#ifndef USE_AS_BCOPY
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl %edx, %eax
-# else
+#else
movl DEST(%esp), %eax
-# endif
#endif
RETURN
@@ -2370,12 +2334,10 @@
movdqa %xmm0, -17(%edx)
movzbl -1(%eax), %ecx
movb %cl, -1(%edx)
-#ifndef USE_AS_BCOPY
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl %edx, %eax
-# else
+#else
movl DEST(%esp), %eax
-# endif
#endif
RETURN
@@ -2394,12 +2356,10 @@
movl %ecx, -6(%edx)
movzwl -2(%eax), %ecx
movw %cx, -2(%edx)
-#ifndef USE_AS_BCOPY
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl %edx, %eax
-# else
+#else
movl DEST(%esp), %eax
-# endif
#endif
RETURN
@@ -2414,12 +2374,10 @@
movl %ecx, -6(%edx)
movzwl -2(%eax), %ecx
movw %cx, -2(%edx)
-#ifndef USE_AS_BCOPY
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl %edx, %eax
-# else
+#else
movl DEST(%esp), %eax
-# endif
#endif
RETURN
@@ -2436,12 +2394,10 @@
L(fwd_write_2bytes_align):
movzwl -2(%eax), %ecx
movw %cx, -2(%edx)
-#ifndef USE_AS_BCOPY
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl %edx, %eax
-# else
+#else
movl DEST(%esp), %eax
-# endif
#endif
RETURN
@@ -2454,12 +2410,10 @@
movdqa %xmm0, -18(%edx)
movzwl -2(%eax), %ecx
movw %cx, -2(%edx)
-#ifndef USE_AS_BCOPY
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl %edx, %eax
-# else
+#else
movl DEST(%esp), %eax
-# endif
#endif
RETURN
@@ -2480,12 +2434,10 @@
movzbl -1(%eax), %eax
movw %cx, -3(%edx)
movb %al, -1(%edx)
-#ifndef USE_AS_BCOPY
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl %edx, %eax
-# else
+#else
movl DEST(%esp), %eax
-# endif
#endif
RETURN
@@ -2502,12 +2454,10 @@
movzbl -1(%eax), %eax
movw %cx, -3(%edx)
movb %al, -1(%edx)
-#ifndef USE_AS_BCOPY
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl %edx, %eax
-# else
+#else
movl DEST(%esp), %eax
-# endif
#endif
RETURN
@@ -2526,12 +2476,10 @@
movzbl -1(%eax), %eax
movw %cx, -3(%edx)
movb %al, -1(%edx)
-#ifndef USE_AS_BCOPY
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl %edx, %eax
-# else
+#else
movl DEST(%esp), %eax
-# endif
#endif
RETURN
@@ -2546,12 +2494,10 @@
movzbl -1(%eax), %eax
movw %cx, -3(%edx)
movb %al, -1(%edx)
-#ifndef USE_AS_BCOPY
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl %edx, %eax
-# else
+#else
movl DEST(%esp), %eax
-# endif
#endif
RETURN
@@ -2568,12 +2514,10 @@
L(fwd_write_4bytes_align):
movl -4(%eax), %ecx
movl %ecx, -4(%edx)
-#ifndef USE_AS_BCOPY
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl %edx, %eax
-# else
+#else
movl DEST(%esp), %eax
-# endif
#endif
RETURN
@@ -2586,12 +2530,10 @@
movdqa %xmm0, -20(%edx)
movl -4(%eax), %ecx
movl %ecx, -4(%edx)
-#ifndef USE_AS_BCOPY
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl %edx, %eax
-# else
+#else
movl DEST(%esp), %eax
-# endif
#endif
RETURN_END
@@ -2685,12 +2627,10 @@
movl (%eax), %ecx
movl %ecx, (%edx)
L(bk_write_0bytes):
-#ifndef USE_AS_BCOPY
movl DEST(%esp), %eax
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl LEN(%esp), %ecx
add %ecx, %eax
-# endif
#endif
RETURN
@@ -2710,12 +2650,10 @@
L(bk_write_8bytes):
movq (%eax), %xmm0
movq %xmm0, (%edx)
-#ifndef USE_AS_BCOPY
movl DEST(%esp), %eax
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl LEN(%esp), %ecx
add %ecx, %eax
-# endif
#endif
RETURN
@@ -2741,12 +2679,10 @@
L(bk_write_1bytes):
movzbl (%eax), %ecx
movb %cl, (%edx)
-#ifndef USE_AS_BCOPY
movl DEST(%esp), %eax
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl LEN(%esp), %ecx
add %ecx, %eax
-# endif
#endif
RETURN
@@ -2768,12 +2704,10 @@
movq %xmm0, 1(%edx)
movzbl (%eax), %ecx
movb %cl, (%edx)
-#ifndef USE_AS_BCOPY
movl DEST(%esp), %eax
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl LEN(%esp), %ecx
add %ecx, %eax
-# endif
#endif
RETURN
@@ -2798,12 +2732,10 @@
movl %ecx, 2(%edx)
movzwl (%eax), %ecx
movw %cx, (%edx)
-#ifndef USE_AS_BCOPY
movl DEST(%esp), %eax
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl LEN(%esp), %ecx
add %ecx, %eax
-# endif
#endif
RETURN
@@ -2826,12 +2758,10 @@
L(bk_write_2bytes):
movzwl (%eax), %ecx
movw %cx, (%edx)
-#ifndef USE_AS_BCOPY
movl DEST(%esp), %eax
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl LEN(%esp), %ecx
add %ecx, %eax
-# endif
#endif
RETURN
@@ -2858,12 +2788,10 @@
movw %cx, 1(%edx)
movzbl (%eax), %eax
movb %al, (%edx)
-#ifndef USE_AS_BCOPY
movl DEST(%esp), %eax
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl LEN(%esp), %ecx
add %ecx, %eax
-# endif
#endif
RETURN
@@ -2888,12 +2816,10 @@
movw %cx, 1(%edx)
movzbl (%eax), %eax
movb %al, (%edx)
-#ifndef USE_AS_BCOPY
movl DEST(%esp), %eax
-# ifdef USE_AS_MEMPCPY
+#ifdef USE_AS_MEMPCPY
movl LEN(%esp), %ecx
add %ecx, %eax
-# endif
#endif
RETURN_END
diff --git a/libc/arch-x86/generic/string/bcopy.S b/libc/arch-x86/generic/string/bcopy.S
deleted file mode 100644
index f425c58..0000000
--- a/libc/arch-x86/generic/string/bcopy.S
+++ /dev/null
@@ -1,98 +0,0 @@
-/* $OpenBSD: bcopy.S,v 1.5 2005/08/07 11:30:38 espie Exp $ */
-
-/*-
- * Copyright (c) 1990 The Regents of the University of California.
- * All rights reserved.
- *
- * This code is derived from locore.s.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- * 3. Neither the name of the University nor the names of its contributors
- * may be used to endorse or promote products derived from this software
- * without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
- * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
- * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
- * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
- * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
- * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
- * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
- * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
- * SUCH DAMAGE.
- */
-
-#include <private/bionic_asm.h>
-
- /*
- * (ov)bcopy (src,dst,cnt)
- * ws@tools.de (Wolfgang Solfrank, TooLs GmbH) +49-228-985800
- */
-
-#if defined(MEMCOPY)
-ENTRY(memcpy)
-#elif defined(MEMMOVE)
-ENTRY(memmove)
-#else
-ENTRY(bcopy)
-#endif
- pushl %esi
- pushl %edi
-#if defined(MEMCOPY) || defined(MEMMOVE)
- movl 12(%esp),%edi
- movl 16(%esp),%esi
- movl %edi, %eax
-#else
- movl 12(%esp),%esi
- movl 16(%esp),%edi
-#endif
- movl 20(%esp),%ecx
- movl %ecx,%edx
- cmpl %esi,%edi /* potentially overlapping? */
- jnb 1f
- cld /* nope, copy forwards. */
- shrl $2,%ecx /* copy by words */
- rep
- movsl
- movl %edx,%ecx
- andl $3,%ecx /* any bytes left? */
- rep
- movsb
- popl %edi
- popl %esi
- ret
-1:
- addl %ecx,%edi /* copy backwards. */
- addl %ecx,%esi
- std
- andl $3,%ecx /* any fractional bytes? */
- decl %edi
- decl %esi
- rep
- movsb
- movl %edx,%ecx
- shrl $2,%ecx
- subl $3,%esi
- subl $3,%edi
- rep
- movsl
- popl %edi
- popl %esi
- cld
- ret
-#if defined(MEMCOPY)
-END(memcpy)
-#elif defined(MEMMOVE)
-END(memmove)
-#else
-END(bcopy)
-#endif
diff --git a/libc/arch-x86/generic/string/memcpy.S b/libc/arch-x86/generic/string/memcpy.S
deleted file mode 100644
index 95c8a83..0000000
--- a/libc/arch-x86/generic/string/memcpy.S
+++ /dev/null
@@ -1,3 +0,0 @@
-/* $OpenBSD: memcpy.S,v 1.3 2005/08/07 11:30:38 espie Exp $ */
-#define MEMCOPY
-#include "bcopy.S"
diff --git a/libc/arch-x86/generic/string/memmove.S b/libc/arch-x86/generic/string/memmove.S
deleted file mode 100644
index c5bfd19..0000000
--- a/libc/arch-x86/generic/string/memmove.S
+++ /dev/null
@@ -1,3 +0,0 @@
-/* $OpenBSD: memmove.S,v 1.3 2005/08/07 11:30:38 espie Exp $ */
-#define MEMMOVE
-#include "bcopy.S"
diff --git a/libc/arch-x86/silvermont/string/sse2-bcopy-slm.S b/libc/arch-x86/silvermont/string/sse2-bcopy-slm.S
deleted file mode 100644
index 190d52f..0000000
--- a/libc/arch-x86/silvermont/string/sse2-bcopy-slm.S
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
-Copyright (c) 2014, Intel Corporation
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are met:
-
- * Redistributions of source code must retain the above copyright notice,
- * this list of conditions and the following disclaimer.
-
- * Redistributions in binary form must reproduce the above copyright notice,
- * this list of conditions and the following disclaimer in the documentation
- * and/or other materials provided with the distribution.
-
- * Neither the name of Intel Corporation nor the names of its contributors
- * may be used to endorse or promote products derived from this software
- * without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
-ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
-ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-*/
-
-
-#define MEMMOVE bcopy
-#define USE_AS_BCOPY
-#include "sse2-memmove-slm.S"
diff --git a/libc/arch-x86/silvermont/string/sse2-bzero-slm.S b/libc/arch-x86/silvermont/string/sse2-bzero-slm.S
deleted file mode 100644
index b682ed6..0000000
--- a/libc/arch-x86/silvermont/string/sse2-bzero-slm.S
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
-Copyright (c) 2014, Intel Corporation
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are met:
-
- * Redistributions of source code must retain the above copyright notice,
- * this list of conditions and the following disclaimer.
-
- * Redistributions in binary form must reproduce the above copyright notice,
- * this list of conditions and the following disclaimer in the documentation
- * and/or other materials provided with the distribution.
-
- * Neither the name of Intel Corporation nor the names of its contributors
- * may be used to endorse or promote products derived from this software
- * without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
-ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
-ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-*/
-
-#define USE_AS_BZERO
-#define MEMSET bzero
-#include "sse2-memset-slm.S"
diff --git a/libc/arch-x86/silvermont/string/sse2-memmove-slm.S b/libc/arch-x86/silvermont/string/sse2-memmove-slm.S
index 6a8f067..bf9f85d 100644
--- a/libc/arch-x86/silvermont/string/sse2-memmove-slm.S
+++ b/libc/arch-x86/silvermont/string/sse2-memmove-slm.S
@@ -73,15 +73,9 @@
.size name, .-name
#endif
-#ifdef USE_AS_BCOPY
-# define SRC PARMS
-# define DEST SRC+4
-# define LEN DEST+4
-#else
-# define DEST PARMS
-# define SRC DEST+4
-# define LEN SRC+4
-#endif
+#define DEST PARMS
+#define SRC DEST+4
+#define LEN SRC+4
#define CFI_PUSH(REG) \
cfi_adjust_cfa_offset (4); \
diff --git a/libc/arch-x86/silvermont/string/sse2-memset-slm.S b/libc/arch-x86/silvermont/string/sse2-memset-slm.S
index c30bf74..0718fa7 100644
--- a/libc/arch-x86/silvermont/string/sse2-memset-slm.S
+++ b/libc/arch-x86/silvermont/string/sse2-memset-slm.S
@@ -28,11 +28,9 @@
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-#include "cache.h"
+#include <private/bionic_asm.h>
-#ifndef MEMSET
-# define MEMSET memset
-#endif
+#include "cache.h"
#ifndef L
# define L(label) .L##label
@@ -42,14 +40,6 @@
# define ALIGN(n) .p2align n
#endif
-#ifndef cfi_startproc
-# define cfi_startproc .cfi_startproc
-#endif
-
-#ifndef cfi_endproc
-# define cfi_endproc .cfi_endproc
-#endif
-
#ifndef cfi_rel_offset
# define cfi_rel_offset(reg, off) .cfi_rel_offset reg, off
#endif
@@ -62,21 +52,6 @@
# define cfi_adjust_cfa_offset(off) .cfi_adjust_cfa_offset off
#endif
-#ifndef ENTRY
-# define ENTRY(name) \
- .type name, @function; \
- .globl name; \
- .p2align 4; \
-name: \
- cfi_startproc
-#endif
-
-#ifndef END
-# define END(name) \
- cfi_endproc; \
- .size name, .-name
-#endif
-
#define CFI_PUSH(REG) \
cfi_adjust_cfa_offset (4); \
cfi_rel_offset (REG, 0)
@@ -88,16 +63,11 @@
#define PUSH(REG) pushl REG; CFI_PUSH (REG)
#define POP(REG) popl REG; CFI_POP (REG)
-#ifdef USE_AS_BZERO
-# define DEST PARMS
-# define LEN DEST+4
-# define SETRTNVAL
-#else
-# define DEST PARMS
-# define CHR DEST+4
-# define LEN CHR+4
-# define SETRTNVAL movl DEST(%esp), %eax
-#endif
+#define DST PARMS
+#define CHR DST+4
+#define LEN CHR+4
+#define CHK_DST_LEN (LEN+4)
+#define SETRTNVAL movl DST(%esp), %eax
#if (defined SHARED || defined __PIC__)
# define ENTRANCE PUSH (%ebx);
@@ -142,9 +112,17 @@
jmp *TABLE(,%ecx,4)
#endif
+ENTRY(__memset_chk)
+ movl LEN(%esp), %ecx
+ cmpl %ecx, CHK_DST_LEN(%esp)
+ jbe memset
+
+ jmp __memset_chk_fail
+END(__memset_chk)
+
.section .text.sse2,"ax",@progbits
ALIGN (4)
-ENTRY (MEMSET)
+ENTRY(memset)
ENTRANCE
movl LEN(%esp), %ecx
@@ -154,17 +132,13 @@
RETURN
L(1byteormore):
-#ifdef USE_AS_BZERO
- xor %eax, %eax
-#else
movzbl CHR(%esp), %eax
movb %al, %ah
/* Fill the whole EAX with pattern. */
movl %eax, %edx
shl $16, %eax
or %edx, %eax
-#endif
- movl DEST(%esp), %edx
+ movl DST(%esp), %edx
cmp $1, %ecx
je L(1byte)
cmp $16, %ecx
@@ -195,12 +169,8 @@
ALIGN (4)
L(16bytesormore):
-#ifdef USE_AS_BZERO
- pxor %xmm0, %xmm0
-#else
movd %eax, %xmm0
pshufd $0, %xmm0, %xmm0
-#endif
cmp $64, %ecx
ja L(64bytesmore)
@@ -838,4 +808,4 @@
SETRTNVAL
RETURN_END
-END (MEMSET)
+END(memset)
diff --git a/libc/arch-x86/x86.mk b/libc/arch-x86/x86.mk
index 1d717aa..2f63446 100644
--- a/libc/arch-x86/x86.mk
+++ b/libc/arch-x86/x86.mk
@@ -14,8 +14,6 @@
arch-x86/atom/string/sse2-wcsrchr-atom.S \
arch-x86/atom/string/sse2-wcslen-atom.S \
arch-x86/atom/string/sse2-wcscmp-atom.S \
- arch-x86/silvermont/string/sse2-bcopy-slm.S \
- arch-x86/silvermont/string/sse2-bzero-slm.S \
arch-x86/silvermont/string/sse2-memcpy-slm.S \
arch-x86/silvermont/string/sse2-memmove-slm.S \
arch-x86/silvermont/string/sse2-memset-slm.S \
diff --git a/libc/arch-x86_64/string/sse2-memset-slm.S b/libc/arch-x86_64/string/sse2-memset-slm.S
index bfcafae..fc502c0 100644
--- a/libc/arch-x86_64/string/sse2-memset-slm.S
+++ b/libc/arch-x86_64/string/sse2-memset-slm.S
@@ -28,11 +28,9 @@
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-#include "cache.h"
+#include <private/bionic_asm.h>
-#ifndef MEMSET
-# define MEMSET memset
-#endif
+#include "cache.h"
#ifndef L
# define L(label) .L##label
@@ -42,39 +40,21 @@
# define ALIGN(n) .p2align n
#endif
-#ifndef cfi_startproc
-# define cfi_startproc .cfi_startproc
-#endif
-#ifndef cfi_endproc
-# define cfi_endproc .cfi_endproc
-#endif
+ENTRY(__memset_chk)
+ # %rdi = dst, %rsi = byte, %rdx = n, %rcx = dst_len
+ cmp %rcx, %rdx
+ ja __memset_chk_fail
+ // Fall through to memset...
+END(__memset_chk)
-#ifndef ENTRY
-# define ENTRY(name) \
- .type name, @function; \
- .globl name; \
-name: \
- cfi_startproc
-#endif
-
-#ifndef END
-# define END(name) \
- cfi_endproc; \
- .size name, .-name
-#endif
.section .text.sse2,"ax",@progbits
-ENTRY (MEMSET)
+ENTRY(memset)
movq %rdi, %rax
-#ifdef USE_AS_BZERO_P
- mov %rsi, %rdx
- xor %rcx, %rcx
-#else
and $0xff, %rsi
mov $0x0101010101010101, %rcx
imul %rsi, %rcx
-#endif
cmpq $16, %rdx
jae L(16bytesormore)
testb $8, %dl
@@ -106,12 +86,8 @@
ALIGN (4)
L(16bytesormore):
-#ifdef USE_AS_BZERO_P
- pxor %xmm0, %xmm0
-#else
movd %rcx, %xmm0
pshufd $0, %xmm0, %xmm0
-#endif
movdqu %xmm0, (%rdi)
movdqu %xmm0, -16(%rdi, %rdx)
cmpq $32, %rdx
@@ -170,4 +146,4 @@
sfence
ret
-END (MEMSET)
+END(memset)
diff --git a/libc/bionic/fortify.cpp b/libc/bionic/fortify.cpp
index a1db2a4..ad7aa04 100644
--- a/libc/bionic/fortify.cpp
+++ b/libc/bionic/fortify.cpp
@@ -153,15 +153,6 @@
return memrchr(s, c, n);
}
-#if !defined(__aarch64__) && !defined(__arm__) // TODO: add optimized assembler for the others too.
-// Runtime implementation of __builtin___memset_chk (used directly by compiler, not in headers).
-extern "C" void* __memset_chk(void* dst, int byte, size_t count, size_t dst_len) {
- __check_count("memset", "count", count);
- __check_buffer_access("memset", "write into", count, dst_len);
- return memset(dst, byte, count);
-}
-#endif
-
// memset is performance-critical enough that we have assembler __memset_chk implementations.
// This function is used to give better diagnostics than we can easily do from assembler.
extern "C" void* __memset_chk_fail(void* /*dst*/, int /*byte*/, size_t count, size_t dst_len) {
diff --git a/libc/bionic/ndk_cruft.cpp b/libc/bionic/ndk_cruft.cpp
index 3ac88f8..95abc20 100644
--- a/libc/bionic/ndk_cruft.cpp
+++ b/libc/bionic/ndk_cruft.cpp
@@ -240,15 +240,17 @@
return signal(signum, handler);
}
-#if !defined(__i386__)
// This was removed from POSIX 2008.
#undef bcopy
void bcopy(const void* src, void* dst, size_t n) {
memmove(dst, src, n);
}
-#else
-// x86 has an assembler implementation.
-#endif
+
+// This was removed from POSIX 2008.
+#undef bzero
+void bzero(void* dst, size_t n) {
+ memset(dst, 0, n);
+}
// sysv_signal() was never in POSIX.
extern "C++" sighandler_t _signal(int signum, sighandler_t handler, int flags);
diff --git a/libc/bionic/setjmp_cookie.cpp b/libc/bionic/setjmp_cookie.cpp
index ce57fd1..3be675a 100644
--- a/libc/bionic/setjmp_cookie.cpp
+++ b/libc/bionic/setjmp_cookie.cpp
@@ -63,3 +63,7 @@
return cookie & 1;
}
+
+extern "C" __LIBC_HIDDEN__ long __bionic_setjmp_checksum_mismatch() {
+ __libc_fatal("setjmp checksum mismatch");
+}
diff --git a/libc/bionic/sysconf.cpp b/libc/bionic/sysconf.cpp
index 8a55f7e..125b3c9 100644
--- a/libc/bionic/sysconf.cpp
+++ b/libc/bionic/sysconf.cpp
@@ -41,16 +41,6 @@
#include "private/bionic_tls.h"
-static int __sysconf_monotonic_clock() {
- timespec t;
- int rc = clock_getres(CLOCK_MONOTONIC, &t);
- return (rc == -1) ? -1 : _POSIX_VERSION;
-}
-
-static bool __sysconf_has_clock(clockid_t clock_id) {
- return clock_getres(clock_id, NULL) == 0;
-}
-
static long __sysconf_rlimit(int resource) {
rlimit rl;
getrlimit(resource, &rl);
@@ -147,7 +137,7 @@
case _SC_NPROCESSORS_ONLN: return get_nprocs();
case _SC_PHYS_PAGES: return get_phys_pages();
case _SC_AVPHYS_PAGES: return get_avphys_pages();
- case _SC_MONOTONIC_CLOCK: return __sysconf_monotonic_clock();
+ case _SC_MONOTONIC_CLOCK: return _POSIX_VERSION;
case _SC_2_PBS: return -1; // Obsolescent in POSIX.1-2008.
case _SC_2_PBS_ACCOUNTING: return -1; // Obsolescent in POSIX.1-2008.
@@ -158,8 +148,7 @@
case _SC_ADVISORY_INFO: return _POSIX_ADVISORY_INFO;
case _SC_BARRIERS: return _POSIX_BARRIERS;
case _SC_CLOCK_SELECTION: return _POSIX_CLOCK_SELECTION;
- case _SC_CPUTIME:
- return __sysconf_has_clock(CLOCK_PROCESS_CPUTIME_ID) ?_POSIX_VERSION : -1;
+ case _SC_CPUTIME: return _POSIX_VERSION;
case _SC_HOST_NAME_MAX: return _POSIX_HOST_NAME_MAX; // Minimum requirement.
case _SC_IPV6: return _POSIX_IPV6;
@@ -172,8 +161,7 @@
case _SC_SPORADIC_SERVER: return _POSIX_SPORADIC_SERVER;
case _SC_SS_REPL_MAX: return -1;
case _SC_SYMLOOP_MAX: return _POSIX_SYMLOOP_MAX; // Minimum requirement.
- case _SC_THREAD_CPUTIME:
- return __sysconf_has_clock(CLOCK_THREAD_CPUTIME_ID) ? _POSIX_VERSION : -1;
+ case _SC_THREAD_CPUTIME: return _POSIX_VERSION;
case _SC_THREAD_PROCESS_SHARED: return _POSIX_THREAD_PROCESS_SHARED;
case _SC_THREAD_ROBUST_PRIO_INHERIT: return _POSIX_THREAD_ROBUST_PRIO_INHERIT;
diff --git a/libm/libm.arm.map b/libm/libm.arm.map
index e781f2d..842f46c 100644
--- a/libm/libm.arm.map
+++ b/libm/libm.arm.map
@@ -272,7 +272,7 @@
*;
};
-LIBC_PRIVATE { # arm mips
+LIBC_DEPRECATED { # arm mips
global: # arm mips
___Unwind_Backtrace; # arm
___Unwind_ForcedUnwind; # arm
diff --git a/libm/libm.map.txt b/libm/libm.map.txt
index 075ebd5..d273603 100644
--- a/libm/libm.map.txt
+++ b/libm/libm.map.txt
@@ -271,7 +271,7 @@
*;
};
-LIBC_PRIVATE { # arm mips
+LIBC_DEPRECATED { # arm mips
global: # arm mips
___Unwind_Backtrace; # arm
___Unwind_ForcedUnwind; # arm
diff --git a/libm/libm.mips.map b/libm/libm.mips.map
index 476c6ad..2429dba 100644
--- a/libm/libm.mips.map
+++ b/libm/libm.mips.map
@@ -272,7 +272,7 @@
*;
};
-LIBC_PRIVATE { # arm mips
+LIBC_DEPRECATED { # arm mips
global: # arm mips
__fixdfdi; # arm mips
__fixsfdi; # arm mips
diff --git a/tests/__cxa_thread_atexit_test.cpp b/tests/__cxa_thread_atexit_test.cpp
index 1432968..e388f3b 100644
--- a/tests/__cxa_thread_atexit_test.cpp
+++ b/tests/__cxa_thread_atexit_test.cpp
@@ -35,12 +35,7 @@
std::string message;
};
-#if defined(__clang__) && defined(__aarch64__)
-// b/25642296, aarch64 clang compiled "thread_local" does not link.
-static ClassWithDtor class_with_dtor;
-#else
static thread_local ClassWithDtor class_with_dtor;
-#endif
static void* thread_nop(void* arg) {
class_with_dtor.set_message(*static_cast<std::string*>(arg));
@@ -52,12 +47,7 @@
pthread_t t;
ASSERT_EQ(0, pthread_create(&t, nullptr, thread_nop, &msg));
ASSERT_EQ(0, pthread_join(t, nullptr));
-#if defined(__clang__) && defined(__aarch64__)
- GTEST_LOG_(INFO) << "Skipping test, b/25642296, "
- << "thread_local does not work with aarch64 clang/llvm.\n";
-#else
ASSERT_EQ("dtor called.", class_with_dtor_output);
-#endif
}
class ClassWithDtorForMainThread {
@@ -74,13 +64,7 @@
};
static void thread_atexit_main() {
-#if defined(__clang__) && defined(__aarch64__)
- static ClassWithDtorForMainThread class_with_dtor_for_main_thread;
- GTEST_LOG_(INFO) << "Skipping test, b/25642296, "
- << "thread_local does not work with aarch64 clang/llvm.\n";
-#else
static thread_local ClassWithDtorForMainThread class_with_dtor_for_main_thread;
-#endif
class_with_dtor_for_main_thread.set_message("d-tor for main thread called.");
exit(0);
}
diff --git a/tests/setjmp_test.cpp b/tests/setjmp_test.cpp
index c75ab51..b7e856f 100644
--- a/tests/setjmp_test.cpp
+++ b/tests/setjmp_test.cpp
@@ -247,3 +247,17 @@
*sigflag &= 1;
EXPECT_DEATH(longjmp(jb, 0), "");
}
+
+TEST(setjmp, setjmp_cookie_checksum) {
+ jmp_buf jb;
+ int value = setjmp(jb);
+
+ if (value == 0) {
+ // Flip a bit.
+ reinterpret_cast<long*>(jb)[0] ^= 1;
+
+ EXPECT_DEATH(longjmp(jb, 1), "checksum mismatch");
+ } else {
+ fprintf(stderr, "setjmp_cookie_checksum: longjmp succeeded?");
+ }
+}