blob: db002b86401e2761c225621a454e7dc3548ec7b6 [file] [log] [blame]
Peter Collingbourne900d07d2019-10-28 13:11:00 -07001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * * Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * * Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in
12 * the documentation and/or other materials provided with the
13 * distribution.
14 *
15 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
16 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
17 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
18 * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
19 * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
20 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
21 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
22 * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
23 * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24 * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
25 * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26 * SUCH DAMAGE.
27 */
28
Peter Collingbourne900d07d2019-10-28 13:11:00 -070029#include <private/bionic_ifuncs.h>
30#include <stddef.h>
31#include <sys/auxv.h>
32
Vaisakh K V83e55842024-03-29 12:47:39 +053033#define MIDR_IMPL_ID_SHIFT 24u
34#define MIDR_IMPL_ID_MASK 0xFF
35#define CPU_VARIANT_SHIFT 20u
36#define CPU_VARIANT_MASK 0xF
37
38/* Macro to identify CPU implementer */
39#define QCOM_IMPL_ID 0x51
40
41/* Macro to indentify qualcomm CPU variants which supports
42 * __memcpy_aarch64_nt routine
43 */
44#define QCOM_ORYON_CPU_VARIANTS 0x5
45
Peter Collingbourne900d07d2019-10-28 13:11:00 -070046extern "C" {
47
Peter Collingbourne900d07d2019-10-28 13:11:00 -070048typedef void* memchr_func(const void*, int, size_t);
49DEFINE_IFUNC_FOR(memchr) {
Peter Collingbourne7e201172020-12-21 14:08:38 -080050 if (arg->_hwcap2 & HWCAP2_MTE) {
Peter Collingbourne2361d4e2020-06-03 16:55:37 -070051 RETURN_FUNC(memchr_func, __memchr_aarch64_mte);
Peter Collingbourne900d07d2019-10-28 13:11:00 -070052 } else {
Peter Collingbourne337a5b32020-02-21 12:11:02 -080053 RETURN_FUNC(memchr_func, __memchr_aarch64);
54 }
55}
56
Elliott Hughes20f9d672023-05-22 19:28:33 +000057typedef int memcmp_func(const void*, const void*, size_t);
Elliott Hughes3d8e98f2023-01-25 23:33:39 +000058DEFINE_IFUNC_FOR(memcmp) {
59 // TODO: enable the SVE version.
60 RETURN_FUNC(memcmp_func, __memcmp_aarch64);
61}
62
Elliott Hughes7daf4592022-11-17 00:34:13 +000063typedef void* memcpy_func(void*, const void*, size_t);
64DEFINE_IFUNC_FOR(memcpy) {
Vaisakh K V83e55842024-03-29 12:47:39 +053065 unsigned long midr;
66 unsigned int impl_id, cpu_variant;
67
68 /* Check if hardware capability CPUID is available */
69 if (arg->_hwcap & HWCAP_CPUID) {
70 /* Read the MIDR register */
71 asm("mrs %0, MIDR_EL1 \n\t" : "=r"(midr));
72
73 /* Extract the CPU Implementer ID */
74 impl_id = (midr >> MIDR_IMPL_ID_SHIFT) & (MIDR_IMPL_ID_MASK);
75
76 /* Check for Qualcomm implementer ID */
77 if (impl_id == QCOM_IMPL_ID) {
78 cpu_variant = (midr >> CPU_VARIANT_SHIFT) & CPU_VARIANT_MASK;
79
80 /* Check for Qualcomm Oryon CPU variants: 0x1, 0x2, 0x3, 0x4, 0x5 */
81 if (cpu_variant <= QCOM_ORYON_CPU_VARIANTS) {
82 RETURN_FUNC(memcpy_func, __memcpy_aarch64_nt);
83 } else {
Elliott Hughes7daf4592022-11-17 00:34:13 +000084 RETURN_FUNC(memcpy_func, __memcpy_aarch64);
Vaisakh K V83e55842024-03-29 12:47:39 +053085 }
Elliott Hughes7daf4592022-11-17 00:34:13 +000086 }
Vaisakh K V83e55842024-03-29 12:47:39 +053087 }
88 /* If CPU implementer is not Qualcomm, choose the custom
89 * implementation based on CPU architecture feature
90 * */
91 if (arg->_hwcap & HWCAP_ASIMD) {
92 RETURN_FUNC(memcpy_func, __memcpy_aarch64_simd);
93 } else {
94 RETURN_FUNC(memcpy_func, __memcpy_aarch64);
95 }
Elliott Hughes7daf4592022-11-17 00:34:13 +000096}
97
98typedef void* memmove_func(void*, const void*, size_t);
99DEFINE_IFUNC_FOR(memmove) {
Vaisakh K V83e55842024-03-29 12:47:39 +0530100 unsigned long midr;
101 unsigned int impl_id, cpu_variant;
102
103 /* Check if hardware capability CPUID is available */
104 if (arg->_hwcap & HWCAP_CPUID) {
105 /* Read the MIDR register */
106 asm("mrs %0, MIDR_EL1 \n\t" : "=r"(midr));
107
108 /* Extract the CPU Implementer ID */
109 impl_id = (midr >> MIDR_IMPL_ID_SHIFT) & (MIDR_IMPL_ID_MASK);
110
111 /* Check for Qualcomm implementer ID */
112 if (impl_id == QCOM_IMPL_ID) {
113 cpu_variant = (midr >> CPU_VARIANT_SHIFT) & CPU_VARIANT_MASK;
114
115 /* Check for Qualcomm Oryon CPU variants: 0x1, 0x2, 0x3, 0x4, 0x5 */
116 if (cpu_variant <= QCOM_ORYON_CPU_VARIANTS) {
117 RETURN_FUNC(memcpy_func, __memmove_aarch64_nt);
118 } else {
119 RETURN_FUNC(memcpy_func, __memmove_aarch64);
120 }
Elliott Hughes7daf4592022-11-17 00:34:13 +0000121 }
Vaisakh K V83e55842024-03-29 12:47:39 +0530122 }
123 /* If CPU implementer is not Qualcomm, choose the custom
124 * implementation based on CPU architecture feature
125 * */
126 if (arg->_hwcap & HWCAP_ASIMD) {
127 RETURN_FUNC(memmove_func, __memmove_aarch64_simd);
128 } else {
129 RETURN_FUNC(memmove_func, __memmove_aarch64);
130 }
Elliott Hughes7daf4592022-11-17 00:34:13 +0000131}
132
Elliott Hughescb47a4f2024-03-25 13:44:36 -0700133typedef int memrchr_func(const void*, int, size_t);
134DEFINE_IFUNC_FOR(memrchr) {
135 RETURN_FUNC(memrchr_func, __memrchr_aarch64);
136}
137
138typedef int memset_func(void*, int, size_t);
139DEFINE_IFUNC_FOR(memset) {
Vaisakh K V54a61212024-03-29 13:32:45 +0530140 unsigned long midr;
141 unsigned int impl_id, cpu_variant;
142
143 if (arg->_hwcap & HWCAP_CPUID) {
144 /* Read the MIDR register */
145 asm("mrs %0, MIDR_EL1 \n\t" : "=r"(midr));
146
147 /* Extract the CPU Implementer ID */
148 impl_id = (midr >> MIDR_IMPL_ID_SHIFT) & (MIDR_IMPL_ID_MASK);
149
150 /* Check for Qualcomm implementer ID */
151 if (impl_id == QCOM_IMPL_ID) {
152 cpu_variant = (midr >> CPU_VARIANT_SHIFT) & CPU_VARIANT_MASK;
153
154 /* Check for Qualcomm Oryon CPU variants: 0x1, 0x2, 0x3, 0x4, 0x5 */
155 if (cpu_variant <= QCOM_ORYON_CPU_VARIANTS) {
156 RETURN_FUNC(memset_func, __memset_aarch64_nt);
157 } else {
158 RETURN_FUNC(memset_func, __memset_aarch64);
159 }
160 } else {
161 RETURN_FUNC(memset_func, __memset_aarch64);
162 }
163 } else {
Elliott Hughescb47a4f2024-03-25 13:44:36 -0700164 RETURN_FUNC(memset_func, __memset_aarch64);
Vaisakh K V54a61212024-03-29 13:32:45 +0530165 }
Elliott Hughescb47a4f2024-03-25 13:44:36 -0700166}
167
Elliott Hughes20f9d672023-05-22 19:28:33 +0000168typedef char* stpcpy_func(char*, const char*, size_t);
Peter Collingbourne337a5b32020-02-21 12:11:02 -0800169DEFINE_IFUNC_FOR(stpcpy) {
Elliott Hughes5ec0bfa2023-01-25 18:12:18 +0000170 // TODO: enable the SVE version.
171 RETURN_FUNC(stpcpy_func, __stpcpy_aarch64);
Peter Collingbourne900d07d2019-10-28 13:11:00 -0700172}
173
174typedef char* strchr_func(const char*, int);
175DEFINE_IFUNC_FOR(strchr) {
Peter Collingbourne7e201172020-12-21 14:08:38 -0800176 if (arg->_hwcap2 & HWCAP2_MTE) {
Peter Collingbourne337a5b32020-02-21 12:11:02 -0800177 RETURN_FUNC(strchr_func, __strchr_aarch64_mte);
Peter Collingbourne900d07d2019-10-28 13:11:00 -0700178 } else {
Peter Collingbourne337a5b32020-02-21 12:11:02 -0800179 RETURN_FUNC(strchr_func, __strchr_aarch64);
180 }
181}
182
183typedef char* strchrnul_func(const char*, int);
184DEFINE_IFUNC_FOR(strchrnul) {
Peter Collingbourne7e201172020-12-21 14:08:38 -0800185 if (arg->_hwcap2 & HWCAP2_MTE) {
Peter Collingbourne2361d4e2020-06-03 16:55:37 -0700186 RETURN_FUNC(strchrnul_func, __strchrnul_aarch64_mte);
Peter Collingbourne337a5b32020-02-21 12:11:02 -0800187 } else {
188 RETURN_FUNC(strchrnul_func, __strchrnul_aarch64);
Peter Collingbourne900d07d2019-10-28 13:11:00 -0700189 }
190}
191
192typedef int strcmp_func(const char*, const char*);
193DEFINE_IFUNC_FOR(strcmp) {
Elliott Hughes5ec0bfa2023-01-25 18:12:18 +0000194 // TODO: enable the SVE version.
195 RETURN_FUNC(strcmp_func, __strcmp_aarch64);
Peter Collingbourne337a5b32020-02-21 12:11:02 -0800196}
197
Elliott Hughes20f9d672023-05-22 19:28:33 +0000198typedef char* strcpy_func(char*, const char*);
Peter Collingbourne337a5b32020-02-21 12:11:02 -0800199DEFINE_IFUNC_FOR(strcpy) {
Elliott Hughes5ec0bfa2023-01-25 18:12:18 +0000200 // TODO: enable the SVE version.
201 RETURN_FUNC(strcpy_func, __strcpy_aarch64);
Peter Collingbourne900d07d2019-10-28 13:11:00 -0700202}
203
204typedef size_t strlen_func(const char*);
205DEFINE_IFUNC_FOR(strlen) {
Peter Collingbourne7e201172020-12-21 14:08:38 -0800206 if (arg->_hwcap2 & HWCAP2_MTE) {
Peter Collingbourne337a5b32020-02-21 12:11:02 -0800207 RETURN_FUNC(strlen_func, __strlen_aarch64_mte);
Peter Collingbourne900d07d2019-10-28 13:11:00 -0700208 } else {
Peter Collingbourne337a5b32020-02-21 12:11:02 -0800209 RETURN_FUNC(strlen_func, __strlen_aarch64);
Peter Collingbourne900d07d2019-10-28 13:11:00 -0700210 }
211}
212
Elliott Hughesa1974062023-05-18 13:30:35 -0700213typedef int strncmp_func(const char*, const char*, size_t);
Peter Collingbourne900d07d2019-10-28 13:11:00 -0700214DEFINE_IFUNC_FOR(strncmp) {
Elliott Hughes5ec0bfa2023-01-25 18:12:18 +0000215 // TODO: enable the SVE version.
216 RETURN_FUNC(strncmp_func, __strncmp_aarch64);
Peter Collingbourne900d07d2019-10-28 13:11:00 -0700217}
218
Elliott Hughesa1974062023-05-18 13:30:35 -0700219typedef size_t strnlen_func(const char*, size_t);
Elliott Hughes3d8e98f2023-01-25 23:33:39 +0000220DEFINE_IFUNC_FOR(strnlen) {
221 // TODO: enable the SVE version.
222 RETURN_FUNC(strnlen_func, __strnlen_aarch64);
223}
224
Peter Collingbourne337a5b32020-02-21 12:11:02 -0800225typedef char* strrchr_func(const char*, int);
226DEFINE_IFUNC_FOR(strrchr) {
Peter Collingbourne7e201172020-12-21 14:08:38 -0800227 if (arg->_hwcap2 & HWCAP2_MTE) {
Peter Collingbourne2361d4e2020-06-03 16:55:37 -0700228 RETURN_FUNC(strrchr_func, __strrchr_aarch64_mte);
Peter Collingbourne337a5b32020-02-21 12:11:02 -0800229 } else {
230 RETURN_FUNC(strrchr_func, __strrchr_aarch64);
Peter Collingbourne900d07d2019-10-28 13:11:00 -0700231 }
232}
233
234} // extern "C"