| Carl Shapiro | 93b0cb4 | 2010-06-03 17:05:15 -0700 | [diff] [blame] | 1 | /* | 
|  | 2 | * Copyright (C) 2010 The Android Open Source Project | 
|  | 3 | * | 
|  | 4 | * Licensed under the Apache License, Version 2.0 (the "License"); | 
|  | 5 | * you may not use this file except in compliance with the License. | 
|  | 6 | * You may obtain a copy of the License at | 
|  | 7 | * | 
|  | 8 | *      http://www.apache.org/licenses/LICENSE-2.0 | 
|  | 9 | * | 
|  | 10 | * Unless required by applicable law or agreed to in writing, software | 
|  | 11 | * distributed under the License is distributed on an "AS IS" BASIS, | 
|  | 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | 
|  | 13 | * See the License for the specific language governing permissions and | 
|  | 14 | * limitations under the License. | 
|  | 15 | */ | 
|  | 16 |  | 
|  | 17 | #ifndef ANDROID_CUTILS_ATOMIC_ARM_H | 
|  | 18 | #define ANDROID_CUTILS_ATOMIC_ARM_H | 
|  | 19 |  | 
|  | 20 | #include <stdint.h> | 
|  | 21 | #include <machine/cpu-features.h> | 
|  | 22 |  | 
|  | 23 | extern inline void android_compiler_barrier(void) | 
|  | 24 | { | 
|  | 25 | __asm__ __volatile__ ("" : : : "memory"); | 
|  | 26 | } | 
|  | 27 |  | 
|  | 28 | #if ANDROID_SMP == 0 | 
|  | 29 | extern inline void android_memory_barrier(void) | 
|  | 30 | { | 
| Brian Carlstrom | 464431e | 2010-09-24 10:56:43 -0700 | [diff] [blame] | 31 | android_compiler_barrier(); | 
|  | 32 | } | 
|  | 33 | extern inline void android_memory_store_barrier(void) | 
|  | 34 | { | 
|  | 35 | android_compiler_barrier(); | 
| Carl Shapiro | 93b0cb4 | 2010-06-03 17:05:15 -0700 | [diff] [blame] | 36 | } | 
|  | 37 | #elif defined(__ARM_HAVE_DMB) | 
|  | 38 | extern inline void android_memory_barrier(void) | 
|  | 39 | { | 
|  | 40 | __asm__ __volatile__ ("dmb" : : : "memory"); | 
|  | 41 | } | 
| Brian Carlstrom | 464431e | 2010-09-24 10:56:43 -0700 | [diff] [blame] | 42 | extern inline void android_memory_store_barrier(void) | 
|  | 43 | { | 
| Andy McFadden | 2bf937e | 2010-10-01 11:29:48 -0700 | [diff] [blame] | 44 | __asm__ __volatile__ ("dmb st" : : : "memory"); | 
| Brian Carlstrom | 464431e | 2010-09-24 10:56:43 -0700 | [diff] [blame] | 45 | } | 
| Carl Shapiro | 93b0cb4 | 2010-06-03 17:05:15 -0700 | [diff] [blame] | 46 | #elif defined(__ARM_HAVE_LDREX_STREX) | 
|  | 47 | extern inline void android_memory_barrier(void) | 
|  | 48 | { | 
| Brian Carlstrom | 464431e | 2010-09-24 10:56:43 -0700 | [diff] [blame] | 49 | __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 5" : : "r" (0) : "memory"); | 
|  | 50 | } | 
|  | 51 | extern inline void android_memory_store_barrier(void) | 
|  | 52 | { | 
|  | 53 | android_memory_barrier(); | 
| Carl Shapiro | 93b0cb4 | 2010-06-03 17:05:15 -0700 | [diff] [blame] | 54 | } | 
|  | 55 | #else | 
|  | 56 | extern inline void android_memory_barrier(void) | 
|  | 57 | { | 
|  | 58 | typedef void (kuser_memory_barrier)(void); | 
|  | 59 | (*(kuser_memory_barrier *)0xffff0fa0)(); | 
|  | 60 | } | 
| Brian Carlstrom | 464431e | 2010-09-24 10:56:43 -0700 | [diff] [blame] | 61 | extern inline void android_memory_store_barrier(void) | 
|  | 62 | { | 
|  | 63 | android_memory_barrier(); | 
|  | 64 | } | 
| Carl Shapiro | 93b0cb4 | 2010-06-03 17:05:15 -0700 | [diff] [blame] | 65 | #endif | 
|  | 66 |  | 
| Carl Shapiro | d55f0ad | 2010-09-28 13:47:03 -0700 | [diff] [blame] | 67 | extern inline int32_t android_atomic_acquire_load(volatile const int32_t *ptr) | 
| Carl Shapiro | 93b0cb4 | 2010-06-03 17:05:15 -0700 | [diff] [blame] | 68 | { | 
|  | 69 | int32_t value = *ptr; | 
|  | 70 | android_memory_barrier(); | 
|  | 71 | return value; | 
|  | 72 | } | 
|  | 73 |  | 
| Carl Shapiro | d55f0ad | 2010-09-28 13:47:03 -0700 | [diff] [blame] | 74 | extern inline int32_t android_atomic_release_load(volatile const int32_t *ptr) | 
| Carl Shapiro | 93b0cb4 | 2010-06-03 17:05:15 -0700 | [diff] [blame] | 75 | { | 
|  | 76 | android_memory_barrier(); | 
|  | 77 | return *ptr; | 
|  | 78 | } | 
|  | 79 |  | 
|  | 80 | extern inline void android_atomic_acquire_store(int32_t value, | 
|  | 81 | volatile int32_t *ptr) | 
|  | 82 | { | 
|  | 83 | *ptr = value; | 
|  | 84 | android_memory_barrier(); | 
|  | 85 | } | 
|  | 86 |  | 
|  | 87 | extern inline void android_atomic_release_store(int32_t value, | 
|  | 88 | volatile int32_t *ptr) | 
|  | 89 | { | 
|  | 90 | android_memory_barrier(); | 
|  | 91 | *ptr = value; | 
|  | 92 | } | 
|  | 93 |  | 
|  | 94 | #if defined(__thumb__) | 
|  | 95 | extern int android_atomic_cas(int32_t old_value, int32_t new_value, | 
|  | 96 | volatile int32_t *ptr); | 
|  | 97 | #elif defined(__ARM_HAVE_LDREX_STREX) | 
|  | 98 | extern inline int android_atomic_cas(int32_t old_value, int32_t new_value, | 
|  | 99 | volatile int32_t *ptr) | 
|  | 100 | { | 
|  | 101 | int32_t prev, status; | 
|  | 102 | do { | 
|  | 103 | __asm__ __volatile__ ("ldrex %0, [%3]\n" | 
|  | 104 | "mov %1, #0\n" | 
|  | 105 | "teq %0, %4\n" | 
|  | 106 | "strexeq %1, %5, [%3]" | 
|  | 107 | : "=&r" (prev), "=&r" (status), "+m"(*ptr) | 
|  | 108 | : "r" (ptr), "Ir" (old_value), "r" (new_value) | 
|  | 109 | : "cc"); | 
|  | 110 | } while (__builtin_expect(status != 0, 0)); | 
|  | 111 | return prev != old_value; | 
|  | 112 | } | 
|  | 113 | #else | 
|  | 114 | extern inline int android_atomic_cas(int32_t old_value, int32_t new_value, | 
|  | 115 | volatile int32_t *ptr) | 
|  | 116 | { | 
|  | 117 | typedef int (kuser_cmpxchg)(int32_t, int32_t, volatile int32_t *); | 
|  | 118 | int32_t prev, status; | 
|  | 119 | prev = *ptr; | 
|  | 120 | do { | 
|  | 121 | status = (*(kuser_cmpxchg *)0xffff0fc0)(old_value, new_value, ptr); | 
|  | 122 | if (__builtin_expect(status == 0, 1)) | 
|  | 123 | return 0; | 
|  | 124 | prev = *ptr; | 
|  | 125 | } while (prev == old_value); | 
|  | 126 | return 1; | 
|  | 127 | } | 
|  | 128 | #endif | 
|  | 129 |  | 
|  | 130 | extern inline int android_atomic_acquire_cas(int32_t old_value, | 
|  | 131 | int32_t new_value, | 
|  | 132 | volatile int32_t *ptr) | 
|  | 133 | { | 
|  | 134 | int status = android_atomic_cas(old_value, new_value, ptr); | 
|  | 135 | android_memory_barrier(); | 
|  | 136 | return status; | 
|  | 137 | } | 
|  | 138 |  | 
|  | 139 | extern inline int android_atomic_release_cas(int32_t old_value, | 
|  | 140 | int32_t new_value, | 
|  | 141 | volatile int32_t *ptr) | 
|  | 142 | { | 
|  | 143 | android_memory_barrier(); | 
|  | 144 | return android_atomic_cas(old_value, new_value, ptr); | 
|  | 145 | } | 
|  | 146 |  | 
|  | 147 |  | 
|  | 148 | #if defined(__thumb__) | 
| Carl Shapiro | 93b0cb4 | 2010-06-03 17:05:15 -0700 | [diff] [blame] | 149 | extern int32_t android_atomic_add(int32_t increment, | 
|  | 150 | volatile int32_t *ptr); | 
|  | 151 | #elif defined(__ARM_HAVE_LDREX_STREX) | 
|  | 152 | extern inline int32_t android_atomic_add(int32_t increment, | 
|  | 153 | volatile int32_t *ptr) | 
|  | 154 | { | 
|  | 155 | int32_t prev, tmp, status; | 
|  | 156 | android_memory_barrier(); | 
|  | 157 | do { | 
|  | 158 | __asm__ __volatile__ ("ldrex %0, [%4]\n" | 
|  | 159 | "add %1, %0, %5\n" | 
|  | 160 | "strex %2, %1, [%4]" | 
|  | 161 | : "=&r" (prev), "=&r" (tmp), | 
|  | 162 | "=&r" (status), "+m" (*ptr) | 
|  | 163 | : "r" (ptr), "Ir" (increment) | 
|  | 164 | : "cc"); | 
|  | 165 | } while (__builtin_expect(status != 0, 0)); | 
|  | 166 | return prev; | 
|  | 167 | } | 
|  | 168 | #else | 
|  | 169 | extern inline int32_t android_atomic_add(int32_t increment, | 
|  | 170 | volatile int32_t *ptr) | 
|  | 171 | { | 
|  | 172 | int32_t prev, status; | 
|  | 173 | android_memory_barrier(); | 
|  | 174 | do { | 
|  | 175 | prev = *ptr; | 
|  | 176 | status = android_atomic_cas(prev, prev + increment, ptr); | 
|  | 177 | } while (__builtin_expect(status != 0, 0)); | 
|  | 178 | return prev; | 
|  | 179 | } | 
|  | 180 | #endif | 
|  | 181 |  | 
| Carl Shapiro | d55f0ad | 2010-09-28 13:47:03 -0700 | [diff] [blame] | 182 | extern inline int32_t android_atomic_inc(volatile int32_t *addr) | 
|  | 183 | { | 
| Carl Shapiro | 93b0cb4 | 2010-06-03 17:05:15 -0700 | [diff] [blame] | 184 | return android_atomic_add(1, addr); | 
|  | 185 | } | 
|  | 186 |  | 
| Carl Shapiro | d55f0ad | 2010-09-28 13:47:03 -0700 | [diff] [blame] | 187 | extern inline int32_t android_atomic_dec(volatile int32_t *addr) | 
|  | 188 | { | 
| Carl Shapiro | 93b0cb4 | 2010-06-03 17:05:15 -0700 | [diff] [blame] | 189 | return android_atomic_add(-1, addr); | 
|  | 190 | } | 
|  | 191 |  | 
|  | 192 | #if defined(__thumb__) | 
|  | 193 | extern int32_t android_atomic_and(int32_t value, volatile int32_t *ptr); | 
|  | 194 | #elif defined(__ARM_HAVE_LDREX_STREX) | 
|  | 195 | extern inline int32_t android_atomic_and(int32_t value, volatile int32_t *ptr) | 
|  | 196 | { | 
|  | 197 | int32_t prev, tmp, status; | 
|  | 198 | android_memory_barrier(); | 
|  | 199 | do { | 
|  | 200 | __asm__ __volatile__ ("ldrex %0, [%4]\n" | 
|  | 201 | "and %1, %0, %5\n" | 
|  | 202 | "strex %2, %1, [%4]" | 
|  | 203 | : "=&r" (prev), "=&r" (tmp), | 
|  | 204 | "=&r" (status), "+m" (*ptr) | 
|  | 205 | : "r" (ptr), "Ir" (value) | 
|  | 206 | : "cc"); | 
|  | 207 | } while (__builtin_expect(status != 0, 0)); | 
|  | 208 | return prev; | 
|  | 209 | } | 
|  | 210 | #else | 
|  | 211 | extern inline int32_t android_atomic_and(int32_t value, volatile int32_t *ptr) | 
|  | 212 | { | 
|  | 213 | int32_t prev, status; | 
|  | 214 | android_memory_barrier(); | 
|  | 215 | do { | 
|  | 216 | prev = *ptr; | 
|  | 217 | status = android_atomic_cas(prev, prev & value, ptr); | 
|  | 218 | } while (__builtin_expect(status != 0, 0)); | 
|  | 219 | return prev; | 
|  | 220 | } | 
|  | 221 | #endif | 
|  | 222 |  | 
|  | 223 | #if defined(__thumb__) | 
|  | 224 | extern int32_t android_atomic_or(int32_t value, volatile int32_t *ptr); | 
|  | 225 | #elif defined(__ARM_HAVE_LDREX_STREX) | 
|  | 226 | extern inline int32_t android_atomic_or(int32_t value, volatile int32_t *ptr) | 
|  | 227 | { | 
|  | 228 | int32_t prev, tmp, status; | 
|  | 229 | android_memory_barrier(); | 
|  | 230 | do { | 
|  | 231 | __asm__ __volatile__ ("ldrex %0, [%4]\n" | 
|  | 232 | "orr %1, %0, %5\n" | 
|  | 233 | "strex %2, %1, [%4]" | 
|  | 234 | : "=&r" (prev), "=&r" (tmp), | 
|  | 235 | "=&r" (status), "+m" (*ptr) | 
|  | 236 | : "r" (ptr), "Ir" (value) | 
|  | 237 | : "cc"); | 
|  | 238 | } while (__builtin_expect(status != 0, 0)); | 
|  | 239 | return prev; | 
|  | 240 | } | 
|  | 241 | #else | 
|  | 242 | extern inline int32_t android_atomic_or(int32_t value, volatile int32_t *ptr) | 
|  | 243 | { | 
|  | 244 | int32_t prev, status; | 
|  | 245 | android_memory_barrier(); | 
|  | 246 | do { | 
|  | 247 | prev = *ptr; | 
|  | 248 | status = android_atomic_cas(prev, prev | value, ptr); | 
|  | 249 | } while (__builtin_expect(status != 0, 0)); | 
|  | 250 | return prev; | 
|  | 251 | } | 
|  | 252 | #endif | 
|  | 253 |  | 
|  | 254 | #endif /* ANDROID_CUTILS_ATOMIC_ARM_H */ |