blob: c91db793ba8f1693763dd181925331c562245619 [file] [log] [blame]
Elliott Hughes5ea047b2011-09-13 14:38:18 -07001/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "atomic.h"
18
Ian Rogers9adbff52013-01-23 18:19:03 -080019#define NEED_SWAP_MUTEXES !defined(__arm__) && !defined(__i386__)
Elliott Hughese222ee02012-12-13 14:41:43 -080020
Ian Rogers9adbff52013-01-23 18:19:03 -080021#if NEED_SWAP_MUTEXES
Ian Rogers25fd14b2012-09-05 10:56:38 -070022#include <vector>
Elliott Hughes76b61672012-12-12 17:47:30 -080023#include "base/mutex.h"
Elliott Hughes1aa246d2012-12-13 09:29:36 -080024#include "base/stl_util.h"
Elliott Hughese222ee02012-12-13 14:41:43 -080025#include "base/stringprintf.h"
Ian Rogers50b35e22012-10-04 10:09:15 -070026#include "thread.h"
Elliott Hughes7c6169d2012-05-02 16:11:48 -070027#endif
Elliott Hughes5ea047b2011-09-13 14:38:18 -070028
29namespace art {
30
Ian Rogers9adbff52013-01-23 18:19:03 -080031#if NEED_SWAP_MUTEXES
32// We stripe across a bunch of different mutexes to reduce contention.
33static const size_t kSwapMutexCount = 32;
34static std::vector<Mutex*>* gSwapMutexes;
Elliott Hughes5ea047b2011-09-13 14:38:18 -070035
Ian Rogers9adbff52013-01-23 18:19:03 -080036static Mutex& GetSwapMutex(const volatile int64_t* addr) {
Brian Carlstrom2d888622013-07-18 17:02:00 -070037 return *(*gSwapMutexes)[(reinterpret_cast<unsigned>(addr) >> 3U) % kSwapMutexCount];
Ian Rogers9adbff52013-01-23 18:19:03 -080038}
Elliott Hughes7c6169d2012-05-02 16:11:48 -070039#endif
40
Ian Rogers9adbff52013-01-23 18:19:03 -080041void QuasiAtomic::Startup() {
42#if NEED_SWAP_MUTEXES
43 gSwapMutexes = new std::vector<Mutex*>;
44 for (size_t i = 0; i < kSwapMutexCount; ++i) {
Ian Rogers0fed3282013-04-19 11:13:20 -070045 gSwapMutexes->push_back(new Mutex("QuasiAtomic stripe"));
Ian Rogers9adbff52013-01-23 18:19:03 -080046 }
Elliott Hughes7c6169d2012-05-02 16:11:48 -070047#endif
Elliott Hughes5ea047b2011-09-13 14:38:18 -070048}
49
Ian Rogers9adbff52013-01-23 18:19:03 -080050void QuasiAtomic::Shutdown() {
51#if NEED_SWAP_MUTEXES
52 STLDeleteElements(gSwapMutexes);
53 delete gSwapMutexes;
54#endif
Elliott Hughes557e0272011-09-29 10:52:22 -070055}
56
Elliott Hughes7c6169d2012-05-02 16:11:48 -070057int64_t QuasiAtomic::Read64(volatile const int64_t* addr) {
58 int64_t value;
Ian Rogers0fed3282013-04-19 11:13:20 -070059#if NEED_SWAP_MUTEXES
60 MutexLock mu(Thread::Current(), GetSwapMutex(addr));
61 value = *addr;
62#elif defined(__arm__)
Ian Rogers9adbff52013-01-23 18:19:03 -080063 // Exclusive loads are defined not to tear, clearing the exclusive state isn't necessary. If we
64 // have LPAE (such as Cortex-A15) then ldrd would suffice.
Elliott Hughes7c6169d2012-05-02 16:11:48 -070065 __asm__ __volatile__("@ QuasiAtomic::Read64\n"
66 "ldrexd %0, %H0, [%1]"
67 : "=&r" (value)
68 : "r" (addr));
Ian Rogers9adbff52013-01-23 18:19:03 -080069#elif defined(__i386__)
70 __asm__ __volatile__(
71 "movq %1, %0\n"
72 : "=x" (value)
73 : "m" (*addr));
74#else
Ian Rogers0fed3282013-04-19 11:13:20 -070075#error Unexpected architecture
Ian Rogers9adbff52013-01-23 18:19:03 -080076#endif
Elliott Hughes7c6169d2012-05-02 16:11:48 -070077 return value;
78}
79
Ian Rogers9adbff52013-01-23 18:19:03 -080080void QuasiAtomic::Write64(volatile int64_t* addr, int64_t value) {
Ian Rogers0fed3282013-04-19 11:13:20 -070081#if NEED_SWAP_MUTEXES
82 MutexLock mu(Thread::Current(), GetSwapMutex(addr));
83 *addr = value;
84#elif defined(__arm__)
Ian Rogers9adbff52013-01-23 18:19:03 -080085 // The write is done as a swap so that the cache-line is in the exclusive state for the store. If
86 // we know that ARM architecture has LPAE (such as Cortex-A15) this isn't necessary and strd will
87 // suffice.
88 int64_t prev;
89 int status;
90 do {
91 __asm__ __volatile__("@ QuasiAtomic::Write64\n"
92 "ldrexd %0, %H0, [%3]\n"
93 "strexd %1, %4, %H4, [%3]"
94 : "=&r" (prev), "=&r" (status), "+m"(*addr)
95 : "r" (addr), "r" (value)
96 : "cc");
97 } while (__builtin_expect(status != 0, 0));
98#elif defined(__i386__)
99 __asm__ __volatile__(
100 "movq %1, %0"
101 : "=m" (*addr)
102 : "x" (value));
103#else
Ian Rogers0fed3282013-04-19 11:13:20 -0700104#error Unexpected architecture
Ian Rogers9adbff52013-01-23 18:19:03 -0800105#endif
106}
107
108
109bool QuasiAtomic::Cas64(int64_t old_value, int64_t new_value, volatile int64_t* addr) {
Ian Rogers0fed3282013-04-19 11:13:20 -0700110#if NEED_SWAP_MUTEXES
111 MutexLock mu(Thread::Current(), GetSwapMutex(addr));
112 if (*addr == old_value) {
113 *addr = new_value;
114 return true;
115 }
116 return false;
117#elif defined(__arm__)
Elliott Hughes5ea047b2011-09-13 14:38:18 -0700118 int64_t prev;
119 int status;
120 do {
Elliott Hughes7c6169d2012-05-02 16:11:48 -0700121 __asm__ __volatile__("@ QuasiAtomic::Cas64\n"
Elliott Hughes5ea047b2011-09-13 14:38:18 -0700122 "ldrexd %0, %H0, [%3]\n"
123 "mov %1, #0\n"
124 "teq %0, %4\n"
125 "teqeq %H0, %H4\n"
126 "strexdeq %1, %5, %H5, [%3]"
127 : "=&r" (prev), "=&r" (status), "+m"(*addr)
128 : "r" (addr), "Ir" (old_value), "r" (new_value)
129 : "cc");
130 } while (__builtin_expect(status != 0, 0));
Ian Rogers0794b6a2013-01-30 16:26:20 -0800131 return prev == old_value;
Ian Rogers9adbff52013-01-23 18:19:03 -0800132#elif defined(__i386__)
Ian Rogers1c653d52013-06-13 15:04:30 -0700133 // The compiler does the right job and works better than inline assembly, especially with -O0
134 // compilation.
135 return __sync_bool_compare_and_swap(addr, old_value, new_value);
Elliott Hughes5ea047b2011-09-13 14:38:18 -0700136#else
Ian Rogers0fed3282013-04-19 11:13:20 -0700137#error Unexpected architecture
Elliott Hughes5ea047b2011-09-13 14:38:18 -0700138#endif
Ian Rogers9adbff52013-01-23 18:19:03 -0800139}
140
141bool QuasiAtomic::LongAtomicsUseMutexes() {
142#if NEED_SWAP_MUTEXES
143 return true;
144#else
145 return false;
146#endif
147}
Elliott Hughes5ea047b2011-09-13 14:38:18 -0700148
Elliott Hughes5ea047b2011-09-13 14:38:18 -0700149} // namespace art