blob: 61fee34a6ed080a35917b213c693c336771cb723 [file] [log] [blame]
Mathieu Chartiere5f13e52015-02-24 09:37:21 -08001/*
2 * Copyright 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_JIT_JIT_CODE_CACHE_H_
18#define ART_RUNTIME_JIT_JIT_CODE_CACHE_H_
19
Andreas Gampef0f3c592018-06-26 13:28:00 -070020#include <iosfwd>
21#include <memory>
22#include <set>
23#include <string>
24#include <unordered_set>
25#include <vector>
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080026
Mingyao Yang063fc772016-08-02 11:02:54 -070027#include "base/arena_containers.h"
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +010028#include "base/array_ref.h"
David Sehrc431b9d2018-03-02 12:01:51 -080029#include "base/atomic.h"
Andreas Gampef0f3c592018-06-26 13:28:00 -070030#include "base/histogram.h"
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080031#include "base/macros.h"
Vladimir Markoc34bebf2018-08-16 16:12:49 +010032#include "base/mem_map.h"
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080033#include "base/mutex.h"
David Sehr67bf42e2018-02-26 16:43:04 -080034#include "base/safe_map.h"
Nicolas Geoffray2a905b22019-06-06 09:04:07 +010035#include "jit_memory_region.h"
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080036
37namespace art {
38
Mathieu Chartiere401d142015-04-22 13:56:20 -070039class ArtMethod;
Vladimir Markob0b68cf2017-11-14 18:11:50 +000040template<class T> class Handle;
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +010041class LinearAlloc;
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +000042class InlineCache;
Andreas Gampe5d08fcc2017-06-05 17:56:46 -070043class IsMarkedVisitor;
Vladimir Marko2196c652017-11-30 16:16:07 +000044class JitJniStubTestHelper;
Andreas Gampe513061a2017-06-01 09:17:34 -070045class OatQuickMethodHeader;
Vladimir Markob0b68cf2017-11-14 18:11:50 +000046struct ProfileMethodInfo;
Nicolas Geoffray26705e22015-10-28 12:50:11 +000047class ProfilingInfo;
Vladimir Marko2196c652017-11-30 16:16:07 +000048class Thread;
49
50namespace gc {
51namespace accounting {
52template<size_t kAlignment> class MemoryRangeBitmap;
53} // namespace accounting
54} // namespace gc
55
56namespace mirror {
57class Class;
58class Object;
59template<class T> class ObjectArray;
60} // namespace mirror
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080061
Vladimir Markob0b68cf2017-11-14 18:11:50 +000062namespace gc {
63namespace accounting {
64template<size_t kAlignment> class MemoryRangeBitmap;
65} // namespace accounting
66} // namespace gc
67
68namespace mirror {
69class Class;
70class Object;
71template<class T> class ObjectArray;
72} // namespace mirror
73
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080074namespace jit {
75
Nicolas Geoffrayce9ed362018-11-29 03:19:28 +000076class MarkCodeClosure;
Orion Hodsondf1ab202019-06-02 16:45:03 +010077
Orion Hodson521ff982019-06-18 13:56:28 +010078// Type of bitmap used for tracking live functions in the JIT code cache for the purposes
79// of garbage collecting code.
80using CodeCacheBitmap = gc::accounting::MemoryRangeBitmap<kJitCodeAccountingBytes>;
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +010081
Nicolas Geoffraye3884e32019-10-28 17:04:49 +000082// The state of profile-based compilation in the zygote.
83// - kInProgress: JIT compilation is happening
84// - kDone: JIT compilation is finished, and the zygote is preparing notifying
85// the other processes.
86// - kNotifiedOk: the zygote has notified the other processes, which can start
87// sharing the boot image method mappings.
88// - kNotifiedFailure: the zygote has notified the other processes, but they
89// cannot share the boot image method mappings due to
90// unexpected errors
91enum class ZygoteCompilationState : uint8_t {
92 kInProgress = 0,
93 kDone = 1,
94 kNotifiedOk = 2,
95 kNotifiedFailure = 3,
96};
97
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +010098// Class abstraction over a map of ArtMethod -> compiled code, where the
99// ArtMethod are compiled by the zygote, and the map acts as a communication
100// channel between the zygote and the other processes.
101// For the zygote process, this map is the only map it is placing the compiled
102// code. JitCodeCache.method_code_map_ is empty.
103//
104// This map is writable only by the zygote, and readable by all children.
105class ZygoteMap {
106 public:
Nicolas Geoffraye3884e32019-10-28 17:04:49 +0000107 explicit ZygoteMap(JitMemoryRegion* region)
108 : map_(), region_(region), compilation_state_(nullptr) {}
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100109
110 // Initialize the data structure so it can hold `number_of_methods` mappings.
111 // Note that the map is fixed size and never grows.
112 void Initialize(uint32_t number_of_methods) REQUIRES(!Locks::jit_lock_);
113
114 // Add the mapping method -> code.
115 void Put(const void* code, ArtMethod* method) REQUIRES(Locks::jit_lock_);
116
117 // Return the code pointer for the given method. If pc is not zero, check that
118 // the pc falls into that code range. Return null otherwise.
119 const void* GetCodeFor(ArtMethod* method, uintptr_t pc = 0) const;
120
121 // Return whether the map has associated code for the given method.
122 bool ContainsMethod(ArtMethod* method) const {
123 return GetCodeFor(method) != nullptr;
124 }
125
Nicolas Geoffraye3884e32019-10-28 17:04:49 +0000126 void SetCompilationState(ZygoteCompilationState state) {
127 region_->WriteData(compilation_state_, state);
Nicolas Geoffray623d4f12019-09-30 13:45:51 +0100128 }
129
Nicolas Geoffray8852e532019-10-30 09:43:35 +0000130 bool IsCompilationDoneButNotNotified() const {
131 return compilation_state_ != nullptr && *compilation_state_ == ZygoteCompilationState::kDone;
132 }
133
Nicolas Geoffraye3884e32019-10-28 17:04:49 +0000134 bool IsCompilationNotified() const {
Nicolas Geoffray8852e532019-10-30 09:43:35 +0000135 return compilation_state_ != nullptr && *compilation_state_ > ZygoteCompilationState::kDone;
Nicolas Geoffraye3884e32019-10-28 17:04:49 +0000136 }
137
138 bool CanMapBootImageMethods() const {
Nicolas Geoffray8852e532019-10-30 09:43:35 +0000139 return compilation_state_ != nullptr &&
140 *compilation_state_ == ZygoteCompilationState::kNotifiedOk;
Nicolas Geoffray623d4f12019-09-30 13:45:51 +0100141 }
142
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100143 private:
144 struct Entry {
145 ArtMethod* method;
146 // Note we currently only allocate code in the low 4g, so we could just reserve 4 bytes
147 // for the code pointer. For simplicity and in the case we move to 64bit
148 // addresses for code, just keep it void* for now.
149 const void* code_ptr;
150 };
151
152 // The map allocated with `region_`.
David Srbecky87fb0322019-08-20 10:34:02 +0100153 ArrayRef<const Entry> map_;
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100154
155 // The region in which the map is allocated.
156 JitMemoryRegion* const region_;
157
Nicolas Geoffraye3884e32019-10-28 17:04:49 +0000158 // The current state of compilation in the zygote. Starts with kInProgress,
159 // and should end with kNotifiedOk or kNotifiedFailure.
160 const ZygoteCompilationState* compilation_state_;
Nicolas Geoffray623d4f12019-09-30 13:45:51 +0100161
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100162 DISALLOW_COPY_AND_ASSIGN(ZygoteMap);
163};
164
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800165class JitCodeCache {
166 public:
Nicolas Geoffray0a3be162015-11-18 11:15:22 +0000167 static constexpr size_t kMaxCapacity = 64 * MB;
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100168 // Put the default to a very low amount for debug builds to stress the code cache
169 // collection.
Nicolas Geoffray7ca4b772016-02-23 13:52:01 +0000170 static constexpr size_t kInitialCapacity = kIsDebugBuild ? 8 * KB : 64 * KB;
Nicolas Geoffray65b83d82016-02-22 13:14:04 +0000171
172 // By default, do not GC until reaching 256KB.
173 static constexpr size_t kReservedCapacity = kInitialCapacity * 4;
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800174
Mathieu Chartierbce416f2015-03-23 12:37:35 -0700175 // Create the code cache with a code + data capacity equal to "capacity", error message is passed
176 // in the out arg error_msg.
Nicolas Geoffrayc9de61c2018-11-27 17:34:31 +0000177 static JitCodeCache* Create(bool used_only_for_profile_data,
Orion Hodsonad28f5e2018-10-17 09:08:17 +0100178 bool rwx_memory_allowed,
Nicolas Geoffrayc9de61c2018-11-27 17:34:31 +0000179 bool is_zygote,
Nicolas Geoffraya25dce92016-01-12 16:41:10 +0000180 std::string* error_msg);
Vladimir Markob0b68cf2017-11-14 18:11:50 +0000181 ~JitCodeCache();
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800182
Nicolas Geoffraya48c3df2019-06-27 13:11:12 +0000183 bool NotifyCompilationOf(ArtMethod* method,
184 Thread* self,
185 bool osr,
186 bool prejit,
Nicolas Geoffraya59af8a2019-11-27 17:42:32 +0000187 bool baseline,
Nicolas Geoffraya48c3df2019-06-27 13:11:12 +0000188 JitMemoryRegion* region)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700189 REQUIRES_SHARED(Locks::mutator_lock_)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100190 REQUIRES(!Locks::jit_lock_);
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100191
Alex Lightdba61482016-12-21 08:20:29 -0800192 void NotifyMethodRedefined(ArtMethod* method)
193 REQUIRES(Locks::mutator_lock_)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100194 REQUIRES(!Locks::jit_lock_);
Alex Lightdba61482016-12-21 08:20:29 -0800195
Nicolas Geoffray07e3ca92016-03-11 09:57:57 +0000196 // Notify to the code cache that the compiler wants to use the
197 // profiling info of `method` to drive optimizations,
198 // and therefore ensure the returned profiling info object is not
199 // collected.
200 ProfilingInfo* NotifyCompilerUse(ArtMethod* method, Thread* self)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700201 REQUIRES_SHARED(Locks::mutator_lock_)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100202 REQUIRES(!Locks::jit_lock_);
Nicolas Geoffrayb6e20ae2016-03-07 14:29:04 +0000203
buzbee454b3b62016-04-07 14:42:47 -0700204 void DoneCompiling(ArtMethod* method, Thread* self, bool osr)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700205 REQUIRES_SHARED(Locks::mutator_lock_)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100206 REQUIRES(!Locks::jit_lock_);
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100207
Nicolas Geoffray07e3ca92016-03-11 09:57:57 +0000208 void DoneCompilerUse(ArtMethod* method, Thread* self)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700209 REQUIRES_SHARED(Locks::mutator_lock_)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100210 REQUIRES(!Locks::jit_lock_);
Nicolas Geoffrayb6e20ae2016-03-07 14:29:04 +0000211
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100212 // Return true if the code cache contains this pc.
213 bool ContainsPc(const void* pc) const;
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800214
Alex Light2d441b12018-06-08 15:33:21 -0700215 // Returns true if either the method's entrypoint is JIT compiled code or it is the
216 // instrumentation entrypoint and we can jump to jit code for this method. For testing use only.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100217 bool WillExecuteJitCode(ArtMethod* method) REQUIRES(!Locks::jit_lock_);
Alex Light2d441b12018-06-08 15:33:21 -0700218
Nicolas Geoffraya5891e82015-11-06 14:18:27 +0000219 // Return true if the code cache contains this method.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100220 bool ContainsMethod(ArtMethod* method) REQUIRES(!Locks::jit_lock_);
Nicolas Geoffraya5891e82015-11-06 14:18:27 +0000221
Vladimir Marko2196c652017-11-30 16:16:07 +0000222 // Return the code pointer for a JNI-compiled stub if the method is in the cache, null otherwise.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100223 const void* GetJniStubCode(ArtMethod* method) REQUIRES(!Locks::jit_lock_);
Vladimir Marko2196c652017-11-30 16:16:07 +0000224
David Srbeckyadb66f92019-10-10 12:59:43 +0000225 // Allocate a region for both code and data in the JIT code cache.
226 // The reserved memory is left completely uninitialized.
227 bool Reserve(Thread* self,
228 JitMemoryRegion* region,
229 size_t code_size,
230 size_t stack_map_size,
231 size_t number_of_roots,
232 ArtMethod* method,
233 /*out*/ArrayRef<const uint8_t>* reserved_code,
234 /*out*/ArrayRef<const uint8_t>* reserved_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700235 REQUIRES_SHARED(Locks::mutator_lock_)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100236 REQUIRES(!Locks::jit_lock_);
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100237
David Srbeckyadb66f92019-10-10 12:59:43 +0000238 // Initialize code and data of previously allocated memory.
239 //
240 // `cha_single_implementation_list` needs to be registered via CHA (if it's
241 // still valid), since the compiled code still needs to be invalidated if the
242 // single-implementation assumptions are violated later. This needs to be done
243 // even if `has_should_deoptimize_flag` is false, which can happen due to CHA
244 // guard elimination.
245 bool Commit(Thread* self,
246 JitMemoryRegion* region,
247 ArtMethod* method,
248 ArrayRef<const uint8_t> reserved_code, // Uninitialized destination.
249 ArrayRef<const uint8_t> code, // Compiler output (source).
250 ArrayRef<const uint8_t> reserved_data, // Uninitialized destination.
251 const std::vector<Handle<mirror::Object>>& roots,
252 ArrayRef<const uint8_t> stack_map, // Compiler output (source).
253 bool osr,
254 bool has_should_deoptimize_flag,
255 const ArenaSet<ArtMethod*>& cha_single_implementation_list)
256 REQUIRES_SHARED(Locks::mutator_lock_)
257 REQUIRES(!Locks::jit_lock_);
258
259 // Free the previously allocated memory regions.
260 void Free(Thread* self, JitMemoryRegion* region, const uint8_t* code, const uint8_t* data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700261 REQUIRES_SHARED(Locks::mutator_lock_)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100262 REQUIRES(!Locks::jit_lock_);
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000263
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100264 // Perform a collection on the code cache.
265 void GarbageCollectCache(Thread* self)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100266 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700267 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100268
269 // Given the 'pc', try to find the JIT compiled code associated with it.
270 // Return null if 'pc' is not in the code cache. 'method' is passed for
271 // sanity check.
272 OatQuickMethodHeader* LookupMethodHeader(uintptr_t pc, ArtMethod* method)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100273 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700274 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100275
Nicolas Geoffrayb331feb2016-02-05 16:51:53 +0000276 OatQuickMethodHeader* LookupOsrMethodHeader(ArtMethod* method)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100277 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700278 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffrayb331feb2016-02-05 16:51:53 +0000279
Orion Hodsoneced6922017-06-01 10:54:28 +0100280 // Removes method from the cache for testing purposes. The caller
281 // must ensure that all threads are suspended and the method should
282 // not be in any thread's stack.
283 bool RemoveMethod(ArtMethod* method, bool release_memory)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100284 REQUIRES(!Locks::jit_lock_)
Orion Hodsoneced6922017-06-01 10:54:28 +0100285 REQUIRES(Locks::mutator_lock_);
286
Nicolas Geoffray26705e22015-10-28 12:50:11 +0000287 // Remove all methods in our cache that were allocated by 'alloc'.
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100288 void RemoveMethodsIn(Thread* self, const LinearAlloc& alloc)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100289 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700290 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800291
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +0000292 void CopyInlineCacheInto(const InlineCache& ic, Handle<mirror::ObjectArray<mirror::Class>> array)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100293 REQUIRES(!Locks::jit_lock_)
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +0000294 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffrayb6e20ae2016-03-07 14:29:04 +0000295
Nicolas Geoffray26705e22015-10-28 12:50:11 +0000296 // Create a 'ProfileInfo' for 'method'. If 'retry_allocation' is true,
297 // will collect and retry if the first allocation is unsuccessful.
298 ProfilingInfo* AddProfilingInfo(Thread* self,
299 ArtMethod* method,
300 const std::vector<uint32_t>& entries,
301 bool retry_allocation)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100302 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700303 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray26705e22015-10-28 12:50:11 +0000304
Nicolas Geoffray0a3be162015-11-18 11:15:22 +0000305 bool OwnsSpace(const void* mspace) const NO_THREAD_SAFETY_ANALYSIS {
Nicolas Geoffraya48c3df2019-06-27 13:11:12 +0000306 return private_region_.OwnsSpace(mspace) || shared_region_.OwnsSpace(mspace);
Nicolas Geoffray0a3be162015-11-18 11:15:22 +0000307 }
308
309 void* MoreCore(const void* mspace, intptr_t increment);
310
Calin Juravle99629622016-04-19 16:33:46 +0100311 // Adds to `methods` all profiled methods which are part of any of the given dex locations.
312 void GetProfiledMethods(const std::set<std::string>& dex_base_locations,
Calin Juravle940eb0c2017-01-30 19:30:44 -0800313 std::vector<ProfileMethodInfo>& methods)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100314 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700315 REQUIRES_SHARED(Locks::mutator_lock_);
Calin Juravle31f2c152015-10-23 17:56:15 +0100316
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000317 void InvalidateAllCompiledCode()
318 REQUIRES(!Locks::jit_lock_)
319 REQUIRES_SHARED(Locks::mutator_lock_);
320
Nicolas Geoffrayb88d59e2016-02-17 11:31:49 +0000321 void InvalidateCompiledCodeFor(ArtMethod* method, const OatQuickMethodHeader* code)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100322 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700323 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffrayb88d59e2016-02-17 11:31:49 +0000324
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100325 void Dump(std::ostream& os) REQUIRES(!Locks::jit_lock_);
Nicolas Geoffraybcd94c82016-03-03 13:23:33 +0000326
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100327 bool IsOsrCompiled(ArtMethod* method) REQUIRES(!Locks::jit_lock_);
Nicolas Geoffray71cd50f2016-04-14 15:00:33 +0100328
Nicolas Geoffray132d8362016-11-16 09:19:42 +0000329 void SweepRootTables(IsMarkedVisitor* visitor)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100330 REQUIRES(!Locks::jit_lock_)
Nicolas Geoffray132d8362016-11-16 09:19:42 +0000331 REQUIRES_SHARED(Locks::mutator_lock_);
332
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +0000333 // The GC needs to disallow the reading of inline caches when it processes them,
334 // to avoid having a class being used while it is being deleted.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100335 void AllowInlineCacheAccess() REQUIRES(!Locks::jit_lock_);
336 void DisallowInlineCacheAccess() REQUIRES(!Locks::jit_lock_);
337 void BroadcastForInlineCacheAccess() REQUIRES(!Locks::jit_lock_);
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +0000338
Alex Lightdba61482016-12-21 08:20:29 -0800339 // Notify the code cache that the method at the pointer 'old_method' is being moved to the pointer
340 // 'new_method' since it is being made obsolete.
341 void MoveObsoleteMethod(ArtMethod* old_method, ArtMethod* new_method)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100342 REQUIRES(!Locks::jit_lock_) REQUIRES(Locks::mutator_lock_);
Alex Lightdba61482016-12-21 08:20:29 -0800343
Nicolas Geoffray226805d2018-12-14 10:59:02 +0000344 // Dynamically change whether we want to garbage collect code.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100345 void SetGarbageCollectCode(bool value) REQUIRES(!Locks::jit_lock_);
Nicolas Geoffray169722b2017-02-27 14:01:59 +0000346
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100347 bool GetGarbageCollectCode() REQUIRES(!Locks::jit_lock_);
Nicolas Geoffray226805d2018-12-14 10:59:02 +0000348
349 // Unsafe variant for debug checks.
350 bool GetGarbageCollectCodeUnsafe() const NO_THREAD_SAFETY_ANALYSIS {
Alex Light2d441b12018-06-08 15:33:21 -0700351 return garbage_collect_code_;
352 }
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100353 ZygoteMap* GetZygoteMap() {
354 return &zygote_map_;
355 }
Alex Light2d441b12018-06-08 15:33:21 -0700356
357 // If Jit-gc has been disabled (and instrumentation has been enabled) this will return the
358 // jit-compiled entrypoint for this method. Otherwise it will return null.
359 const void* FindCompiledCodeForInstrumentation(ArtMethod* method)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100360 REQUIRES(!Locks::jit_lock_)
Alex Light2d441b12018-06-08 15:33:21 -0700361 REQUIRES_SHARED(Locks::mutator_lock_);
362
Nicolas Geoffray32384402019-07-17 20:06:44 +0100363 // Fetch the code of a method that was JITted, but the JIT could not
364 // update its entrypoint due to the resolution trampoline.
365 const void* GetSavedEntryPointOfPreCompiledMethod(ArtMethod* method)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100366 REQUIRES(!Locks::jit_lock_)
Nicolas Geoffray7989ac92019-04-10 12:42:30 +0100367 REQUIRES_SHARED(Locks::mutator_lock_);
368
Nicolas Geoffrayc9de61c2018-11-27 17:34:31 +0000369 void PostForkChildAction(bool is_system_server, bool is_zygote);
370
Nicolas Geoffray226805d2018-12-14 10:59:02 +0000371 // Clear the entrypoints of JIT compiled methods that belong in the zygote space.
372 // This is used for removing non-debuggable JIT code at the point we realize the runtime
373 // is debuggable.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100374 void ClearEntryPointsInZygoteExecSpace() REQUIRES(!Locks::jit_lock_) REQUIRES(Locks::mutator_lock_);
Nicolas Geoffray226805d2018-12-14 10:59:02 +0000375
Nicolas Geoffraya48c3df2019-06-27 13:11:12 +0000376 JitMemoryRegion* GetCurrentRegion();
377 bool IsSharedRegion(const JitMemoryRegion& region) const { return &region == &shared_region_; }
378 bool CanAllocateProfilingInfo() {
379 // If we don't have a private region, we cannot allocate a profiling info.
380 // A shared region doesn't support in general GC objects, which a profiling info
381 // can reference.
382 JitMemoryRegion* region = GetCurrentRegion();
383 return region->IsValid() && !IsSharedRegion(*region);
384 }
Nicolas Geoffray7f7539b2019-06-06 16:20:54 +0100385
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800386 private:
Nicolas Geoffrayc9de61c2018-11-27 17:34:31 +0000387 JitCodeCache();
388
Nicolas Geoffray26705e22015-10-28 12:50:11 +0000389 ProfilingInfo* AddProfilingInfoInternal(Thread* self,
390 ArtMethod* method,
391 const std::vector<uint32_t>& entries)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100392 REQUIRES(Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700393 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray26705e22015-10-28 12:50:11 +0000394
Alex Light33b7b5d2018-08-07 19:13:51 +0000395 // If a collection is in progress, wait for it to finish. Must be called with the mutator lock.
396 // The non-mutator lock version should be used if possible. This method will release then
397 // re-acquire the mutator lock.
398 void WaitForPotentialCollectionToCompleteRunnable(Thread* self)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100399 REQUIRES(Locks::jit_lock_, !Roles::uninterruptible_) REQUIRES_SHARED(Locks::mutator_lock_);
Alex Light33b7b5d2018-08-07 19:13:51 +0000400
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100401 // If a collection is in progress, wait for it to finish. Return
402 // whether the thread actually waited.
403 bool WaitForPotentialCollectionToComplete(Thread* self)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100404 REQUIRES(Locks::jit_lock_) REQUIRES(!Locks::mutator_lock_);
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100405
Mingyao Yang063fc772016-08-02 11:02:54 -0700406 // Remove CHA dependents and underlying allocations for entries in `method_headers`.
407 void FreeAllMethodHeaders(const std::unordered_set<OatQuickMethodHeader*>& method_headers)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100408 REQUIRES(!Locks::jit_lock_)
Mingyao Yang063fc772016-08-02 11:02:54 -0700409 REQUIRES(!Locks::cha_lock_);
410
Vladimir Marko2196c652017-11-30 16:16:07 +0000411 // Removes method from the cache. The caller must ensure that all threads
412 // are suspended and the method should not be in any thread's stack.
413 bool RemoveMethodLocked(ArtMethod* method, bool release_memory)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100414 REQUIRES(Locks::jit_lock_)
Vladimir Marko2196c652017-11-30 16:16:07 +0000415 REQUIRES(Locks::mutator_lock_);
416
Orion Hodson607624f2018-05-11 10:10:46 +0100417 // Free code and data allocations for `code_ptr`.
David Srbecky8fc2f952019-07-31 18:40:09 +0100418 void FreeCodeAndData(const void* code_ptr, bool free_debug_info = true)
419 REQUIRES(Locks::jit_lock_);
Nicolas Geoffray1dad3f62015-10-23 14:59:54 +0100420
Nicolas Geoffraya5891e82015-11-06 14:18:27 +0000421 // Number of bytes allocated in the code cache.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100422 size_t CodeCacheSize() REQUIRES(!Locks::jit_lock_);
Nicolas Geoffrayb9f1af52018-11-16 10:30:29 +0000423
424 // Number of bytes allocated in the data cache.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100425 size_t DataCacheSize() REQUIRES(!Locks::jit_lock_);
Nicolas Geoffrayb9f1af52018-11-16 10:30:29 +0000426
427 // Number of bytes allocated in the code cache.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100428 size_t CodeCacheSizeLocked() REQUIRES(Locks::jit_lock_);
Nicolas Geoffraya5891e82015-11-06 14:18:27 +0000429
430 // Number of bytes allocated in the data cache.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100431 size_t DataCacheSizeLocked() REQUIRES(Locks::jit_lock_);
Nicolas Geoffraya5891e82015-11-06 14:18:27 +0000432
Nicolas Geoffray0a3be162015-11-18 11:15:22 +0000433 // Notify all waiting threads that a collection is done.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100434 void NotifyCollectionDone(Thread* self) REQUIRES(Locks::jit_lock_);
Nicolas Geoffray0a3be162015-11-18 11:15:22 +0000435
Vladimir Marko2196c652017-11-30 16:16:07 +0000436 // Return whether we should do a full collection given the current state of the cache.
437 bool ShouldDoFullCollection()
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100438 REQUIRES(Locks::jit_lock_)
Vladimir Marko2196c652017-11-30 16:16:07 +0000439 REQUIRES_SHARED(Locks::mutator_lock_);
440
Nicolas Geoffray35122442016-03-02 12:05:30 +0000441 void DoCollection(Thread* self, bool collect_profiling_info)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100442 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700443 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray8d372502016-02-23 13:56:43 +0000444
Nicolas Geoffray9abb2972016-03-04 14:32:59 +0000445 void RemoveUnmarkedCode(Thread* self)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100446 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700447 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray8d372502016-02-23 13:56:43 +0000448
449 void MarkCompiledCodeOnThreadStacks(Thread* self)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100450 REQUIRES(!Locks::jit_lock_)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700451 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray8d372502016-02-23 13:56:43 +0000452
Nicolas Geoffrayb9f1af52018-11-16 10:30:29 +0000453 CodeCacheBitmap* GetLiveBitmap() const {
454 return live_bitmap_.get();
455 }
456
Nicolas Geoffrayce9ed362018-11-29 03:19:28 +0000457 bool IsInZygoteDataSpace(const void* ptr) const {
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100458 return shared_region_.IsInDataSpace(ptr);
Nicolas Geoffrayce9ed362018-11-29 03:19:28 +0000459 }
460
461 bool IsInZygoteExecSpace(const void* ptr) const {
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100462 return shared_region_.IsInExecSpace(ptr);
Nicolas Geoffrayce9ed362018-11-29 03:19:28 +0000463 }
464
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +0000465 bool IsWeakAccessEnabled(Thread* self) const;
466 void WaitUntilInlineCacheAccessible(Thread* self)
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100467 REQUIRES(!Locks::jit_lock_)
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +0000468 REQUIRES_SHARED(Locks::mutator_lock_);
469
Vladimir Marko2196c652017-11-30 16:16:07 +0000470 class JniStubKey;
471 class JniStubData;
472
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +0000473 // Whether the GC allows accessing weaks in inline caches. Note that this
474 // is not used by the concurrent collector, which uses
475 // Thread::SetWeakRefAccessEnabled instead.
476 Atomic<bool> is_weak_access_enabled_;
477
478 // Condition to wait on for accessing inline caches.
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100479 ConditionVariable inline_cache_cond_ GUARDED_BY(Locks::jit_lock_);
Nicolas Geoffraye51ca8b2016-11-22 14:49:31 +0000480
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100481 // -------------- JIT memory regions ------------------------------------- //
482
483 // Shared region, inherited from the zygote.
484 JitMemoryRegion shared_region_;
485
486 // Process's own region.
487 JitMemoryRegion private_region_;
488
489 // -------------- Global JIT maps --------------------------------------- //
490
491 // Holds compiled code associated with the shorty for a JNI stub.
492 SafeMap<JniStubKey, JniStubData> jni_stubs_map_ GUARDED_BY(Locks::jit_lock_);
493
494 // Holds compiled code associated to the ArtMethod.
495 SafeMap<const void*, ArtMethod*> method_code_map_ GUARDED_BY(Locks::jit_lock_);
496
Nicolas Geoffray32384402019-07-17 20:06:44 +0100497 // Holds compiled code associated to the ArtMethod. Used when pre-jitting
498 // methods whose entrypoints have the resolution stub.
499 SafeMap<ArtMethod*, const void*> saved_compiled_methods_map_ GUARDED_BY(Locks::jit_lock_);
500
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100501 // Holds osr compiled code associated to the ArtMethod.
502 SafeMap<ArtMethod*, const void*> osr_code_map_ GUARDED_BY(Locks::jit_lock_);
503
504 // ProfilingInfo objects we have allocated.
505 std::vector<ProfilingInfo*> profiling_infos_ GUARDED_BY(Locks::jit_lock_);
506
Nicolas Geoffraye32d24c2019-07-05 10:28:59 +0100507 // Methods that the zygote has compiled and can be shared across processes
508 // forked from the zygote.
509 ZygoteMap zygote_map_;
510
Nicolas Geoffray2a905b22019-06-06 09:04:07 +0100511 // -------------- JIT GC related data structures ----------------------- //
512
513 // Condition to wait on during collection.
514 ConditionVariable lock_cond_ GUARDED_BY(Locks::jit_lock_);
515
516 // Whether there is a code cache collection in progress.
517 bool collection_in_progress_ GUARDED_BY(Locks::jit_lock_);
518
519 // Bitmap for collecting code and data.
520 std::unique_ptr<CodeCacheBitmap> live_bitmap_;
521
522 // Whether the last collection round increased the code cache.
523 bool last_collection_increased_code_cache_ GUARDED_BY(Locks::jit_lock_);
524
525 // Whether we can do garbage collection. Not 'const' as tests may override this.
526 bool garbage_collect_code_ GUARDED_BY(Locks::jit_lock_);
527
528 // ---------------- JIT statistics -------------------------------------- //
529
530 // Number of compilations done throughout the lifetime of the JIT.
531 size_t number_of_compilations_ GUARDED_BY(Locks::jit_lock_);
532
533 // Number of compilations for on-stack-replacement done throughout the lifetime of the JIT.
534 size_t number_of_osr_compilations_ GUARDED_BY(Locks::jit_lock_);
535
536 // Number of code cache collections done throughout the lifetime of the JIT.
537 size_t number_of_collections_ GUARDED_BY(Locks::jit_lock_);
538
539 // Histograms for keeping track of stack map size statistics.
540 Histogram<uint64_t> histogram_stack_map_memory_use_ GUARDED_BY(Locks::jit_lock_);
541
542 // Histograms for keeping track of code size statistics.
543 Histogram<uint64_t> histogram_code_memory_use_ GUARDED_BY(Locks::jit_lock_);
544
545 // Histograms for keeping track of profiling info statistics.
546 Histogram<uint64_t> histogram_profiling_info_memory_use_ GUARDED_BY(Locks::jit_lock_);
Nicolas Geoffrayc9de61c2018-11-27 17:34:31 +0000547
Vladimir Marko2196c652017-11-30 16:16:07 +0000548 friend class art::JitJniStubTestHelper;
Calin Juravle016fcbe22018-05-03 19:47:35 -0700549 friend class ScopedCodeCacheWrite;
Nicolas Geoffrayce9ed362018-11-29 03:19:28 +0000550 friend class MarkCodeClosure;
Calin Juravle016fcbe22018-05-03 19:47:35 -0700551
Nicolas Geoffrayc9de61c2018-11-27 17:34:31 +0000552 DISALLOW_COPY_AND_ASSIGN(JitCodeCache);
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800553};
554
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800555} // namespace jit
556} // namespace art
557
558#endif // ART_RUNTIME_JIT_JIT_CODE_CACHE_H_