blob: aa8c7173a5900d37da056de0fa9c1b854dd1c87a [file] [log] [blame]
Mathieu Chartiere5f13e52015-02-24 09:37:21 -08001/*
2 * Copyright 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_JIT_JIT_CODE_CACHE_H_
18#define ART_RUNTIME_JIT_JIT_CODE_CACHE_H_
19
20#include "instrumentation.h"
21
22#include "atomic.h"
23#include "base/macros.h"
24#include "base/mutex.h"
25#include "gc_root.h"
26#include "jni.h"
27#include "oat_file.h"
28#include "object_callbacks.h"
29#include "safe_map.h"
30#include "thread_pool.h"
31
32namespace art {
33
34class CompiledMethod;
35class CompilerCallbacks;
36
37namespace mirror {
38class ArtMethod;
39} // namespcae mirror
40
41namespace jit {
42
43class JitInstrumentationCache;
44
45class JitCodeCache {
46 public:
47 static constexpr size_t kMaxCapacity = 1 * GB;
48 static constexpr size_t kDefaultCapacity = 2 * MB;
49
50 static JitCodeCache* Create(size_t capacity, std::string* error_msg);
51
52 const uint8_t* CodeCachePtr() const {
53 return code_cache_ptr_;
54 }
55 size_t CodeCacheSize() const {
56 return code_cache_ptr_ - code_cache_begin_;
57 }
58 size_t CodeCacheRemain() const {
59 return code_cache_end_ - code_cache_ptr_;
60 }
61 size_t DataCacheSize() const {
62 return data_cache_ptr_ - data_cache_begin_;
63 }
64 size_t DataCacheRemain() const {
65 return data_cache_end_ - data_cache_ptr_;
66 }
67 size_t NumMethods() const {
68 return num_methods_;
69 }
70
71 bool ContainsMethod(mirror::ArtMethod* method) const
72 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
73 bool ContainsCodePtr(const void* ptr) const;
74
75 uint8_t* ReserveCode(Thread* self, size_t size) LOCKS_EXCLUDED(lock_);
76
77 uint8_t* AddDataArray(Thread* self, const uint8_t* begin, const uint8_t* end)
78 LOCKS_EXCLUDED(lock_);
79
80 // Get code for a method, returns null if it is not in the jit cache.
81 const void* GetCodeFor(mirror::ArtMethod* method)
82 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) LOCKS_EXCLUDED(lock_);
83
84 void SaveCompiledCode(mirror::ArtMethod* method, const void* old_code_ptr)
85 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) LOCKS_EXCLUDED(lock_);
86
87 private:
88 // Takes ownership of code_mem_map.
89 explicit JitCodeCache(MemMap* code_mem_map);
90 void FlushInstructionCache();
91
92 Mutex lock_;
93 // Mem map which holds code and data. We do this since we need to have 32 bit offsets from method
94 // headers in code cache which point to things in the data cache. If the maps are more than 4GB
95 // apart, having multiple maps wouldn't work.
96 std::unique_ptr<MemMap> mem_map_;
97 // Code cache section.
98 uint8_t* code_cache_ptr_;
99 const uint8_t* code_cache_begin_;
100 const uint8_t* code_cache_end_;
101 // Data cache section.
102 uint8_t* data_cache_ptr_;
103 const uint8_t* data_cache_begin_;
104 const uint8_t* data_cache_end_;
105 size_t num_methods_;
106 // TODO: This relies on methods not moving.
107 // This map holds code for methods if they were deoptimized by the instrumentation stubs. This is
108 // required since we have to implement ClassLinker::GetQuickOatCodeFor for walking stacks.
109 SafeMap<mirror::ArtMethod*, const void*> method_code_map_;
110
111 DISALLOW_COPY_AND_ASSIGN(JitCodeCache);
112};
113
114
115} // namespace jit
116} // namespace art
117
118#endif // ART_RUNTIME_JIT_JIT_CODE_CACHE_H_