blob: 4b8a8e743d314d758aa7cafd28f24dc9f0a0bc99 [file] [log] [blame]
Andreas Gampe85b62f22015-09-09 13:15:38 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_INTRINSICS_UTILS_H_
18#define ART_COMPILER_OPTIMIZING_INTRINSICS_UTILS_H_
19
Vladimir Marko9922f002020-06-08 15:05:15 +010020#include "base/casts.h"
Andreas Gampe85b62f22015-09-09 13:15:38 -070021#include "base/macros.h"
22#include "code_generator.h"
23#include "locations.h"
24#include "nodes.h"
25#include "utils/assembler.h"
26#include "utils/label.h"
27
Vladimir Marko0a516052019-10-14 13:00:44 +000028namespace art {
Andreas Gampe85b62f22015-09-09 13:15:38 -070029
30// Default slow-path for fallback (calling the managed code to handle the intrinsic) in an
31// intrinsified call. This will copy the arguments into the positions for a regular call.
32//
33// Note: The actual parameters are required to be in the locations given by the invoke's location
34// summary. If an intrinsic modifies those locations before a slowpath call, they must be
35// restored!
36//
37// Note: If an invoke wasn't sharpened, we will put down an invoke-virtual here. That's potentially
38// sub-optimal (compared to a direct pointer call), but this is a slow-path.
39
Vladimir Marko9922f002020-06-08 15:05:15 +010040template <typename TDexCallingConvention,
41 typename TSlowPathCode = SlowPathCode,
42 typename TAssembler = Assembler>
43class IntrinsicSlowPath : public TSlowPathCode {
Andreas Gampe85b62f22015-09-09 13:15:38 -070044 public:
Vladimir Marko9922f002020-06-08 15:05:15 +010045 explicit IntrinsicSlowPath(HInvoke* invoke) : TSlowPathCode(invoke), invoke_(invoke) { }
Andreas Gampe85b62f22015-09-09 13:15:38 -070046
47 Location MoveArguments(CodeGenerator* codegen) {
48 TDexCallingConvention calling_convention_visitor;
49 IntrinsicVisitor::MoveArguments(invoke_, codegen, &calling_convention_visitor);
50 return calling_convention_visitor.GetMethodLocation();
51 }
52
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010053 void EmitNativeCode(CodeGenerator* codegen) override {
Vladimir Marko9922f002020-06-08 15:05:15 +010054 TAssembler* assembler = down_cast<TAssembler*>(codegen->GetAssembler());
55 assembler->Bind(this->GetEntryLabel());
Andreas Gampe85b62f22015-09-09 13:15:38 -070056
Vladimir Marko9922f002020-06-08 15:05:15 +010057 this->SaveLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe85b62f22015-09-09 13:15:38 -070058
59 Location method_loc = MoveArguments(codegen);
60
61 if (invoke_->IsInvokeStaticOrDirect()) {
Vladimir Marko86c87522020-05-11 16:55:55 +010062 HInvokeStaticOrDirect* invoke_static_or_direct = invoke_->AsInvokeStaticOrDirect();
63 DCHECK_NE(invoke_static_or_direct->GetMethodLoadKind(),
64 HInvokeStaticOrDirect::MethodLoadKind::kRecursive);
65 DCHECK_NE(invoke_static_or_direct->GetCodePtrLocation(),
66 HInvokeStaticOrDirect::CodePtrLocation::kCallCriticalNative);
67 codegen->GenerateStaticOrDirectCall(invoke_static_or_direct, method_loc, this);
Andra Danciue3e187f2020-07-30 12:19:31 +000068 } else if (invoke_->IsInvokeVirtual()) {
Vladimir Markoe7197bf2017-06-02 17:00:23 +010069 codegen->GenerateVirtualCall(invoke_->AsInvokeVirtual(), method_loc, this);
Andra Danciue3e187f2020-07-30 12:19:31 +000070 } else {
71 DCHECK(invoke_->IsInvokePolymorphic());
72 codegen->GenerateInvokePolymorphicCall(invoke_->AsInvokePolymorphic(), this);
Andreas Gampe85b62f22015-09-09 13:15:38 -070073 }
Andreas Gampe85b62f22015-09-09 13:15:38 -070074
75 // Copy the result back to the expected output.
76 Location out = invoke_->GetLocations()->Out();
77 if (out.IsValid()) {
Andra Danciue3e187f2020-07-30 12:19:31 +000078 DCHECK(out.IsRegisterKind()); // TODO: Replace this when we support output in memory.
79 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->OverlapsRegisters(out));
Andreas Gampe85b62f22015-09-09 13:15:38 -070080 codegen->MoveFromReturnRegister(out, invoke_->GetType());
81 }
82
Vladimir Marko9922f002020-06-08 15:05:15 +010083 this->RestoreLiveRegisters(codegen, invoke_->GetLocations());
84 assembler->Jump(this->GetExitLabel());
Andreas Gampe85b62f22015-09-09 13:15:38 -070085 }
86
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010087 const char* GetDescription() const override { return "IntrinsicSlowPath"; }
Andreas Gampe85b62f22015-09-09 13:15:38 -070088
89 private:
90 // The instruction where this slow path is happening.
91 HInvoke* const invoke_;
92
93 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPath);
94};
95
96} // namespace art
97
98#endif // ART_COMPILER_OPTIMIZING_INTRINSICS_UTILS_H_