blob: 373d0ccb18576d8890ef2c920e7669dc431d22a2 [file] [log] [blame]
Alex Bradbury6b2cca72016-11-01 23:47:30 +00001//===-- RISCVAsmBackend.cpp - RISCV Assembler Backend ---------------------===//
2//
Chandler Carruth2946cd72019-01-19 08:50:56 +00003// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
Alex Bradbury6b2cca72016-11-01 23:47:30 +00006//
7//===----------------------------------------------------------------------===//
8
Alex Bradbury9c03e4c2018-11-12 14:25:07 +00009#include "RISCVAsmBackend.h"
Alex Bradburyeb3a64a2018-12-20 14:52:15 +000010#include "RISCVMCExpr.h"
Alex Bradbury9d3f1252017-09-28 08:26:24 +000011#include "llvm/ADT/APInt.h"
James Clarke3f5976c2020-01-23 02:05:46 +000012#include "llvm/MC/MCAsmLayout.h"
Alex Bradbury6b2cca72016-11-01 23:47:30 +000013#include "llvm/MC/MCAssembler.h"
Alex Bradbury9d3f1252017-09-28 08:26:24 +000014#include "llvm/MC/MCContext.h"
Alex Bradbury6b2cca72016-11-01 23:47:30 +000015#include "llvm/MC/MCDirectives.h"
16#include "llvm/MC/MCELFObjectWriter.h"
Chandler Carruth6bda14b2017-06-06 11:49:48 +000017#include "llvm/MC/MCExpr.h"
Alex Bradbury6b2cca72016-11-01 23:47:30 +000018#include "llvm/MC/MCObjectWriter.h"
Alex Bradbury6b2cca72016-11-01 23:47:30 +000019#include "llvm/MC/MCSymbol.h"
Shiva Chen5af037f2019-01-30 11:16:59 +000020#include "llvm/MC/MCValue.h"
Alex Bradbury6b2cca72016-11-01 23:47:30 +000021#include "llvm/Support/ErrorHandling.h"
22#include "llvm/Support/raw_ostream.h"
23
24using namespace llvm;
25
Alex Bradburyeb3a64a2018-12-20 14:52:15 +000026// If linker relaxation is enabled, or the relax option had previously been
27// enabled, always emit relocations even if the fixup can be resolved. This is
28// necessary for correctness as offsets may change during relaxation.
29bool RISCVAsmBackend::shouldForceRelocation(const MCAssembler &Asm,
30 const MCFixup &Fixup,
31 const MCValue &Target) {
Sam Clegg90b6bb72019-08-23 01:00:55 +000032 switch (Fixup.getTargetKind()) {
Alex Bradburyeb3a64a2018-12-20 14:52:15 +000033 default:
34 break;
Alex Bradbury1c1f8f22019-08-19 13:23:02 +000035 case FK_Data_1:
36 case FK_Data_2:
37 case FK_Data_4:
38 case FK_Data_8:
39 if (Target.isAbsolute())
40 return false;
41 break;
Alex Bradbury8eb87e52019-02-15 09:43:46 +000042 case RISCV::fixup_riscv_got_hi20:
Lewis Revilldf3cb472019-04-23 14:46:13 +000043 case RISCV::fixup_riscv_tls_got_hi20:
44 case RISCV::fixup_riscv_tls_gd_hi20:
Alex Bradbury8eb87e52019-02-15 09:43:46 +000045 return true;
Alex Bradburyeb3a64a2018-12-20 14:52:15 +000046 }
47
James Clarke3f5976c2020-01-23 02:05:46 +000048 return STI.getFeatureBits()[RISCV::FeatureRelax] || ForceRelocs;
Alex Bradburyeb3a64a2018-12-20 14:52:15 +000049}
50
Shiva Chen6e07dfb2018-05-18 06:42:21 +000051bool RISCVAsmBackend::fixupNeedsRelaxationAdvanced(const MCFixup &Fixup,
52 bool Resolved,
53 uint64_t Value,
54 const MCRelaxableFragment *DF,
55 const MCAsmLayout &Layout,
56 const bool WasForced) const {
57 // Return true if the symbol is actually unresolved.
58 // Resolved could be always false when shouldForceRelocation return true.
59 // We use !WasForced to indicate that the symbol is unresolved and not forced
60 // by shouldForceRelocation.
61 if (!Resolved && !WasForced)
62 return true;
63
Sameer AbuAsal2646a412018-03-02 22:04:12 +000064 int64_t Offset = int64_t(Value);
Sam Clegg90b6bb72019-08-23 01:00:55 +000065 switch (Fixup.getTargetKind()) {
Sameer AbuAsal2646a412018-03-02 22:04:12 +000066 default:
67 return false;
68 case RISCV::fixup_riscv_rvc_branch:
69 // For compressed branch instructions the immediate must be
70 // in the range [-256, 254].
71 return Offset > 254 || Offset < -256;
72 case RISCV::fixup_riscv_rvc_jump:
73 // For compressed jump instructions the immediate must be
74 // in the range [-2048, 2046].
75 return Offset > 2046 || Offset < -2048;
76 }
77}
78
79void RISCVAsmBackend::relaxInstruction(const MCInst &Inst,
80 const MCSubtargetInfo &STI,
81 MCInst &Res) const {
82 // TODO: replace this with call to auto generated uncompressinstr() function.
83 switch (Inst.getOpcode()) {
84 default:
85 llvm_unreachable("Opcode not expected!");
86 case RISCV::C_BEQZ:
87 // c.beqz $rs1, $imm -> beq $rs1, X0, $imm.
88 Res.setOpcode(RISCV::BEQ);
89 Res.addOperand(Inst.getOperand(0));
90 Res.addOperand(MCOperand::createReg(RISCV::X0));
91 Res.addOperand(Inst.getOperand(1));
92 break;
93 case RISCV::C_BNEZ:
94 // c.bnez $rs1, $imm -> bne $rs1, X0, $imm.
95 Res.setOpcode(RISCV::BNE);
96 Res.addOperand(Inst.getOperand(0));
97 Res.addOperand(MCOperand::createReg(RISCV::X0));
98 Res.addOperand(Inst.getOperand(1));
99 break;
100 case RISCV::C_J:
101 // c.j $imm -> jal X0, $imm.
102 Res.setOpcode(RISCV::JAL);
103 Res.addOperand(MCOperand::createReg(RISCV::X0));
104 Res.addOperand(Inst.getOperand(0));
105 break;
106 case RISCV::C_JAL:
107 // c.jal $imm -> jal X1, $imm.
108 Res.setOpcode(RISCV::JAL);
109 Res.addOperand(MCOperand::createReg(RISCV::X1));
110 Res.addOperand(Inst.getOperand(0));
111 break;
112 }
113}
114
115// Given a compressed control flow instruction this function returns
116// the expanded instruction.
117unsigned RISCVAsmBackend::getRelaxedOpcode(unsigned Op) const {
118 switch (Op) {
119 default:
120 return Op;
121 case RISCV::C_BEQZ:
122 return RISCV::BEQ;
123 case RISCV::C_BNEZ:
124 return RISCV::BNE;
125 case RISCV::C_J:
126 case RISCV::C_JAL: // fall through.
127 return RISCV::JAL;
128 }
129}
130
Ilya Biryukov3c9c1062018-06-06 10:57:50 +0000131bool RISCVAsmBackend::mayNeedRelaxation(const MCInst &Inst,
132 const MCSubtargetInfo &STI) const {
Sameer AbuAsal2646a412018-03-02 22:04:12 +0000133 return getRelaxedOpcode(Inst.getOpcode()) != Inst.getOpcode();
134}
135
Peter Collingbourne571a3302018-05-21 17:57:19 +0000136bool RISCVAsmBackend::writeNopData(raw_ostream &OS, uint64_t Count) const {
Alex Bradburyd93f8892018-01-17 14:17:12 +0000137 bool HasStdExtC = STI.getFeatureBits()[RISCV::FeatureStdExtC];
138 unsigned MinNopLen = HasStdExtC ? 2 : 4;
139
140 if ((Count % MinNopLen) != 0)
Alex Bradbury6b2cca72016-11-01 23:47:30 +0000141 return false;
142
Alex Bradburyd93f8892018-01-17 14:17:12 +0000143 // The canonical nop on RISC-V is addi x0, x0, 0.
Fangrui Song44cc4e92019-06-15 06:14:15 +0000144 for (; Count >= 4; Count -= 4)
Peter Collingbourne571a3302018-05-21 17:57:19 +0000145 OS.write("\x13\0\0\0", 4);
Alex Bradbury6b2cca72016-11-01 23:47:30 +0000146
Alex Bradburyd93f8892018-01-17 14:17:12 +0000147 // The canonical nop on RVC is c.nop.
Fangrui Song44cc4e92019-06-15 06:14:15 +0000148 if (Count && HasStdExtC)
149 OS.write("\x01\0", 2);
Alex Bradburyd93f8892018-01-17 14:17:12 +0000150
Alex Bradbury6b2cca72016-11-01 23:47:30 +0000151 return true;
152}
153
Alex Bradbury9d3f1252017-09-28 08:26:24 +0000154static uint64_t adjustFixupValue(const MCFixup &Fixup, uint64_t Value,
155 MCContext &Ctx) {
Sam Clegg90b6bb72019-08-23 01:00:55 +0000156 switch (Fixup.getTargetKind()) {
Alex Bradbury9d3f1252017-09-28 08:26:24 +0000157 default:
158 llvm_unreachable("Unknown fixup kind!");
Alex Bradbury8eb87e52019-02-15 09:43:46 +0000159 case RISCV::fixup_riscv_got_hi20:
Lewis Revilldf3cb472019-04-23 14:46:13 +0000160 case RISCV::fixup_riscv_tls_got_hi20:
161 case RISCV::fixup_riscv_tls_gd_hi20:
Alex Bradbury8eb87e52019-02-15 09:43:46 +0000162 llvm_unreachable("Relocation should be unconditionally forced\n");
Alex Bradbury9d3f1252017-09-28 08:26:24 +0000163 case FK_Data_1:
164 case FK_Data_2:
165 case FK_Data_4:
166 case FK_Data_8:
Hsiangkai Wang18ccfad2019-07-19 02:03:34 +0000167 case FK_Data_6b:
Alex Bradbury9d3f1252017-09-28 08:26:24 +0000168 return Value;
169 case RISCV::fixup_riscv_lo12_i:
Ahmed Charles646ab872018-02-06 00:55:23 +0000170 case RISCV::fixup_riscv_pcrel_lo12_i:
Lewis Revillaa79a3f2019-04-04 14:13:37 +0000171 case RISCV::fixup_riscv_tprel_lo12_i:
Alex Bradbury9d3f1252017-09-28 08:26:24 +0000172 return Value & 0xfff;
173 case RISCV::fixup_riscv_lo12_s:
Ahmed Charles646ab872018-02-06 00:55:23 +0000174 case RISCV::fixup_riscv_pcrel_lo12_s:
Lewis Revillaa79a3f2019-04-04 14:13:37 +0000175 case RISCV::fixup_riscv_tprel_lo12_s:
Alex Bradbury9d3f1252017-09-28 08:26:24 +0000176 return (((Value >> 5) & 0x7f) << 25) | ((Value & 0x1f) << 7);
177 case RISCV::fixup_riscv_hi20:
178 case RISCV::fixup_riscv_pcrel_hi20:
Lewis Revillaa79a3f2019-04-04 14:13:37 +0000179 case RISCV::fixup_riscv_tprel_hi20:
Alex Bradbury9d3f1252017-09-28 08:26:24 +0000180 // Add 1 if bit 11 is 1, to compensate for low 12 bits being negative.
181 return ((Value + 0x800) >> 12) & 0xfffff;
182 case RISCV::fixup_riscv_jal: {
183 if (!isInt<21>(Value))
184 Ctx.reportError(Fixup.getLoc(), "fixup value out of range");
185 if (Value & 0x1)
186 Ctx.reportError(Fixup.getLoc(), "fixup value must be 2-byte aligned");
187 // Need to produce imm[19|10:1|11|19:12] from the 21-bit Value.
188 unsigned Sbit = (Value >> 20) & 0x1;
189 unsigned Hi8 = (Value >> 12) & 0xff;
190 unsigned Mid1 = (Value >> 11) & 0x1;
191 unsigned Lo10 = (Value >> 1) & 0x3ff;
192 // Inst{31} = Sbit;
193 // Inst{30-21} = Lo10;
194 // Inst{20} = Mid1;
195 // Inst{19-12} = Hi8;
196 Value = (Sbit << 19) | (Lo10 << 9) | (Mid1 << 8) | Hi8;
197 return Value;
198 }
199 case RISCV::fixup_riscv_branch: {
200 if (!isInt<13>(Value))
201 Ctx.reportError(Fixup.getLoc(), "fixup value out of range");
202 if (Value & 0x1)
203 Ctx.reportError(Fixup.getLoc(), "fixup value must be 2-byte aligned");
204 // Need to extract imm[12], imm[10:5], imm[4:1], imm[11] from the 13-bit
205 // Value.
206 unsigned Sbit = (Value >> 12) & 0x1;
207 unsigned Hi1 = (Value >> 11) & 0x1;
208 unsigned Mid6 = (Value >> 5) & 0x3f;
209 unsigned Lo4 = (Value >> 1) & 0xf;
210 // Inst{31} = Sbit;
211 // Inst{30-25} = Mid6;
212 // Inst{11-8} = Lo4;
213 // Inst{7} = Hi1;
214 Value = (Sbit << 31) | (Mid6 << 25) | (Lo4 << 8) | (Hi1 << 7);
215 return Value;
216 }
Alex Bradburyf8078f62019-04-02 12:47:20 +0000217 case RISCV::fixup_riscv_call:
218 case RISCV::fixup_riscv_call_plt: {
Shiva Chenc3d0e892018-05-30 01:16:36 +0000219 // Jalr will add UpperImm with the sign-extended 12-bit LowerImm,
220 // we need to add 0x800ULL before extract upper bits to reflect the
221 // effect of the sign extension.
222 uint64_t UpperImm = (Value + 0x800ULL) & 0xfffff000ULL;
223 uint64_t LowerImm = Value & 0xfffULL;
224 return UpperImm | ((LowerImm << 20) << 32);
225 }
Alex Bradburyf8f4b902017-12-07 13:19:57 +0000226 case RISCV::fixup_riscv_rvc_jump: {
227 // Need to produce offset[11|4|9:8|10|6|7|3:1|5] from the 11-bit Value.
228 unsigned Bit11 = (Value >> 11) & 0x1;
229 unsigned Bit4 = (Value >> 4) & 0x1;
230 unsigned Bit9_8 = (Value >> 8) & 0x3;
231 unsigned Bit10 = (Value >> 10) & 0x1;
232 unsigned Bit6 = (Value >> 6) & 0x1;
233 unsigned Bit7 = (Value >> 7) & 0x1;
234 unsigned Bit3_1 = (Value >> 1) & 0x7;
235 unsigned Bit5 = (Value >> 5) & 0x1;
236 Value = (Bit11 << 10) | (Bit4 << 9) | (Bit9_8 << 7) | (Bit10 << 6) |
237 (Bit6 << 5) | (Bit7 << 4) | (Bit3_1 << 1) | Bit5;
238 return Value;
239 }
240 case RISCV::fixup_riscv_rvc_branch: {
241 // Need to produce offset[8|4:3], [reg 3 bit], offset[7:6|2:1|5]
242 unsigned Bit8 = (Value >> 8) & 0x1;
243 unsigned Bit7_6 = (Value >> 6) & 0x3;
244 unsigned Bit5 = (Value >> 5) & 0x1;
245 unsigned Bit4_3 = (Value >> 3) & 0x3;
246 unsigned Bit2_1 = (Value >> 1) & 0x3;
247 Value = (Bit8 << 12) | (Bit4_3 << 10) | (Bit7_6 << 5) | (Bit2_1 << 3) |
248 (Bit5 << 2);
249 return Value;
250 }
Alex Bradbury9d3f1252017-09-28 08:26:24 +0000251
252 }
253}
254
James Clarke3f5976c2020-01-23 02:05:46 +0000255bool RISCVAsmBackend::evaluateTargetFixup(
256 const MCAssembler &Asm, const MCAsmLayout &Layout, const MCFixup &Fixup,
257 const MCFragment *DF, const MCValue &Target, uint64_t &Value,
258 bool &WasForced) {
259 const MCFixup *AUIPCFixup;
260 const MCFragment *AUIPCDF;
261 MCValue AUIPCTarget;
262 switch (Fixup.getTargetKind()) {
263 default:
264 llvm_unreachable("Unexpected fixup kind!");
265 case RISCV::fixup_riscv_pcrel_hi20:
266 AUIPCFixup = &Fixup;
267 AUIPCDF = DF;
268 AUIPCTarget = Target;
269 break;
270 case RISCV::fixup_riscv_pcrel_lo12_i:
271 case RISCV::fixup_riscv_pcrel_lo12_s: {
272 AUIPCFixup = cast<RISCVMCExpr>(Fixup.getValue())->getPCRelHiFixup(&AUIPCDF);
273 if (!AUIPCFixup) {
274 Asm.getContext().reportError(Fixup.getLoc(),
275 "could not find corresponding %pcrel_hi");
276 return true;
277 }
278
279 // MCAssembler::evaluateFixup will emit an error for this case when it sees
280 // the %pcrel_hi, so don't duplicate it when also seeing the %pcrel_lo.
281 const MCExpr *AUIPCExpr = AUIPCFixup->getValue();
282 if (!AUIPCExpr->evaluateAsRelocatable(AUIPCTarget, &Layout, AUIPCFixup))
283 return true;
284 break;
285 }
286 }
287
288 if (!AUIPCTarget.getSymA() || AUIPCTarget.getSymB())
289 return false;
290
291 const MCSymbolRefExpr *A = AUIPCTarget.getSymA();
292 const MCSymbol &SA = A->getSymbol();
293 if (A->getKind() != MCSymbolRefExpr::VK_None || SA.isUndefined())
294 return false;
295
296 auto *Writer = Asm.getWriterPtr();
297 if (!Writer)
298 return false;
299
300 bool IsResolved = Writer->isSymbolRefDifferenceFullyResolvedImpl(
301 Asm, SA, *AUIPCDF, false, true);
302 if (!IsResolved)
303 return false;
304
305 Value = Layout.getSymbolOffset(SA) + AUIPCTarget.getConstant();
306 Value -= Layout.getFragmentOffset(AUIPCDF) + AUIPCFixup->getOffset();
307
308 if (shouldForceRelocation(Asm, *AUIPCFixup, AUIPCTarget)) {
309 WasForced = true;
310 return false;
311 }
312
313 return true;
314}
315
Rafael Espindola801b42d2017-06-23 22:52:36 +0000316void RISCVAsmBackend::applyFixup(const MCAssembler &Asm, const MCFixup &Fixup,
317 const MCValue &Target,
Rafael Espindola88d9e372017-06-21 23:06:53 +0000318 MutableArrayRef<char> Data, uint64_t Value,
Ilya Biryukov3c9c1062018-06-06 10:57:50 +0000319 bool IsResolved,
320 const MCSubtargetInfo *STI) const {
Alex Bradbury9d3f1252017-09-28 08:26:24 +0000321 MCContext &Ctx = Asm.getContext();
Mandeep Singh Grang5f043ae2017-11-10 19:09:28 +0000322 MCFixupKindInfo Info = getFixupKindInfo(Fixup.getKind());
Alex Bradbury9d3f1252017-09-28 08:26:24 +0000323 if (!Value)
324 return; // Doesn't change encoding.
Alex Bradbury9d3f1252017-09-28 08:26:24 +0000325 // Apply any target-specific value adjustments.
326 Value = adjustFixupValue(Fixup, Value, Ctx);
327
328 // Shift the value into position.
329 Value <<= Info.TargetOffset;
330
331 unsigned Offset = Fixup.getOffset();
Alex Bradbury1c010d02018-05-23 10:53:56 +0000332 unsigned NumBytes = alignTo(Info.TargetSize + Info.TargetOffset, 8) / 8;
Mandeep Singh Grang5f043ae2017-11-10 19:09:28 +0000333
Alex Bradbury9d3f1252017-09-28 08:26:24 +0000334 assert(Offset + NumBytes <= Data.size() && "Invalid fixup offset!");
335
336 // For each byte of the fragment that the fixup touches, mask in the
337 // bits from the fixup value.
Alex Bradbury1c010d02018-05-23 10:53:56 +0000338 for (unsigned i = 0; i != NumBytes; ++i) {
Alex Bradbury9d3f1252017-09-28 08:26:24 +0000339 Data[Offset + i] |= uint8_t((Value >> (i * 8)) & 0xff);
340 }
Alex Bradbury6b2cca72016-11-01 23:47:30 +0000341}
342
Shiva Chen5af037f2019-01-30 11:16:59 +0000343// Linker relaxation may change code size. We have to insert Nops
344// for .align directive when linker relaxation enabled. So then Linker
345// could satisfy alignment by removing Nops.
346// The function return the total Nops Size we need to insert.
347bool RISCVAsmBackend::shouldInsertExtraNopBytesForCodeAlign(
348 const MCAlignFragment &AF, unsigned &Size) {
349 // Calculate Nops Size only when linker relaxation enabled.
350 if (!STI.getFeatureBits()[RISCV::FeatureRelax])
351 return false;
352
353 bool HasStdExtC = STI.getFeatureBits()[RISCV::FeatureStdExtC];
354 unsigned MinNopLen = HasStdExtC ? 2 : 4;
355
Alex Bradburybb479ca2019-07-16 04:40:25 +0000356 if (AF.getAlignment() <= MinNopLen) {
357 return false;
358 } else {
359 Size = AF.getAlignment() - MinNopLen;
360 return true;
361 }
Shiva Chen5af037f2019-01-30 11:16:59 +0000362}
363
364// We need to insert R_RISCV_ALIGN relocation type to indicate the
365// position of Nops and the total bytes of the Nops have been inserted
366// when linker relaxation enabled.
367// The function insert fixup_riscv_align fixup which eventually will
368// transfer to R_RISCV_ALIGN relocation type.
369bool RISCVAsmBackend::shouldInsertFixupForCodeAlign(MCAssembler &Asm,
370 const MCAsmLayout &Layout,
371 MCAlignFragment &AF) {
372 // Insert the fixup only when linker relaxation enabled.
373 if (!STI.getFeatureBits()[RISCV::FeatureRelax])
374 return false;
375
Alex Bradburye9ad0cf2019-07-16 04:37:19 +0000376 // Calculate total Nops we need to insert. If there are none to insert
377 // then simply return.
Shiva Chen5af037f2019-01-30 11:16:59 +0000378 unsigned Count;
Alex Bradburye9ad0cf2019-07-16 04:37:19 +0000379 if (!shouldInsertExtraNopBytesForCodeAlign(AF, Count) || (Count == 0))
Shiva Chen5af037f2019-01-30 11:16:59 +0000380 return false;
381
382 MCContext &Ctx = Asm.getContext();
383 const MCExpr *Dummy = MCConstantExpr::create(0, Ctx);
384 // Create fixup_riscv_align fixup.
385 MCFixup Fixup =
386 MCFixup::create(0, Dummy, MCFixupKind(RISCV::fixup_riscv_align), SMLoc());
387
388 uint64_t FixedValue = 0;
389 MCValue NopBytes = MCValue::get(Count);
390
391 Asm.getWriter().recordRelocation(Asm, Layout, &AF, Fixup, NopBytes,
392 FixedValue);
393
394 return true;
395}
396
Peter Collingbournedcd7d6c2018-05-21 19:20:29 +0000397std::unique_ptr<MCObjectTargetWriter>
398RISCVAsmBackend::createObjectTargetWriter() const {
399 return createRISCVELFObjectWriter(OSABI, Is64Bit);
Alex Bradbury6b2cca72016-11-01 23:47:30 +0000400}
401
Alex Bradbury6b2cca72016-11-01 23:47:30 +0000402MCAsmBackend *llvm::createRISCVAsmBackend(const Target &T,
Alex Bradburyb22f7512018-01-03 08:53:05 +0000403 const MCSubtargetInfo &STI,
Alex Bradbury6b2cca72016-11-01 23:47:30 +0000404 const MCRegisterInfo &MRI,
Alex Bradbury6b2cca72016-11-01 23:47:30 +0000405 const MCTargetOptions &Options) {
Alex Bradburyb22f7512018-01-03 08:53:05 +0000406 const Triple &TT = STI.getTargetTriple();
Alex Bradbury6b2cca72016-11-01 23:47:30 +0000407 uint8_t OSABI = MCELFObjectTargetWriter::getOSABI(TT.getOS());
Alex Bradburyfea49572019-03-09 09:28:06 +0000408 return new RISCVAsmBackend(STI, OSABI, TT.isArch64Bit(), Options);
Alex Bradbury6b2cca72016-11-01 23:47:30 +0000409}