Alex Bradbury | 6b2cca7 | 2016-11-01 23:47:30 +0000 | [diff] [blame] | 1 | //===-- RISCVAsmBackend.cpp - RISCV Assembler Backend ---------------------===// |
| 2 | // |
Chandler Carruth | 2946cd7 | 2019-01-19 08:50:56 +0000 | [diff] [blame] | 3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
| 4 | // See https://llvm.org/LICENSE.txt for license information. |
| 5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
Alex Bradbury | 6b2cca7 | 2016-11-01 23:47:30 +0000 | [diff] [blame] | 6 | // |
| 7 | //===----------------------------------------------------------------------===// |
| 8 | |
Alex Bradbury | 9c03e4c | 2018-11-12 14:25:07 +0000 | [diff] [blame] | 9 | #include "RISCVAsmBackend.h" |
Alex Bradbury | eb3a64a | 2018-12-20 14:52:15 +0000 | [diff] [blame] | 10 | #include "RISCVMCExpr.h" |
Alex Bradbury | 9d3f125 | 2017-09-28 08:26:24 +0000 | [diff] [blame] | 11 | #include "llvm/ADT/APInt.h" |
Alex Bradbury | 6b2cca7 | 2016-11-01 23:47:30 +0000 | [diff] [blame] | 12 | #include "llvm/MC/MCAssembler.h" |
Alex Bradbury | 9d3f125 | 2017-09-28 08:26:24 +0000 | [diff] [blame] | 13 | #include "llvm/MC/MCContext.h" |
Alex Bradbury | 6b2cca7 | 2016-11-01 23:47:30 +0000 | [diff] [blame] | 14 | #include "llvm/MC/MCDirectives.h" |
| 15 | #include "llvm/MC/MCELFObjectWriter.h" |
Chandler Carruth | 6bda14b | 2017-06-06 11:49:48 +0000 | [diff] [blame] | 16 | #include "llvm/MC/MCExpr.h" |
Alex Bradbury | 6b2cca7 | 2016-11-01 23:47:30 +0000 | [diff] [blame] | 17 | #include "llvm/MC/MCObjectWriter.h" |
Alex Bradbury | 6b2cca7 | 2016-11-01 23:47:30 +0000 | [diff] [blame] | 18 | #include "llvm/MC/MCSymbol.h" |
Shiva Chen | 5af037f | 2019-01-30 11:16:59 +0000 | [diff] [blame] | 19 | #include "llvm/MC/MCValue.h" |
Alex Bradbury | 6b2cca7 | 2016-11-01 23:47:30 +0000 | [diff] [blame] | 20 | #include "llvm/Support/ErrorHandling.h" |
| 21 | #include "llvm/Support/raw_ostream.h" |
| 22 | |
| 23 | using namespace llvm; |
| 24 | |
Alex Bradbury | eb3a64a | 2018-12-20 14:52:15 +0000 | [diff] [blame] | 25 | // If linker relaxation is enabled, or the relax option had previously been |
| 26 | // enabled, always emit relocations even if the fixup can be resolved. This is |
| 27 | // necessary for correctness as offsets may change during relaxation. |
| 28 | bool RISCVAsmBackend::shouldForceRelocation(const MCAssembler &Asm, |
| 29 | const MCFixup &Fixup, |
| 30 | const MCValue &Target) { |
| 31 | bool ShouldForce = false; |
| 32 | |
Sam Clegg | 90b6bb7 | 2019-08-23 01:00:55 +0000 | [diff] [blame] | 33 | switch (Fixup.getTargetKind()) { |
Alex Bradbury | eb3a64a | 2018-12-20 14:52:15 +0000 | [diff] [blame] | 34 | default: |
| 35 | break; |
Alex Bradbury | 1c1f8f2 | 2019-08-19 13:23:02 +0000 | [diff] [blame] | 36 | case FK_Data_1: |
| 37 | case FK_Data_2: |
| 38 | case FK_Data_4: |
| 39 | case FK_Data_8: |
| 40 | if (Target.isAbsolute()) |
| 41 | return false; |
| 42 | break; |
Alex Bradbury | 8eb87e5 | 2019-02-15 09:43:46 +0000 | [diff] [blame] | 43 | case RISCV::fixup_riscv_got_hi20: |
Lewis Revill | df3cb47 | 2019-04-23 14:46:13 +0000 | [diff] [blame] | 44 | case RISCV::fixup_riscv_tls_got_hi20: |
| 45 | case RISCV::fixup_riscv_tls_gd_hi20: |
Alex Bradbury | 8eb87e5 | 2019-02-15 09:43:46 +0000 | [diff] [blame] | 46 | return true; |
Alex Bradbury | eb3a64a | 2018-12-20 14:52:15 +0000 | [diff] [blame] | 47 | case RISCV::fixup_riscv_pcrel_lo12_i: |
| 48 | case RISCV::fixup_riscv_pcrel_lo12_s: |
| 49 | // For pcrel_lo12, force a relocation if the target of the corresponding |
| 50 | // pcrel_hi20 is not in the same fragment. |
| 51 | const MCFixup *T = cast<RISCVMCExpr>(Fixup.getValue())->getPCRelHiFixup(); |
| 52 | if (!T) { |
| 53 | Asm.getContext().reportError(Fixup.getLoc(), |
| 54 | "could not find corresponding %pcrel_hi"); |
| 55 | return false; |
| 56 | } |
| 57 | |
Sam Clegg | 90b6bb7 | 2019-08-23 01:00:55 +0000 | [diff] [blame] | 58 | switch (T->getTargetKind()) { |
Alex Bradbury | eb3a64a | 2018-12-20 14:52:15 +0000 | [diff] [blame] | 59 | default: |
| 60 | llvm_unreachable("Unexpected fixup kind for pcrel_lo12"); |
| 61 | break; |
Alex Bradbury | 8eb87e5 | 2019-02-15 09:43:46 +0000 | [diff] [blame] | 62 | case RISCV::fixup_riscv_got_hi20: |
Lewis Revill | df3cb47 | 2019-04-23 14:46:13 +0000 | [diff] [blame] | 63 | case RISCV::fixup_riscv_tls_got_hi20: |
| 64 | case RISCV::fixup_riscv_tls_gd_hi20: |
Alex Bradbury | 8eb87e5 | 2019-02-15 09:43:46 +0000 | [diff] [blame] | 65 | ShouldForce = true; |
| 66 | break; |
Roger Ferrer Ibanez | 41449c5 | 2019-11-08 08:26:30 +0000 | [diff] [blame] | 67 | case RISCV::fixup_riscv_pcrel_hi20: { |
| 68 | MCFragment *TFragment = T->getValue()->findAssociatedFragment(); |
| 69 | MCFragment *FixupFragment = Fixup.getValue()->findAssociatedFragment(); |
| 70 | assert(FixupFragment && "We should have a fragment for this fixup"); |
| 71 | ShouldForce = |
| 72 | !TFragment || TFragment->getParent() != FixupFragment->getParent(); |
Alex Bradbury | eb3a64a | 2018-12-20 14:52:15 +0000 | [diff] [blame] | 73 | break; |
| 74 | } |
Roger Ferrer Ibanez | 41449c5 | 2019-11-08 08:26:30 +0000 | [diff] [blame] | 75 | } |
Alex Bradbury | eb3a64a | 2018-12-20 14:52:15 +0000 | [diff] [blame] | 76 | break; |
| 77 | } |
| 78 | |
| 79 | return ShouldForce || STI.getFeatureBits()[RISCV::FeatureRelax] || |
| 80 | ForceRelocs; |
| 81 | } |
| 82 | |
Shiva Chen | 6e07dfb | 2018-05-18 06:42:21 +0000 | [diff] [blame] | 83 | bool RISCVAsmBackend::fixupNeedsRelaxationAdvanced(const MCFixup &Fixup, |
| 84 | bool Resolved, |
| 85 | uint64_t Value, |
| 86 | const MCRelaxableFragment *DF, |
| 87 | const MCAsmLayout &Layout, |
| 88 | const bool WasForced) const { |
| 89 | // Return true if the symbol is actually unresolved. |
| 90 | // Resolved could be always false when shouldForceRelocation return true. |
| 91 | // We use !WasForced to indicate that the symbol is unresolved and not forced |
| 92 | // by shouldForceRelocation. |
| 93 | if (!Resolved && !WasForced) |
| 94 | return true; |
| 95 | |
Sameer AbuAsal | 2646a41 | 2018-03-02 22:04:12 +0000 | [diff] [blame] | 96 | int64_t Offset = int64_t(Value); |
Sam Clegg | 90b6bb7 | 2019-08-23 01:00:55 +0000 | [diff] [blame] | 97 | switch (Fixup.getTargetKind()) { |
Sameer AbuAsal | 2646a41 | 2018-03-02 22:04:12 +0000 | [diff] [blame] | 98 | default: |
| 99 | return false; |
| 100 | case RISCV::fixup_riscv_rvc_branch: |
| 101 | // For compressed branch instructions the immediate must be |
| 102 | // in the range [-256, 254]. |
| 103 | return Offset > 254 || Offset < -256; |
| 104 | case RISCV::fixup_riscv_rvc_jump: |
| 105 | // For compressed jump instructions the immediate must be |
| 106 | // in the range [-2048, 2046]. |
| 107 | return Offset > 2046 || Offset < -2048; |
| 108 | } |
| 109 | } |
| 110 | |
| 111 | void RISCVAsmBackend::relaxInstruction(const MCInst &Inst, |
| 112 | const MCSubtargetInfo &STI, |
| 113 | MCInst &Res) const { |
| 114 | // TODO: replace this with call to auto generated uncompressinstr() function. |
| 115 | switch (Inst.getOpcode()) { |
| 116 | default: |
| 117 | llvm_unreachable("Opcode not expected!"); |
| 118 | case RISCV::C_BEQZ: |
| 119 | // c.beqz $rs1, $imm -> beq $rs1, X0, $imm. |
| 120 | Res.setOpcode(RISCV::BEQ); |
| 121 | Res.addOperand(Inst.getOperand(0)); |
| 122 | Res.addOperand(MCOperand::createReg(RISCV::X0)); |
| 123 | Res.addOperand(Inst.getOperand(1)); |
| 124 | break; |
| 125 | case RISCV::C_BNEZ: |
| 126 | // c.bnez $rs1, $imm -> bne $rs1, X0, $imm. |
| 127 | Res.setOpcode(RISCV::BNE); |
| 128 | Res.addOperand(Inst.getOperand(0)); |
| 129 | Res.addOperand(MCOperand::createReg(RISCV::X0)); |
| 130 | Res.addOperand(Inst.getOperand(1)); |
| 131 | break; |
| 132 | case RISCV::C_J: |
| 133 | // c.j $imm -> jal X0, $imm. |
| 134 | Res.setOpcode(RISCV::JAL); |
| 135 | Res.addOperand(MCOperand::createReg(RISCV::X0)); |
| 136 | Res.addOperand(Inst.getOperand(0)); |
| 137 | break; |
| 138 | case RISCV::C_JAL: |
| 139 | // c.jal $imm -> jal X1, $imm. |
| 140 | Res.setOpcode(RISCV::JAL); |
| 141 | Res.addOperand(MCOperand::createReg(RISCV::X1)); |
| 142 | Res.addOperand(Inst.getOperand(0)); |
| 143 | break; |
| 144 | } |
| 145 | } |
| 146 | |
| 147 | // Given a compressed control flow instruction this function returns |
| 148 | // the expanded instruction. |
| 149 | unsigned RISCVAsmBackend::getRelaxedOpcode(unsigned Op) const { |
| 150 | switch (Op) { |
| 151 | default: |
| 152 | return Op; |
| 153 | case RISCV::C_BEQZ: |
| 154 | return RISCV::BEQ; |
| 155 | case RISCV::C_BNEZ: |
| 156 | return RISCV::BNE; |
| 157 | case RISCV::C_J: |
| 158 | case RISCV::C_JAL: // fall through. |
| 159 | return RISCV::JAL; |
| 160 | } |
| 161 | } |
| 162 | |
Ilya Biryukov | 3c9c106 | 2018-06-06 10:57:50 +0000 | [diff] [blame] | 163 | bool RISCVAsmBackend::mayNeedRelaxation(const MCInst &Inst, |
| 164 | const MCSubtargetInfo &STI) const { |
Sameer AbuAsal | 2646a41 | 2018-03-02 22:04:12 +0000 | [diff] [blame] | 165 | return getRelaxedOpcode(Inst.getOpcode()) != Inst.getOpcode(); |
| 166 | } |
| 167 | |
Peter Collingbourne | 571a330 | 2018-05-21 17:57:19 +0000 | [diff] [blame] | 168 | bool RISCVAsmBackend::writeNopData(raw_ostream &OS, uint64_t Count) const { |
Alex Bradbury | d93f889 | 2018-01-17 14:17:12 +0000 | [diff] [blame] | 169 | bool HasStdExtC = STI.getFeatureBits()[RISCV::FeatureStdExtC]; |
| 170 | unsigned MinNopLen = HasStdExtC ? 2 : 4; |
| 171 | |
| 172 | if ((Count % MinNopLen) != 0) |
Alex Bradbury | 6b2cca7 | 2016-11-01 23:47:30 +0000 | [diff] [blame] | 173 | return false; |
| 174 | |
Alex Bradbury | d93f889 | 2018-01-17 14:17:12 +0000 | [diff] [blame] | 175 | // The canonical nop on RISC-V is addi x0, x0, 0. |
Fangrui Song | 44cc4e9 | 2019-06-15 06:14:15 +0000 | [diff] [blame] | 176 | for (; Count >= 4; Count -= 4) |
Peter Collingbourne | 571a330 | 2018-05-21 17:57:19 +0000 | [diff] [blame] | 177 | OS.write("\x13\0\0\0", 4); |
Alex Bradbury | 6b2cca7 | 2016-11-01 23:47:30 +0000 | [diff] [blame] | 178 | |
Alex Bradbury | d93f889 | 2018-01-17 14:17:12 +0000 | [diff] [blame] | 179 | // The canonical nop on RVC is c.nop. |
Fangrui Song | 44cc4e9 | 2019-06-15 06:14:15 +0000 | [diff] [blame] | 180 | if (Count && HasStdExtC) |
| 181 | OS.write("\x01\0", 2); |
Alex Bradbury | d93f889 | 2018-01-17 14:17:12 +0000 | [diff] [blame] | 182 | |
Alex Bradbury | 6b2cca7 | 2016-11-01 23:47:30 +0000 | [diff] [blame] | 183 | return true; |
| 184 | } |
| 185 | |
Alex Bradbury | 9d3f125 | 2017-09-28 08:26:24 +0000 | [diff] [blame] | 186 | static uint64_t adjustFixupValue(const MCFixup &Fixup, uint64_t Value, |
| 187 | MCContext &Ctx) { |
Sam Clegg | 90b6bb7 | 2019-08-23 01:00:55 +0000 | [diff] [blame] | 188 | switch (Fixup.getTargetKind()) { |
Alex Bradbury | 9d3f125 | 2017-09-28 08:26:24 +0000 | [diff] [blame] | 189 | default: |
| 190 | llvm_unreachable("Unknown fixup kind!"); |
Alex Bradbury | 8eb87e5 | 2019-02-15 09:43:46 +0000 | [diff] [blame] | 191 | case RISCV::fixup_riscv_got_hi20: |
Lewis Revill | df3cb47 | 2019-04-23 14:46:13 +0000 | [diff] [blame] | 192 | case RISCV::fixup_riscv_tls_got_hi20: |
| 193 | case RISCV::fixup_riscv_tls_gd_hi20: |
Alex Bradbury | 8eb87e5 | 2019-02-15 09:43:46 +0000 | [diff] [blame] | 194 | llvm_unreachable("Relocation should be unconditionally forced\n"); |
Alex Bradbury | 9d3f125 | 2017-09-28 08:26:24 +0000 | [diff] [blame] | 195 | case FK_Data_1: |
| 196 | case FK_Data_2: |
| 197 | case FK_Data_4: |
| 198 | case FK_Data_8: |
Hsiangkai Wang | 18ccfad | 2019-07-19 02:03:34 +0000 | [diff] [blame] | 199 | case FK_Data_6b: |
Alex Bradbury | 9d3f125 | 2017-09-28 08:26:24 +0000 | [diff] [blame] | 200 | return Value; |
| 201 | case RISCV::fixup_riscv_lo12_i: |
Ahmed Charles | 646ab87 | 2018-02-06 00:55:23 +0000 | [diff] [blame] | 202 | case RISCV::fixup_riscv_pcrel_lo12_i: |
Lewis Revill | aa79a3f | 2019-04-04 14:13:37 +0000 | [diff] [blame] | 203 | case RISCV::fixup_riscv_tprel_lo12_i: |
Alex Bradbury | 9d3f125 | 2017-09-28 08:26:24 +0000 | [diff] [blame] | 204 | return Value & 0xfff; |
| 205 | case RISCV::fixup_riscv_lo12_s: |
Ahmed Charles | 646ab87 | 2018-02-06 00:55:23 +0000 | [diff] [blame] | 206 | case RISCV::fixup_riscv_pcrel_lo12_s: |
Lewis Revill | aa79a3f | 2019-04-04 14:13:37 +0000 | [diff] [blame] | 207 | case RISCV::fixup_riscv_tprel_lo12_s: |
Alex Bradbury | 9d3f125 | 2017-09-28 08:26:24 +0000 | [diff] [blame] | 208 | return (((Value >> 5) & 0x7f) << 25) | ((Value & 0x1f) << 7); |
| 209 | case RISCV::fixup_riscv_hi20: |
| 210 | case RISCV::fixup_riscv_pcrel_hi20: |
Lewis Revill | aa79a3f | 2019-04-04 14:13:37 +0000 | [diff] [blame] | 211 | case RISCV::fixup_riscv_tprel_hi20: |
Alex Bradbury | 9d3f125 | 2017-09-28 08:26:24 +0000 | [diff] [blame] | 212 | // Add 1 if bit 11 is 1, to compensate for low 12 bits being negative. |
| 213 | return ((Value + 0x800) >> 12) & 0xfffff; |
| 214 | case RISCV::fixup_riscv_jal: { |
| 215 | if (!isInt<21>(Value)) |
| 216 | Ctx.reportError(Fixup.getLoc(), "fixup value out of range"); |
| 217 | if (Value & 0x1) |
| 218 | Ctx.reportError(Fixup.getLoc(), "fixup value must be 2-byte aligned"); |
| 219 | // Need to produce imm[19|10:1|11|19:12] from the 21-bit Value. |
| 220 | unsigned Sbit = (Value >> 20) & 0x1; |
| 221 | unsigned Hi8 = (Value >> 12) & 0xff; |
| 222 | unsigned Mid1 = (Value >> 11) & 0x1; |
| 223 | unsigned Lo10 = (Value >> 1) & 0x3ff; |
| 224 | // Inst{31} = Sbit; |
| 225 | // Inst{30-21} = Lo10; |
| 226 | // Inst{20} = Mid1; |
| 227 | // Inst{19-12} = Hi8; |
| 228 | Value = (Sbit << 19) | (Lo10 << 9) | (Mid1 << 8) | Hi8; |
| 229 | return Value; |
| 230 | } |
| 231 | case RISCV::fixup_riscv_branch: { |
| 232 | if (!isInt<13>(Value)) |
| 233 | Ctx.reportError(Fixup.getLoc(), "fixup value out of range"); |
| 234 | if (Value & 0x1) |
| 235 | Ctx.reportError(Fixup.getLoc(), "fixup value must be 2-byte aligned"); |
| 236 | // Need to extract imm[12], imm[10:5], imm[4:1], imm[11] from the 13-bit |
| 237 | // Value. |
| 238 | unsigned Sbit = (Value >> 12) & 0x1; |
| 239 | unsigned Hi1 = (Value >> 11) & 0x1; |
| 240 | unsigned Mid6 = (Value >> 5) & 0x3f; |
| 241 | unsigned Lo4 = (Value >> 1) & 0xf; |
| 242 | // Inst{31} = Sbit; |
| 243 | // Inst{30-25} = Mid6; |
| 244 | // Inst{11-8} = Lo4; |
| 245 | // Inst{7} = Hi1; |
| 246 | Value = (Sbit << 31) | (Mid6 << 25) | (Lo4 << 8) | (Hi1 << 7); |
| 247 | return Value; |
| 248 | } |
Alex Bradbury | f8078f6 | 2019-04-02 12:47:20 +0000 | [diff] [blame] | 249 | case RISCV::fixup_riscv_call: |
| 250 | case RISCV::fixup_riscv_call_plt: { |
Shiva Chen | c3d0e89 | 2018-05-30 01:16:36 +0000 | [diff] [blame] | 251 | // Jalr will add UpperImm with the sign-extended 12-bit LowerImm, |
| 252 | // we need to add 0x800ULL before extract upper bits to reflect the |
| 253 | // effect of the sign extension. |
| 254 | uint64_t UpperImm = (Value + 0x800ULL) & 0xfffff000ULL; |
| 255 | uint64_t LowerImm = Value & 0xfffULL; |
| 256 | return UpperImm | ((LowerImm << 20) << 32); |
| 257 | } |
Alex Bradbury | f8f4b90 | 2017-12-07 13:19:57 +0000 | [diff] [blame] | 258 | case RISCV::fixup_riscv_rvc_jump: { |
| 259 | // Need to produce offset[11|4|9:8|10|6|7|3:1|5] from the 11-bit Value. |
| 260 | unsigned Bit11 = (Value >> 11) & 0x1; |
| 261 | unsigned Bit4 = (Value >> 4) & 0x1; |
| 262 | unsigned Bit9_8 = (Value >> 8) & 0x3; |
| 263 | unsigned Bit10 = (Value >> 10) & 0x1; |
| 264 | unsigned Bit6 = (Value >> 6) & 0x1; |
| 265 | unsigned Bit7 = (Value >> 7) & 0x1; |
| 266 | unsigned Bit3_1 = (Value >> 1) & 0x7; |
| 267 | unsigned Bit5 = (Value >> 5) & 0x1; |
| 268 | Value = (Bit11 << 10) | (Bit4 << 9) | (Bit9_8 << 7) | (Bit10 << 6) | |
| 269 | (Bit6 << 5) | (Bit7 << 4) | (Bit3_1 << 1) | Bit5; |
| 270 | return Value; |
| 271 | } |
| 272 | case RISCV::fixup_riscv_rvc_branch: { |
| 273 | // Need to produce offset[8|4:3], [reg 3 bit], offset[7:6|2:1|5] |
| 274 | unsigned Bit8 = (Value >> 8) & 0x1; |
| 275 | unsigned Bit7_6 = (Value >> 6) & 0x3; |
| 276 | unsigned Bit5 = (Value >> 5) & 0x1; |
| 277 | unsigned Bit4_3 = (Value >> 3) & 0x3; |
| 278 | unsigned Bit2_1 = (Value >> 1) & 0x3; |
| 279 | Value = (Bit8 << 12) | (Bit4_3 << 10) | (Bit7_6 << 5) | (Bit2_1 << 3) | |
| 280 | (Bit5 << 2); |
| 281 | return Value; |
| 282 | } |
Alex Bradbury | 9d3f125 | 2017-09-28 08:26:24 +0000 | [diff] [blame] | 283 | |
| 284 | } |
| 285 | } |
| 286 | |
Rafael Espindola | 801b42d | 2017-06-23 22:52:36 +0000 | [diff] [blame] | 287 | void RISCVAsmBackend::applyFixup(const MCAssembler &Asm, const MCFixup &Fixup, |
| 288 | const MCValue &Target, |
Rafael Espindola | 88d9e37 | 2017-06-21 23:06:53 +0000 | [diff] [blame] | 289 | MutableArrayRef<char> Data, uint64_t Value, |
Ilya Biryukov | 3c9c106 | 2018-06-06 10:57:50 +0000 | [diff] [blame] | 290 | bool IsResolved, |
| 291 | const MCSubtargetInfo *STI) const { |
Alex Bradbury | 9d3f125 | 2017-09-28 08:26:24 +0000 | [diff] [blame] | 292 | MCContext &Ctx = Asm.getContext(); |
Mandeep Singh Grang | 5f043ae | 2017-11-10 19:09:28 +0000 | [diff] [blame] | 293 | MCFixupKindInfo Info = getFixupKindInfo(Fixup.getKind()); |
Alex Bradbury | 9d3f125 | 2017-09-28 08:26:24 +0000 | [diff] [blame] | 294 | if (!Value) |
| 295 | return; // Doesn't change encoding. |
Alex Bradbury | 9d3f125 | 2017-09-28 08:26:24 +0000 | [diff] [blame] | 296 | // Apply any target-specific value adjustments. |
| 297 | Value = adjustFixupValue(Fixup, Value, Ctx); |
| 298 | |
| 299 | // Shift the value into position. |
| 300 | Value <<= Info.TargetOffset; |
| 301 | |
| 302 | unsigned Offset = Fixup.getOffset(); |
Alex Bradbury | 1c010d0 | 2018-05-23 10:53:56 +0000 | [diff] [blame] | 303 | unsigned NumBytes = alignTo(Info.TargetSize + Info.TargetOffset, 8) / 8; |
Mandeep Singh Grang | 5f043ae | 2017-11-10 19:09:28 +0000 | [diff] [blame] | 304 | |
Alex Bradbury | 9d3f125 | 2017-09-28 08:26:24 +0000 | [diff] [blame] | 305 | assert(Offset + NumBytes <= Data.size() && "Invalid fixup offset!"); |
| 306 | |
| 307 | // For each byte of the fragment that the fixup touches, mask in the |
| 308 | // bits from the fixup value. |
Alex Bradbury | 1c010d0 | 2018-05-23 10:53:56 +0000 | [diff] [blame] | 309 | for (unsigned i = 0; i != NumBytes; ++i) { |
Alex Bradbury | 9d3f125 | 2017-09-28 08:26:24 +0000 | [diff] [blame] | 310 | Data[Offset + i] |= uint8_t((Value >> (i * 8)) & 0xff); |
| 311 | } |
Alex Bradbury | 6b2cca7 | 2016-11-01 23:47:30 +0000 | [diff] [blame] | 312 | } |
| 313 | |
Shiva Chen | 5af037f | 2019-01-30 11:16:59 +0000 | [diff] [blame] | 314 | // Linker relaxation may change code size. We have to insert Nops |
| 315 | // for .align directive when linker relaxation enabled. So then Linker |
| 316 | // could satisfy alignment by removing Nops. |
| 317 | // The function return the total Nops Size we need to insert. |
| 318 | bool RISCVAsmBackend::shouldInsertExtraNopBytesForCodeAlign( |
| 319 | const MCAlignFragment &AF, unsigned &Size) { |
| 320 | // Calculate Nops Size only when linker relaxation enabled. |
| 321 | if (!STI.getFeatureBits()[RISCV::FeatureRelax]) |
| 322 | return false; |
| 323 | |
| 324 | bool HasStdExtC = STI.getFeatureBits()[RISCV::FeatureStdExtC]; |
| 325 | unsigned MinNopLen = HasStdExtC ? 2 : 4; |
| 326 | |
Alex Bradbury | bb479ca | 2019-07-16 04:40:25 +0000 | [diff] [blame] | 327 | if (AF.getAlignment() <= MinNopLen) { |
| 328 | return false; |
| 329 | } else { |
| 330 | Size = AF.getAlignment() - MinNopLen; |
| 331 | return true; |
| 332 | } |
Shiva Chen | 5af037f | 2019-01-30 11:16:59 +0000 | [diff] [blame] | 333 | } |
| 334 | |
| 335 | // We need to insert R_RISCV_ALIGN relocation type to indicate the |
| 336 | // position of Nops and the total bytes of the Nops have been inserted |
| 337 | // when linker relaxation enabled. |
| 338 | // The function insert fixup_riscv_align fixup which eventually will |
| 339 | // transfer to R_RISCV_ALIGN relocation type. |
| 340 | bool RISCVAsmBackend::shouldInsertFixupForCodeAlign(MCAssembler &Asm, |
| 341 | const MCAsmLayout &Layout, |
| 342 | MCAlignFragment &AF) { |
| 343 | // Insert the fixup only when linker relaxation enabled. |
| 344 | if (!STI.getFeatureBits()[RISCV::FeatureRelax]) |
| 345 | return false; |
| 346 | |
Alex Bradbury | e9ad0cf | 2019-07-16 04:37:19 +0000 | [diff] [blame] | 347 | // Calculate total Nops we need to insert. If there are none to insert |
| 348 | // then simply return. |
Shiva Chen | 5af037f | 2019-01-30 11:16:59 +0000 | [diff] [blame] | 349 | unsigned Count; |
Alex Bradbury | e9ad0cf | 2019-07-16 04:37:19 +0000 | [diff] [blame] | 350 | if (!shouldInsertExtraNopBytesForCodeAlign(AF, Count) || (Count == 0)) |
Shiva Chen | 5af037f | 2019-01-30 11:16:59 +0000 | [diff] [blame] | 351 | return false; |
| 352 | |
| 353 | MCContext &Ctx = Asm.getContext(); |
| 354 | const MCExpr *Dummy = MCConstantExpr::create(0, Ctx); |
| 355 | // Create fixup_riscv_align fixup. |
| 356 | MCFixup Fixup = |
| 357 | MCFixup::create(0, Dummy, MCFixupKind(RISCV::fixup_riscv_align), SMLoc()); |
| 358 | |
| 359 | uint64_t FixedValue = 0; |
| 360 | MCValue NopBytes = MCValue::get(Count); |
| 361 | |
| 362 | Asm.getWriter().recordRelocation(Asm, Layout, &AF, Fixup, NopBytes, |
| 363 | FixedValue); |
| 364 | |
| 365 | return true; |
| 366 | } |
| 367 | |
Peter Collingbourne | dcd7d6c | 2018-05-21 19:20:29 +0000 | [diff] [blame] | 368 | std::unique_ptr<MCObjectTargetWriter> |
| 369 | RISCVAsmBackend::createObjectTargetWriter() const { |
| 370 | return createRISCVELFObjectWriter(OSABI, Is64Bit); |
Alex Bradbury | 6b2cca7 | 2016-11-01 23:47:30 +0000 | [diff] [blame] | 371 | } |
| 372 | |
Alex Bradbury | 6b2cca7 | 2016-11-01 23:47:30 +0000 | [diff] [blame] | 373 | MCAsmBackend *llvm::createRISCVAsmBackend(const Target &T, |
Alex Bradbury | b22f751 | 2018-01-03 08:53:05 +0000 | [diff] [blame] | 374 | const MCSubtargetInfo &STI, |
Alex Bradbury | 6b2cca7 | 2016-11-01 23:47:30 +0000 | [diff] [blame] | 375 | const MCRegisterInfo &MRI, |
Alex Bradbury | 6b2cca7 | 2016-11-01 23:47:30 +0000 | [diff] [blame] | 376 | const MCTargetOptions &Options) { |
Alex Bradbury | b22f751 | 2018-01-03 08:53:05 +0000 | [diff] [blame] | 377 | const Triple &TT = STI.getTargetTriple(); |
Alex Bradbury | 6b2cca7 | 2016-11-01 23:47:30 +0000 | [diff] [blame] | 378 | uint8_t OSABI = MCELFObjectTargetWriter::getOSABI(TT.getOS()); |
Alex Bradbury | fea4957 | 2019-03-09 09:28:06 +0000 | [diff] [blame] | 379 | return new RISCVAsmBackend(STI, OSABI, TT.isArch64Bit(), Options); |
Alex Bradbury | 6b2cca7 | 2016-11-01 23:47:30 +0000 | [diff] [blame] | 380 | } |