blob: 8bb7914840f0bfa7c10ffc52a069e96a42a7bcfc [file] [log] [blame]
David Peixottob9b73622014-02-04 17:22:40 +00001//===- ARMTargetStreamer.cpp - ARMTargetStreamer class --*- C++ -*---------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements the ARMTargetStreamer class.
11//
12//===----------------------------------------------------------------------===//
Eugene Zelenkoe79c0772017-01-27 23:58:02 +000013
Oliver Stannard7ad2e8a2017-04-18 12:52:35 +000014#include "ARMTargetMachine.h"
Weiming Zhao8c899732014-06-18 18:17:25 +000015#include "llvm/MC/ConstantPools.h"
Chandler Carruth442f7842014-03-04 10:07:28 +000016#include "llvm/MC/MCExpr.h"
17#include "llvm/MC/MCStreamer.h"
Oliver Stannard7ad2e8a2017-04-18 12:52:35 +000018#include "llvm/MC/MCSubtargetInfo.h"
19#include "llvm/Support/ARMBuildAttributes.h"
20#include "llvm/Support/TargetParser.h"
David Peixottob9b73622014-02-04 17:22:40 +000021
22using namespace llvm;
Eugene Zelenkoe79c0772017-01-27 23:58:02 +000023
David Peixottob9b73622014-02-04 17:22:40 +000024//
25// ARMTargetStreamer Implemenation
26//
Eugene Zelenkoe79c0772017-01-27 23:58:02 +000027
David Peixottob9b73622014-02-04 17:22:40 +000028ARMTargetStreamer::ARMTargetStreamer(MCStreamer &S)
29 : MCTargetStreamer(S), ConstantPools(new AssemblerConstantPools()) {}
30
Eugene Zelenkoe79c0772017-01-27 23:58:02 +000031ARMTargetStreamer::~ARMTargetStreamer() = default;
David Peixottob9b73622014-02-04 17:22:40 +000032
33// The constant pool handling is shared by all ARMTargetStreamer
34// implementations.
Oliver Stannard9327a752015-11-16 16:25:47 +000035const MCExpr *ARMTargetStreamer::addConstantPoolEntry(const MCExpr *Expr, SMLoc Loc) {
36 return ConstantPools->addEntry(Streamer, Expr, 4, Loc);
David Peixottob9b73622014-02-04 17:22:40 +000037}
38
39void ARMTargetStreamer::emitCurrentConstantPool() {
40 ConstantPools->emitForCurrentSection(Streamer);
41}
42
43// finish() - write out any non-empty assembler constant pools.
44void ARMTargetStreamer::finish() { ConstantPools->emitAll(Streamer); }
45
Keno Fischer00021422016-01-12 13:38:15 +000046// reset() - Reset any state
47void ARMTargetStreamer::reset() {}
48
David Peixottob9b73622014-02-04 17:22:40 +000049// The remaining callbacks should be handled separately by each
50// streamer.
Rafael Espindola1fc003e2014-06-20 13:11:28 +000051void ARMTargetStreamer::emitFnStart() {}
52void ARMTargetStreamer::emitFnEnd() {}
53void ARMTargetStreamer::emitCantUnwind() {}
54void ARMTargetStreamer::emitPersonality(const MCSymbol *Personality) {}
55void ARMTargetStreamer::emitPersonalityIndex(unsigned Index) {}
56void ARMTargetStreamer::emitHandlerData() {}
David Peixottob9b73622014-02-04 17:22:40 +000057void ARMTargetStreamer::emitSetFP(unsigned FpReg, unsigned SpReg,
Rafael Espindola1fc003e2014-06-20 13:11:28 +000058 int64_t Offset) {}
59void ARMTargetStreamer::emitMovSP(unsigned Reg, int64_t Offset) {}
60void ARMTargetStreamer::emitPad(int64_t Offset) {}
61void ARMTargetStreamer::emitRegSave(const SmallVectorImpl<unsigned> &RegList,
62 bool isVector) {}
63void ARMTargetStreamer::emitUnwindRaw(int64_t StackOffset,
64 const SmallVectorImpl<uint8_t> &Opcodes) {
David Peixottob9b73622014-02-04 17:22:40 +000065}
Rafael Espindola1fc003e2014-06-20 13:11:28 +000066void ARMTargetStreamer::switchVendor(StringRef Vendor) {}
67void ARMTargetStreamer::emitAttribute(unsigned Attribute, unsigned Value) {}
David Peixottob9b73622014-02-04 17:22:40 +000068void ARMTargetStreamer::emitTextAttribute(unsigned Attribute,
Rafael Espindola1fc003e2014-06-20 13:11:28 +000069 StringRef String) {}
David Peixottob9b73622014-02-04 17:22:40 +000070void ARMTargetStreamer::emitIntTextAttribute(unsigned Attribute,
Rafael Espindola1fc003e2014-06-20 13:11:28 +000071 unsigned IntValue,
72 StringRef StringValue) {}
73void ARMTargetStreamer::emitArch(unsigned Arch) {}
Sumanth Gundapanenia9049ea2015-02-26 18:07:35 +000074void ARMTargetStreamer::emitArchExtension(unsigned ArchExt) {}
Rafael Espindola1fc003e2014-06-20 13:11:28 +000075void ARMTargetStreamer::emitObjectArch(unsigned Arch) {}
76void ARMTargetStreamer::emitFPU(unsigned FPU) {}
77void ARMTargetStreamer::finishAttributeSection() {}
78void ARMTargetStreamer::emitInst(uint32_t Inst, char Suffix) {}
79void
80ARMTargetStreamer::AnnotateTLSDescriptorSequence(const MCSymbolRefExpr *SRE) {}
Rafael Espindola1fc003e2014-06-20 13:11:28 +000081void ARMTargetStreamer::emitThumbSet(MCSymbol *Symbol, const MCExpr *Value) {}
Oliver Stannard7ad2e8a2017-04-18 12:52:35 +000082
83static ARMBuildAttrs::CPUArch getArchForCPU(const MCSubtargetInfo &STI) {
84 if (STI.getCPU() == "xscale")
85 return ARMBuildAttrs::v5TEJ;
86
87 if (STI.hasFeature(ARM::HasV8Ops)) {
88 if (STI.hasFeature(ARM::FeatureRClass))
89 return ARMBuildAttrs::v8_R;
90 return ARMBuildAttrs::v8_A;
91 } else if (STI.hasFeature(ARM::HasV8MMainlineOps))
92 return ARMBuildAttrs::v8_M_Main;
93 else if (STI.hasFeature(ARM::HasV7Ops)) {
94 if (STI.hasFeature(ARM::FeatureMClass) && STI.hasFeature(ARM::FeatureDSP))
95 return ARMBuildAttrs::v7E_M;
96 return ARMBuildAttrs::v7;
97 } else if (STI.hasFeature(ARM::HasV6T2Ops))
98 return ARMBuildAttrs::v6T2;
99 else if (STI.hasFeature(ARM::HasV8MBaselineOps))
100 return ARMBuildAttrs::v8_M_Base;
101 else if (STI.hasFeature(ARM::HasV6MOps))
102 return ARMBuildAttrs::v6S_M;
103 else if (STI.hasFeature(ARM::HasV6Ops))
104 return ARMBuildAttrs::v6;
105 else if (STI.hasFeature(ARM::HasV5TEOps))
106 return ARMBuildAttrs::v5TE;
107 else if (STI.hasFeature(ARM::HasV5TOps))
108 return ARMBuildAttrs::v5T;
109 else if (STI.hasFeature(ARM::HasV4TOps))
110 return ARMBuildAttrs::v4T;
111 else
112 return ARMBuildAttrs::v4;
113}
114
115static bool isV8M(const MCSubtargetInfo &STI) {
116 // Note that v8M Baseline is a subset of v6T2!
117 return (STI.hasFeature(ARM::HasV8MBaselineOps) &&
118 !STI.hasFeature(ARM::HasV6T2Ops)) ||
119 STI.hasFeature(ARM::HasV8MMainlineOps);
120}
121
122/// Emit the build attributes that only depend on the hardware that we expect
123// /to be available, and not on the ABI, or any source-language choices.
124void ARMTargetStreamer::emitTargetAttributes(const MCSubtargetInfo &STI) {
125 switchVendor("aeabi");
126
127 const StringRef CPUString = STI.getCPU();
128 if (!CPUString.empty() && !CPUString.startswith("generic")) {
129 // FIXME: remove krait check when GNU tools support krait cpu
130 if (STI.hasFeature(ARM::ProcKrait)) {
131 emitTextAttribute(ARMBuildAttrs::CPU_name, "cortex-a9");
132 // We consider krait as a "cortex-a9" + hwdiv CPU
133 // Enable hwdiv through ".arch_extension idiv"
134 if (STI.hasFeature(ARM::FeatureHWDiv) ||
135 STI.hasFeature(ARM::FeatureHWDivARM))
136 emitArchExtension(ARM::AEK_HWDIV | ARM::AEK_HWDIVARM);
137 } else {
138 emitTextAttribute(ARMBuildAttrs::CPU_name, CPUString);
139 }
140 }
141
142 emitAttribute(ARMBuildAttrs::CPU_arch, getArchForCPU(STI));
143
144 if (STI.hasFeature(ARM::FeatureAClass)) {
145 emitAttribute(ARMBuildAttrs::CPU_arch_profile,
146 ARMBuildAttrs::ApplicationProfile);
147 } else if (STI.hasFeature(ARM::FeatureRClass)) {
148 emitAttribute(ARMBuildAttrs::CPU_arch_profile,
149 ARMBuildAttrs::RealTimeProfile);
150 } else if (STI.hasFeature(ARM::FeatureMClass)) {
151 emitAttribute(ARMBuildAttrs::CPU_arch_profile,
152 ARMBuildAttrs::MicroControllerProfile);
153 }
154
155 emitAttribute(ARMBuildAttrs::ARM_ISA_use, STI.hasFeature(ARM::FeatureNoARM)
156 ? ARMBuildAttrs::Not_Allowed
157 : ARMBuildAttrs::Allowed);
158
159 if (isV8M(STI)) {
160 emitAttribute(ARMBuildAttrs::THUMB_ISA_use,
161 ARMBuildAttrs::AllowThumbDerived);
162 } else if (STI.hasFeature(ARM::FeatureThumb2)) {
163 emitAttribute(ARMBuildAttrs::THUMB_ISA_use,
164 ARMBuildAttrs::AllowThumb32);
165 } else if (STI.hasFeature(ARM::HasV4TOps)) {
166 emitAttribute(ARMBuildAttrs::THUMB_ISA_use, ARMBuildAttrs::Allowed);
167 }
168
169 if (STI.hasFeature(ARM::FeatureNEON)) {
170 /* NEON is not exactly a VFP architecture, but GAS emit one of
171 * neon/neon-fp-armv8/neon-vfpv4/vfpv3/vfpv2 for .fpu parameters */
172 if (STI.hasFeature(ARM::FeatureFPARMv8)) {
173 if (STI.hasFeature(ARM::FeatureCrypto))
174 emitFPU(ARM::FK_CRYPTO_NEON_FP_ARMV8);
175 else
176 emitFPU(ARM::FK_NEON_FP_ARMV8);
177 } else if (STI.hasFeature(ARM::FeatureVFP4))
178 emitFPU(ARM::FK_NEON_VFPV4);
179 else
180 emitFPU(STI.hasFeature(ARM::FeatureFP16) ? ARM::FK_NEON_FP16
181 : ARM::FK_NEON);
182 // Emit Tag_Advanced_SIMD_arch for ARMv8 architecture
183 if (STI.hasFeature(ARM::HasV8Ops))
184 emitAttribute(ARMBuildAttrs::Advanced_SIMD_arch,
185 STI.hasFeature(ARM::HasV8_1aOps)
186 ? ARMBuildAttrs::AllowNeonARMv8_1a
187 : ARMBuildAttrs::AllowNeonARMv8);
188 } else {
189 if (STI.hasFeature(ARM::FeatureFPARMv8))
190 // FPv5 and FP-ARMv8 have the same instructions, so are modeled as one
191 // FPU, but there are two different names for it depending on the CPU.
192 emitFPU(STI.hasFeature(ARM::FeatureD16)
193 ? (STI.hasFeature(ARM::FeatureVFPOnlySP) ? ARM::FK_FPV5_SP_D16
194 : ARM::FK_FPV5_D16)
195 : ARM::FK_FP_ARMV8);
196 else if (STI.hasFeature(ARM::FeatureVFP4))
197 emitFPU(STI.hasFeature(ARM::FeatureD16)
198 ? (STI.hasFeature(ARM::FeatureVFPOnlySP) ? ARM::FK_FPV4_SP_D16
199 : ARM::FK_VFPV4_D16)
200 : ARM::FK_VFPV4);
201 else if (STI.hasFeature(ARM::FeatureVFP3))
202 emitFPU(
203 STI.hasFeature(ARM::FeatureD16)
204 // +d16
205 ? (STI.hasFeature(ARM::FeatureVFPOnlySP)
206 ? (STI.hasFeature(ARM::FeatureFP16) ? ARM::FK_VFPV3XD_FP16
207 : ARM::FK_VFPV3XD)
208 : (STI.hasFeature(ARM::FeatureFP16)
209 ? ARM::FK_VFPV3_D16_FP16
210 : ARM::FK_VFPV3_D16))
211 // -d16
212 : (STI.hasFeature(ARM::FeatureFP16) ? ARM::FK_VFPV3_FP16
213 : ARM::FK_VFPV3));
214 else if (STI.hasFeature(ARM::FeatureVFP2))
215 emitFPU(ARM::FK_VFPV2);
216 }
217
218 // ABI_HardFP_use attribute to indicate single precision FP.
219 if (STI.hasFeature(ARM::FeatureVFPOnlySP))
220 emitAttribute(ARMBuildAttrs::ABI_HardFP_use,
221 ARMBuildAttrs::HardFPSinglePrecision);
222
223 if (STI.hasFeature(ARM::FeatureFP16))
224 emitAttribute(ARMBuildAttrs::FP_HP_extension, ARMBuildAttrs::AllowHPFP);
225
226 if (STI.hasFeature(ARM::FeatureMP))
227 emitAttribute(ARMBuildAttrs::MPextension_use, ARMBuildAttrs::AllowMP);
228
229 // Hardware divide in ARM mode is part of base arch, starting from ARMv8.
230 // If only Thumb hwdiv is present, it must also be in base arch (ARMv7-R/M).
231 // It is not possible to produce DisallowDIV: if hwdiv is present in the base
232 // arch, supplying -hwdiv downgrades the effective arch, via ClearImpliedBits.
233 // AllowDIVExt is only emitted if hwdiv isn't available in the base arch;
234 // otherwise, the default value (AllowDIVIfExists) applies.
235 if (STI.hasFeature(ARM::FeatureHWDivARM) && !STI.hasFeature(ARM::HasV8Ops))
236 emitAttribute(ARMBuildAttrs::DIV_use, ARMBuildAttrs::AllowDIVExt);
237
238 if (STI.hasFeature(ARM::FeatureDSP) && isV8M(STI))
239 emitAttribute(ARMBuildAttrs::DSP_extension, ARMBuildAttrs::Allowed);
240
241 if (STI.hasFeature(ARM::FeatureStrictAlign))
242 emitAttribute(ARMBuildAttrs::CPU_unaligned_access,
243 ARMBuildAttrs::Not_Allowed);
244 else
245 emitAttribute(ARMBuildAttrs::CPU_unaligned_access,
246 ARMBuildAttrs::Allowed);
247
248 if (STI.hasFeature(ARM::FeatureTrustZone) &&
249 STI.hasFeature(ARM::FeatureVirtualization))
250 emitAttribute(ARMBuildAttrs::Virtualization_use,
251 ARMBuildAttrs::AllowTZVirtualization);
252 else if (STI.hasFeature(ARM::FeatureTrustZone))
253 emitAttribute(ARMBuildAttrs::Virtualization_use, ARMBuildAttrs::AllowTZ);
254 else if (STI.hasFeature(ARM::FeatureVirtualization))
255 emitAttribute(ARMBuildAttrs::Virtualization_use,
256 ARMBuildAttrs::AllowVirtualization);
257}