blob: 440996ad5558ffb75f39d3d36f497943f654326b [file] [log] [blame]
Clement Courbet44b4c542018-06-19 11:28:59 +00001//===-- Target.cpp ----------------------------------------------*- C++ -*-===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9#include "../Target.h"
10
Clement Courbet4860b982018-06-26 08:49:30 +000011#include "../Latency.h"
12#include "../Uops.h"
Clement Courbet717c9762018-06-28 07:41:16 +000013#include "MCTargetDesc/X86BaseInfo.h"
Clement Courbeta51efc22018-06-25 13:12:02 +000014#include "MCTargetDesc/X86MCTargetDesc.h"
Clement Courbet6fd00e32018-06-20 11:54:35 +000015#include "X86.h"
Clement Courbeta51efc22018-06-25 13:12:02 +000016#include "X86RegisterInfo.h"
Clement Courbete7851692018-07-03 06:17:05 +000017#include "X86Subtarget.h"
Clement Courbeta51efc22018-06-25 13:12:02 +000018#include "llvm/MC/MCInstBuilder.h"
Clement Courbet6fd00e32018-06-20 11:54:35 +000019
Clement Courbet44b4c542018-06-19 11:28:59 +000020namespace exegesis {
21
22namespace {
23
Clement Courbet717c9762018-06-28 07:41:16 +000024// Common code for X86 Uops and Latency runners.
Clement Courbetd939f6d2018-09-13 07:40:53 +000025template <typename Impl> class X86SnippetGenerator : public Impl {
Clement Courbet717c9762018-06-28 07:41:16 +000026 using Impl::Impl;
Clement Courbet4860b982018-06-26 08:49:30 +000027
Guillaume Chatelete60866a2018-08-03 09:29:38 +000028 llvm::Expected<CodeTemplate>
Guillaume Chatelet9b592382018-10-10 14:57:32 +000029 generateCodeTemplate(const Instruction &Instr) const override {
Clement Courbet717c9762018-06-28 07:41:16 +000030 // Test whether we can generate a snippet for this instruction.
Guillaume Chatelet9b592382018-10-10 14:57:32 +000031 const auto OpcodeName = Instr.Name;
Clement Courbet717c9762018-06-28 07:41:16 +000032 if (OpcodeName.startswith("POPF") || OpcodeName.startswith("PUSHF") ||
33 OpcodeName.startswith("ADJCALLSTACK")) {
34 return llvm::make_error<BenchmarkFailure>(
35 "Unsupported opcode: Push/Pop/AdjCallStack");
Clement Courbet4860b982018-06-26 08:49:30 +000036 }
Clement Courbet717c9762018-06-28 07:41:16 +000037
38 // Handle X87.
Guillaume Chateletee9c2a172018-10-10 14:22:48 +000039 const unsigned FPInstClass =
Guillaume Chatelet9b592382018-10-10 14:57:32 +000040 Instr.Description->TSFlags & llvm::X86II::FPTypeMask;
Clement Courbet717c9762018-06-28 07:41:16 +000041 switch (FPInstClass) {
42 case llvm::X86II::NotFP:
43 break;
44 case llvm::X86II::ZeroArgFP:
Clement Courbetf9a0bb32018-07-05 13:54:51 +000045 return llvm::make_error<BenchmarkFailure>("Unsupported x87 ZeroArgFP");
Clement Courbet717c9762018-06-28 07:41:16 +000046 case llvm::X86II::OneArgFP:
Clement Courbetf9a0bb32018-07-05 13:54:51 +000047 return llvm::make_error<BenchmarkFailure>("Unsupported x87 OneArgFP");
Clement Courbet717c9762018-06-28 07:41:16 +000048 case llvm::X86II::OneArgFPRW:
49 case llvm::X86II::TwoArgFP: {
50 // These are instructions like
51 // - `ST(0) = fsqrt(ST(0))` (OneArgFPRW)
52 // - `ST(0) = ST(0) + ST(i)` (TwoArgFP)
53 // They are intrinsically serial and do not modify the state of the stack.
54 // We generate the same code for latency and uops.
Guillaume Chatelete60866a2018-08-03 09:29:38 +000055 return this->generateSelfAliasingCodeTemplate(Instr);
Clement Courbet717c9762018-06-28 07:41:16 +000056 }
57 case llvm::X86II::CompareFP:
58 return Impl::handleCompareFP(Instr);
59 case llvm::X86II::CondMovFP:
60 return Impl::handleCondMovFP(Instr);
61 case llvm::X86II::SpecialFP:
Clement Courbetf9a0bb32018-07-05 13:54:51 +000062 return llvm::make_error<BenchmarkFailure>("Unsupported x87 SpecialFP");
Clement Courbet717c9762018-06-28 07:41:16 +000063 default:
64 llvm_unreachable("Unknown FP Type!");
65 }
66
67 // Fallback to generic implementation.
Guillaume Chatelet9b592382018-10-10 14:57:32 +000068 return Impl::Base::generateCodeTemplate(Instr);
Clement Courbet4860b982018-06-26 08:49:30 +000069 }
70};
71
Clement Courbetd939f6d2018-09-13 07:40:53 +000072class X86LatencyImpl : public LatencySnippetGenerator {
Clement Courbet717c9762018-06-28 07:41:16 +000073protected:
Clement Courbetd939f6d2018-09-13 07:40:53 +000074 using Base = LatencySnippetGenerator;
Clement Courbet717c9762018-06-28 07:41:16 +000075 using Base::Base;
Guillaume Chatelete60866a2018-08-03 09:29:38 +000076 llvm::Expected<CodeTemplate> handleCompareFP(const Instruction &Instr) const {
Clement Courbetd939f6d2018-09-13 07:40:53 +000077 return llvm::make_error<SnippetGeneratorFailure>(
78 "Unsupported x87 CompareFP");
Clement Courbet717c9762018-06-28 07:41:16 +000079 }
Guillaume Chatelete60866a2018-08-03 09:29:38 +000080 llvm::Expected<CodeTemplate> handleCondMovFP(const Instruction &Instr) const {
Clement Courbetd939f6d2018-09-13 07:40:53 +000081 return llvm::make_error<SnippetGeneratorFailure>(
82 "Unsupported x87 CondMovFP");
Clement Courbet717c9762018-06-28 07:41:16 +000083 }
Clement Courbet717c9762018-06-28 07:41:16 +000084};
85
Clement Courbetd939f6d2018-09-13 07:40:53 +000086class X86UopsImpl : public UopsSnippetGenerator {
Clement Courbet717c9762018-06-28 07:41:16 +000087protected:
Clement Courbetd939f6d2018-09-13 07:40:53 +000088 using Base = UopsSnippetGenerator;
Clement Courbet717c9762018-06-28 07:41:16 +000089 using Base::Base;
Clement Courbetf9a0bb32018-07-05 13:54:51 +000090 // We can compute uops for any FP instruction that does not grow or shrink the
91 // stack (either do not touch the stack or push as much as they pop).
Guillaume Chatelete60866a2018-08-03 09:29:38 +000092 llvm::Expected<CodeTemplate> handleCompareFP(const Instruction &Instr) const {
93 return generateUnconstrainedCodeTemplate(
Clement Courbetf9a0bb32018-07-05 13:54:51 +000094 Instr, "instruction does not grow/shrink the FP stack");
Clement Courbet717c9762018-06-28 07:41:16 +000095 }
Guillaume Chatelete60866a2018-08-03 09:29:38 +000096 llvm::Expected<CodeTemplate> handleCondMovFP(const Instruction &Instr) const {
97 return generateUnconstrainedCodeTemplate(
Clement Courbetf9a0bb32018-07-05 13:54:51 +000098 Instr, "instruction does not grow/shrink the FP stack");
Clement Courbet4860b982018-06-26 08:49:30 +000099 }
100};
101
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000102static unsigned GetLoadImmediateOpcode(unsigned RegBitWidth) {
103 switch (RegBitWidth) {
Guillaume Chatelet5ad29092018-09-18 11:26:27 +0000104 case 8:
105 return llvm::X86::MOV8ri;
106 case 16:
107 return llvm::X86::MOV16ri;
108 case 32:
109 return llvm::X86::MOV32ri;
110 case 64:
111 return llvm::X86::MOV64ri;
112 }
113 llvm_unreachable("Invalid Value Width");
114}
115
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000116// Generates instruction to load an immediate value into a register.
117static llvm::MCInst loadImmediate(unsigned Reg, unsigned RegBitWidth,
118 const llvm::APInt &Value) {
119 if (Value.getBitWidth() > RegBitWidth)
120 llvm_unreachable("Value must fit in the Register");
121 return llvm::MCInstBuilder(GetLoadImmediateOpcode(RegBitWidth))
Guillaume Chatelet5ad29092018-09-18 11:26:27 +0000122 .addReg(Reg)
123 .addImm(Value.getZExtValue());
124}
125
126// Allocates scratch memory on the stack.
127static llvm::MCInst allocateStackSpace(unsigned Bytes) {
128 return llvm::MCInstBuilder(llvm::X86::SUB64ri8)
129 .addReg(llvm::X86::RSP)
130 .addReg(llvm::X86::RSP)
131 .addImm(Bytes);
132}
133
134// Fills scratch memory at offset `OffsetBytes` with value `Imm`.
135static llvm::MCInst fillStackSpace(unsigned MovOpcode, unsigned OffsetBytes,
136 uint64_t Imm) {
137 return llvm::MCInstBuilder(MovOpcode)
138 // Address = ESP
139 .addReg(llvm::X86::RSP) // BaseReg
140 .addImm(1) // ScaleAmt
141 .addReg(0) // IndexReg
142 .addImm(OffsetBytes) // Disp
143 .addReg(0) // Segment
144 // Immediate.
145 .addImm(Imm);
146}
147
148// Loads scratch memory into register `Reg` using opcode `RMOpcode`.
149static llvm::MCInst loadToReg(unsigned Reg, unsigned RMOpcode) {
150 return llvm::MCInstBuilder(RMOpcode)
151 .addReg(Reg)
152 // Address = ESP
153 .addReg(llvm::X86::RSP) // BaseReg
154 .addImm(1) // ScaleAmt
155 .addReg(0) // IndexReg
156 .addImm(0) // Disp
157 .addReg(0); // Segment
158}
159
160// Releases scratch memory.
161static llvm::MCInst releaseStackSpace(unsigned Bytes) {
162 return llvm::MCInstBuilder(llvm::X86::ADD64ri8)
163 .addReg(llvm::X86::RSP)
164 .addReg(llvm::X86::RSP)
165 .addImm(Bytes);
166}
167
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000168// Reserves some space on the stack, fills it with the content of the provided
169// constant and provide methods to load the stack value into a register.
Guillaume Chatelet5ad29092018-09-18 11:26:27 +0000170struct ConstantInliner {
Clement Courbet78b2e732018-09-25 07:31:44 +0000171 explicit ConstantInliner(const llvm::APInt &Constant) : Constant_(Constant) {}
Guillaume Chatelet5ad29092018-09-18 11:26:27 +0000172
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000173 std::vector<llvm::MCInst> loadAndFinalize(unsigned Reg, unsigned RegBitWidth,
174 unsigned Opcode) {
Clement Courbet78b2e732018-09-25 07:31:44 +0000175 assert((RegBitWidth & 7) == 0 &&
176 "RegBitWidth must be a multiple of 8 bits");
177 initStack(RegBitWidth / 8);
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000178 add(loadToReg(Reg, Opcode));
Clement Courbet78b2e732018-09-25 07:31:44 +0000179 add(releaseStackSpace(RegBitWidth / 8));
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000180 return std::move(Instructions);
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000181 }
182
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000183 std::vector<llvm::MCInst>
184 loadX87AndFinalize(unsigned Reg, unsigned RegBitWidth, unsigned Opcode) {
Clement Courbet78b2e732018-09-25 07:31:44 +0000185 assert((RegBitWidth & 7) == 0 &&
186 "RegBitWidth must be a multiple of 8 bits");
187 initStack(RegBitWidth / 8);
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000188 add(llvm::MCInstBuilder(Opcode)
189 .addReg(llvm::X86::RSP) // BaseReg
190 .addImm(1) // ScaleAmt
191 .addReg(0) // IndexReg
192 .addImm(0) // Disp
193 .addReg(0)); // Segment
194 if (Reg != llvm::X86::ST0)
195 add(llvm::MCInstBuilder(llvm::X86::ST_Frr).addReg(Reg));
Clement Courbet78b2e732018-09-25 07:31:44 +0000196 add(releaseStackSpace(RegBitWidth / 8));
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000197 return std::move(Instructions);
198 }
199
200 std::vector<llvm::MCInst> popFlagAndFinalize() {
Clement Courbet78b2e732018-09-25 07:31:44 +0000201 initStack(8);
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000202 add(llvm::MCInstBuilder(llvm::X86::POPF64));
Simon Pilgrimf652ef32018-09-18 15:38:16 +0000203 return std::move(Instructions);
204 }
205
206private:
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000207 ConstantInliner &add(const llvm::MCInst &Inst) {
208 Instructions.push_back(Inst);
209 return *this;
210 }
211
Clement Courbet78b2e732018-09-25 07:31:44 +0000212 void initStack(unsigned Bytes) {
213 assert(Constant_.getBitWidth() <= Bytes * 8 &&
214 "Value does not have the correct size");
215 const llvm::APInt WideConstant = Constant_.getBitWidth() < Bytes * 8
216 ? Constant_.sext(Bytes * 8)
217 : Constant_;
218 add(allocateStackSpace(Bytes));
219 size_t ByteOffset = 0;
220 for (; Bytes - ByteOffset >= 4; ByteOffset += 4)
221 add(fillStackSpace(
222 llvm::X86::MOV32mi, ByteOffset,
223 WideConstant.extractBits(32, ByteOffset * 8).getZExtValue()));
224 if (Bytes - ByteOffset >= 2) {
225 add(fillStackSpace(
226 llvm::X86::MOV16mi, ByteOffset,
227 WideConstant.extractBits(16, ByteOffset * 8).getZExtValue()));
228 ByteOffset += 2;
229 }
230 if (Bytes - ByteOffset >= 1)
231 add(fillStackSpace(
232 llvm::X86::MOV8mi, ByteOffset,
233 WideConstant.extractBits(8, ByteOffset * 8).getZExtValue()));
234 }
235
236 llvm::APInt Constant_;
Guillaume Chatelet5ad29092018-09-18 11:26:27 +0000237 std::vector<llvm::MCInst> Instructions;
238};
239
Clement Courbet44b4c542018-06-19 11:28:59 +0000240class ExegesisX86Target : public ExegesisTarget {
Clement Courbet6fd00e32018-06-20 11:54:35 +0000241 void addTargetSpecificPasses(llvm::PassManagerBase &PM) const override {
242 // Lowers FP pseudo-instructions, e.g. ABS_Fp32 -> ABS_F.
Clement Courbet717c9762018-06-28 07:41:16 +0000243 PM.add(llvm::createX86FloatingPointStackifierPass());
Clement Courbet6fd00e32018-06-20 11:54:35 +0000244 }
245
Guillaume Chateletfb943542018-08-01 14:41:45 +0000246 unsigned getScratchMemoryRegister(const llvm::Triple &TT) const override {
247 if (!TT.isArch64Bit()) {
248 // FIXME: This would require popping from the stack, so we would have to
249 // add some additional setup code.
250 return 0;
251 }
252 return TT.isOSWindows() ? llvm::X86::RCX : llvm::X86::RDI;
253 }
254
255 unsigned getMaxMemoryAccessSize() const override { return 64; }
256
Guillaume Chatelet70ac0192018-09-27 09:23:04 +0000257 void fillMemoryOperands(InstructionTemplate &IT, unsigned Reg,
Guillaume Chateletfb943542018-08-01 14:41:45 +0000258 unsigned Offset) const override {
259 // FIXME: For instructions that read AND write to memory, we use the same
260 // value for input and output.
Guillaume Chatelet70ac0192018-09-27 09:23:04 +0000261 for (size_t I = 0, E = IT.Instr.Operands.size(); I < E; ++I) {
262 const Operand *Op = &IT.Instr.Operands[I];
Guillaume Chatelet09c28392018-10-09 08:59:10 +0000263 if (Op->isExplicit() && Op->isMemory()) {
Guillaume Chateletfb943542018-08-01 14:41:45 +0000264 // Case 1: 5-op memory.
265 assert((I + 5 <= E) && "x86 memory references are always 5 ops");
Guillaume Chatelet70ac0192018-09-27 09:23:04 +0000266 IT.getValueFor(*Op) = llvm::MCOperand::createReg(Reg); // BaseReg
267 Op = &IT.Instr.Operands[++I];
Guillaume Chatelet09c28392018-10-09 08:59:10 +0000268 assert(Op->isMemory());
269 assert(Op->isExplicit());
Guillaume Chatelet70ac0192018-09-27 09:23:04 +0000270 IT.getValueFor(*Op) = llvm::MCOperand::createImm(1); // ScaleAmt
271 Op = &IT.Instr.Operands[++I];
Guillaume Chatelet09c28392018-10-09 08:59:10 +0000272 assert(Op->isMemory());
273 assert(Op->isExplicit());
Guillaume Chatelet70ac0192018-09-27 09:23:04 +0000274 IT.getValueFor(*Op) = llvm::MCOperand::createReg(0); // IndexReg
275 Op = &IT.Instr.Operands[++I];
Guillaume Chatelet09c28392018-10-09 08:59:10 +0000276 assert(Op->isMemory());
277 assert(Op->isExplicit());
Guillaume Chatelet70ac0192018-09-27 09:23:04 +0000278 IT.getValueFor(*Op) = llvm::MCOperand::createImm(Offset); // Disp
279 Op = &IT.Instr.Operands[++I];
Guillaume Chatelet09c28392018-10-09 08:59:10 +0000280 assert(Op->isMemory());
281 assert(Op->isExplicit());
Guillaume Chatelet70ac0192018-09-27 09:23:04 +0000282 IT.getValueFor(*Op) = llvm::MCOperand::createReg(0); // Segment
Guillaume Chateletfb943542018-08-01 14:41:45 +0000283 // Case2: segment:index addressing. We assume that ES is 0.
284 }
285 }
286 }
287
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000288 std::vector<llvm::MCInst> setRegTo(const llvm::MCSubtargetInfo &STI,
289 unsigned Reg,
290 const llvm::APInt &Value) const override {
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000291 if (llvm::X86::GR8RegClass.contains(Reg))
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000292 return {loadImmediate(Reg, 8, Value)};
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000293 if (llvm::X86::GR16RegClass.contains(Reg))
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000294 return {loadImmediate(Reg, 16, Value)};
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000295 if (llvm::X86::GR32RegClass.contains(Reg))
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000296 return {loadImmediate(Reg, 32, Value)};
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000297 if (llvm::X86::GR64RegClass.contains(Reg))
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000298 return {loadImmediate(Reg, 64, Value)};
299 ConstantInliner CI(Value);
Guillaume Chatelet5ad29092018-09-18 11:26:27 +0000300 if (llvm::X86::VR64RegClass.contains(Reg))
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000301 return CI.loadAndFinalize(Reg, 64, llvm::X86::MMX_MOVQ64rm);
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000302 if (llvm::X86::VR128XRegClass.contains(Reg)) {
303 if (STI.getFeatureBits()[llvm::X86::FeatureAVX512])
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000304 return CI.loadAndFinalize(Reg, 128, llvm::X86::VMOVDQU32Z128rm);
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000305 if (STI.getFeatureBits()[llvm::X86::FeatureAVX])
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000306 return CI.loadAndFinalize(Reg, 128, llvm::X86::VMOVDQUrm);
307 return CI.loadAndFinalize(Reg, 128, llvm::X86::MOVDQUrm);
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000308 }
309 if (llvm::X86::VR256XRegClass.contains(Reg)) {
310 if (STI.getFeatureBits()[llvm::X86::FeatureAVX512])
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000311 return CI.loadAndFinalize(Reg, 256, llvm::X86::VMOVDQU32Z256rm);
312 if (STI.getFeatureBits()[llvm::X86::FeatureAVX])
313 return CI.loadAndFinalize(Reg, 256, llvm::X86::VMOVDQUYrm);
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000314 }
315 if (llvm::X86::VR512RegClass.contains(Reg))
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000316 if (STI.getFeatureBits()[llvm::X86::FeatureAVX512])
317 return CI.loadAndFinalize(Reg, 512, llvm::X86::VMOVDQU32Zrm);
318 if (llvm::X86::RSTRegClass.contains(Reg)) {
319 if (Value.getBitWidth() == 32)
320 return CI.loadX87AndFinalize(Reg, 32, llvm::X86::LD_F32m);
321 if (Value.getBitWidth() == 64)
322 return CI.loadX87AndFinalize(Reg, 64, llvm::X86::LD_F64m);
323 if (Value.getBitWidth() == 80)
324 return CI.loadX87AndFinalize(Reg, 80, llvm::X86::LD_F80m);
Guillaume Chatelet8721ad92018-09-18 11:26:35 +0000325 }
Guillaume Chateletc96a97b2018-09-20 12:22:18 +0000326 if (Reg == llvm::X86::EFLAGS)
327 return CI.popFlagAndFinalize();
328 return {}; // Not yet implemented.
Clement Courbeta51efc22018-06-25 13:12:02 +0000329 }
330
Clement Courbetd939f6d2018-09-13 07:40:53 +0000331 std::unique_ptr<SnippetGenerator>
332 createLatencySnippetGenerator(const LLVMState &State) const override {
333 return llvm::make_unique<X86SnippetGenerator<X86LatencyImpl>>(State);
Clement Courbet4860b982018-06-26 08:49:30 +0000334 }
335
Clement Courbetd939f6d2018-09-13 07:40:53 +0000336 std::unique_ptr<SnippetGenerator>
337 createUopsSnippetGenerator(const LLVMState &State) const override {
338 return llvm::make_unique<X86SnippetGenerator<X86UopsImpl>>(State);
Clement Courbet4860b982018-06-26 08:49:30 +0000339 }
340
Clement Courbet44b4c542018-06-19 11:28:59 +0000341 bool matchesArch(llvm::Triple::ArchType Arch) const override {
342 return Arch == llvm::Triple::x86_64 || Arch == llvm::Triple::x86;
343 }
344};
345
346} // namespace
347
Clement Courbetcff2caa2018-06-25 11:22:23 +0000348static ExegesisTarget *getTheExegesisX86Target() {
Clement Courbet44b4c542018-06-19 11:28:59 +0000349 static ExegesisX86Target Target;
350 return &Target;
351}
352
353void InitializeX86ExegesisTarget() {
354 ExegesisTarget::registerTarget(getTheExegesisX86Target());
355}
356
Clement Courbetcff2caa2018-06-25 11:22:23 +0000357} // namespace exegesis