1 //===-- X86AsmInstrumentation.cpp - Instrument X86 inline assembly C++ -*-===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 #include "MCTargetDesc/X86BaseInfo.h"
11 #include "X86AsmInstrumentation.h"
12 #include "X86Operand.h"
13 #include "X86RegisterInfo.h"
14 #include "llvm/ADT/StringExtras.h"
15 #include "llvm/ADT/Triple.h"
16 #include "llvm/CodeGen/MachineValueType.h"
17 #include "llvm/IR/Function.h"
18 #include "llvm/MC/MCAsmInfo.h"
19 #include "llvm/MC/MCContext.h"
20 #include "llvm/MC/MCInst.h"
21 #include "llvm/MC/MCInstBuilder.h"
22 #include "llvm/MC/MCInstrInfo.h"
23 #include "llvm/MC/MCParser/MCParsedAsmOperand.h"
24 #include "llvm/MC/MCStreamer.h"
25 #include "llvm/MC/MCSubtargetInfo.h"
26 #include "llvm/MC/MCTargetAsmParser.h"
27 #include "llvm/MC/MCTargetOptions.h"
28 #include "llvm/Support/CommandLine.h"
34 static cl::opt<bool> ClAsanInstrumentAssembly(
35 "asan-instrument-assembly",
36 cl::desc("instrument assembly with AddressSanitizer checks"), cl::Hidden,
39 const int64_t MinAllowedDisplacement = std::numeric_limits<int32_t>::min();
40 const int64_t MaxAllowedDisplacement = std::numeric_limits<int32_t>::max();
42 int64_t ApplyBounds(int64_t Displacement) {
43 return std::max(std::min(MaxAllowedDisplacement, Displacement),
44 MinAllowedDisplacement);
47 bool InBounds(int64_t Displacement) {
48 return Displacement >= MinAllowedDisplacement &&
49 Displacement <= MaxAllowedDisplacement;
52 bool IsStackReg(unsigned Reg) { return Reg == X86::RSP || Reg == X86::ESP; }
54 bool IsSmallMemAccess(unsigned AccessSize) { return AccessSize < 8; }
56 std::string FuncName(unsigned AccessSize, bool IsWrite) {
57 return std::string("__asan_report_") + (IsWrite ? "store" : "load") +
61 class X86AddressSanitizer : public X86AsmInstrumentation {
63 struct RegisterContext {
64 RegisterContext(unsigned AddressReg, unsigned ShadowReg,
66 : AddressReg(AddressReg), ShadowReg(ShadowReg), ScratchReg(ScratchReg) {
69 unsigned addressReg(MVT::SimpleValueType VT) const {
70 return getX86SubSuperRegister(AddressReg, VT);
73 unsigned shadowReg(MVT::SimpleValueType VT) const {
74 return getX86SubSuperRegister(ShadowReg, VT);
77 unsigned scratchReg(MVT::SimpleValueType VT) const {
78 return getX86SubSuperRegister(ScratchReg, VT);
81 const unsigned AddressReg;
82 const unsigned ShadowReg;
83 const unsigned ScratchReg;
86 X86AddressSanitizer(const MCSubtargetInfo &STI)
87 : X86AsmInstrumentation(STI), RepPrefix(false), OrigSPOffset(0) {}
89 virtual ~X86AddressSanitizer() {}
91 // X86AsmInstrumentation implementation:
92 virtual void InstrumentAndEmitInstruction(const MCInst &Inst,
93 OperandVector &Operands,
95 const MCInstrInfo &MII,
96 MCStreamer &Out) override {
97 InstrumentMOVS(Inst, Operands, Ctx, MII, Out);
99 EmitInstruction(Out, MCInstBuilder(X86::REP_PREFIX));
101 InstrumentMOV(Inst, Operands, Ctx, MII, Out);
103 RepPrefix = (Inst.getOpcode() == X86::REP_PREFIX);
105 EmitInstruction(Out, Inst);
108 // Adjusts up stack and saves all registers used in instrumentation.
109 virtual void InstrumentMemOperandPrologue(const RegisterContext &RegCtx,
111 MCStreamer &Out) = 0;
113 // Restores all registers used in instrumentation and adjusts stack.
114 virtual void InstrumentMemOperandEpilogue(const RegisterContext &RegCtx,
116 MCStreamer &Out) = 0;
118 virtual void InstrumentMemOperandSmall(X86Operand &Op, unsigned AccessSize,
120 const RegisterContext &RegCtx,
121 MCContext &Ctx, MCStreamer &Out) = 0;
122 virtual void InstrumentMemOperandLarge(X86Operand &Op, unsigned AccessSize,
124 const RegisterContext &RegCtx,
125 MCContext &Ctx, MCStreamer &Out) = 0;
127 virtual void InstrumentMOVSImpl(unsigned AccessSize, MCContext &Ctx,
128 MCStreamer &Out) = 0;
130 void InstrumentMemOperand(X86Operand &Op, unsigned AccessSize, bool IsWrite,
131 const RegisterContext &RegCtx, MCContext &Ctx,
133 void InstrumentMOVSBase(unsigned DstReg, unsigned SrcReg, unsigned CntReg,
134 unsigned AccessSize, MCContext &Ctx, MCStreamer &Out);
136 void InstrumentMOVS(const MCInst &Inst, OperandVector &Operands,
137 MCContext &Ctx, const MCInstrInfo &MII, MCStreamer &Out);
138 void InstrumentMOV(const MCInst &Inst, OperandVector &Operands,
139 MCContext &Ctx, const MCInstrInfo &MII, MCStreamer &Out);
142 void EmitLabel(MCStreamer &Out, MCSymbol *Label) { Out.EmitLabel(Label); }
144 void EmitLEA(X86Operand &Op, MVT::SimpleValueType VT, unsigned Reg,
146 assert(VT == MVT::i32 || VT == MVT::i64);
148 Inst.setOpcode(VT == MVT::i32 ? X86::LEA32r : X86::LEA64r);
149 Inst.addOperand(MCOperand::CreateReg(getX86SubSuperRegister(Reg, VT)));
150 Op.addMemOperands(Inst, 5);
151 EmitInstruction(Out, Inst);
154 void ComputeMemOperandAddress(X86Operand &Op, MVT::SimpleValueType VT,
155 unsigned Reg, MCContext &Ctx, MCStreamer &Out);
157 // Creates new memory operand with Displacement added to an original
158 // displacement. Residue will contain a residue which could happen when the
159 // total displacement exceeds 32-bit limitation.
160 std::unique_ptr<X86Operand> AddDisplacement(X86Operand &Op,
161 int64_t Displacement,
162 MCContext &Ctx, int64_t *Residue);
164 // True when previous instruction was actually REP prefix.
167 // Offset from the original SP register.
168 int64_t OrigSPOffset;
171 void X86AddressSanitizer::InstrumentMemOperand(
172 X86Operand &Op, unsigned AccessSize, bool IsWrite,
173 const RegisterContext &RegCtx, MCContext &Ctx, MCStreamer &Out) {
174 assert(Op.isMem() && "Op should be a memory operand.");
175 assert((AccessSize & (AccessSize - 1)) == 0 && AccessSize <= 16 &&
176 "AccessSize should be a power of two, less or equal than 16.");
177 // FIXME: take into account load/store alignment.
178 if (IsSmallMemAccess(AccessSize))
179 InstrumentMemOperandSmall(Op, AccessSize, IsWrite, RegCtx, Ctx, Out);
181 InstrumentMemOperandLarge(Op, AccessSize, IsWrite, RegCtx, Ctx, Out);
184 void X86AddressSanitizer::InstrumentMOVSBase(unsigned DstReg, unsigned SrcReg,
187 MCContext &Ctx, MCStreamer &Out) {
188 // FIXME: check whole ranges [DstReg .. DstReg + AccessSize * (CntReg - 1)]
189 // and [SrcReg .. SrcReg + AccessSize * (CntReg - 1)].
190 RegisterContext RegCtx(X86::RDX /* AddressReg */, X86::RAX /* ShadowReg */,
191 IsSmallMemAccess(AccessSize)
193 : X86::NoRegister /* ScratchReg */);
195 InstrumentMemOperandPrologue(RegCtx, Ctx, Out);
199 const MCExpr *Disp = MCConstantExpr::Create(0, Ctx);
200 std::unique_ptr<X86Operand> Op(X86Operand::CreateMem(
201 0, Disp, SrcReg, 0, AccessSize, SMLoc(), SMLoc()));
202 InstrumentMemOperand(*Op, AccessSize, false /* IsWrite */, RegCtx, Ctx,
206 // Test -1(%SrcReg, %CntReg, AccessSize)
208 const MCExpr *Disp = MCConstantExpr::Create(-1, Ctx);
209 std::unique_ptr<X86Operand> Op(X86Operand::CreateMem(
210 0, Disp, SrcReg, CntReg, AccessSize, SMLoc(), SMLoc()));
211 InstrumentMemOperand(*Op, AccessSize, false /* IsWrite */, RegCtx, Ctx,
217 const MCExpr *Disp = MCConstantExpr::Create(0, Ctx);
218 std::unique_ptr<X86Operand> Op(X86Operand::CreateMem(
219 0, Disp, DstReg, 0, AccessSize, SMLoc(), SMLoc()));
220 InstrumentMemOperand(*Op, AccessSize, true /* IsWrite */, RegCtx, Ctx, Out);
223 // Test -1(%DstReg, %CntReg, AccessSize)
225 const MCExpr *Disp = MCConstantExpr::Create(-1, Ctx);
226 std::unique_ptr<X86Operand> Op(X86Operand::CreateMem(
227 0, Disp, DstReg, CntReg, AccessSize, SMLoc(), SMLoc()));
228 InstrumentMemOperand(*Op, AccessSize, true /* IsWrite */, RegCtx, Ctx, Out);
231 InstrumentMemOperandEpilogue(RegCtx, Ctx, Out);
234 void X86AddressSanitizer::InstrumentMOVS(const MCInst &Inst,
235 OperandVector &Operands,
236 MCContext &Ctx, const MCInstrInfo &MII,
238 // Access size in bytes.
239 unsigned AccessSize = 0;
241 switch (Inst.getOpcode()) {
258 InstrumentMOVSImpl(AccessSize, Ctx, Out);
261 void X86AddressSanitizer::InstrumentMOV(const MCInst &Inst,
262 OperandVector &Operands, MCContext &Ctx,
263 const MCInstrInfo &MII,
265 // Access size in bytes.
266 unsigned AccessSize = 0;
268 switch (Inst.getOpcode()) {
299 const bool IsWrite = MII.get(Inst.getOpcode()).mayStore();
300 RegisterContext RegCtx(X86::RDI /* AddressReg */, X86::RAX /* ShadowReg */,
301 IsSmallMemAccess(AccessSize)
303 : X86::NoRegister /* ScratchReg */);
305 for (unsigned Ix = 0; Ix < Operands.size(); ++Ix) {
306 assert(Operands[Ix]);
307 MCParsedAsmOperand &Op = *Operands[Ix];
309 X86Operand &MemOp = static_cast<X86Operand &>(Op);
310 InstrumentMemOperandPrologue(RegCtx, Ctx, Out);
311 InstrumentMemOperand(MemOp, AccessSize, IsWrite, RegCtx, Ctx, Out);
312 InstrumentMemOperandEpilogue(RegCtx, Ctx, Out);
317 void X86AddressSanitizer::ComputeMemOperandAddress(X86Operand &Op,
318 MVT::SimpleValueType VT,
319 unsigned Reg, MCContext &Ctx,
321 int64_t Displacement = 0;
322 if (IsStackReg(Op.getMemBaseReg()))
323 Displacement -= OrigSPOffset;
324 if (IsStackReg(Op.getMemIndexReg()))
325 Displacement -= OrigSPOffset * Op.getMemScale();
327 assert(Displacement >= 0);
330 if (Displacement == 0) {
331 EmitLEA(Op, VT, Reg, Out);
336 std::unique_ptr<X86Operand> NewOp =
337 AddDisplacement(Op, Displacement, Ctx, &Residue);
338 EmitLEA(*NewOp, VT, Reg, Out);
340 while (Residue != 0) {
341 const MCConstantExpr *Disp =
342 MCConstantExpr::Create(ApplyBounds(Residue), Ctx);
343 std::unique_ptr<X86Operand> DispOp =
344 X86Operand::CreateMem(0, Disp, Reg, 0, 1, SMLoc(), SMLoc());
345 EmitLEA(*DispOp, VT, Reg, Out);
346 Residue -= Disp->getValue();
350 std::unique_ptr<X86Operand>
351 X86AddressSanitizer::AddDisplacement(X86Operand &Op, int64_t Displacement,
352 MCContext &Ctx, int64_t *Residue) {
353 assert(Displacement >= 0);
355 if (Displacement == 0 ||
356 (Op.getMemDisp() && Op.getMemDisp()->getKind() != MCExpr::Constant)) {
357 *Residue = Displacement;
358 return X86Operand::CreateMem(Op.getMemSegReg(), Op.getMemDisp(),
359 Op.getMemBaseReg(), Op.getMemIndexReg(),
360 Op.getMemScale(), SMLoc(), SMLoc());
363 int64_t OrigDisplacement =
364 static_cast<const MCConstantExpr *>(Op.getMemDisp())->getValue();
365 assert(InBounds(OrigDisplacement));
366 Displacement += OrigDisplacement;
368 int64_t NewDisplacement = ApplyBounds(Displacement);
369 assert(InBounds(NewDisplacement));
371 *Residue = Displacement - NewDisplacement;
372 const MCExpr *Disp = MCConstantExpr::Create(NewDisplacement, Ctx);
373 return X86Operand::CreateMem(Op.getMemSegReg(), Disp, Op.getMemBaseReg(),
374 Op.getMemIndexReg(), Op.getMemScale(), SMLoc(),
378 class X86AddressSanitizer32 : public X86AddressSanitizer {
380 static const long kShadowOffset = 0x20000000;
382 X86AddressSanitizer32(const MCSubtargetInfo &STI)
383 : X86AddressSanitizer(STI) {}
385 virtual ~X86AddressSanitizer32() {}
387 unsigned GetFrameReg(const MCContext &Ctx, MCStreamer &Out) {
388 unsigned FrameReg = GetFrameRegGeneric(Ctx, Out);
389 if (FrameReg == X86::NoRegister)
391 return getX86SubSuperRegister(FrameReg, MVT::i32);
394 void SpillReg(MCStreamer &Out, unsigned Reg) {
395 EmitInstruction(Out, MCInstBuilder(X86::PUSH32r).addReg(Reg));
399 void RestoreReg(MCStreamer &Out, unsigned Reg) {
400 EmitInstruction(Out, MCInstBuilder(X86::POP32r).addReg(Reg));
404 void StoreFlags(MCStreamer &Out) {
405 EmitInstruction(Out, MCInstBuilder(X86::PUSHF32));
409 void RestoreFlags(MCStreamer &Out) {
410 EmitInstruction(Out, MCInstBuilder(X86::POPF32));
414 virtual void InstrumentMemOperandPrologue(const RegisterContext &RegCtx,
416 MCStreamer &Out) override {
417 const MCRegisterInfo *MRI = Ctx.getRegisterInfo();
418 unsigned FrameReg = GetFrameReg(Ctx, Out);
419 if (MRI && FrameReg != X86::NoRegister) {
420 SpillReg(Out, X86::EBP);
421 if (FrameReg == X86::ESP) {
422 Out.EmitCFIAdjustCfaOffset(4 /* byte size of the FrameReg */);
423 Out.EmitCFIRelOffset(MRI->getDwarfRegNum(X86::EBP, true /* IsEH */), 0);
426 Out, MCInstBuilder(X86::MOV32rr).addReg(X86::EBP).addReg(FrameReg));
427 Out.EmitCFIRememberState();
428 Out.EmitCFIDefCfaRegister(MRI->getDwarfRegNum(X86::EBP, true /* IsEH */));
431 SpillReg(Out, RegCtx.addressReg(MVT::i32));
432 SpillReg(Out, RegCtx.shadowReg(MVT::i32));
433 if (RegCtx.ScratchReg != X86::NoRegister)
434 SpillReg(Out, RegCtx.scratchReg(MVT::i32));
438 virtual void InstrumentMemOperandEpilogue(const RegisterContext &RegCtx,
440 MCStreamer &Out) override {
442 if (RegCtx.ScratchReg != X86::NoRegister)
443 RestoreReg(Out, RegCtx.scratchReg(MVT::i32));
444 RestoreReg(Out, RegCtx.shadowReg(MVT::i32));
445 RestoreReg(Out, RegCtx.addressReg(MVT::i32));
447 unsigned FrameReg = GetFrameReg(Ctx, Out);
448 if (Ctx.getRegisterInfo() && FrameReg != X86::NoRegister) {
449 RestoreReg(Out, X86::EBP);
450 Out.EmitCFIRestoreState();
451 if (FrameReg == X86::ESP)
452 Out.EmitCFIAdjustCfaOffset(-4 /* byte size of the FrameReg */);
456 virtual void InstrumentMemOperandSmall(X86Operand &Op, unsigned AccessSize,
458 const RegisterContext &RegCtx,
460 MCStreamer &Out) override;
461 virtual void InstrumentMemOperandLarge(X86Operand &Op, unsigned AccessSize,
463 const RegisterContext &RegCtx,
465 MCStreamer &Out) override;
466 virtual void InstrumentMOVSImpl(unsigned AccessSize, MCContext &Ctx,
467 MCStreamer &Out) override;
470 void EmitCallAsanReport(unsigned AccessSize, bool IsWrite, MCContext &Ctx,
471 MCStreamer &Out, const RegisterContext &RegCtx) {
472 EmitInstruction(Out, MCInstBuilder(X86::CLD));
473 EmitInstruction(Out, MCInstBuilder(X86::MMX_EMMS));
475 EmitInstruction(Out, MCInstBuilder(X86::AND64ri8)
480 Out, MCInstBuilder(X86::PUSH32r).addReg(RegCtx.addressReg(MVT::i32)));
482 const std::string &Fn = FuncName(AccessSize, IsWrite);
483 MCSymbol *FnSym = Ctx.GetOrCreateSymbol(StringRef(Fn));
484 const MCSymbolRefExpr *FnExpr =
485 MCSymbolRefExpr::Create(FnSym, MCSymbolRefExpr::VK_PLT, Ctx);
486 EmitInstruction(Out, MCInstBuilder(X86::CALLpcrel32).addExpr(FnExpr));
490 void X86AddressSanitizer32::InstrumentMemOperandSmall(
491 X86Operand &Op, unsigned AccessSize, bool IsWrite,
492 const RegisterContext &RegCtx, MCContext &Ctx, MCStreamer &Out) {
493 unsigned AddressRegI32 = RegCtx.addressReg(MVT::i32);
494 unsigned ShadowRegI32 = RegCtx.shadowReg(MVT::i32);
495 unsigned ShadowRegI8 = RegCtx.shadowReg(MVT::i8);
497 assert(RegCtx.ScratchReg != X86::NoRegister);
498 unsigned ScratchRegI32 = RegCtx.scratchReg(MVT::i32);
500 ComputeMemOperandAddress(Op, MVT::i32, AddressRegI32, Ctx, Out);
502 EmitInstruction(Out, MCInstBuilder(X86::MOV32rr).addReg(ShadowRegI32).addReg(
504 EmitInstruction(Out, MCInstBuilder(X86::SHR32ri)
505 .addReg(ShadowRegI32)
506 .addReg(ShadowRegI32)
511 Inst.setOpcode(X86::MOV8rm);
512 Inst.addOperand(MCOperand::CreateReg(ShadowRegI8));
513 const MCExpr *Disp = MCConstantExpr::Create(kShadowOffset, Ctx);
514 std::unique_ptr<X86Operand> Op(
515 X86Operand::CreateMem(0, Disp, ShadowRegI32, 0, 1, SMLoc(), SMLoc()));
516 Op->addMemOperands(Inst, 5);
517 EmitInstruction(Out, Inst);
521 Out, MCInstBuilder(X86::TEST8rr).addReg(ShadowRegI8).addReg(ShadowRegI8));
522 MCSymbol *DoneSym = Ctx.CreateTempSymbol();
523 const MCExpr *DoneExpr = MCSymbolRefExpr::Create(DoneSym, Ctx);
524 EmitInstruction(Out, MCInstBuilder(X86::JE_4).addExpr(DoneExpr));
526 EmitInstruction(Out, MCInstBuilder(X86::MOV32rr).addReg(ScratchRegI32).addReg(
528 EmitInstruction(Out, MCInstBuilder(X86::AND32ri)
529 .addReg(ScratchRegI32)
530 .addReg(ScratchRegI32)
533 switch (AccessSize) {
537 const MCExpr *Disp = MCConstantExpr::Create(1, Ctx);
538 std::unique_ptr<X86Operand> Op(
539 X86Operand::CreateMem(0, Disp, ScratchRegI32, 0, 1, SMLoc(), SMLoc()));
540 EmitLEA(*Op, MVT::i32, ScratchRegI32, Out);
544 EmitInstruction(Out, MCInstBuilder(X86::ADD32ri8)
545 .addReg(ScratchRegI32)
546 .addReg(ScratchRegI32)
550 assert(false && "Incorrect access size");
556 MCInstBuilder(X86::MOVSX32rr8).addReg(ShadowRegI32).addReg(ShadowRegI8));
557 EmitInstruction(Out, MCInstBuilder(X86::CMP32rr).addReg(ScratchRegI32).addReg(
559 EmitInstruction(Out, MCInstBuilder(X86::JL_4).addExpr(DoneExpr));
561 EmitCallAsanReport(AccessSize, IsWrite, Ctx, Out, RegCtx);
562 EmitLabel(Out, DoneSym);
565 void X86AddressSanitizer32::InstrumentMemOperandLarge(
566 X86Operand &Op, unsigned AccessSize, bool IsWrite,
567 const RegisterContext &RegCtx, MCContext &Ctx, MCStreamer &Out) {
568 unsigned AddressRegI32 = RegCtx.addressReg(MVT::i32);
569 unsigned ShadowRegI32 = RegCtx.shadowReg(MVT::i32);
571 ComputeMemOperandAddress(Op, MVT::i32, AddressRegI32, Ctx, Out);
573 EmitInstruction(Out, MCInstBuilder(X86::MOV32rr).addReg(ShadowRegI32).addReg(
575 EmitInstruction(Out, MCInstBuilder(X86::SHR32ri)
576 .addReg(ShadowRegI32)
577 .addReg(ShadowRegI32)
581 switch (AccessSize) {
583 Inst.setOpcode(X86::CMP8mi);
586 Inst.setOpcode(X86::CMP16mi);
589 assert(false && "Incorrect access size");
592 const MCExpr *Disp = MCConstantExpr::Create(kShadowOffset, Ctx);
593 std::unique_ptr<X86Operand> Op(
594 X86Operand::CreateMem(0, Disp, ShadowRegI32, 0, 1, SMLoc(), SMLoc()));
595 Op->addMemOperands(Inst, 5);
596 Inst.addOperand(MCOperand::CreateImm(0));
597 EmitInstruction(Out, Inst);
599 MCSymbol *DoneSym = Ctx.CreateTempSymbol();
600 const MCExpr *DoneExpr = MCSymbolRefExpr::Create(DoneSym, Ctx);
601 EmitInstruction(Out, MCInstBuilder(X86::JE_4).addExpr(DoneExpr));
603 EmitCallAsanReport(AccessSize, IsWrite, Ctx, Out, RegCtx);
604 EmitLabel(Out, DoneSym);
607 void X86AddressSanitizer32::InstrumentMOVSImpl(unsigned AccessSize,
612 // No need to test when ECX is equals to zero.
613 MCSymbol *DoneSym = Ctx.CreateTempSymbol();
614 const MCExpr *DoneExpr = MCSymbolRefExpr::Create(DoneSym, Ctx);
616 Out, MCInstBuilder(X86::TEST32rr).addReg(X86::ECX).addReg(X86::ECX));
617 EmitInstruction(Out, MCInstBuilder(X86::JE_4).addExpr(DoneExpr));
619 // Instrument first and last elements in src and dst range.
620 InstrumentMOVSBase(X86::EDI /* DstReg */, X86::ESI /* SrcReg */,
621 X86::ECX /* CntReg */, AccessSize, Ctx, Out);
623 EmitLabel(Out, DoneSym);
627 class X86AddressSanitizer64 : public X86AddressSanitizer {
629 static const long kShadowOffset = 0x7fff8000;
631 X86AddressSanitizer64(const MCSubtargetInfo &STI)
632 : X86AddressSanitizer(STI) {}
634 virtual ~X86AddressSanitizer64() {}
636 unsigned GetFrameReg(const MCContext &Ctx, MCStreamer &Out) {
637 unsigned FrameReg = GetFrameRegGeneric(Ctx, Out);
638 if (FrameReg == X86::NoRegister)
640 return getX86SubSuperRegister(FrameReg, MVT::i64);
643 void SpillReg(MCStreamer &Out, unsigned Reg) {
644 EmitInstruction(Out, MCInstBuilder(X86::PUSH64r).addReg(Reg));
648 void RestoreReg(MCStreamer &Out, unsigned Reg) {
649 EmitInstruction(Out, MCInstBuilder(X86::POP64r).addReg(Reg));
653 void StoreFlags(MCStreamer &Out) {
654 EmitInstruction(Out, MCInstBuilder(X86::PUSHF64));
658 void RestoreFlags(MCStreamer &Out) {
659 EmitInstruction(Out, MCInstBuilder(X86::POPF64));
663 virtual void InstrumentMemOperandPrologue(const RegisterContext &RegCtx,
665 MCStreamer &Out) override {
666 const MCRegisterInfo *MRI = Ctx.getRegisterInfo();
667 unsigned FrameReg = GetFrameReg(Ctx, Out);
668 if (MRI && FrameReg != X86::NoRegister) {
669 SpillReg(Out, X86::RBP);
670 if (FrameReg == X86::RSP) {
671 Out.EmitCFIAdjustCfaOffset(8 /* byte size of the FrameReg */);
672 Out.EmitCFIRelOffset(MRI->getDwarfRegNum(X86::RBP, true /* IsEH */), 0);
675 Out, MCInstBuilder(X86::MOV64rr).addReg(X86::RBP).addReg(FrameReg));
676 Out.EmitCFIRememberState();
677 Out.EmitCFIDefCfaRegister(MRI->getDwarfRegNum(X86::RBP, true /* IsEH */));
680 EmitAdjustRSP(Ctx, Out, -128);
681 SpillReg(Out, RegCtx.shadowReg(MVT::i64));
682 SpillReg(Out, RegCtx.addressReg(MVT::i64));
683 if (RegCtx.ScratchReg != X86::NoRegister)
684 SpillReg(Out, RegCtx.scratchReg(MVT::i64));
688 virtual void InstrumentMemOperandEpilogue(const RegisterContext &RegCtx,
690 MCStreamer &Out) override {
692 if (RegCtx.ScratchReg != X86::NoRegister)
693 RestoreReg(Out, RegCtx.scratchReg(MVT::i64));
694 RestoreReg(Out, RegCtx.addressReg(MVT::i64));
695 RestoreReg(Out, RegCtx.shadowReg(MVT::i64));
696 EmitAdjustRSP(Ctx, Out, 128);
698 unsigned FrameReg = GetFrameReg(Ctx, Out);
699 if (Ctx.getRegisterInfo() && FrameReg != X86::NoRegister) {
700 RestoreReg(Out, X86::RBP);
701 Out.EmitCFIRestoreState();
702 if (FrameReg == X86::RSP)
703 Out.EmitCFIAdjustCfaOffset(-8 /* byte size of the FrameReg */);
707 virtual void InstrumentMemOperandSmall(X86Operand &Op, unsigned AccessSize,
709 const RegisterContext &RegCtx,
711 MCStreamer &Out) override;
712 virtual void InstrumentMemOperandLarge(X86Operand &Op, unsigned AccessSize,
714 const RegisterContext &RegCtx,
716 MCStreamer &Out) override;
717 virtual void InstrumentMOVSImpl(unsigned AccessSize, MCContext &Ctx,
718 MCStreamer &Out) override;
721 void EmitAdjustRSP(MCContext &Ctx, MCStreamer &Out, long Offset) {
722 const MCExpr *Disp = MCConstantExpr::Create(Offset, Ctx);
723 std::unique_ptr<X86Operand> Op(
724 X86Operand::CreateMem(0, Disp, X86::RSP, 0, 1, SMLoc(), SMLoc()));
725 EmitLEA(*Op, MVT::i64, X86::RSP, Out);
726 OrigSPOffset += Offset;
729 void EmitCallAsanReport(unsigned AccessSize, bool IsWrite, MCContext &Ctx,
730 MCStreamer &Out, const RegisterContext &RegCtx) {
731 EmitInstruction(Out, MCInstBuilder(X86::CLD));
732 EmitInstruction(Out, MCInstBuilder(X86::MMX_EMMS));
734 EmitInstruction(Out, MCInstBuilder(X86::AND64ri8)
739 if (RegCtx.AddressReg != X86::RDI) {
740 EmitInstruction(Out, MCInstBuilder(X86::MOV64rr).addReg(X86::RDI).addReg(
741 RegCtx.addressReg(MVT::i64)));
743 const std::string &Fn = FuncName(AccessSize, IsWrite);
744 MCSymbol *FnSym = Ctx.GetOrCreateSymbol(StringRef(Fn));
745 const MCSymbolRefExpr *FnExpr =
746 MCSymbolRefExpr::Create(FnSym, MCSymbolRefExpr::VK_PLT, Ctx);
747 EmitInstruction(Out, MCInstBuilder(X86::CALL64pcrel32).addExpr(FnExpr));
751 void X86AddressSanitizer64::InstrumentMemOperandSmall(
752 X86Operand &Op, unsigned AccessSize, bool IsWrite,
753 const RegisterContext &RegCtx, MCContext &Ctx, MCStreamer &Out) {
754 unsigned AddressRegI64 = RegCtx.addressReg(MVT::i64);
755 unsigned AddressRegI32 = RegCtx.addressReg(MVT::i32);
756 unsigned ShadowRegI64 = RegCtx.shadowReg(MVT::i64);
757 unsigned ShadowRegI32 = RegCtx.shadowReg(MVT::i32);
758 unsigned ShadowRegI8 = RegCtx.shadowReg(MVT::i8);
760 assert(RegCtx.ScratchReg != X86::NoRegister);
761 unsigned ScratchRegI32 = RegCtx.scratchReg(MVT::i32);
763 ComputeMemOperandAddress(Op, MVT::i64, AddressRegI64, Ctx, Out);
765 EmitInstruction(Out, MCInstBuilder(X86::MOV64rr).addReg(ShadowRegI64).addReg(
767 EmitInstruction(Out, MCInstBuilder(X86::SHR64ri)
768 .addReg(ShadowRegI64)
769 .addReg(ShadowRegI64)
773 Inst.setOpcode(X86::MOV8rm);
774 Inst.addOperand(MCOperand::CreateReg(ShadowRegI8));
775 const MCExpr *Disp = MCConstantExpr::Create(kShadowOffset, Ctx);
776 std::unique_ptr<X86Operand> Op(
777 X86Operand::CreateMem(0, Disp, ShadowRegI64, 0, 1, SMLoc(), SMLoc()));
778 Op->addMemOperands(Inst, 5);
779 EmitInstruction(Out, Inst);
783 Out, MCInstBuilder(X86::TEST8rr).addReg(ShadowRegI8).addReg(ShadowRegI8));
784 MCSymbol *DoneSym = Ctx.CreateTempSymbol();
785 const MCExpr *DoneExpr = MCSymbolRefExpr::Create(DoneSym, Ctx);
786 EmitInstruction(Out, MCInstBuilder(X86::JE_4).addExpr(DoneExpr));
788 EmitInstruction(Out, MCInstBuilder(X86::MOV32rr).addReg(ScratchRegI32).addReg(
790 EmitInstruction(Out, MCInstBuilder(X86::AND32ri)
791 .addReg(ScratchRegI32)
792 .addReg(ScratchRegI32)
795 switch (AccessSize) {
799 const MCExpr *Disp = MCConstantExpr::Create(1, Ctx);
800 std::unique_ptr<X86Operand> Op(
801 X86Operand::CreateMem(0, Disp, ScratchRegI32, 0, 1, SMLoc(), SMLoc()));
802 EmitLEA(*Op, MVT::i32, ScratchRegI32, Out);
806 EmitInstruction(Out, MCInstBuilder(X86::ADD32ri8)
807 .addReg(ScratchRegI32)
808 .addReg(ScratchRegI32)
812 assert(false && "Incorrect access size");
818 MCInstBuilder(X86::MOVSX32rr8).addReg(ShadowRegI32).addReg(ShadowRegI8));
819 EmitInstruction(Out, MCInstBuilder(X86::CMP32rr).addReg(ScratchRegI32).addReg(
821 EmitInstruction(Out, MCInstBuilder(X86::JL_4).addExpr(DoneExpr));
823 EmitCallAsanReport(AccessSize, IsWrite, Ctx, Out, RegCtx);
824 EmitLabel(Out, DoneSym);
827 void X86AddressSanitizer64::InstrumentMemOperandLarge(
828 X86Operand &Op, unsigned AccessSize, bool IsWrite,
829 const RegisterContext &RegCtx, MCContext &Ctx, MCStreamer &Out) {
830 unsigned AddressRegI64 = RegCtx.addressReg(MVT::i64);
831 unsigned ShadowRegI64 = RegCtx.shadowReg(MVT::i64);
833 ComputeMemOperandAddress(Op, MVT::i64, AddressRegI64, Ctx, Out);
835 EmitInstruction(Out, MCInstBuilder(X86::MOV64rr).addReg(ShadowRegI64).addReg(
837 EmitInstruction(Out, MCInstBuilder(X86::SHR64ri)
838 .addReg(ShadowRegI64)
839 .addReg(ShadowRegI64)
843 switch (AccessSize) {
845 Inst.setOpcode(X86::CMP8mi);
848 Inst.setOpcode(X86::CMP16mi);
851 assert(false && "Incorrect access size");
854 const MCExpr *Disp = MCConstantExpr::Create(kShadowOffset, Ctx);
855 std::unique_ptr<X86Operand> Op(
856 X86Operand::CreateMem(0, Disp, ShadowRegI64, 0, 1, SMLoc(), SMLoc()));
857 Op->addMemOperands(Inst, 5);
858 Inst.addOperand(MCOperand::CreateImm(0));
859 EmitInstruction(Out, Inst);
862 MCSymbol *DoneSym = Ctx.CreateTempSymbol();
863 const MCExpr *DoneExpr = MCSymbolRefExpr::Create(DoneSym, Ctx);
864 EmitInstruction(Out, MCInstBuilder(X86::JE_4).addExpr(DoneExpr));
866 EmitCallAsanReport(AccessSize, IsWrite, Ctx, Out, RegCtx);
867 EmitLabel(Out, DoneSym);
870 void X86AddressSanitizer64::InstrumentMOVSImpl(unsigned AccessSize,
875 // No need to test when RCX is equals to zero.
876 MCSymbol *DoneSym = Ctx.CreateTempSymbol();
877 const MCExpr *DoneExpr = MCSymbolRefExpr::Create(DoneSym, Ctx);
879 Out, MCInstBuilder(X86::TEST64rr).addReg(X86::RCX).addReg(X86::RCX));
880 EmitInstruction(Out, MCInstBuilder(X86::JE_4).addExpr(DoneExpr));
882 // Instrument first and last elements in src and dst range.
883 InstrumentMOVSBase(X86::RDI /* DstReg */, X86::RSI /* SrcReg */,
884 X86::RCX /* CntReg */, AccessSize, Ctx, Out);
886 EmitLabel(Out, DoneSym);
890 } // End anonymous namespace
892 X86AsmInstrumentation::X86AsmInstrumentation(const MCSubtargetInfo &STI)
893 : STI(STI), InitialFrameReg(0) {}
895 X86AsmInstrumentation::~X86AsmInstrumentation() {}
897 void X86AsmInstrumentation::InstrumentAndEmitInstruction(
898 const MCInst &Inst, OperandVector &Operands, MCContext &Ctx,
899 const MCInstrInfo &MII, MCStreamer &Out) {
900 EmitInstruction(Out, Inst);
903 void X86AsmInstrumentation::EmitInstruction(MCStreamer &Out,
904 const MCInst &Inst) {
905 Out.EmitInstruction(Inst, STI);
908 unsigned X86AsmInstrumentation::GetFrameRegGeneric(const MCContext &Ctx,
910 if (!Out.getNumFrameInfos()) // No active dwarf frame
911 return X86::NoRegister;
912 const MCDwarfFrameInfo &Frame = Out.getDwarfFrameInfos().back();
913 if (Frame.End) // Active dwarf frame is closed
914 return X86::NoRegister;
915 const MCRegisterInfo *MRI = Ctx.getRegisterInfo();
916 if (!MRI) // No register info
917 return X86::NoRegister;
919 if (InitialFrameReg) {
920 // FrameReg is set explicitly, we're instrumenting a MachineFunction.
921 return InitialFrameReg;
924 return MRI->getLLVMRegNum(Frame.CurrentCfaRegister, true /* IsEH */);
927 X86AsmInstrumentation *
928 CreateX86AsmInstrumentation(const MCTargetOptions &MCOptions,
929 const MCContext &Ctx, const MCSubtargetInfo &STI) {
930 Triple T(STI.getTargetTriple());
931 const bool hasCompilerRTSupport = T.isOSLinux();
932 if (ClAsanInstrumentAssembly && hasCompilerRTSupport &&
933 MCOptions.SanitizeAddress) {
934 if ((STI.getFeatureBits() & X86::Mode32Bit) != 0)
935 return new X86AddressSanitizer32(STI);
936 if ((STI.getFeatureBits() & X86::Mode64Bit) != 0)
937 return new X86AddressSanitizer64(STI);
939 return new X86AsmInstrumentation(STI);
942 } // End llvm namespace