+static Optional<CodeGenFunction::MSVCIntrin>
+translateArmToMsvcIntrin(unsigned BuiltinID) {
+ using MSVCIntrin = CodeGenFunction::MSVCIntrin;
+ switch (BuiltinID) {
+ default:
+ return None;
+ case ARM::BI_BitScanForward:
+ case ARM::BI_BitScanForward64:
+ return MSVCIntrin::_BitScanForward;
+ case ARM::BI_BitScanReverse:
+ case ARM::BI_BitScanReverse64:
+ return MSVCIntrin::_BitScanReverse;
+ case ARM::BI_InterlockedAnd64:
+ return MSVCIntrin::_InterlockedAnd;
+ case ARM::BI_InterlockedExchange64:
+ return MSVCIntrin::_InterlockedExchange;
+ case ARM::BI_InterlockedExchangeAdd64:
+ return MSVCIntrin::_InterlockedExchangeAdd;
+ case ARM::BI_InterlockedExchangeSub64:
+ return MSVCIntrin::_InterlockedExchangeSub;
+ case ARM::BI_InterlockedOr64:
+ return MSVCIntrin::_InterlockedOr;
+ case ARM::BI_InterlockedXor64:
+ return MSVCIntrin::_InterlockedXor;
+ case ARM::BI_InterlockedDecrement64:
+ return MSVCIntrin::_InterlockedDecrement;
+ case ARM::BI_InterlockedIncrement64:
+ return MSVCIntrin::_InterlockedIncrement;
+ case ARM::BI_InterlockedExchangeAdd8_acq:
+ case ARM::BI_InterlockedExchangeAdd16_acq:
+ case ARM::BI_InterlockedExchangeAdd_acq:
+ case ARM::BI_InterlockedExchangeAdd64_acq:
+ return MSVCIntrin::_InterlockedExchangeAdd_acq;
+ case ARM::BI_InterlockedExchangeAdd8_rel:
+ case ARM::BI_InterlockedExchangeAdd16_rel:
+ case ARM::BI_InterlockedExchangeAdd_rel:
+ case ARM::BI_InterlockedExchangeAdd64_rel:
+ return MSVCIntrin::_InterlockedExchangeAdd_rel;
+ case ARM::BI_InterlockedExchangeAdd8_nf:
+ case ARM::BI_InterlockedExchangeAdd16_nf:
+ case ARM::BI_InterlockedExchangeAdd_nf:
+ case ARM::BI_InterlockedExchangeAdd64_nf:
+ return MSVCIntrin::_InterlockedExchangeAdd_nf;
+ case ARM::BI_InterlockedExchange8_acq:
+ case ARM::BI_InterlockedExchange16_acq:
+ case ARM::BI_InterlockedExchange_acq:
+ case ARM::BI_InterlockedExchange64_acq:
+ return MSVCIntrin::_InterlockedExchange_acq;
+ case ARM::BI_InterlockedExchange8_rel:
+ case ARM::BI_InterlockedExchange16_rel:
+ case ARM::BI_InterlockedExchange_rel:
+ case ARM::BI_InterlockedExchange64_rel:
+ return MSVCIntrin::_InterlockedExchange_rel;
+ case ARM::BI_InterlockedExchange8_nf:
+ case ARM::BI_InterlockedExchange16_nf:
+ case ARM::BI_InterlockedExchange_nf:
+ case ARM::BI_InterlockedExchange64_nf:
+ return MSVCIntrin::_InterlockedExchange_nf;
+ case ARM::BI_InterlockedCompareExchange8_acq:
+ case ARM::BI_InterlockedCompareExchange16_acq:
+ case ARM::BI_InterlockedCompareExchange_acq:
+ case ARM::BI_InterlockedCompareExchange64_acq:
+ return MSVCIntrin::_InterlockedCompareExchange_acq;
+ case ARM::BI_InterlockedCompareExchange8_rel:
+ case ARM::BI_InterlockedCompareExchange16_rel:
+ case ARM::BI_InterlockedCompareExchange_rel:
+ case ARM::BI_InterlockedCompareExchange64_rel:
+ return MSVCIntrin::_InterlockedCompareExchange_rel;
+ case ARM::BI_InterlockedCompareExchange8_nf:
+ case ARM::BI_InterlockedCompareExchange16_nf:
+ case ARM::BI_InterlockedCompareExchange_nf:
+ case ARM::BI_InterlockedCompareExchange64_nf:
+ return MSVCIntrin::_InterlockedCompareExchange_nf;
+ case ARM::BI_InterlockedOr8_acq:
+ case ARM::BI_InterlockedOr16_acq:
+ case ARM::BI_InterlockedOr_acq:
+ case ARM::BI_InterlockedOr64_acq:
+ return MSVCIntrin::_InterlockedOr_acq;
+ case ARM::BI_InterlockedOr8_rel:
+ case ARM::BI_InterlockedOr16_rel:
+ case ARM::BI_InterlockedOr_rel:
+ case ARM::BI_InterlockedOr64_rel:
+ return MSVCIntrin::_InterlockedOr_rel;
+ case ARM::BI_InterlockedOr8_nf:
+ case ARM::BI_InterlockedOr16_nf:
+ case ARM::BI_InterlockedOr_nf:
+ case ARM::BI_InterlockedOr64_nf:
+ return MSVCIntrin::_InterlockedOr_nf;
+ case ARM::BI_InterlockedXor8_acq:
+ case ARM::BI_InterlockedXor16_acq:
+ case ARM::BI_InterlockedXor_acq:
+ case ARM::BI_InterlockedXor64_acq:
+ return MSVCIntrin::_InterlockedXor_acq;
+ case ARM::BI_InterlockedXor8_rel:
+ case ARM::BI_InterlockedXor16_rel:
+ case ARM::BI_InterlockedXor_rel:
+ case ARM::BI_InterlockedXor64_rel:
+ return MSVCIntrin::_InterlockedXor_rel;
+ case ARM::BI_InterlockedXor8_nf:
+ case ARM::BI_InterlockedXor16_nf:
+ case ARM::BI_InterlockedXor_nf:
+ case ARM::BI_InterlockedXor64_nf:
+ return MSVCIntrin::_InterlockedXor_nf;
+ case ARM::BI_InterlockedAnd8_acq:
+ case ARM::BI_InterlockedAnd16_acq:
+ case ARM::BI_InterlockedAnd_acq:
+ case ARM::BI_InterlockedAnd64_acq:
+ return MSVCIntrin::_InterlockedAnd_acq;
+ case ARM::BI_InterlockedAnd8_rel:
+ case ARM::BI_InterlockedAnd16_rel:
+ case ARM::BI_InterlockedAnd_rel:
+ case ARM::BI_InterlockedAnd64_rel:
+ return MSVCIntrin::_InterlockedAnd_rel;
+ case ARM::BI_InterlockedAnd8_nf:
+ case ARM::BI_InterlockedAnd16_nf:
+ case ARM::BI_InterlockedAnd_nf:
+ case ARM::BI_InterlockedAnd64_nf:
+ return MSVCIntrin::_InterlockedAnd_nf;
+ case ARM::BI_InterlockedIncrement16_acq:
+ case ARM::BI_InterlockedIncrement_acq:
+ case ARM::BI_InterlockedIncrement64_acq:
+ return MSVCIntrin::_InterlockedIncrement_acq;
+ case ARM::BI_InterlockedIncrement16_rel:
+ case ARM::BI_InterlockedIncrement_rel:
+ case ARM::BI_InterlockedIncrement64_rel:
+ return MSVCIntrin::_InterlockedIncrement_rel;
+ case ARM::BI_InterlockedIncrement16_nf:
+ case ARM::BI_InterlockedIncrement_nf:
+ case ARM::BI_InterlockedIncrement64_nf:
+ return MSVCIntrin::_InterlockedIncrement_nf;
+ case ARM::BI_InterlockedDecrement16_acq:
+ case ARM::BI_InterlockedDecrement_acq:
+ case ARM::BI_InterlockedDecrement64_acq:
+ return MSVCIntrin::_InterlockedDecrement_acq;
+ case ARM::BI_InterlockedDecrement16_rel:
+ case ARM::BI_InterlockedDecrement_rel:
+ case ARM::BI_InterlockedDecrement64_rel:
+ return MSVCIntrin::_InterlockedDecrement_rel;
+ case ARM::BI_InterlockedDecrement16_nf:
+ case ARM::BI_InterlockedDecrement_nf:
+ case ARM::BI_InterlockedDecrement64_nf:
+ return MSVCIntrin::_InterlockedDecrement_nf;
+ }
+ llvm_unreachable("must return from switch");
+}
+
+static Optional<CodeGenFunction::MSVCIntrin>
+translateAarch64ToMsvcIntrin(unsigned BuiltinID) {
+ using MSVCIntrin = CodeGenFunction::MSVCIntrin;
+ switch (BuiltinID) {
+ default:
+ return None;
+ case AArch64::BI_BitScanForward:
+ case AArch64::BI_BitScanForward64:
+ return MSVCIntrin::_BitScanForward;
+ case AArch64::BI_BitScanReverse:
+ case AArch64::BI_BitScanReverse64:
+ return MSVCIntrin::_BitScanReverse;
+ case AArch64::BI_InterlockedAnd64:
+ return MSVCIntrin::_InterlockedAnd;
+ case AArch64::BI_InterlockedExchange64:
+ return MSVCIntrin::_InterlockedExchange;
+ case AArch64::BI_InterlockedExchangeAdd64:
+ return MSVCIntrin::_InterlockedExchangeAdd;
+ case AArch64::BI_InterlockedExchangeSub64:
+ return MSVCIntrin::_InterlockedExchangeSub;
+ case AArch64::BI_InterlockedOr64:
+ return MSVCIntrin::_InterlockedOr;
+ case AArch64::BI_InterlockedXor64:
+ return MSVCIntrin::_InterlockedXor;
+ case AArch64::BI_InterlockedDecrement64:
+ return MSVCIntrin::_InterlockedDecrement;
+ case AArch64::BI_InterlockedIncrement64:
+ return MSVCIntrin::_InterlockedIncrement;
+ case AArch64::BI_InterlockedExchangeAdd8_acq:
+ case AArch64::BI_InterlockedExchangeAdd16_acq:
+ case AArch64::BI_InterlockedExchangeAdd_acq:
+ case AArch64::BI_InterlockedExchangeAdd64_acq:
+ return MSVCIntrin::_InterlockedExchangeAdd_acq;
+ case AArch64::BI_InterlockedExchangeAdd8_rel:
+ case AArch64::BI_InterlockedExchangeAdd16_rel:
+ case AArch64::BI_InterlockedExchangeAdd_rel:
+ case AArch64::BI_InterlockedExchangeAdd64_rel:
+ return MSVCIntrin::_InterlockedExchangeAdd_rel;
+ case AArch64::BI_InterlockedExchangeAdd8_nf:
+ case AArch64::BI_InterlockedExchangeAdd16_nf:
+ case AArch64::BI_InterlockedExchangeAdd_nf:
+ case AArch64::BI_InterlockedExchangeAdd64_nf:
+ return MSVCIntrin::_InterlockedExchangeAdd_nf;
+ case AArch64::BI_InterlockedExchange8_acq:
+ case AArch64::BI_InterlockedExchange16_acq:
+ case AArch64::BI_InterlockedExchange_acq:
+ case AArch64::BI_InterlockedExchange64_acq:
+ return MSVCIntrin::_InterlockedExchange_acq;
+ case AArch64::BI_InterlockedExchange8_rel:
+ case AArch64::BI_InterlockedExchange16_rel:
+ case AArch64::BI_InterlockedExchange_rel:
+ case AArch64::BI_InterlockedExchange64_rel:
+ return MSVCIntrin::_InterlockedExchange_rel;
+ case AArch64::BI_InterlockedExchange8_nf:
+ case AArch64::BI_InterlockedExchange16_nf:
+ case AArch64::BI_InterlockedExchange_nf:
+ case AArch64::BI_InterlockedExchange64_nf:
+ return MSVCIntrin::_InterlockedExchange_nf;
+ case AArch64::BI_InterlockedCompareExchange8_acq:
+ case AArch64::BI_InterlockedCompareExchange16_acq:
+ case AArch64::BI_InterlockedCompareExchange_acq:
+ case AArch64::BI_InterlockedCompareExchange64_acq:
+ return MSVCIntrin::_InterlockedCompareExchange_acq;
+ case AArch64::BI_InterlockedCompareExchange8_rel:
+ case AArch64::BI_InterlockedCompareExchange16_rel:
+ case AArch64::BI_InterlockedCompareExchange_rel:
+ case AArch64::BI_InterlockedCompareExchange64_rel:
+ return MSVCIntrin::_InterlockedCompareExchange_rel;
+ case AArch64::BI_InterlockedCompareExchange8_nf:
+ case AArch64::BI_InterlockedCompareExchange16_nf:
+ case AArch64::BI_InterlockedCompareExchange_nf:
+ case AArch64::BI_InterlockedCompareExchange64_nf:
+ return MSVCIntrin::_InterlockedCompareExchange_nf;
+ case AArch64::BI_InterlockedOr8_acq:
+ case AArch64::BI_InterlockedOr16_acq:
+ case AArch64::BI_InterlockedOr_acq:
+ case AArch64::BI_InterlockedOr64_acq:
+ return MSVCIntrin::_InterlockedOr_acq;
+ case AArch64::BI_InterlockedOr8_rel:
+ case AArch64::BI_InterlockedOr16_rel:
+ case AArch64::BI_InterlockedOr_rel:
+ case AArch64::BI_InterlockedOr64_rel:
+ return MSVCIntrin::_InterlockedOr_rel;
+ case AArch64::BI_InterlockedOr8_nf:
+ case AArch64::BI_InterlockedOr16_nf:
+ case AArch64::BI_InterlockedOr_nf:
+ case AArch64::BI_InterlockedOr64_nf:
+ return MSVCIntrin::_InterlockedOr_nf;
+ case AArch64::BI_InterlockedXor8_acq:
+ case AArch64::BI_InterlockedXor16_acq:
+ case AArch64::BI_InterlockedXor_acq:
+ case AArch64::BI_InterlockedXor64_acq:
+ return MSVCIntrin::_InterlockedXor_acq;
+ case AArch64::BI_InterlockedXor8_rel:
+ case AArch64::BI_InterlockedXor16_rel:
+ case AArch64::BI_InterlockedXor_rel:
+ case AArch64::BI_InterlockedXor64_rel:
+ return MSVCIntrin::_InterlockedXor_rel;
+ case AArch64::BI_InterlockedXor8_nf:
+ case AArch64::BI_InterlockedXor16_nf:
+ case AArch64::BI_InterlockedXor_nf:
+ case AArch64::BI_InterlockedXor64_nf:
+ return MSVCIntrin::_InterlockedXor_nf;
+ case AArch64::BI_InterlockedAnd8_acq:
+ case AArch64::BI_InterlockedAnd16_acq:
+ case AArch64::BI_InterlockedAnd_acq:
+ case AArch64::BI_InterlockedAnd64_acq:
+ return MSVCIntrin::_InterlockedAnd_acq;
+ case AArch64::BI_InterlockedAnd8_rel:
+ case AArch64::BI_InterlockedAnd16_rel:
+ case AArch64::BI_InterlockedAnd_rel:
+ case AArch64::BI_InterlockedAnd64_rel:
+ return MSVCIntrin::_InterlockedAnd_rel;
+ case AArch64::BI_InterlockedAnd8_nf:
+ case AArch64::BI_InterlockedAnd16_nf:
+ case AArch64::BI_InterlockedAnd_nf:
+ case AArch64::BI_InterlockedAnd64_nf:
+ return MSVCIntrin::_InterlockedAnd_nf;
+ case AArch64::BI_InterlockedIncrement16_acq:
+ case AArch64::BI_InterlockedIncrement_acq:
+ case AArch64::BI_InterlockedIncrement64_acq:
+ return MSVCIntrin::_InterlockedIncrement_acq;
+ case AArch64::BI_InterlockedIncrement16_rel:
+ case AArch64::BI_InterlockedIncrement_rel:
+ case AArch64::BI_InterlockedIncrement64_rel:
+ return MSVCIntrin::_InterlockedIncrement_rel;
+ case AArch64::BI_InterlockedIncrement16_nf:
+ case AArch64::BI_InterlockedIncrement_nf:
+ case AArch64::BI_InterlockedIncrement64_nf:
+ return MSVCIntrin::_InterlockedIncrement_nf;
+ case AArch64::BI_InterlockedDecrement16_acq:
+ case AArch64::BI_InterlockedDecrement_acq:
+ case AArch64::BI_InterlockedDecrement64_acq:
+ return MSVCIntrin::_InterlockedDecrement_acq;
+ case AArch64::BI_InterlockedDecrement16_rel:
+ case AArch64::BI_InterlockedDecrement_rel:
+ case AArch64::BI_InterlockedDecrement64_rel:
+ return MSVCIntrin::_InterlockedDecrement_rel;
+ case AArch64::BI_InterlockedDecrement16_nf:
+ case AArch64::BI_InterlockedDecrement_nf:
+ case AArch64::BI_InterlockedDecrement64_nf:
+ return MSVCIntrin::_InterlockedDecrement_nf;
+ }
+ llvm_unreachable("must return from switch");
+}
+
+static Optional<CodeGenFunction::MSVCIntrin>
+translateX86ToMsvcIntrin(unsigned BuiltinID) {
+ using MSVCIntrin = CodeGenFunction::MSVCIntrin;
+ switch (BuiltinID) {
+ default:
+ return None;
+ case clang::X86::BI_BitScanForward:
+ case clang::X86::BI_BitScanForward64:
+ return MSVCIntrin::_BitScanForward;
+ case clang::X86::BI_BitScanReverse:
+ case clang::X86::BI_BitScanReverse64:
+ return MSVCIntrin::_BitScanReverse;
+ case clang::X86::BI_InterlockedAnd64:
+ return MSVCIntrin::_InterlockedAnd;
+ case clang::X86::BI_InterlockedExchange64:
+ return MSVCIntrin::_InterlockedExchange;
+ case clang::X86::BI_InterlockedExchangeAdd64:
+ return MSVCIntrin::_InterlockedExchangeAdd;
+ case clang::X86::BI_InterlockedExchangeSub64:
+ return MSVCIntrin::_InterlockedExchangeSub;
+ case clang::X86::BI_InterlockedOr64:
+ return MSVCIntrin::_InterlockedOr;
+ case clang::X86::BI_InterlockedXor64:
+ return MSVCIntrin::_InterlockedXor;
+ case clang::X86::BI_InterlockedDecrement64:
+ return MSVCIntrin::_InterlockedDecrement;
+ case clang::X86::BI_InterlockedIncrement64:
+ return MSVCIntrin::_InterlockedIncrement;
+ }
+ llvm_unreachable("must return from switch");
+}
+
+// Emit an MSVC intrinsic. Assumes that arguments have *not* been evaluated.