intrin-impl.h 79 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294
  1. /**
  2. * This file has no copyright assigned and is placed in the Public Domain.
  3. * This file is part of the mingw-w64 runtime package.
  4. * No warranty is given; refer to the file DISCLAIMER.PD within this package.
  5. */
  6. /* There are 3 separate ways this file is intended to be used:
  7. 1) Included from intrin.h. In this case, all intrinsics in this file get declarations and
  8. implementations. No special #defines are needed for this case.
  9. 2) Included from the library versions of these functions (ie mingw-w64-crt\intrincs\*.c). All
  10. intrinsics in this file must also be included in the library. In this case, only the
  11. specific functions requested will get defined, and they will not be defined as inline. If
  12. you have followed the instructions (below) for adding functions to this file, then all you
  13. need to have in the .c file is the following:
  14. #define __INTRINSIC_ONLYSPECIAL
  15. #define __INTRINSIC_SPECIAL___stosb // Causes code generation in intrin-impl.h
  16. #include <intrin.h>
  17. 3) Included from various platform sdk headers. Some platform sdk headers (such as winnt.h)
  18. define a subset of intrinsics. To avoid potential conflicts, this file is designed to
  19. allow for specific subsets of functions to be defined. This is done by defining the
  20. appropriate variable before including this file:
  21. #define __INTRINSIC_GROUP_WINNT
  22. #include <psdk_inc/intrin-impl.h>
  23. In all cases, it is acceptable to include this file multiple times in any order (ie include
  24. winnt.h to get its subset, then include intrin.h to get everything, or vice versa).
  25. See also the comments at the top of intrin.h.
  26. */
  27. /* To add an implementation for a new intrinsic to this file, you should comment out the current prototype in intrin.h.
  28. If the function you are adding is not in intrin.h, you should not be adding it to this file. This file is only
  29. for MSVC intrinsics.
  30. Make sure you put your definition in the right section (x86 vs x64), and use this outline when adding definitions
  31. to this file:
  32. #if __INTRINSIC_PROLOG(__int2c)
  33. <prototype goes here>
  34. __INTRINSICS_USEINLINE
  35. <code goes here>
  36. #define __INTRINSIC_DEFINED___int2c
  37. #endif
  38. */
  39. /* Note that there is no file-wide #if to prevent intrin-impl.h from being
  40. included multiple times. This is because this file might be included multiple
  41. times to define various subsets of the functions it contains. */
  42. /* However we do check for __MINGW_INTRIN_INLINE. In theory this means we
  43. can work with other compilers. */
  44. #ifdef __MINGW_INTRIN_INLINE
  45. /* Clang has support for MSVC builtins, GCC doesn't */
  46. #pragma push_macro("__has_builtin")
  47. #ifndef __has_builtin
  48. #define __has_builtin(x) 0
  49. #endif
  50. /* These macros are used by the routines below. While this file may be included
  51. multiple times, these macros only need to be defined once. */
  52. #ifndef _INTRIN_MAC_
  53. #define _INTRIN_MAC_
  54. /* GCC v6 added support for outputting flags. This allows better code to be
  55. produced for a number of intrinsics. */
  56. #ifndef __GCC_ASM_FLAG_OUTPUTS__
  57. #define __FLAGCONSTRAINT "=qm"
  58. #define __FLAGSET "\n\tsetc %[old]"
  59. #define __FLAGCLOBBER1 , "cc"
  60. #define __FLAGCLOBBER2 "cc"
  61. #else
  62. #define __FLAGCONSTRAINT "=@ccc"
  63. #define __FLAGSET
  64. #define __FLAGCLOBBER1
  65. #define __FLAGCLOBBER2
  66. #endif
  67. /* This macro is used by __stosb, __stosw, __stosd, __stosq */
  68. /* Parameters: (FunctionName, DataType, Operator)
  69. FunctionName: Any valid function name
  70. DataType: BYTE, WORD, DWORD or DWORD64
  71. InstructionSize: b|b, w|w, l|d, q|q */
  72. /* While we don't need the output values for Dest or Count, we
  73. must still inform the compiler the asm changes them. */
  74. #define __buildstos(x, y, z) void x(y *Dest, y Data, size_t Count) \
  75. { \
  76. __asm__ __volatile__ ("rep stos{" z "}" \
  77. : "+D" (Dest), "+c" (Count) \
  78. : [Data] "a" (Data) \
  79. : "memory"); \
  80. }
  81. /* This macro is used by InterlockedAnd, InterlockedOr, InterlockedXor, InterlockedAnd64, InterlockedOr64, InterlockedXor64 */
  82. /* Parameters: (FunctionName, DataType, Operator)
  83. FunctionName: Any valid function name
  84. DataType: __LONG32 or __int64
  85. Operator: One of xor, or, and */
  86. #define __buildlogicali(x, y, o) y x(volatile y *Destination, y Value) \
  87. { \
  88. return __sync_fetch_and_ ## o(Destination, Value); \
  89. }
  90. /* This macro is used by InterlockedBitTestAndSet, InterlockedBitTestAndReset, InterlockedBitTestAndComplement,
  91. InterlockedBitTestAndSet64, InterlockedBitTestAndReset64, InterlockedBitTestAndComplement64
  92. _interlockedbittestandset, _interlockedbittestandreset, _interlockedbittestandcomplement
  93. _interlockedbittestandset64, _interlockedbittestandreset64, _interlockedbittestandcomplement64 */
  94. /* Parameters: (FunctionName, DataType, AsmCode, OffsetConstraint)
  95. FunctionName: Any valid function name
  96. DataType: __LONG32 or __int64
  97. OffsetConstraint: either "I" for 32bit data types or "J" for 64. */
  98. #if defined(__x86_64__) || defined(_AMD64_) || defined(__i386__) || defined(_X86_)
  99. #define __buildbittesti(x, y, z, a) unsigned char x(y volatile *Base, y Offset) \
  100. { \
  101. unsigned char old; \
  102. __asm__ __volatile__ (z \
  103. : [old] __FLAGCONSTRAINT (old), [Base] "+m" (*Base) \
  104. : [Offset] a "r" (Offset) \
  105. : "memory" __FLAGCLOBBER1); \
  106. return old; \
  107. }
  108. #elif defined(__arm__) || defined(_ARM_)
  109. #define __buildbittesti(x, y, z, a) unsigned char x(y volatile *Base, y Offset) \
  110. { \
  111. unsigned int old, tmp1, tmp2; \
  112. unsigned int bit = 1 << Offset; \
  113. __asm__ __volatile__ ("dmb sy\n\t" \
  114. "1: ldrex %[old], %[Base]\n\t" \
  115. "mov %[tmp1], %[old]\n\t" \
  116. z " %[tmp1], %[tmp1], %[bit]\n\t" \
  117. "strex %[tmp2], %[tmp1], %[Base]\n\t" \
  118. "cmp %[tmp2], #0\n\t" \
  119. "bne 1b\n\t" \
  120. "dmb sy" \
  121. : [old] "=&r" (old), [tmp1] "=&r" (tmp1), [tmp2] "=&r" (tmp2), [Base] "+m" (*Base) \
  122. : [bit] a "r" (bit) \
  123. : "memory", "cc"); \
  124. return (old >> Offset) & 1; \
  125. }
  126. #elif defined(__aarch64__) || defined(_ARM64_)
  127. #define __buildbittesti(x, y, z, a) unsigned char x(y volatile *Base, y Offset) \
  128. { \
  129. unsigned int old, tmp1, tmp2; \
  130. unsigned int bit = 1 << Offset; \
  131. __asm__ __volatile__ ("dmb sy\n\t" \
  132. "1: ldxr %w[old], %[Base]\n\t" \
  133. "mov %w[tmp1], %w[old]\n\t" \
  134. z " %w[tmp1], %w[tmp1], %w[bit]\n\t" \
  135. "stxr %w[tmp2], %w[tmp1], %[Base]\n\t" \
  136. "cmp %w[tmp2], #0\n\t" \
  137. "b.ne 1b\n\t" \
  138. "dmb sy" \
  139. : [old] "=&r" (old), [tmp1] "=&r" (tmp1), [tmp2] "=&r" (tmp2), [Base] "+m" (*Base) \
  140. : [bit] a "r" (bit) \
  141. : "memory", "cc"); \
  142. return (old >> Offset) & 1; \
  143. }
  144. #define __buildbittesti64(x, y, z, a) unsigned char x(y volatile *Base, y Offset) \
  145. { \
  146. unsigned __int64 old, tmp1; \
  147. unsigned int tmp2; \
  148. unsigned __int64 bit = 1ULL << Offset; \
  149. __asm__ __volatile__ ("dmb sy\n\t" \
  150. "1: ldxr %[old], %[Base]\n\t" \
  151. "mov %[tmp1], %[old]\n\t" \
  152. z " %[tmp1], %[tmp1], %[bit]\n\t" \
  153. "stxr %w[tmp2], %[tmp1], %[Base]\n\t" \
  154. "cmp %w[tmp2], #0\n\t" \
  155. "b.ne 1b\n\t" \
  156. "dmb sy" \
  157. : [old] "=&r" (old), [tmp1] "=&r" (tmp1), [tmp2] "=&r" (tmp2), [Base] "+m" (*Base) \
  158. : [bit] a "r" (bit) \
  159. : "memory", "cc"); \
  160. return (old >> Offset) & 1; \
  161. }
  162. #endif /* defined(__x86_64__) || defined(_AMD64_) || defined(__i386__) || defined(_X86_) */
  163. /* This macro is used by YieldProcessor when compiling x86 w/o SSE2.
  164. It generates the same opcodes as _mm_pause. */
  165. #define __buildpause() __asm__ __volatile__("rep nop")
  166. /* This macro is used by DbgRaiseAssertionFailure and __int2c
  167. Parameters: (IntNum)
  168. IntNum: Interrupt number in hex */
  169. #define __buildint(a) __asm__ __volatile__("int {$}" #a :)
  170. /* This macro is used by MemoryBarrier when compiling x86 w/o SSE2.
  171. Note that on i386, xchg performs an implicit lock. */
  172. #define __buildmemorybarrier() \
  173. { \
  174. unsigned char Barrier; \
  175. __asm__ __volatile__("xchg{b %%| }al, %0" :"=m" (Barrier) : /* no inputs */ : "eax", "memory"); \
  176. }
  177. /* This macro is used by __readfsbyte, __readfsword, __readfsdword
  178. __readgsbyte, __readgsword, __readgsdword, __readgsqword
  179. Parameters: (FunctionName, DataType, Segment)
  180. FunctionName: Any valid function name
  181. DataType: char, short, __LONG32 or __int64
  182. Segment: fs or gs
  183. Type: b, w, l, q
  184. */
  185. #define __buildreadseg(x, y, z, a) y x(unsigned __LONG32 Offset) { \
  186. y ret; \
  187. __asm__ ("mov{" a " %%" z ":%[offset], %[ret] | %[ret], %%" z ":%[offset]}" \
  188. : [ret] "=r" (ret) \
  189. : [offset] "m" ((*(y *) (size_t) Offset))); \
  190. return ret; \
  191. }
  192. /* This macro is used by __writefsbyte, __writefsword, __writefsdword
  193. __writegsbyte, __writegsword, __writegsdword, __writegsqword
  194. Parameters: (FunctionName, DataType, Segment)
  195. FunctionName: Any valid function name
  196. DataType: char, short, __LONG32 or __int64
  197. Segment: fs or gs
  198. Type: b, w, l, q
  199. */
  200. #define __buildwriteseg(x, y, z, a) void x(unsigned __LONG32 Offset, y Data) { \
  201. __asm__ ("mov{" a " %[Data], %%" z ":%[offset] | %%" z ":%[offset], %[Data]}" \
  202. : [offset] "=m" ((*(y *) (size_t) Offset)) \
  203. : [Data] "ri" (Data)); \
  204. }
  205. /* This macro is used by _BitScanForward, _BitScanForward64, _BitScanReverse _BitScanReverse64
  206. Parameters: (FunctionName, DataType, Segment)
  207. FunctionName: Any valid function name
  208. DataType: unsigned __LONG32 or unsigned __int64
  209. Statement: BSF or BSR */
  210. /* GCC v6 added support for outputting flags. This allows better code to be
  211. produced for a number of intrinsics. */
  212. #ifndef __GCC_ASM_FLAG_OUTPUTS__
  213. #define __buildbitscan(x, y, z) unsigned char x(unsigned __LONG32 *Index, y Mask) \
  214. { \
  215. y n; \
  216. __asm__ (z \
  217. : [Index] "=r" (n) \
  218. : [Mask] "r" (Mask) \
  219. : "cc"); \
  220. *Index = n; \
  221. return Mask!=0; \
  222. }
  223. #else
  224. #define __buildbitscan(x, y, z) unsigned char x(unsigned __LONG32 *Index, y Mask) \
  225. { \
  226. y n; \
  227. unsigned char old; \
  228. __asm__ (z \
  229. : "=@ccnz" (old), [Index] "=r" (n) \
  230. : [Mask] "r" (Mask)); \
  231. *Index = n; \
  232. return old; \
  233. }
  234. #endif
  235. /* This macro is used by _bittest & _bittest64
  236. Parameters: (FunctionName, DataType, OffsetConstraint)
  237. FunctionName: Any valid function name
  238. DataType: __LONG32 or __int64
  239. Type: l, q
  240. OffsetConstraint: either "I" for 32bit data types or "J" for 64.
  241. */
  242. #define __buildbittest(x, y, z, a) unsigned char x(const y *Base, y Offset) \
  243. { \
  244. unsigned char old; \
  245. __asm__ ("bt{" z " %[Offset],%[Base] | %[Base],%[Offset]}" __FLAGSET \
  246. : [old] __FLAGCONSTRAINT (old) \
  247. : [Offset] a "r" (Offset), [Base] "rm" (*Base) \
  248. : __FLAGCLOBBER2); \
  249. return old; \
  250. }
  251. /* This macro is used by _bittestandset, _bittestandreset, _bittestandcomplement,
  252. _bittestandset64, _bittestandreset64, _bittestandcomplement64
  253. Parameters: (FunctionName, DataType, Statement, OffsetConstraint)
  254. FunctionName: Any valid function name
  255. DataType: __LONG32 or __int64
  256. Statement: asm statement (bts, btr, btc)
  257. OffsetConstraint: either "I" for 32bit data types or "J" for 64.
  258. Type: l, q
  259. */
  260. #define __buildbittestand(x, y, z, a, b) unsigned char x(y *Base, y Offset) \
  261. { \
  262. unsigned char old; \
  263. __asm__ (z "{" b " %[Offset],%[Base] | %[Base],%[Offset]}" __FLAGSET \
  264. : [old] __FLAGCONSTRAINT (old), [Base] "+rm" (*Base) \
  265. : [Offset] a "r" (Offset) \
  266. : __FLAGCLOBBER2); \
  267. return old; \
  268. }
  269. /* This macro is used by __inbyte, __inword, __indword
  270. Parameters: (FunctionName, DataType)
  271. FunctionName: Any valid function name
  272. DataType: unsigned char, unsigned short, unsigned __LONG32
  273. Type: b, w, l
  274. */
  275. #define __build_inport(x, y, z) y x(unsigned short Port) { \
  276. y value; \
  277. __asm__ __volatile__ ("in{" z " %w[port],%[value]| %[value],%w[port]}" \
  278. : [value] "=a" (value) \
  279. : [port] "Nd" (Port)); \
  280. return value; \
  281. }
  282. /* This macro is used by __outbyte, __outword, __outdword
  283. Parameters: (FunctionName, DataType)
  284. FunctionName: Any valid function name
  285. DataType: unsigned char, unsigned short, unsigned __LONG32
  286. Type: b, w, l
  287. */
  288. #define __build_outport(x, y, z) void x(unsigned short Port, y Data) { \
  289. __asm__ __volatile__ ("out{" z " %[data],%w[port]| %w[port],%[data]}" \
  290. : \
  291. : [data] "a" (Data), [port] "Nd" (Port)); \
  292. }
  293. /* This macro is used by __inbytestring, __inwordstring, __indwordstring
  294. Parameters: (FunctionName, DataType, InstructionSizeAtt, InstructionSizeIntel)
  295. FunctionName: Any valid function name
  296. DataType: unsigned char, unsigned short, unsigned __LONG32
  297. InstructionSizeAtt: b, w, l
  298. InstructionSizeIntel: b, w, d (not b,w,l)
  299. */
  300. #define __build_inportstring(x, y, z, a) void x(unsigned short Port, y *Buffer, unsigned __LONG32 Count) { \
  301. __asm__ __volatile__ ("cld ; rep ins{" z "|" a "}" \
  302. : "=D" (Buffer), "=c" (Count) \
  303. : "d"(Port), "0"(Buffer), "1" (Count) \
  304. : "memory"); \
  305. }
  306. /* This macro is used by __outbytestring, __outwordstring, __outdwordstring
  307. Parameters: (FunctionName, DataType, InstructionSizeAtt, InstructionSizeIntel)
  308. FunctionName: Any valid function name
  309. DataType: unsigned char, unsigned short, unsigned __LONG32
  310. InstructionSizeAtt: b, w, l
  311. InstructionSizeIntel: b, w, d (not b,w,l)
  312. */
  313. #define __build_outportstring(x, y, z, a) void x(unsigned short Port, y *Buffer, unsigned __LONG32 Count) { \
  314. __asm__ __volatile__ ("cld ; rep outs{" z "|" a "}" \
  315. : "=S" (Buffer), "=c" (Count) \
  316. : "d"(Port), "0"(Buffer), "1" (Count) \
  317. : "memory"); \
  318. }
  319. /* This macro is used by __readcr0, __readcr2, __readcr3, __readcr4, __readcr8
  320. Parameters: (FunctionName, DataType, RegisterNumber)
  321. FunctionName: Any valid function name
  322. DataType: unsigned __LONG32, unsigned __int64
  323. RegisterNumber: 0, 2, 3, 4, 8
  324. */
  325. #define __build_readcr(x, y, z) y x(void) { \
  326. y value; \
  327. __asm__ __volatile__ ("mov {%%cr" z ", %[value] | %[value], %%cr" z "}" \
  328. : [value] "=q" (value)); \
  329. return value; \
  330. }
  331. /* This macro is used by __writecr0, __writecr2, __writecr3, __writecr4, __writecr8
  332. Parameters: (FunctionName, DataType, RegisterNumber)
  333. FunctionName: Any valid function name
  334. DataType: unsigned __LONG32, unsigned __int64
  335. RegisterNumber: 0, 2, 3, 4, 8
  336. */
  337. #define __build_writecr(x, y, z) void x(y Data) { \
  338. __asm__ __volatile__ ("mov {%[Data], %%cr" z "|%%cr" z ", %[Data]}" \
  339. : \
  340. : [Data] "q" (Data) \
  341. : "memory"); \
  342. }
  343. /* This macro is used by __movsb, __movsd, __movsq, __movsw
  344. Parameters: (FunctionName, DataType, RegisterNumber)
  345. FunctionName: Any valid function name
  346. DataType: unsigned char, unsigned short, unsigned __LONG32, unsigned __int64
  347. InstructionSize: b, w, d, q
  348. */
  349. #define __buildmov(x, y, z) void x(y *Destination, y const *Source, size_t Count) \
  350. { \
  351. __asm__ __volatile__ ( \
  352. "rep movs" z \
  353. : "=D" (Destination), "=S" (Source), "=c" (Count) \
  354. : "0" (Destination), "1" (Source), "2" (Count) \
  355. : "memory"); \
  356. }
  357. #endif /* _INTRIN_MAC_ */
  358. /* The Barrier functions can never be in the library. Since gcc only
  359. supports ReadWriteBarrier, map all 3 to do the same. */
  360. #ifndef _ReadWriteBarrier
  361. #define _ReadWriteBarrier() __asm__ __volatile__ ("" ::: "memory")
  362. #define _ReadBarrier _ReadWriteBarrier
  363. #define _WriteBarrier _ReadWriteBarrier
  364. #endif
  365. /* The logic for this macro is:
  366. if the function is not yet defined AND
  367. (
  368. (if we are not just defining special OR
  369. (we are defining special AND this is one of the ones we are defining)
  370. )
  371. )
  372. */
  373. #define __INTRINSIC_PROLOG(name) (!defined(__INTRINSIC_DEFINED_ ## name)) && ((!defined (__INTRINSIC_ONLYSPECIAL)) || (defined (__INTRINSIC_ONLYSPECIAL) && defined(__INTRINSIC_SPECIAL_ ## name)))
  374. #ifdef __INTRINSIC_ONLYSPECIAL
  375. #define __INTRINSICS_USEINLINE
  376. #else
  377. #define __INTRINSICS_USEINLINE __MINGW_INTRIN_INLINE
  378. #endif
  379. /* Normally __INTRINSIC_ONLYSPECIAL is used to indicate that we are
  380. being included in the library version of the intrinsic (case 2). However,
  381. that really only affects the definition of __INTRINSICS_USEINLINE.
  382. So here we are letting it serve an additional purpose of only defining
  383. the intrinsics for a certain file (case 3). For example, to create the
  384. intrinsics for the functions in winnt.h, define __INTRINSIC_GROUP_WINNT.
  385. Note that this file can be included multiple times, and as a result
  386. there can be overlap (definitions that appear in more than one
  387. file). This is handled by __INTRINSIC_DEFINED_*
  388. If no groups are defined (such as what happens when including intrin.h),
  389. all intrinsics are defined. */
  390. /* If __INTRINSIC_ONLYSPECIAL is defined at this point, we are processing case 2. In
  391. that case, don't go looking for groups */
  392. #ifndef __INTRINSIC_ONLYSPECIAL
  393. #ifdef __INTRINSIC_GROUP_WINNT
  394. #undef __INTRINSIC_GROUP_WINNT /* Remove this for efficiency if intrin-impl.h is included again */
  395. /* Note that this gets undefined at the end of this file */
  396. #define __INTRINSIC_ONLYSPECIAL
  397. #define __INTRINSIC_SPECIAL___faststorefence
  398. #define __INTRINSIC_SPECIAL___int2c
  399. #define __INTRINSIC_SPECIAL___stosb
  400. #define __INTRINSIC_SPECIAL___stosd
  401. #define __INTRINSIC_SPECIAL___stosq
  402. #define __INTRINSIC_SPECIAL___stosw
  403. #define __INTRINSIC_SPECIAL__InterlockedAnd
  404. #define __INTRINSIC_SPECIAL__InterlockedAnd64
  405. #define __INTRINSIC_SPECIAL__interlockedbittestandcomplement
  406. #define __INTRINSIC_SPECIAL__interlockedbittestandcomplement64
  407. #define __INTRINSIC_SPECIAL__interlockedbittestandreset
  408. #define __INTRINSIC_SPECIAL__interlockedbittestandreset64
  409. #define __INTRINSIC_SPECIAL__interlockedbittestandset
  410. #define __INTRINSIC_SPECIAL__interlockedbittestandset64
  411. #define __INTRINSIC_SPECIAL__InterlockedOr
  412. #define __INTRINSIC_SPECIAL__InterlockedOr64
  413. #define __INTRINSIC_SPECIAL__InterlockedXor
  414. #define __INTRINSIC_SPECIAL__InterlockedXor64
  415. #define __INTRINSIC_SPECIAL_InterlockedBitTestAndComplement
  416. #define __INTRINSIC_SPECIAL_InterlockedBitTestAndComplement64
  417. #define __INTRINSIC_SPECIAL_InterlockedBitTestAndReset
  418. #define __INTRINSIC_SPECIAL_InterlockedBitTestAndReset64
  419. #define __INTRINSIC_SPECIAL_InterlockedBitTestAndSet
  420. #define __INTRINSIC_SPECIAL_InterlockedBitTestAndSet64
  421. #define __INTRINSIC_SPECIAL__InterlockedIncrement16
  422. #define __INTRINSIC_SPECIAL__InterlockedDecrement16
  423. #define __INTRINSIC_SPECIAL__InterlockedCompareExchange16
  424. #define __INTRINSIC_SPECIAL__InterlockedIncrement
  425. #define __INTRINSIC_SPECIAL__InterlockedDecrement
  426. #define __INTRINSIC_SPECIAL__InterlockedAdd
  427. #define __INTRINSIC_SPECIAL__InterlockedExchange
  428. #define __INTRINSIC_SPECIAL__InterlockedExchangeAdd
  429. #define __INTRINSIC_SPECIAL__InterlockedCompareExchange
  430. #define __INTRINSIC_SPECIAL__InterlockedIncrement64
  431. #define __INTRINSIC_SPECIAL__InterlockedDecrement64
  432. #define __INTRINSIC_SPECIAL__InterlockedAdd64
  433. #define __INTRINSIC_SPECIAL__InterlockedExchangeAdd64
  434. #define __INTRINSIC_SPECIAL__InterlockedExchange64
  435. #define __INTRINSIC_SPECIAL__InterlockedCompareExchange64
  436. #define __INTRINSIC_SPECIAL__InterlockedExchangePointer
  437. #define __INTRINSIC_SPECIAL__InterlockedCompareExchangePointer
  438. #define __INTRINSIC_SPECIAL___readgsbyte
  439. #define __INTRINSIC_SPECIAL___readgsword
  440. #define __INTRINSIC_SPECIAL___readgsdword
  441. #define __INTRINSIC_SPECIAL___readgsqword
  442. #define __INTRINSIC_SPECIAL___writegsbyte
  443. #define __INTRINSIC_SPECIAL___writegsword
  444. #define __INTRINSIC_SPECIAL___writegsdword
  445. #define __INTRINSIC_SPECIAL___writegsqword
  446. #define __INTRINSIC_SPECIAL___readfsbyte
  447. #define __INTRINSIC_SPECIAL___readfsword
  448. #define __INTRINSIC_SPECIAL___readfsdword
  449. #define __INTRINSIC_SPECIAL___writefsbyte
  450. #define __INTRINSIC_SPECIAL___writefsword
  451. #define __INTRINSIC_SPECIAL___writefsdword
  452. #define __INTRINSIC_SPECIAL__BitScanForward
  453. #define __INTRINSIC_SPECIAL__BitScanForward64
  454. #define __INTRINSIC_SPECIAL__BitScanReverse
  455. #define __INTRINSIC_SPECIAL__BitScanReverse64
  456. #define __INTRINSIC_SPECIAL__bittest
  457. #define __INTRINSIC_SPECIAL__bittestandset
  458. #define __INTRINSIC_SPECIAL__bittestandreset
  459. #define __INTRINSIC_SPECIAL__bittestandcomplement
  460. #define __INTRINSIC_SPECIAL__bittest64
  461. #define __INTRINSIC_SPECIAL__bittestandset64
  462. #define __INTRINSIC_SPECIAL__bittestandreset64
  463. #define __INTRINSIC_SPECIAL__bittestandcomplement64
  464. #define __INTRINSIC_SPECIAL___movsb
  465. #define __INTRINSIC_SPECIAL___movsw
  466. #define __INTRINSIC_SPECIAL___movsd
  467. #define __INTRINSIC_SPECIAL___movsq
  468. #endif /* __INTRINSIC_GROUP_WINNT */
  469. #ifdef __INTRINSIC_GROUP_WINBASE
  470. #undef __INTRINSIC_GROUP_WINBASE /* Remove this for efficiency if intrin-impl.h is included again */
  471. /* Note that this gets undefined at the end of this file */
  472. #define __INTRINSIC_ONLYSPECIAL
  473. #define __INTRINSIC_SPECIAL__InterlockedIncrement
  474. #define __INTRINSIC_SPECIAL__InterlockedDecrement
  475. #define __INTRINSIC_SPECIAL__InterlockedAdd
  476. #define __INTRINSIC_SPECIAL__InterlockedExchange
  477. #define __INTRINSIC_SPECIAL__InterlockedExchangeAdd
  478. #define __INTRINSIC_SPECIAL__InterlockedCompareExchange
  479. #define __INTRINSIC_SPECIAL__InterlockedCompareExchangePointer
  480. #define __INTRINSIC_SPECIAL__InterlockedExchangePointer
  481. #define __INTRINSIC_SPECIAL__InterlockedAnd64
  482. #define __INTRINSIC_SPECIAL__InterlockedOr64
  483. #define __INTRINSIC_SPECIAL__InterlockedXor64
  484. #define __INTRINSIC_SPECIAL__InterlockedIncrement64
  485. #define __INTRINSIC_SPECIAL__InterlockedDecrement64
  486. #define __INTRINSIC_SPECIAL__InterlockedAdd64
  487. #define __INTRINSIC_SPECIAL__InterlockedExchange64
  488. #define __INTRINSIC_SPECIAL__InterlockedExchangeAdd64
  489. #define __INTRINSIC_SPECIAL__InterlockedCompareExchange64
  490. #endif /* __INTRINSIC_GROUP_WINBASE */
  491. /* To add an additional group, put the #ifdef and definitions here. */
  492. #endif /* __INTRINSIC_ONLYSPECIAL */
  493. #ifdef __cplusplus
  494. extern "C" {
  495. #endif
  496. /* Before 4.9.2, ia32intrin.h had broken versions of these. */
  497. #undef _lrotl
  498. #undef _lrotr
  499. #if __INTRINSIC_PROLOG(_lrotl)
  500. unsigned long _lrotl(unsigned long __X, int __C);
  501. #if !__has_builtin(_lrotl)
  502. __INTRINSICS_USEINLINE
  503. unsigned long _lrotl(unsigned long __X, int __C)
  504. {
  505. return (__X << __C) | (__X >> ((sizeof(long) * 8) - __C));
  506. }
  507. #endif
  508. #define __INTRINSIC_DEFINED__lrotl
  509. #endif /* __INTRINSIC_PROLOG */
  510. #if __INTRINSIC_PROLOG(_lrotr)
  511. unsigned long _lrotr(unsigned long __X, int __C);
  512. #if !__has_builtin(_lrotr)
  513. __INTRINSICS_USEINLINE
  514. unsigned long _lrotr(unsigned long __X, int __C)
  515. {
  516. return (__X >> __C) | (__X << ((sizeof(long) * 8) - __C));
  517. }
  518. #endif
  519. #define __INTRINSIC_DEFINED__lrotr
  520. #endif /* __INTRINSIC_PROLOG */
  521. #if __INTRINSIC_PROLOG(_rotl8)
  522. unsigned char _rotl8(unsigned char __X, unsigned char __C);
  523. #if !__has_builtin(_rotl8)
  524. __INTRINSICS_USEINLINE
  525. unsigned char _rotl8(unsigned char __X, unsigned char __C)
  526. {
  527. return (__X << __C) | (__X >> (8 - __C));
  528. }
  529. #endif
  530. #define __INTRINSIC_DEFINED__rotl8
  531. #endif /* __INTRINSIC_PROLOG */
  532. #if __INTRINSIC_PROLOG(_rotr8)
  533. unsigned char _rotr8(unsigned char __X, unsigned char __C);
  534. #if !__has_builtin(_rotr8)
  535. __INTRINSICS_USEINLINE
  536. unsigned char _rotr8(unsigned char __X, unsigned char __C)
  537. {
  538. return (__X >> __C) | (__X << (8 - __C));
  539. }
  540. #endif
  541. #define __INTRINSIC_DEFINED__rotr8
  542. #endif /* __INTRINSIC_PROLOG */
  543. #if __INTRINSIC_PROLOG(_rotl16)
  544. unsigned short _rotl16(unsigned short __X, unsigned char __C);
  545. #if !__has_builtin(_rotl16)
  546. __INTRINSICS_USEINLINE
  547. unsigned short _rotl16(unsigned short __X, unsigned char __C)
  548. {
  549. return (__X << __C) | (__X >> (16 - __C));
  550. }
  551. #endif
  552. #define __INTRINSIC_DEFINED__rotl16
  553. #endif /* __INTRINSIC_PROLOG */
  554. #if __INTRINSIC_PROLOG(_rotr16)
  555. unsigned short _rotr16(unsigned short __X, unsigned char __C);
  556. #if !__has_builtin(_rotr16)
  557. __INTRINSICS_USEINLINE
  558. unsigned short _rotr16(unsigned short __X, unsigned char __C)
  559. {
  560. return (__X >> __C) | (__X << (16 - __C));
  561. }
  562. #endif
  563. #define __INTRINSIC_DEFINED__rotr16
  564. #endif /* __INTRINSIC_PROLOG */
  565. #if defined(__x86_64__) || defined(_AMD64_)
  566. #if __INTRINSIC_PROLOG(__faststorefence)
  567. void __faststorefence(void);
  568. #if !__has_builtin(__faststorefence)
  569. __INTRINSICS_USEINLINE
  570. void __faststorefence(void) {
  571. /* Turns out this is actually faster than MS's "trick" on newer cpus. Note
  572. that this builtin performs an implicit ReadWriteBarrier. */
  573. __builtin_ia32_sfence();
  574. }
  575. #endif
  576. #define __INTRINSIC_DEFINED___faststorefence
  577. #endif /* __INTRINSIC_PROLOG */
  578. #if __INTRINSIC_PROLOG(__stosq)
  579. __MINGW_EXTENSION void __stosq(unsigned __int64 *, unsigned __int64, size_t);
  580. #if !__has_builtin(__stosq)
  581. __INTRINSICS_USEINLINE
  582. __buildstos(__stosq, unsigned __int64, "q|q")
  583. #endif
  584. #define __INTRINSIC_DEFINED___stosq
  585. #endif /* __INTRINSIC_PROLOG */
  586. #if __INTRINSIC_PROLOG(_interlockedbittestandset64)
  587. __MINGW_EXTENSION unsigned char _interlockedbittestandset64(__int64 volatile *a, __int64 b);
  588. #if !__has_builtin(_interlockedbittestandset64)
  589. __INTRINSICS_USEINLINE
  590. __buildbittesti(_interlockedbittestandset64, __int64, "lock bts{q %[Offset],%[Base] | %[Base],%[Offset]}" __FLAGSET, "J")
  591. #endif
  592. #define __INTRINSIC_DEFINED__interlockedbittestandset64
  593. #endif /* __INTRINSIC_PROLOG */
  594. #if __INTRINSIC_PROLOG(_interlockedbittestandreset64)
  595. __MINGW_EXTENSION unsigned char _interlockedbittestandreset64(__int64 volatile *a, __int64 b);
  596. #if !__has_builtin(_interlockedbittestandreset64)
  597. __INTRINSICS_USEINLINE
  598. __buildbittesti(_interlockedbittestandreset64, __int64, "lock btr{q %[Offset],%[Base] | %[Base],%[Offset]}" __FLAGSET, "J")
  599. #endif
  600. #define __INTRINSIC_DEFINED__interlockedbittestandreset64
  601. #endif /* __INTRINSIC_PROLOG */
  602. #if __INTRINSIC_PROLOG(_interlockedbittestandcomplement64)
  603. __MINGW_EXTENSION unsigned char _interlockedbittestandcomplement64(__int64 volatile *a, __int64 b);
  604. #if !__has_builtin(_interlockedbittestandcomplement64)
  605. __INTRINSICS_USEINLINE
  606. __buildbittesti(_interlockedbittestandcomplement64, __int64, "lock btc{q %[Offset],%[Base] | %[Base],%[Offset]}" __FLAGSET, "J")
  607. #endif
  608. #define __INTRINSIC_DEFINED__interlockedbittestandcomplement64
  609. #endif /* __INTRINSIC_PROLOG */
  610. #if __INTRINSIC_PROLOG(InterlockedBitTestAndSet64)
  611. __MINGW_EXTENSION unsigned char InterlockedBitTestAndSet64(volatile __int64 *a, __int64 b);
  612. #if !__has_builtin(InterlockedBitTestAndSet64)
  613. __INTRINSICS_USEINLINE
  614. __buildbittesti(InterlockedBitTestAndSet64, __int64, "lock bts{q %[Offset],%[Base] | %[Base],%[Offset]}" __FLAGSET, "J")
  615. #endif
  616. #define __INTRINSIC_DEFINED_InterlockedBitTestAndSet64
  617. #endif /* __INTRINSIC_PROLOG */
  618. #if __INTRINSIC_PROLOG(InterlockedBitTestAndReset64)
  619. __MINGW_EXTENSION unsigned char InterlockedBitTestAndReset64(volatile __int64 *a, __int64 b);
  620. #if !__has_builtin(InterlockedBitTestAndReset64)
  621. __INTRINSICS_USEINLINE
  622. __buildbittesti(InterlockedBitTestAndReset64, __int64, "lock btr{q %[Offset],%[Base] | %[Base],%[Offset]}" __FLAGSET, "J")
  623. #endif
  624. #define __INTRINSIC_DEFINED_InterlockedBitTestAndReset64
  625. #endif /* __INTRINSIC_PROLOG */
  626. #if __INTRINSIC_PROLOG(InterlockedBitTestAndComplement64)
  627. __MINGW_EXTENSION unsigned char InterlockedBitTestAndComplement64(volatile __int64 *a, __int64 b);
  628. #if !__has_builtin(InterlockedBitTestAndComplement64)
  629. __INTRINSICS_USEINLINE
  630. __buildbittesti(InterlockedBitTestAndComplement64, __int64, "lock btc{q %[Offset],%[Base] | %[Base],%[Offset]}" __FLAGSET, "J")
  631. #endif
  632. #define __INTRINSIC_DEFINED_InterlockedBitTestAndComplement64
  633. #endif /* __INTRINSIC_PROLOG */
  634. #if __INTRINSIC_PROLOG(_InterlockedAnd64)
  635. __MINGW_EXTENSION __int64 _InterlockedAnd64(__int64 volatile *, __int64);
  636. #if !__has_builtin(_InterlockedAnd64)
  637. __INTRINSICS_USEINLINE
  638. __buildlogicali(_InterlockedAnd64, __int64, and)
  639. #endif
  640. #define __INTRINSIC_DEFINED__InterlockedAnd64
  641. #endif /* __INTRINSIC_PROLOG */
  642. #if __INTRINSIC_PROLOG(_InterlockedOr64)
  643. __MINGW_EXTENSION __int64 _InterlockedOr64(__int64 volatile *, __int64);
  644. #if !__has_builtin(_InterlockedOr64)
  645. __INTRINSICS_USEINLINE
  646. __buildlogicali(_InterlockedOr64, __int64, or)
  647. #endif
  648. #define __INTRINSIC_DEFINED__InterlockedOr64
  649. #endif /* __INTRINSIC_PROLOG */
  650. #if __INTRINSIC_PROLOG(_InterlockedXor64)
  651. __MINGW_EXTENSION __int64 _InterlockedXor64(__int64 volatile *, __int64);
  652. #if !__has_builtin(_InterlockedXor64)
  653. __INTRINSICS_USEINLINE
  654. __buildlogicali(_InterlockedXor64, __int64, xor)
  655. #endif
  656. #define __INTRINSIC_DEFINED__InterlockedXor64
  657. #endif /* __INTRINSIC_PROLOG */
  658. #if __INTRINSIC_PROLOG(_InterlockedIncrement64)
  659. __MINGW_EXTENSION __int64 _InterlockedIncrement64(__int64 volatile *Addend);
  660. #if !__has_builtin(_InterlockedIncrement64)
  661. __MINGW_EXTENSION __INTRINSICS_USEINLINE
  662. __int64 _InterlockedIncrement64(__int64 volatile *Addend) {
  663. return __sync_add_and_fetch(Addend, 1);
  664. }
  665. #endif
  666. #define __INTRINSIC_DEFINED__InterlockedIncrement64
  667. #endif /* __INTRINSIC_PROLOG */
  668. #if __INTRINSIC_PROLOG(_InterlockedDecrement64)
  669. __MINGW_EXTENSION __int64 _InterlockedDecrement64(__int64 volatile *Addend);
  670. #if !__has_builtin(_InterlockedDecrement64)
  671. __MINGW_EXTENSION __INTRINSICS_USEINLINE
  672. __int64 _InterlockedDecrement64(__int64 volatile *Addend) {
  673. return __sync_sub_and_fetch(Addend, 1);
  674. }
  675. #endif
  676. #define __INTRINSIC_DEFINED__InterlockedDecrement64
  677. #endif /* __INTRINSIC_PROLOG */
  678. #if __INTRINSIC_PROLOG(_InterlockedExchange64)
  679. __MINGW_EXTENSION __int64 _InterlockedExchange64(__int64 volatile *Target, __int64 Value);
  680. #if !__has_builtin(_InterlockedExchange64)
  681. __MINGW_EXTENSION __INTRINSICS_USEINLINE
  682. __int64 _InterlockedExchange64(__int64 volatile *Target, __int64 Value) {
  683. return __sync_lock_test_and_set(Target, Value);
  684. }
  685. #endif
  686. #define __INTRINSIC_DEFINED__InterlockedExchange64
  687. #endif /* __INTRINSIC_PROLOG */
  688. #if __INTRINSIC_PROLOG(_InterlockedExchangeAdd64)
  689. __MINGW_EXTENSION __int64 _InterlockedExchangeAdd64(__int64 volatile *Addend, __int64 Value);
  690. #if !__has_builtin(_InterlockedExchangeAdd64)
  691. __MINGW_EXTENSION __INTRINSICS_USEINLINE
  692. __int64 _InterlockedExchangeAdd64(__int64 volatile *Addend, __int64 Value) {
  693. return __sync_fetch_and_add(Addend, Value);
  694. }
  695. #endif
  696. #define __INTRINSIC_DEFINED__InterlockedExchangeAdd64
  697. #endif /* __INTRINSIC_PROLOG */
  698. #if __INTRINSIC_PROLOG(__readgsbyte)
  699. unsigned char __readgsbyte(unsigned __LONG32 Offset);
  700. #if !__has_builtin(__readgsbyte)
  701. __INTRINSICS_USEINLINE
  702. __buildreadseg(__readgsbyte, unsigned char, "gs", "b")
  703. #endif
  704. #define __INTRINSIC_DEFINED___readgsbyte
  705. #endif /* __INTRINSIC_PROLOG */
  706. #if __INTRINSIC_PROLOG(__readgsword)
  707. unsigned short __readgsword(unsigned __LONG32 Offset);
  708. #if !__has_builtin(__readgsword)
  709. __INTRINSICS_USEINLINE
  710. __buildreadseg(__readgsword, unsigned short, "gs", "w")
  711. #endif
  712. #define __INTRINSIC_DEFINED___readgsword
  713. #endif /* __INTRINSIC_PROLOG */
  714. #if __INTRINSIC_PROLOG(__readgsdword)
  715. unsigned __LONG32 __readgsdword(unsigned __LONG32 Offset);
  716. #if !__has_builtin(__readgsdword)
  717. __INTRINSICS_USEINLINE
  718. __buildreadseg(__readgsdword, unsigned __LONG32, "gs", "l")
  719. #endif
  720. #define __INTRINSIC_DEFINED___readgsdword
  721. #endif /* __INTRINSIC_PROLOG */
  722. #if __INTRINSIC_PROLOG(__readgsqword)
  723. __MINGW_EXTENSION unsigned __int64 __readgsqword(unsigned __LONG32 Offset);
  724. #if !__has_builtin(__readgsqword)
  725. __MINGW_EXTENSION __INTRINSICS_USEINLINE
  726. __buildreadseg(__readgsqword, unsigned __int64, "gs", "q")
  727. #endif
  728. #define __INTRINSIC_DEFINED___readgsqword
  729. #endif /* __INTRINSIC_PROLOG */
  730. #if __INTRINSIC_PROLOG(__writegsbyte)
  731. void __writegsbyte(unsigned __LONG32 Offset,unsigned char Data);
  732. #if !__has_builtin(__writegsbyte)
  733. __INTRINSICS_USEINLINE
  734. __buildwriteseg(__writegsbyte, unsigned char, "gs", "b")
  735. #endif
  736. #define __INTRINSIC_DEFINED___writegsbyte
  737. #endif /* __INTRINSIC_PROLOG */
  738. #if __INTRINSIC_PROLOG(__writegsword)
  739. void __writegsword(unsigned __LONG32 Offset,unsigned short Data);
  740. #if !__has_builtin(__writegsword)
  741. __INTRINSICS_USEINLINE
  742. __buildwriteseg(__writegsword, unsigned short, "gs", "w")
  743. #endif
  744. #define __INTRINSIC_DEFINED___writegsword
  745. #endif /* __INTRINSIC_PROLOG */
  746. #if __INTRINSIC_PROLOG(__writegsdword)
  747. void __writegsdword(unsigned __LONG32 Offset,unsigned __LONG32 Data);
  748. #if !__has_builtin(__writegsdword)
  749. __INTRINSICS_USEINLINE
  750. __buildwriteseg(__writegsdword, unsigned __LONG32, "gs", "l")
  751. #endif
  752. #define __INTRINSIC_DEFINED___writegsdword
  753. #endif /* __INTRINSIC_PROLOG */
  754. #if __INTRINSIC_PROLOG(__writegsqword)
  755. __MINGW_EXTENSION void __writegsqword(unsigned __LONG32 Offset,unsigned __int64 Data);
  756. #if !__has_builtin(__writegsqword)
  757. __MINGW_EXTENSION __INTRINSICS_USEINLINE
  758. __buildwriteseg(__writegsqword, unsigned __int64, "gs", "q")
  759. #endif
  760. #define __INTRINSIC_DEFINED___writegsqword
  761. #endif /* __INTRINSIC_PROLOG */
  762. #if __INTRINSIC_PROLOG(_BitScanForward64)
  763. __MINGW_EXTENSION unsigned char _BitScanForward64(unsigned __LONG32 *Index, unsigned __int64 Mask);
  764. #if !__has_builtin(_BitScanForward64)
  765. __MINGW_EXTENSION __INTRINSICS_USEINLINE
  766. __buildbitscan(_BitScanForward64, unsigned __int64, "bsf{q %[Mask],%[Index] | %[Index],%[Mask]}")
  767. #endif
  768. #define __INTRINSIC_DEFINED__BitScanForward64
  769. #endif /* __INTRINSIC_PROLOG */
  770. #if __INTRINSIC_PROLOG(_BitScanReverse64)
  771. __MINGW_EXTENSION unsigned char _BitScanReverse64(unsigned __LONG32 *Index, unsigned __int64 Mask);
  772. #if !__has_builtin(_BitScanReverse64)
  773. __MINGW_EXTENSION __INTRINSICS_USEINLINE
  774. __buildbitscan(_BitScanReverse64, unsigned __int64, "bsr{q %[Mask],%[Index] | %[Index],%[Mask]}")
  775. #endif
  776. #define __INTRINSIC_DEFINED__BitScanReverse64
  777. #endif /* __INTRINSIC_PROLOG */
  778. #if __INTRINSIC_PROLOG(_bittest64)
  779. __MINGW_EXTENSION unsigned char _bittest64(__int64 const *a, __int64 b);
  780. #if !__has_builtin(_bittest64)
  781. __MINGW_EXTENSION __INTRINSICS_USEINLINE
  782. __buildbittest(_bittest64, __int64, "q", "J")
  783. #endif
  784. #define __INTRINSIC_DEFINED__bittest64
  785. #endif /* __INTRINSIC_PROLOG */
  786. #if __INTRINSIC_PROLOG(_bittestandset64)
  787. __MINGW_EXTENSION unsigned char _bittestandset64(__int64 *a, __int64 b);
  788. #if !__has_builtin(_bittestandset64)
  789. __MINGW_EXTENSION __INTRINSICS_USEINLINE
  790. __buildbittestand(_bittestandset64, __int64, "bts", "J", "q")
  791. #endif
  792. #define __INTRINSIC_DEFINED__bittestandset64
  793. #endif /* __INTRINSIC_PROLOG */
  794. #if __INTRINSIC_PROLOG(_bittestandreset64)
  795. __MINGW_EXTENSION unsigned char _bittestandreset64(__int64 *a, __int64 b);
  796. #if !__has_builtin(_bittestandreset64)
  797. __MINGW_EXTENSION __INTRINSICS_USEINLINE
  798. __buildbittestand(_bittestandreset64, __int64, "btr", "J", "q")
  799. #endif
  800. #define __INTRINSIC_DEFINED__bittestandreset64
  801. #endif /* __INTRINSIC_PROLOG */
  802. #if __INTRINSIC_PROLOG(_bittestandcomplement64)
  803. __MINGW_EXTENSION unsigned char _bittestandcomplement64(__int64 *a, __int64 b);
  804. #if !__has_builtin(_bittestandcomplement64)
  805. __MINGW_EXTENSION __INTRINSICS_USEINLINE
  806. __buildbittestand(_bittestandcomplement64, __int64, "btc", "J", "q")
  807. #endif
  808. #define __INTRINSIC_DEFINED__bittestandcomplement64
  809. #endif /* __INTRINSIC_PROLOG */
  810. #if __INTRINSIC_PROLOG(__readcr0)
  811. __MINGW_EXTENSION unsigned __int64 __readcr0(void);
  812. #if !__has_builtin(__readcr0)
  813. __INTRINSICS_USEINLINE
  814. __build_readcr(__readcr0, unsigned __int64, "0")
  815. #endif
  816. #define __INTRINSIC_DEFINED___readcr0
  817. #endif /* __INTRINSIC_PROLOG */
  818. #if __INTRINSIC_PROLOG(__readcr2)
  819. __MINGW_EXTENSION unsigned __int64 __readcr2(void);
  820. #if !__has_builtin(__readcr2)
  821. __INTRINSICS_USEINLINE
  822. __build_readcr(__readcr2, unsigned __int64, "2")
  823. #endif
  824. #define __INTRINSIC_DEFINED___readcr2
  825. #endif /* __INTRINSIC_PROLOG */
  826. #if __INTRINSIC_PROLOG(__readcr3)
  827. __MINGW_EXTENSION unsigned __int64 __readcr3(void);
  828. #if !__has_builtin(__readcr3)
  829. __INTRINSICS_USEINLINE
  830. __build_readcr(__readcr3, unsigned __int64, "3")
  831. #endif
  832. #define __INTRINSIC_DEFINED___readcr3
  833. #endif /* __INTRINSIC_PROLOG */
  834. #if __INTRINSIC_PROLOG(__readcr4)
  835. __MINGW_EXTENSION unsigned __int64 __readcr4(void);
  836. #if !__has_builtin(__readcr4)
  837. __INTRINSICS_USEINLINE
  838. __build_readcr(__readcr4, unsigned __int64, "4")
  839. #endif
  840. #define __INTRINSIC_DEFINED___readcr4
  841. #endif /* __INTRINSIC_PROLOG */
  842. #if __INTRINSIC_PROLOG(__readcr8)
  843. __MINGW_EXTENSION unsigned __int64 __readcr8(void);
  844. #if !__has_builtin(__readcr8)
  845. __INTRINSICS_USEINLINE
  846. __build_readcr(__readcr8, unsigned __int64, "8")
  847. #endif
  848. #define __INTRINSIC_DEFINED___readcr8
  849. #endif /* __INTRINSIC_PROLOG */
  850. #if __INTRINSIC_PROLOG(__writecr0)
  851. __MINGW_EXTENSION void __writecr0(unsigned __int64);
  852. #if !__has_builtin(__writecr0)
  853. __INTRINSICS_USEINLINE
  854. __build_writecr(__writecr0, unsigned __int64, "0")
  855. #endif
  856. #define __INTRINSIC_DEFINED___writecr0
  857. #endif /* __INTRINSIC_PROLOG */
  858. #if __INTRINSIC_PROLOG(__writecr3)
  859. __MINGW_EXTENSION void __writecr3(unsigned __int64);
  860. #if !__has_builtin(__writecr3)
  861. __INTRINSICS_USEINLINE
  862. __build_writecr(__writecr3, unsigned __int64, "3")
  863. #endif
  864. #define __INTRINSIC_DEFINED___writecr3
  865. #endif /* __INTRINSIC_PROLOG */
  866. #if __INTRINSIC_PROLOG(__writecr4)
  867. __MINGW_EXTENSION void __writecr4(unsigned __int64);
  868. #if !__has_builtin(__writecr4)
  869. __INTRINSICS_USEINLINE
  870. __build_writecr(__writecr4, unsigned __int64, "4")
  871. #endif
  872. #define __INTRINSIC_DEFINED___writecr4
  873. #endif /* __INTRINSIC_PROLOG */
  874. #if __INTRINSIC_PROLOG(__writecr8)
  875. __MINGW_EXTENSION void __writecr8(unsigned __int64);
  876. #if !__has_builtin(__writecr8)
  877. __INTRINSICS_USEINLINE
  878. __build_writecr(__writecr8, unsigned __int64, "8")
  879. #endif
  880. #define __INTRINSIC_DEFINED___writecr8
  881. #endif /* __INTRINSIC_PROLOG */
  882. #if __INTRINSIC_PROLOG(__movsq)
  883. __MINGW_EXTENSION void __movsq(unsigned __int64 *Dest, unsigned __int64 const *Source, size_t Count);
  884. #if !__has_builtin(__movsq)
  885. __MINGW_EXTENSION __INTRINSICS_USEINLINE
  886. __buildmov(__movsq, unsigned __int64, "q")
  887. #endif
  888. #define __INTRINSIC_DEFINED___movsq
  889. #endif /* __INTRINSIC_PROLOG */
  890. #if __INTRINSIC_PROLOG(_umul128)
  891. unsigned __int64 _umul128(unsigned __int64, unsigned __int64, unsigned __int64 *);
  892. #if !__has_builtin(_umul128)
  893. __INTRINSICS_USEINLINE
  894. unsigned __int64 _umul128(unsigned __int64 a, unsigned __int64 b, unsigned __int64 *hi)
  895. {
  896. __MINGW_EXTENSION union { unsigned __int128 v; unsigned __int64 sv[2]; } var;
  897. var.v = a;
  898. var.v *= b;
  899. if (hi) *hi = var.sv[1];
  900. return var.sv[0];
  901. }
  902. #endif
  903. #define __INTRINSIC_DEFINED__umul128
  904. #endif /* __INTRINSIC_PROLOG */
  905. #if __INTRINSIC_PROLOG(_mul128)
  906. __int64 _mul128(__int64, __int64, __int64 *);
  907. #if !__has_builtin(_mul128)
  908. __INTRINSICS_USEINLINE
  909. __int64 _mul128(__int64 a, __int64 b, __int64 *hi)
  910. {
  911. __MINGW_EXTENSION union { __int128 v; __int64 sv[2]; } var;
  912. var.v = a;
  913. var.v *= b;
  914. if (hi) *hi = var.sv[1];
  915. return var.sv[0];
  916. }
  917. #endif
  918. #define __INTRINSIC_DEFINED__mul128
  919. #endif /* __INTRINSIC_PROLOG */
  920. #if __INTRINSIC_PROLOG(__shiftleft128)
  921. unsigned __int64 __shiftleft128(unsigned __int64 LowPart, unsigned __int64 HighPart, unsigned char Shift);
  922. #if !__has_builtin(__shiftleft128)
  923. __INTRINSICS_USEINLINE
  924. unsigned __int64 __shiftleft128 (unsigned __int64 LowPart, unsigned __int64 HighPart, unsigned char Shift)
  925. {
  926. unsigned __int64 ret;
  927. __asm__ ("shld {%[Shift],%[LowPart],%[HighPart]|%[HighPart], %[LowPart], %[Shift]}"
  928. : [ret] "=r" (ret)
  929. : [LowPart] "r" (LowPart), [HighPart] "0" (HighPart), [Shift] "Jc" (Shift)
  930. : "cc");
  931. return ret;
  932. }
  933. #endif
  934. #define __INTRINSIC_DEFINED___shiftleft128
  935. #endif /* __INTRINSIC_PROLOG */
  936. #if __INTRINSIC_PROLOG(__shiftright128)
  937. unsigned __int64 __shiftright128 (unsigned __int64 LowPart, unsigned __int64 HighPart, unsigned char Shift);
  938. #if !__has_builtin(__shiftright128)
  939. __INTRINSICS_USEINLINE
  940. unsigned __int64 __shiftright128 (unsigned __int64 LowPart, unsigned __int64 HighPart, unsigned char Shift)
  941. {
  942. unsigned __int64 ret;
  943. __asm__ ("shrd {%[Shift],%[HighPart],%[LowPart]|%[LowPart], %[HighPart], %[Shift]}"
  944. : [ret] "=r" (ret)
  945. : [LowPart] "0" (LowPart), [HighPart] "r" (HighPart), [Shift] "Jc" (Shift)
  946. : "cc");
  947. return ret;
  948. }
  949. #endif
  950. #define __INTRINSIC_DEFINED___shiftright128
  951. #endif /* __INTRINSIC_PROLOG */
  952. #endif /* defined(__x86_64__) || defined(_AMD64_) */
  953. /* ***************************************************** */
  954. #if defined(__arm__) || defined(_ARM_)
  955. #if __INTRINSIC_PROLOG(_interlockedbittestandset)
  956. unsigned char _interlockedbittestandset(__LONG32 volatile *a, __LONG32 b);
  957. #if !__has_builtin(_interlockedbittestandset)
  958. __INTRINSICS_USEINLINE
  959. __buildbittesti(_interlockedbittestandset, __LONG32, "orr", /* unused param */)
  960. #endif
  961. #define __INTRINSIC_DEFINED__interlockedbittestandset
  962. #endif /* __INTRINSIC_PROLOG */
  963. #if __INTRINSIC_PROLOG(_interlockedbittestandreset)
  964. unsigned char _interlockedbittestandreset(__LONG32 volatile *a, __LONG32 b);
  965. __INTRINSICS_USEINLINE
  966. #if !__has_builtin(_interlockedbittestandreset)
  967. __buildbittesti(_interlockedbittestandreset, __LONG32, "bic", /* unused param */)
  968. #endif
  969. #define __INTRINSIC_DEFINED__interlockedbittestandreset
  970. #endif /* __INTRINSIC_PROLOG */
  971. #if __INTRINSIC_PROLOG(_interlockedbittestandcomplement)
  972. unsigned char _interlockedbittestandcomplement(__LONG32 volatile *a, __LONG32 b);
  973. #if !__has_builtin(_interlockedbittestandcomplement)
  974. __INTRINSICS_USEINLINE
  975. __buildbittesti(_interlockedbittestandcomplement, __LONG32, "eor", /* unused param */)
  976. #endif
  977. #define __INTRINSIC_DEFINED__interlockedbittestandcomplement
  978. #endif /* __INTRINSIC_PROLOG */
  979. #if __INTRINSIC_PROLOG(InterlockedBitTestAndSet)
  980. unsigned char InterlockedBitTestAndSet(volatile __LONG32 *a, __LONG32 b);
  981. #if !__has_builtin(InterlockedBitTestAndSet)
  982. __INTRINSICS_USEINLINE
  983. __buildbittesti(InterlockedBitTestAndSet, __LONG32, "orr", /* unused param */)
  984. #endif
  985. #define __INTRINSIC_DEFINED_InterlockedBitTestAndSet
  986. #endif /* __INTRINSIC_PROLOG */
  987. #if __INTRINSIC_PROLOG(InterlockedBitTestAndReset)
  988. unsigned char InterlockedBitTestAndReset(volatile __LONG32 *a, __LONG32 b);
  989. #if !__has_builtin(InterlockedBitTestAndReset)
  990. __INTRINSICS_USEINLINE
  991. __buildbittesti(InterlockedBitTestAndReset, __LONG32, "bic", /* unused param */)
  992. #endif
  993. #define __INTRINSIC_DEFINED_InterlockedBitTestAndReset
  994. #endif /* __INTRINSIC_PROLOG */
  995. #if __INTRINSIC_PROLOG(InterlockedBitTestAndComplement)
  996. unsigned char InterlockedBitTestAndComplement(volatile __LONG32 *a, __LONG32 b);
  997. #if !__has_builtin(InterlockedBitTestAndComplement)
  998. __INTRINSICS_USEINLINE
  999. __buildbittesti(InterlockedBitTestAndComplement, __LONG32, "eor", /* unused param */)
  1000. #endif
  1001. #define __INTRINSIC_DEFINED_InterlockedBitTestAndComplement
  1002. #endif /* __INTRINSIC_PROLOG */
  1003. #if __INTRINSIC_PROLOG(_BitScanForward)
  1004. __MINGW_EXTENSION unsigned char _BitScanForward(unsigned __LONG32 *Index, unsigned __LONG32 Mask);
  1005. #if !__has_builtin(_BitScanForward)
  1006. __MINGW_EXTENSION __INTRINSICS_USEINLINE
  1007. unsigned char _BitScanForward(unsigned __LONG32 *Index, unsigned __LONG32 Mask)
  1008. {
  1009. if (Mask == 0)
  1010. return 0;
  1011. *Index = __builtin_ctz(Mask);
  1012. return 1;
  1013. }
  1014. #endif
  1015. #define __INTRINSIC_DEFINED__BitScanForward
  1016. #endif /* __INTRINSIC_PROLOG */
  1017. #if __INTRINSIC_PROLOG(_BitScanReverse)
  1018. __MINGW_EXTENSION unsigned char _BitScanReverse(unsigned __LONG32 *Index, unsigned __LONG32 Mask);
  1019. #if !__has_builtin(_BitScanReverse)
  1020. __MINGW_EXTENSION __INTRINSICS_USEINLINE
  1021. unsigned char _BitScanReverse(unsigned __LONG32 *Index, unsigned __LONG32 Mask)
  1022. {
  1023. if (Mask == 0)
  1024. return 0;
  1025. *Index = 31 - __builtin_clz(Mask);
  1026. return 1;
  1027. }
  1028. #endif
  1029. #define __INTRINSIC_DEFINED__BitScanReverse
  1030. #endif /* __INTRINSIC_PROLOG */
  1031. #endif /* defined(__arm__) || defined(_ARM_) */
  1032. #if defined(__aarch64__) || defined(_ARM64_)
  1033. #if __INTRINSIC_PROLOG(_interlockedbittestandset)
  1034. unsigned char _interlockedbittestandset(__LONG32 volatile *a, __LONG32 b);
  1035. #if !__has_builtin(_interlockedbittestandset)
  1036. __INTRINSICS_USEINLINE
  1037. __buildbittesti(_interlockedbittestandset, __LONG32, "orr", /* unused param */)
  1038. #endif
  1039. #define __INTRINSIC_DEFINED__interlockedbittestandset
  1040. #endif /* __INTRINSIC_PROLOG */
  1041. #if __INTRINSIC_PROLOG(_interlockedbittestandreset)
  1042. unsigned char _interlockedbittestandreset(__LONG32 volatile *a, __LONG32 b);
  1043. __INTRINSICS_USEINLINE
  1044. #if !__has_builtin(_interlockedbittestandreset)
  1045. __buildbittesti(_interlockedbittestandreset, __LONG32, "bic", /* unused param */)
  1046. #endif
  1047. #define __INTRINSIC_DEFINED__interlockedbittestandreset
  1048. #endif /* __INTRINSIC_PROLOG */
  1049. #if __INTRINSIC_PROLOG(_interlockedbittestandcomplement)
  1050. unsigned char _interlockedbittestandcomplement(__LONG32 volatile *a, __LONG32 b);
  1051. #if !__has_builtin(_interlockedbittestandcomplement)
  1052. __INTRINSICS_USEINLINE
  1053. __buildbittesti(_interlockedbittestandcomplement, __LONG32, "eor", /* unused param */)
  1054. #endif
  1055. #define __INTRINSIC_DEFINED__interlockedbittestandcomplement
  1056. #endif /* __INTRINSIC_PROLOG */
  1057. #if __INTRINSIC_PROLOG(InterlockedBitTestAndSet)
  1058. unsigned char InterlockedBitTestAndSet(volatile __LONG32 *a, __LONG32 b);
  1059. #if !__has_builtin(InterlockedBitTestAndSet)
  1060. __INTRINSICS_USEINLINE
  1061. __buildbittesti(InterlockedBitTestAndSet, __LONG32, "orr", /* unused param */)
  1062. #endif
  1063. #define __INTRINSIC_DEFINED_InterlockedBitTestAndSet
  1064. #endif /* __INTRINSIC_PROLOG */
  1065. #if __INTRINSIC_PROLOG(InterlockedBitTestAndReset)
  1066. unsigned char InterlockedBitTestAndReset(volatile __LONG32 *a, __LONG32 b);
  1067. #if !__has_builtin(InterlockedBitTestAndReset)
  1068. __INTRINSICS_USEINLINE
  1069. __buildbittesti(InterlockedBitTestAndReset, __LONG32, "bic", /* unused param */)
  1070. #endif
  1071. #define __INTRINSIC_DEFINED_InterlockedBitTestAndReset
  1072. #endif /* __INTRINSIC_PROLOG */
  1073. #if __INTRINSIC_PROLOG(InterlockedBitTestAndComplement)
  1074. unsigned char InterlockedBitTestAndComplement(volatile __LONG32 *a, __LONG32 b);
  1075. #if !__has_builtin(InterlockedBitTestAndComplement)
  1076. __INTRINSICS_USEINLINE
  1077. __buildbittesti(InterlockedBitTestAndComplement, __LONG32, "eor", /* unused param */)
  1078. #endif
  1079. #define __INTRINSIC_DEFINED_InterlockedBitTestAndComplement
  1080. #endif /* __INTRINSIC_PROLOG */
  1081. #if __INTRINSIC_PROLOG(_interlockedbittestandset64)
  1082. unsigned char _interlockedbittestandset64(__int64 volatile *a, __int64 b);
  1083. #if !__has_builtin(_interlockedbittestandset64)
  1084. __INTRINSICS_USEINLINE
  1085. __buildbittesti64(_interlockedbittestandset64, __int64, "orr", /* unused param */)
  1086. #endif
  1087. #define __INTRINSIC_DEFINED__interlockedbittestandset64
  1088. #endif /* __INTRINSIC_PROLOG */
  1089. #if __INTRINSIC_PROLOG(_interlockedbittestandreset64)
  1090. unsigned char _interlockedbittestandreset64(__int64 volatile *a, __int64 b);
  1091. __INTRINSICS_USEINLINE
  1092. #if !__has_builtin(_interlockedbittestandreset64)
  1093. __buildbittesti64(_interlockedbittestandreset64, __int64, "bic", /* unused param */)
  1094. #endif
  1095. #define __INTRINSIC_DEFINED__interlockedbittestandreset64
  1096. #endif /* __INTRINSIC_PROLOG */
  1097. #if __INTRINSIC_PROLOG(_interlockedbittestandcomplement64)
  1098. unsigned char _interlockedbittestandcomplement64(__int64 volatile *a, __int64 b);
  1099. #if !__has_builtin(_interlockedbittestandcomplement64)
  1100. __INTRINSICS_USEINLINE
  1101. __buildbittesti64(_interlockedbittestandcomplement64, __int64, "eor", /* unused param */)
  1102. #endif
  1103. #define __INTRINSIC_DEFINED__interlockedbittestandcomplement64
  1104. #endif /* __INTRINSIC_PROLOG */
  1105. #if __INTRINSIC_PROLOG(InterlockedBitTestAndSet64)
  1106. unsigned char InterlockedBitTestAndSet64(volatile __int64 *a, __int64 b);
  1107. #if !__has_builtin(InterlockedBitTestAndSet64)
  1108. __INTRINSICS_USEINLINE
  1109. __buildbittesti64(InterlockedBitTestAndSet64, __int64, "orr", /* unused param */)
  1110. #endif
  1111. #define __INTRINSIC_DEFINED_InterlockedBitTestAndSet64
  1112. #endif /* __INTRINSIC_PROLOG */
  1113. #if __INTRINSIC_PROLOG(InterlockedBitTestAndReset64)
  1114. unsigned char InterlockedBitTestAndReset64(volatile __int64 *a, __int64 b);
  1115. #if !__has_builtin(InterlockedBitTestAndReset64)
  1116. __INTRINSICS_USEINLINE
  1117. __buildbittesti64(InterlockedBitTestAndReset64, __int64, "bic", /* unused param */)
  1118. #endif
  1119. #define __INTRINSIC_DEFINED_InterlockedBitTestAndReset64
  1120. #endif /* __INTRINSIC_PROLOG */
  1121. #if __INTRINSIC_PROLOG(InterlockedBitTestAndComplement64)
  1122. unsigned char InterlockedBitTestAndComplement64(volatile __int64 *a, __int64 b);
  1123. #if !__has_builtin(InterlockedBitTestAndComplement64)
  1124. __INTRINSICS_USEINLINE
  1125. __buildbittesti64(InterlockedBitTestAndComplement64, __int64, "eor", /* unused param */)
  1126. #endif
  1127. #define __INTRINSIC_DEFINED_InterlockedBitTestAndComplement64
  1128. #endif /* __INTRINSIC_PROLOG */
  1129. #if __INTRINSIC_PROLOG(_InterlockedAnd64)
  1130. __MINGW_EXTENSION __int64 _InterlockedAnd64(__int64 volatile *, __int64);
  1131. #if !__has_builtin(_InterlockedAnd64)
  1132. __INTRINSICS_USEINLINE
  1133. __buildlogicali(_InterlockedAnd64, __int64, and)
  1134. #endif
  1135. #define __INTRINSIC_DEFINED__InterlockedAnd64
  1136. #endif /* __INTRINSIC_PROLOG */
  1137. #if __INTRINSIC_PROLOG(_InterlockedOr64)
  1138. __MINGW_EXTENSION __int64 _InterlockedOr64(__int64 volatile *, __int64);
  1139. #if !__has_builtin(_InterlockedOr64)
  1140. __INTRINSICS_USEINLINE
  1141. __buildlogicali(_InterlockedOr64, __int64, or)
  1142. #endif
  1143. #define __INTRINSIC_DEFINED__InterlockedOr64
  1144. #endif /* __INTRINSIC_PROLOG */
  1145. #if __INTRINSIC_PROLOG(_InterlockedXor64)
  1146. __MINGW_EXTENSION __int64 _InterlockedXor64(__int64 volatile *, __int64);
  1147. #if !__has_builtin(_InterlockedXor64)
  1148. __INTRINSICS_USEINLINE
  1149. __buildlogicali(_InterlockedXor64, __int64, xor)
  1150. #endif
  1151. #define __INTRINSIC_DEFINED__InterlockedXor64
  1152. #endif /* __INTRINSIC_PROLOG */
  1153. #if __INTRINSIC_PROLOG(_InterlockedIncrement64)
  1154. __MINGW_EXTENSION __int64 _InterlockedIncrement64(__int64 volatile *Addend);
  1155. #if !__has_builtin(_InterlockedIncrement64)
  1156. __MINGW_EXTENSION __INTRINSICS_USEINLINE
  1157. __int64 _InterlockedIncrement64(__int64 volatile *Addend) {
  1158. return __sync_add_and_fetch(Addend, 1);
  1159. }
  1160. #endif
  1161. #define __INTRINSIC_DEFINED__InterlockedIncrement64
  1162. #endif /* __INTRINSIC_PROLOG */
  1163. #if __INTRINSIC_PROLOG(_InterlockedDecrement64)
  1164. __MINGW_EXTENSION __int64 _InterlockedDecrement64(__int64 volatile *Addend);
  1165. #if !__has_builtin(_InterlockedDecrement64)
  1166. __MINGW_EXTENSION __INTRINSICS_USEINLINE
  1167. __int64 _InterlockedDecrement64(__int64 volatile *Addend) {
  1168. return __sync_sub_and_fetch(Addend, 1);
  1169. }
  1170. #endif
  1171. #define __INTRINSIC_DEFINED__InterlockedDecrement64
  1172. #endif /* __INTRINSIC_PROLOG */
  1173. #if __INTRINSIC_PROLOG(_InterlockedExchange64)
  1174. __MINGW_EXTENSION __int64 _InterlockedExchange64(__int64 volatile *Target, __int64 Value);
  1175. #if !__has_builtin(_InterlockedExchange64)
  1176. __MINGW_EXTENSION __INTRINSICS_USEINLINE
  1177. __int64 _InterlockedExchange64(__int64 volatile *Target, __int64 Value) {
  1178. return __sync_lock_test_and_set(Target, Value);
  1179. }
  1180. #endif
  1181. #define __INTRINSIC_DEFINED__InterlockedExchange64
  1182. #endif /* __INTRINSIC_PROLOG */
  1183. #if __INTRINSIC_PROLOG(_InterlockedExchangeAdd64)
  1184. __MINGW_EXTENSION __int64 _InterlockedExchangeAdd64(__int64 volatile *Addend, __int64 Value);
  1185. #if !__has_builtin(_InterlockedExchangeAdd64)
  1186. __MINGW_EXTENSION __INTRINSICS_USEINLINE
  1187. __int64 _InterlockedExchangeAdd64(__int64 volatile *Addend, __int64 Value) {
  1188. return __sync_fetch_and_add(Addend, Value);
  1189. }
  1190. #endif
  1191. #define __INTRINSIC_DEFINED__InterlockedExchangeAdd64
  1192. #endif /* __INTRINSIC_PROLOG */
  1193. #if __INTRINSIC_PROLOG(_BitScanForward)
  1194. __MINGW_EXTENSION unsigned char _BitScanForward(unsigned __LONG32 *Index, unsigned __LONG32 Mask);
  1195. #if !__has_builtin(_BitScanForward)
  1196. __MINGW_EXTENSION __INTRINSICS_USEINLINE
  1197. unsigned char _BitScanForward(unsigned __LONG32 *Index, unsigned __LONG32 Mask)
  1198. {
  1199. if (Mask == 0)
  1200. return 0;
  1201. *Index = __builtin_ctz(Mask);
  1202. return 1;
  1203. }
  1204. #endif
  1205. #define __INTRINSIC_DEFINED__BitScanForward
  1206. #endif /* __INTRINSIC_PROLOG */
  1207. #if __INTRINSIC_PROLOG(_BitScanReverse)
  1208. __MINGW_EXTENSION unsigned char _BitScanReverse(unsigned __LONG32 *Index, unsigned __LONG32 Mask);
  1209. #if !__has_builtin(_BitScanReverse)
  1210. __MINGW_EXTENSION __INTRINSICS_USEINLINE
  1211. unsigned char _BitScanReverse(unsigned __LONG32 *Index, unsigned __LONG32 Mask)
  1212. {
  1213. if (Mask == 0)
  1214. return 0;
  1215. *Index = 31 - __builtin_clz(Mask);
  1216. return 1;
  1217. }
  1218. #endif
  1219. #define __INTRINSIC_DEFINED__BitScanReverse
  1220. #endif /* __INTRINSIC_PROLOG */
  1221. #if __INTRINSIC_PROLOG(_BitScanForward64)
  1222. __MINGW_EXTENSION unsigned char _BitScanForward64(unsigned __LONG32 *Index, unsigned __int64 Mask);
  1223. #if !__has_builtin(_BitScanForward64)
  1224. __MINGW_EXTENSION __INTRINSICS_USEINLINE
  1225. unsigned char _BitScanForward64(unsigned __LONG32 *Index, unsigned __int64 Mask)
  1226. {
  1227. if (Mask == 0)
  1228. return 0;
  1229. *Index = __builtin_ctzll(Mask);
  1230. return 1;
  1231. }
  1232. #endif
  1233. #define __INTRINSIC_DEFINED__BitScanForward64
  1234. #endif /* __INTRINSIC_PROLOG */
  1235. #if __INTRINSIC_PROLOG(_BitScanReverse64)
  1236. __MINGW_EXTENSION unsigned char _BitScanReverse64(unsigned __LONG32 *Index, unsigned __int64 Mask);
  1237. #if !__has_builtin(_BitScanReverse64)
  1238. __MINGW_EXTENSION __INTRINSICS_USEINLINE
  1239. unsigned char _BitScanReverse64(unsigned __LONG32 *Index, unsigned __int64 Mask)
  1240. {
  1241. if (Mask == 0)
  1242. return 0;
  1243. *Index = 63 - __builtin_clzll(Mask);
  1244. return 1;
  1245. }
  1246. #endif
  1247. #define __INTRINSIC_DEFINED__BitScanReverse64
  1248. #endif /* __INTRINSIC_PROLOG */
  1249. #endif /* defined(__aarch64__) || define(_ARM64_) */
  1250. #if defined(__arm__) || defined(_ARM_) || defined(__aarch64__) || defined(_ARM64_)
  1251. #if __INTRINSIC_PROLOG(_bittest)
  1252. unsigned char _bittest(const __LONG32 *__a, __LONG32 __b);
  1253. #if !__has_builtin(_bittest)
  1254. __INTRINSICS_USEINLINE
  1255. unsigned char _bittest(const __LONG32 *__a, __LONG32 __b)
  1256. {
  1257. return (*__a >> __b) & 1;
  1258. }
  1259. #endif
  1260. #define __INTRINSIC_DEFINED__bittest
  1261. #endif /* __INTRINSIC_PROLOG */
  1262. #if __INTRINSIC_PROLOG(_bittestandset)
  1263. unsigned char _bittestandset(__LONG32 *__a, __LONG32 __b);
  1264. #if !__has_builtin(_bittestandset)
  1265. __INTRINSICS_USEINLINE
  1266. unsigned char _bittestandset(__LONG32 *__a, __LONG32 __b)
  1267. {
  1268. unsigned char __v = (*__a >> __b) & 1;
  1269. *__a |= 1UL << __b;
  1270. return __v;
  1271. }
  1272. #endif
  1273. #define __INTRINSIC_DEFINED__bittestandset
  1274. #endif /* __INTRINSIC_PROLOG */
  1275. #if __INTRINSIC_PROLOG(_bittestandreset)
  1276. unsigned char _bittestandreset(__LONG32 *__a, __LONG32 __b);
  1277. #if !__has_builtin(_bittestandreset)
  1278. __INTRINSICS_USEINLINE
  1279. unsigned char _bittestandreset(__LONG32 *__a, __LONG32 __b)
  1280. {
  1281. unsigned char __v = (*__a >> __b) & 1;
  1282. *__a &= ~(1UL << __b);
  1283. return __v;
  1284. }
  1285. #endif
  1286. #define __INTRINSIC_DEFINED__bittestandreset
  1287. #endif /* __INTRINSIC_PROLOG */
  1288. #if __INTRINSIC_PROLOG(_bittestandcomplement)
  1289. unsigned char _bittestandcomplement(__LONG32 *a, __LONG32 b);
  1290. #if !__has_builtin(_bittestandcomplement)
  1291. __INTRINSICS_USEINLINE
  1292. unsigned char _bittestandcomplement(__LONG32 *__a, __LONG32 __b)
  1293. {
  1294. unsigned char __v = (*__a >> __b) & 1;
  1295. *__a ^= 1UL << __b;
  1296. return __v;
  1297. }
  1298. #endif
  1299. #define __INTRINSIC_DEFINED__bittestandcomplement
  1300. #endif /* __INTRINSIC_PROLOG */
  1301. #endif /* defined(__arm__) || defined(_ARM_) || defined(__aarch64__) || defined(_ARM64_) */
  1302. #if defined(__aarch64__) || defined(_ARM64_)
  1303. #if __INTRINSIC_PROLOG(_bittest64)
  1304. unsigned char _bittest64(const __int64 *__a, __int64 __b);
  1305. #if !__has_builtin(_bittest64)
  1306. __INTRINSICS_USEINLINE
  1307. unsigned char _bittest64(const __int64 *__a, __int64 __b)
  1308. {
  1309. return (*__a >> __b) & 1;
  1310. }
  1311. #endif
  1312. #define __INTRINSIC_DEFINED__bittest64
  1313. #endif /* __INTRINSIC_PROLOG */
  1314. #if __INTRINSIC_PROLOG(_bittestandset64)
  1315. unsigned char _bittestandset64(__int64 *__a, __int64 __b);
  1316. #if !__has_builtin(_bittestandset64)
  1317. __INTRINSICS_USEINLINE
  1318. unsigned char _bittestandset64(__int64 *__a, __int64 __b)
  1319. {
  1320. unsigned char __v = (*__a >> __b) & 1;
  1321. *__a |= 1ULL << __b;
  1322. return __v;
  1323. }
  1324. #endif
  1325. #define __INTRINSIC_DEFINED__bittestandset64
  1326. #endif /* __INTRINSIC_PROLOG */
  1327. #if __INTRINSIC_PROLOG(_bittestandreset64)
  1328. unsigned char _bittestandreset64(__int64 *__a, __int64 __b);
  1329. #if !__has_builtin(_bittestandreset64)
  1330. __INTRINSICS_USEINLINE
  1331. unsigned char _bittestandreset64(__int64 *__a, __int64 __b)
  1332. {
  1333. unsigned char __v = (*__a >> __b) & 1;
  1334. *__a &= ~(1ULL << __b);
  1335. return __v;
  1336. }
  1337. #endif
  1338. #define __INTRINSIC_DEFINED__bittestandreset64
  1339. #endif /* __INTRINSIC_PROLOG */
  1340. #if __INTRINSIC_PROLOG(_bittestandcomplement64)
  1341. unsigned char _bittestandcomplement64(__int64 *a, __int64 b);
  1342. #if !__has_builtin(_bittestandcomplement64)
  1343. __INTRINSICS_USEINLINE
  1344. unsigned char _bittestandcomplement64(__int64 *__a, __int64 __b)
  1345. {
  1346. unsigned char __v = (*__a >> __b) & 1;
  1347. *__a ^= 1ULL << __b;
  1348. return __v;
  1349. }
  1350. #endif
  1351. #define __INTRINSIC_DEFINED__bittestandcomplement64
  1352. #endif /* __INTRINSIC_PROLOG */
  1353. #endif /* defined(__aarch64__) || define(_ARM64_) */
  1354. /* ***************************************************** */
  1355. #if defined(__x86_64__) || defined(_AMD64_) || defined(__i386__) || defined(_X86_) || defined(__arm__) || defined(_ARM_) || defined(__aarch64__) || defined(_ARM64_)
  1356. #if __INTRINSIC_PROLOG(__popcnt16)
  1357. unsigned short __popcnt16(unsigned short);
  1358. #if !__has_builtin(__popcnt16)
  1359. __INTRINSICS_USEINLINE
  1360. unsigned short __popcnt16(unsigned short value)
  1361. {
  1362. return __builtin_popcount(value);
  1363. }
  1364. #endif
  1365. #define __INTRINSIC_DEFINED___popcnt16
  1366. #endif /* __INTRINSIC_PROLOG */
  1367. #if __INTRINSIC_PROLOG(__popcnt)
  1368. unsigned int __popcnt(unsigned int);
  1369. #if !__has_builtin(__popcnt)
  1370. __INTRINSICS_USEINLINE
  1371. unsigned int __popcnt(unsigned int value)
  1372. {
  1373. return __builtin_popcount(value);
  1374. }
  1375. #endif
  1376. #define __INTRINSIC_DEFINED___popcnt
  1377. #endif /* __INTRINSIC_PROLOG */
  1378. #if __INTRINSIC_PROLOG(__popcnt64)
  1379. unsigned __int64 __popcnt64(unsigned __int64);
  1380. #if !__has_builtin(__popcnt64)
  1381. __INTRINSICS_USEINLINE
  1382. unsigned __int64 __popcnt64(unsigned __int64 value)
  1383. {
  1384. return __builtin_popcountll(value);
  1385. }
  1386. #endif
  1387. #define __INTRINSIC_DEFINED___popcnt64
  1388. #endif /* __INTRINSIC_PROLOG */
  1389. #if __INTRINSIC_PROLOG(_InterlockedAnd)
  1390. __LONG32 _InterlockedAnd(__LONG32 volatile *, __LONG32);
  1391. #if !__has_builtin(_InterlockedAnd)
  1392. __INTRINSICS_USEINLINE
  1393. __buildlogicali(_InterlockedAnd, __LONG32, and)
  1394. #endif
  1395. #define __INTRINSIC_DEFINED__InterlockedAnd
  1396. #endif /* __INTRINSIC_PROLOG */
  1397. #if __INTRINSIC_PROLOG(_InterlockedOr)
  1398. __LONG32 _InterlockedOr(__LONG32 volatile *, __LONG32);
  1399. #if !__has_builtin(_InterlockedOr)
  1400. __INTRINSICS_USEINLINE
  1401. __buildlogicali(_InterlockedOr, __LONG32, or)
  1402. #endif
  1403. #define __INTRINSIC_DEFINED__InterlockedOr
  1404. #endif /* __INTRINSIC_PROLOG */
  1405. #if __INTRINSIC_PROLOG(_InterlockedXor)
  1406. __LONG32 _InterlockedXor(__LONG32 volatile *, __LONG32);
  1407. #if !__has_builtin(_InterlockedXor)
  1408. __INTRINSICS_USEINLINE
  1409. __buildlogicali(_InterlockedXor, __LONG32, xor)
  1410. #endif
  1411. #define __INTRINSIC_DEFINED__InterlockedXor
  1412. #endif /* __INTRINSIC_PROLOG */
  1413. #if __INTRINSIC_PROLOG(_InterlockedIncrement16)
  1414. short _InterlockedIncrement16(short volatile *Addend);
  1415. #if !__has_builtin(_InterlockedIncrement16)
  1416. __INTRINSICS_USEINLINE
  1417. short _InterlockedIncrement16(short volatile *Addend) {
  1418. return __sync_add_and_fetch(Addend, 1);
  1419. }
  1420. #endif
  1421. #define __INTRINSIC_DEFINED__InterlockedIncrement16
  1422. #endif /* __INTRINSIC_PROLOG */
  1423. #if __INTRINSIC_PROLOG(_InterlockedDecrement16)
  1424. short _InterlockedDecrement16(short volatile *Addend);
  1425. #if !__has_builtin(_InterlockedDecrement16)
  1426. __INTRINSICS_USEINLINE
  1427. short _InterlockedDecrement16(short volatile *Addend) {
  1428. return __sync_sub_and_fetch(Addend, 1);
  1429. }
  1430. #endif
  1431. #define __INTRINSIC_DEFINED__InterlockedDecrement16
  1432. #endif /* __INTRINSIC_PROLOG */
  1433. #if __INTRINSIC_PROLOG(_InterlockedCompareExchange16)
  1434. short _InterlockedCompareExchange16(short volatile *Destination, short ExChange, short Comperand);
  1435. #if !__has_builtin(_InterlockedCompareExchange16)
  1436. __INTRINSICS_USEINLINE
  1437. short _InterlockedCompareExchange16(short volatile *Destination, short ExChange, short Comperand) {
  1438. return __sync_val_compare_and_swap(Destination, Comperand, ExChange);
  1439. }
  1440. #endif
  1441. #define __INTRINSIC_DEFINED__InterlockedCompareExchange16
  1442. #endif /* __INTRINSIC_PROLOG */
  1443. #if __INTRINSIC_PROLOG(_InterlockedExchangeAdd)
  1444. __LONG32 _InterlockedExchangeAdd(__LONG32 volatile *Addend, __LONG32 Value);
  1445. #if !__has_builtin(_InterlockedExchangeAdd)
  1446. __INTRINSICS_USEINLINE
  1447. __LONG32 _InterlockedExchangeAdd(__LONG32 volatile *Addend, __LONG32 Value) {
  1448. return __sync_fetch_and_add(Addend, Value);
  1449. }
  1450. #endif
  1451. #define __INTRINSIC_DEFINED__InterlockedExchangeAdd
  1452. #endif /* __INTRINSIC_PROLOG */
  1453. #if __INTRINSIC_PROLOG(_InterlockedCompareExchange)
  1454. __LONG32 _InterlockedCompareExchange(__LONG32 volatile *Destination, __LONG32 ExChange, __LONG32 Comperand);
  1455. #if !__has_builtin(_InterlockedCompareExchange)
  1456. __INTRINSICS_USEINLINE
  1457. __LONG32 _InterlockedCompareExchange(__LONG32 volatile *Destination, __LONG32 ExChange, __LONG32 Comperand) {
  1458. return __sync_val_compare_and_swap(Destination, Comperand, ExChange);
  1459. }
  1460. #endif
  1461. #define __INTRINSIC_DEFINED__InterlockedCompareExchange
  1462. #endif /* __INTRINSIC_PROLOG */
  1463. #if __INTRINSIC_PROLOG(_InterlockedIncrement)
  1464. __LONG32 _InterlockedIncrement(__LONG32 volatile *Addend);
  1465. #if !__has_builtin(_InterlockedIncrement)
  1466. __INTRINSICS_USEINLINE
  1467. __LONG32 _InterlockedIncrement(__LONG32 volatile *Addend) {
  1468. return __sync_add_and_fetch(Addend, 1);
  1469. }
  1470. #endif
  1471. #define __INTRINSIC_DEFINED__InterlockedIncrement
  1472. #endif /* __INTRINSIC_PROLOG */
  1473. #if __INTRINSIC_PROLOG(_InterlockedDecrement)
  1474. __LONG32 _InterlockedDecrement(__LONG32 volatile *Addend);
  1475. #if !__has_builtin(_InterlockedDecrement)
  1476. __INTRINSICS_USEINLINE
  1477. __LONG32 _InterlockedDecrement(__LONG32 volatile *Addend) {
  1478. return __sync_sub_and_fetch(Addend, 1);
  1479. }
  1480. #endif
  1481. #define __INTRINSIC_DEFINED__InterlockedDecrement
  1482. #endif /* __INTRINSIC_PROLOG */
  1483. #if __INTRINSIC_PROLOG(_InterlockedAdd)
  1484. __LONG32 _InterlockedAdd(__LONG32 volatile *Addend, __LONG32 Value);
  1485. #if !__has_builtin(_InterlockedAdd)
  1486. __INTRINSICS_USEINLINE
  1487. __LONG32 _InterlockedAdd(__LONG32 volatile *Addend, __LONG32 Value) {
  1488. return __sync_add_and_fetch(Addend, Value);
  1489. }
  1490. #endif
  1491. #define __INTRINSIC_DEFINED__InterlockedAdd
  1492. #endif /* __INTRINSIC_PROLOG */
  1493. #if __INTRINSIC_PROLOG(_InterlockedAdd64)
  1494. __MINGW_EXTENSION __int64 _InterlockedAdd64(__int64 volatile *Addend, __int64 Value);
  1495. #if !__has_builtin(_InterlockedAdd64)
  1496. __MINGW_EXTENSION __INTRINSICS_USEINLINE
  1497. __int64 _InterlockedAdd64(__int64 volatile *Addend, __int64 Value) {
  1498. return __sync_add_and_fetch(Addend, Value);
  1499. }
  1500. #endif
  1501. #define __INTRINSIC_DEFINED__InterlockedAdd64
  1502. #endif /* __INTRINSIC_PROLOG */
  1503. #if __INTRINSIC_PROLOG(_InterlockedExchange)
  1504. __LONG32 _InterlockedExchange(__LONG32 volatile *Target, __LONG32 Value);
  1505. #if !__has_builtin(_InterlockedExchange)
  1506. __INTRINSICS_USEINLINE
  1507. __LONG32 _InterlockedExchange(__LONG32 volatile *Target, __LONG32 Value) {
  1508. return __sync_lock_test_and_set(Target, Value);
  1509. }
  1510. #endif
  1511. #define __INTRINSIC_DEFINED__InterlockedExchange
  1512. #endif /* __INTRINSIC_PROLOG */
  1513. #if __INTRINSIC_PROLOG(_InterlockedCompareExchange64)
  1514. __MINGW_EXTENSION __int64 _InterlockedCompareExchange64(__int64 volatile *Destination, __int64 ExChange, __int64 Comperand);
  1515. #if !__has_builtin(_InterlockedCompareExchange64)
  1516. __MINGW_EXTENSION __INTRINSICS_USEINLINE
  1517. __int64 _InterlockedCompareExchange64(__int64 volatile *Destination, __int64 ExChange, __int64 Comperand) {
  1518. return __sync_val_compare_and_swap(Destination, Comperand, ExChange);
  1519. }
  1520. #endif
  1521. #define __INTRINSIC_DEFINED__InterlockedCompareExchange64
  1522. #endif /* __INTRINSIC_PROLOG */
  1523. #if __INTRINSIC_PROLOG(_InterlockedCompareExchangePointer)
  1524. void *_InterlockedCompareExchangePointer(void * volatile *Destination, void *ExChange, void *Comperand);
  1525. #if !__has_builtin(_InterlockedCompareExchangePointer)
  1526. __INTRINSICS_USEINLINE
  1527. void *_InterlockedCompareExchangePointer(void *volatile *Destination, void *ExChange, void *Comperand) {
  1528. return __sync_val_compare_and_swap(Destination, Comperand, ExChange);
  1529. }
  1530. #endif
  1531. #define __INTRINSIC_DEFINED__InterlockedCompareExchangePointer
  1532. #endif /* __INTRINSIC_PROLOG */
  1533. #if __INTRINSIC_PROLOG(_InterlockedExchangePointer)
  1534. void *_InterlockedExchangePointer(void *volatile *Target,void *Value);
  1535. #if !__has_builtin(_InterlockedExchangePointer)
  1536. __INTRINSICS_USEINLINE
  1537. void *_InterlockedExchangePointer(void *volatile *Target,void *Value) {
  1538. return __sync_lock_test_and_set(Target, Value);
  1539. }
  1540. #endif
  1541. #define __INTRINSIC_DEFINED__InterlockedExchangePointer
  1542. #endif /* __INTRINSIC_PROLOG */
  1543. #endif /* defined(__x86_64__) || defined(_AMD64_) || defined(__i386__) || defined(_X86_) || defined(__arm__) || defined(_ARM_) || defined(__aarch64__) || defined(_ARM64_) */
  1544. #if defined(__x86_64__) || defined(_AMD64_) || defined(__i386__) || defined(_X86_)
  1545. #if __INTRINSIC_PROLOG(__int2c)
  1546. void __int2c(void);
  1547. #if !__has_builtin(__int2c)
  1548. __INTRINSICS_USEINLINE
  1549. void __int2c(void) {
  1550. __buildint(0x2c);
  1551. }
  1552. #endif
  1553. #define __INTRINSIC_DEFINED___int2c
  1554. #endif /* __INTRINSIC_PROLOG */
  1555. #if __INTRINSIC_PROLOG(__stosb)
  1556. void __stosb(unsigned char *, unsigned char, size_t);
  1557. #if !__has_builtin(__stosb)
  1558. __INTRINSICS_USEINLINE
  1559. __buildstos(__stosb, unsigned char, "b|b")
  1560. #endif
  1561. #define __INTRINSIC_DEFINED___stosb
  1562. #endif /* __INTRINSIC_PROLOG */
  1563. #if __INTRINSIC_PROLOG(__stosw)
  1564. void __stosw(unsigned short *, unsigned short, size_t);
  1565. #if !__has_builtin(__stosw)
  1566. __INTRINSICS_USEINLINE
  1567. __buildstos(__stosw, unsigned short, "w|w")
  1568. #endif
  1569. #define __INTRINSIC_DEFINED___stosw
  1570. #endif /* __INTRINSIC_PROLOG */
  1571. #if __INTRINSIC_PROLOG(__stosd)
  1572. void __stosd(unsigned __LONG32 *, unsigned __LONG32, size_t);
  1573. #if !__has_builtin(__stosd)
  1574. __INTRINSICS_USEINLINE
  1575. __buildstos(__stosd, unsigned __LONG32, "l|d")
  1576. #endif
  1577. #define __INTRINSIC_DEFINED___stosd
  1578. #endif /* __INTRINSIC_PROLOG */
  1579. #if __INTRINSIC_PROLOG(_interlockedbittestandset)
  1580. unsigned char _interlockedbittestandset(__LONG32 volatile *a, __LONG32 b);
  1581. #if !__has_builtin(_interlockedbittestandset)
  1582. __INTRINSICS_USEINLINE
  1583. __buildbittesti(_interlockedbittestandset, __LONG32, "lock bts{l %[Offset],%[Base] | %[Base],%[Offset]}" __FLAGSET, "I")
  1584. #endif
  1585. #define __INTRINSIC_DEFINED__interlockedbittestandset
  1586. #endif /* __INTRINSIC_PROLOG */
  1587. #if __INTRINSIC_PROLOG(_interlockedbittestandreset)
  1588. unsigned char _interlockedbittestandreset(__LONG32 volatile *a, __LONG32 b);
  1589. #if !__has_builtin(_interlockedbittestandreset)
  1590. __INTRINSICS_USEINLINE
  1591. __buildbittesti(_interlockedbittestandreset, __LONG32, "lock btr{l %[Offset],%[Base] | %[Base],%[Offset]}" __FLAGSET, "I")
  1592. #endif
  1593. #define __INTRINSIC_DEFINED__interlockedbittestandreset
  1594. #endif /* __INTRINSIC_PROLOG */
  1595. #if __INTRINSIC_PROLOG(_interlockedbittestandcomplement)
  1596. unsigned char _interlockedbittestandcomplement(__LONG32 volatile *a, __LONG32 b);
  1597. #if !__has_builtin(_interlockedbittestandcomplement)
  1598. __INTRINSICS_USEINLINE
  1599. __buildbittesti(_interlockedbittestandcomplement, __LONG32, "lock btc{l %[Offset],%[Base] | %[Base],%[Offset]}" __FLAGSET, "I")
  1600. #endif
  1601. #define __INTRINSIC_DEFINED__interlockedbittestandcomplement
  1602. #endif /* __INTRINSIC_PROLOG */
  1603. #if __INTRINSIC_PROLOG(InterlockedBitTestAndSet)
  1604. unsigned char InterlockedBitTestAndSet(volatile __LONG32 *a, __LONG32 b);
  1605. #if !__has_builtin(InterlockedBitTestAndSet)
  1606. __INTRINSICS_USEINLINE
  1607. __buildbittesti(InterlockedBitTestAndSet, __LONG32, "lock bts{l %[Offset],%[Base] | %[Base],%[Offset]}" __FLAGSET, "I")
  1608. #endif
  1609. #define __INTRINSIC_DEFINED_InterlockedBitTestAndSet
  1610. #endif /* __INTRINSIC_PROLOG */
  1611. #if __INTRINSIC_PROLOG(InterlockedBitTestAndReset)
  1612. unsigned char InterlockedBitTestAndReset(volatile __LONG32 *a, __LONG32 b);
  1613. #if !__has_builtin(InterlockedBitTestAndReset)
  1614. __INTRINSICS_USEINLINE
  1615. __buildbittesti(InterlockedBitTestAndReset, __LONG32, "lock btr{l %[Offset],%[Base] | %[Base],%[Offset]}" __FLAGSET, "I")
  1616. #endif
  1617. #define __INTRINSIC_DEFINED_InterlockedBitTestAndReset
  1618. #endif /* __INTRINSIC_PROLOG */
  1619. #if __INTRINSIC_PROLOG(InterlockedBitTestAndComplement)
  1620. unsigned char InterlockedBitTestAndComplement(volatile __LONG32 *a, __LONG32 b);
  1621. #if !__has_builtin(InterlockedBitTestAndComplement)
  1622. __INTRINSICS_USEINLINE
  1623. __buildbittesti(InterlockedBitTestAndComplement, __LONG32, "lock btc{l %[Offset],%[Base] | %[Base],%[Offset]}" __FLAGSET, "I")
  1624. #endif
  1625. #define __INTRINSIC_DEFINED_InterlockedBitTestAndComplement
  1626. #endif /* __INTRINSIC_PROLOG */
  1627. #if __INTRINSIC_PROLOG(_BitScanForward)
  1628. unsigned char _BitScanForward(unsigned __LONG32 *Index, unsigned __LONG32 Mask);
  1629. #if !__has_builtin(_BitScanForward)
  1630. __INTRINSICS_USEINLINE
  1631. __buildbitscan(_BitScanForward, unsigned __LONG32, "bsf{l %[Mask],%[Index] | %[Index],%[Mask]}")
  1632. #endif
  1633. #define __INTRINSIC_DEFINED__BitScanForward
  1634. #endif /* __INTRINSIC_PROLOG */
  1635. #if __INTRINSIC_PROLOG(_BitScanReverse)
  1636. unsigned char _BitScanReverse(unsigned __LONG32 *Index, unsigned __LONG32 Mask);
  1637. #if !__has_builtin(_BitScanReverse)
  1638. __INTRINSICS_USEINLINE
  1639. __buildbitscan(_BitScanReverse, unsigned __LONG32, "bsr{l %[Mask],%[Index] | %[Index],%[Mask]}")
  1640. #endif
  1641. #define __INTRINSIC_DEFINED__BitScanReverse
  1642. #endif /* __INTRINSIC_PROLOG */
  1643. #if __INTRINSIC_PROLOG(_bittest)
  1644. unsigned char _bittest(__LONG32 const *a, __LONG32 b);
  1645. #if !__has_builtin(_bittest)
  1646. __INTRINSICS_USEINLINE
  1647. __buildbittest(_bittest, __LONG32, "l", "I")
  1648. #endif
  1649. #define __INTRINSIC_DEFINED__bittest
  1650. #endif /* __INTRINSIC_PROLOG */
  1651. #if __INTRINSIC_PROLOG(_bittestandset)
  1652. unsigned char _bittestandset(__LONG32 *a, __LONG32 b);
  1653. #if !__has_builtin(_bittestandset)
  1654. __INTRINSICS_USEINLINE
  1655. __buildbittestand(_bittestandset, __LONG32, "bts", "I", "l")
  1656. #endif
  1657. #define __INTRINSIC_DEFINED__bittestandset
  1658. #endif /* __INTRINSIC_PROLOG */
  1659. #if __INTRINSIC_PROLOG(_bittestandreset)
  1660. unsigned char _bittestandreset(__LONG32 *a, __LONG32 b);
  1661. #if !__has_builtin(_bittestandreset)
  1662. __INTRINSICS_USEINLINE
  1663. __buildbittestand(_bittestandreset, __LONG32, "btr", "I", "l")
  1664. #endif
  1665. #define __INTRINSIC_DEFINED__bittestandreset
  1666. #endif /* __INTRINSIC_PROLOG */
  1667. #if __INTRINSIC_PROLOG(_bittestandcomplement)
  1668. unsigned char _bittestandcomplement(__LONG32 *a, __LONG32 b);
  1669. #if !__has_builtin(_bittestandcomplement)
  1670. __INTRINSICS_USEINLINE
  1671. __buildbittestand(_bittestandcomplement, __LONG32, "btc", "I", "l")
  1672. #endif
  1673. #define __INTRINSIC_DEFINED__bittestandcomplement
  1674. #endif /* __INTRINSIC_PROLOG */
  1675. #if __INTRINSIC_PROLOG(__inbyte)
  1676. unsigned char __inbyte(unsigned short Port);
  1677. #if !__has_builtin(__inbyte)
  1678. __INTRINSICS_USEINLINE
  1679. __build_inport(__inbyte, unsigned char, "b")
  1680. #endif
  1681. #define __INTRINSIC_DEFINED___inbyte
  1682. #endif /* __INTRINSIC_PROLOG */
  1683. #if __INTRINSIC_PROLOG(__inword)
  1684. unsigned short __inword(unsigned short Port);
  1685. #if !__has_builtin(__inword)
  1686. __INTRINSICS_USEINLINE
  1687. __build_inport(__inword, unsigned short, "w")
  1688. #endif
  1689. #define __INTRINSIC_DEFINED___inword
  1690. #endif /* __INTRINSIC_PROLOG */
  1691. #if __INTRINSIC_PROLOG(__indword)
  1692. unsigned __LONG32 __indword(unsigned short Port);
  1693. #if !__has_builtin(__indword)
  1694. __INTRINSICS_USEINLINE
  1695. __build_inport(__indword, unsigned __LONG32, "l")
  1696. #endif
  1697. #define __INTRINSIC_DEFINED___indword
  1698. #endif /* __INTRINSIC_PROLOG */
  1699. #if __INTRINSIC_PROLOG(__outbyte)
  1700. void __outbyte(unsigned short Port, unsigned char Data);
  1701. #if !__has_builtin(__outbyte)
  1702. __INTRINSICS_USEINLINE
  1703. __build_outport(__outbyte, unsigned char, "b")
  1704. #endif
  1705. #define __INTRINSIC_DEFINED___outbyte
  1706. #endif /* __INTRINSIC_PROLOG */
  1707. #if __INTRINSIC_PROLOG(__outword)
  1708. void __outword(unsigned short Port, unsigned short Data);
  1709. #if !__has_builtin(__outword)
  1710. __INTRINSICS_USEINLINE
  1711. __build_outport(__outword, unsigned short, "w")
  1712. #endif
  1713. #define __INTRINSIC_DEFINED___outword
  1714. #endif /* __INTRINSIC_PROLOG */
  1715. #if __INTRINSIC_PROLOG(__outdword)
  1716. void __outdword(unsigned short Port, unsigned __LONG32 Data);
  1717. #if !__has_builtin(__outdword)
  1718. __INTRINSICS_USEINLINE
  1719. __build_outport(__outdword, unsigned __LONG32, "l")
  1720. #endif
  1721. #define __INTRINSIC_DEFINED___outdword
  1722. #endif /* __INTRINSIC_PROLOG */
  1723. #if __INTRINSIC_PROLOG(__inbytestring)
  1724. void __inbytestring(unsigned short Port, unsigned char *Buffer, unsigned __LONG32 Count);
  1725. #if !__has_builtin(__inbytestring)
  1726. __INTRINSICS_USEINLINE
  1727. __build_inportstring(__inbytestring, unsigned char, "b", "b")
  1728. #endif
  1729. #define __INTRINSIC_DEFINED___inbytestring
  1730. #endif /* __INTRINSIC_PROLOG */
  1731. #if __INTRINSIC_PROLOG(__inwordstring)
  1732. void __inwordstring(unsigned short Port, unsigned short *Buffer, unsigned __LONG32 Count);
  1733. #if !__has_builtin(__inwordstring)
  1734. __INTRINSICS_USEINLINE
  1735. __build_inportstring(__inwordstring, unsigned short, "w", "w")
  1736. #endif
  1737. #define __INTRINSIC_DEFINED___inwordstring
  1738. #endif /* __INTRINSIC_PROLOG */
  1739. #if __INTRINSIC_PROLOG(__indwordstring)
  1740. void __indwordstring(unsigned short Port, unsigned __LONG32 *Buffer, unsigned __LONG32 Count);
  1741. #if !__has_builtin(__indwordstring)
  1742. __INTRINSICS_USEINLINE
  1743. __build_inportstring(__indwordstring, unsigned __LONG32, "l", "d")
  1744. #endif
  1745. #define __INTRINSIC_DEFINED___indwordstring
  1746. #endif /* __INTRINSIC_PROLOG */
  1747. #if __INTRINSIC_PROLOG(__outbytestring)
  1748. void __outbytestring(unsigned short Port, unsigned char *Buffer, unsigned __LONG32 Count);
  1749. #if !__has_builtin(__outbytestring)
  1750. __INTRINSICS_USEINLINE
  1751. __build_outportstring(__outbytestring, unsigned char, "b", "b")
  1752. #endif
  1753. #define __INTRINSIC_DEFINED___outbytestring
  1754. #endif /* __INTRINSIC_PROLOG */
  1755. #if __INTRINSIC_PROLOG(__outwordstring)
  1756. void __outwordstring(unsigned short Port, unsigned short *Buffer, unsigned __LONG32 Count);
  1757. #if !__has_builtin(__outwordstring)
  1758. __INTRINSICS_USEINLINE
  1759. __build_outportstring(__outwordstring, unsigned short, "w", "w")
  1760. #endif
  1761. #define __INTRINSIC_DEFINED___outwordstring
  1762. #endif /* __INTRINSIC_PROLOG */
  1763. #if __INTRINSIC_PROLOG(__outdwordstring)
  1764. void __outdwordstring(unsigned short Port, unsigned __LONG32 *Buffer, unsigned __LONG32 Count);
  1765. #if !__has_builtin(__outdwordstring)
  1766. __INTRINSICS_USEINLINE
  1767. __build_outportstring(__outdwordstring, unsigned __LONG32, "l", "d")
  1768. #endif
  1769. #define __INTRINSIC_DEFINED___outdwordstring
  1770. #endif /* __INTRINSIC_PROLOG */
  1771. #if __INTRINSIC_PROLOG(__cpuid)
  1772. void __cpuid(int CPUInfo[4], int InfoType);
  1773. #if !__has_builtin(__cpuid)
  1774. __INTRINSICS_USEINLINE
  1775. void __cpuid(int CPUInfo[4], int InfoType) {
  1776. __asm__ __volatile__ (
  1777. "cpuid"
  1778. : "=a" (CPUInfo [0]), "=b" (CPUInfo [1]), "=c" (CPUInfo [2]), "=d" (CPUInfo [3])
  1779. : "a" (InfoType));
  1780. }
  1781. #endif
  1782. #define __INTRINSIC_DEFINED___cpuid
  1783. #endif /* __INTRINSIC_PROLOG */
  1784. #if (!defined(__GNUC__) || __GNUC__ < 11)
  1785. #if __INTRINSIC_PROLOG(__cpuidex)
  1786. void __cpuidex(int CPUInfo[4], int, int);
  1787. #if !__has_builtin(__cpuidex)
  1788. __INTRINSICS_USEINLINE
  1789. void __cpuidex(int CPUInfo[4], int function_id, int subfunction_id) {
  1790. __asm__ __volatile__ (
  1791. "cpuid"
  1792. : "=a" (CPUInfo [0]), "=b" (CPUInfo [1]), "=c" (CPUInfo [2]), "=d" (CPUInfo [3])
  1793. : "a" (function_id), "c" (subfunction_id));
  1794. }
  1795. #endif
  1796. #define __INTRINSIC_DEFINED___cpuidex
  1797. #endif /* __INTRINSIC_PROLOG */
  1798. #endif /* __GNUC__ < 11 */
  1799. #if __INTRINSIC_PROLOG(__readmsr)
  1800. __MINGW_EXTENSION unsigned __int64 __readmsr(unsigned __LONG32);
  1801. #if !__has_builtin(__readmsr)
  1802. __INTRINSICS_USEINLINE
  1803. unsigned __int64 __readmsr(unsigned __LONG32 msr)
  1804. {
  1805. #if defined(__x86_64__) || defined(_AMD64_)
  1806. unsigned __int64 val1, val2;
  1807. #else
  1808. unsigned __LONG32 val1, val2;
  1809. #endif /* defined(__x86_64__) || defined(_AMD64_) */
  1810. __asm__ __volatile__(
  1811. "rdmsr"
  1812. : "=a" (val1), "=d" (val2)
  1813. : "c" (msr));
  1814. return ((unsigned __int64) val1) | (((unsigned __int64)val2) << 32);
  1815. }
  1816. #endif
  1817. #define __INTRINSIC_DEFINED___readmsr
  1818. #endif /* __INTRINSIC_PROLOG */
  1819. #if __INTRINSIC_PROLOG(__writemsr)
  1820. __MINGW_EXTENSION void __writemsr(unsigned __LONG32, unsigned __int64);
  1821. #if !__has_builtin(__writemsr)
  1822. __INTRINSICS_USEINLINE
  1823. void __writemsr(unsigned __LONG32 msr, unsigned __int64 Value)
  1824. {
  1825. unsigned __LONG32 val1 = Value, val2 = Value >> 32;
  1826. __asm__ __volatile__ (
  1827. "wrmsr"
  1828. :
  1829. : "c" (msr), "a" (val1), "d" (val2));
  1830. }
  1831. #endif
  1832. #define __INTRINSIC_DEFINED___writemsr
  1833. #endif /* __INTRINSIC_PROLOG */
  1834. #if __INTRINSIC_PROLOG(__movsb)
  1835. void __movsb(unsigned char *Destination, unsigned char const *Source, size_t Count);
  1836. #if !__has_builtin(__movsb)
  1837. __INTRINSICS_USEINLINE
  1838. __buildmov(__movsb, unsigned char, "b")
  1839. #endif
  1840. #define __INTRINSIC_DEFINED___movsb
  1841. #endif /* __INTRINSIC_PROLOG */
  1842. #if __INTRINSIC_PROLOG(__movsw)
  1843. void __movsw(unsigned short *Dest, unsigned short const *Source, size_t Count);
  1844. #if !__has_builtin(__movsw)
  1845. __INTRINSICS_USEINLINE
  1846. __buildmov(__movsw, unsigned short, "w")
  1847. #endif
  1848. #define __INTRINSIC_DEFINED___movsw
  1849. #endif /* __INTRINSIC_PROLOG */
  1850. #if __INTRINSIC_PROLOG(__movsd)
  1851. void __movsd(unsigned __LONG32 *Dest, unsigned __LONG32 const *Source, size_t Count);
  1852. #if !__has_builtin(__movsd)
  1853. __INTRINSICS_USEINLINE
  1854. __buildmov(__movsd, unsigned __LONG32, "d")
  1855. #endif
  1856. #define __INTRINSIC_DEFINED___movsd
  1857. #endif /* __INTRINSIC_PROLOG */
  1858. /* GCC 8 has already defined _xgetbv, Clang 9 has _xgetbv defined as a macro
  1859. * redirecting to the __builtin_ia32_xgetbv builtin. */
  1860. #if (!defined(__GNUC__) || __GNUC__ < 8) && !defined(_xgetbv)
  1861. /* NOTE: This should be in immintrin.h */
  1862. #if __INTRINSIC_PROLOG(_xgetbv)
  1863. unsigned __int64 _xgetbv(unsigned int);
  1864. #if !__has_builtin(_xgetbv)
  1865. __INTRINSICS_USEINLINE
  1866. unsigned __int64 _xgetbv(unsigned int index)
  1867. {
  1868. #if defined(__x86_64__) || defined(_AMD64_)
  1869. unsigned __int64 val1, val2;
  1870. #else
  1871. unsigned __LONG32 val1, val2;
  1872. #endif /* defined(__x86_64__) || defined(_AMD64_) */
  1873. __asm__ __volatile__(
  1874. "xgetbv"
  1875. : "=a" (val1), "=d" (val2)
  1876. : "c" (index));
  1877. return (((unsigned __int64)val2) << 32) | val1;
  1878. }
  1879. #endif
  1880. #define __INTRINSIC_DEFINED__xgetbv
  1881. #endif /* __INTRINSIC_PROLOG */
  1882. #endif /* __GNUC__ < 8 */
  1883. #endif /* defined(__x86_64__) || defined(_AMD64_) || defined(__i386__) || defined(_X86_) */
  1884. /* ***************************************************** */
  1885. #if defined(__i386__) || defined(_X86_)
  1886. #if __INTRINSIC_PROLOG(__readfsbyte)
  1887. unsigned char __readfsbyte(unsigned __LONG32 Offset);
  1888. #if !__has_builtin(__readfsbyte)
  1889. __INTRINSICS_USEINLINE
  1890. __buildreadseg(__readfsbyte, unsigned char, "fs", "b")
  1891. #endif
  1892. #define __INTRINSIC_DEFINED___readfsbyte
  1893. #endif /* __INTRINSIC_PROLOG */
  1894. #if __INTRINSIC_PROLOG(__readfsword)
  1895. unsigned short __readfsword(unsigned __LONG32 Offset);
  1896. #if !__has_builtin(__readfsword)
  1897. __INTRINSICS_USEINLINE
  1898. __buildreadseg(__readfsword, unsigned short, "fs", "w")
  1899. #endif
  1900. #define __INTRINSIC_DEFINED___readfsword
  1901. #endif /* __INTRINSIC_PROLOG */
  1902. #if __INTRINSIC_PROLOG(__readfsdword)
  1903. unsigned __LONG32 __readfsdword(unsigned __LONG32 Offset);
  1904. #if !__has_builtin(__readfsdword)
  1905. __INTRINSICS_USEINLINE
  1906. __buildreadseg(__readfsdword, unsigned __LONG32, "fs", "l")
  1907. #endif
  1908. #define __INTRINSIC_DEFINED___readfsdword
  1909. #endif /* __INTRINSIC_PROLOG */
  1910. #if __INTRINSIC_PROLOG(__writefsbyte)
  1911. void __writefsbyte(unsigned __LONG32 Offset,unsigned char Data);
  1912. #if !__has_builtin(__writefsbyte)
  1913. __INTRINSICS_USEINLINE
  1914. __buildwriteseg(__writefsbyte, unsigned char, "fs", "b")
  1915. #endif
  1916. #define __INTRINSIC_DEFINED___writefsbyte
  1917. #endif /* __INTRINSIC_PROLOG */
  1918. #if __INTRINSIC_PROLOG(__writefsword)
  1919. void __writefsword(unsigned __LONG32 Offset,unsigned short Data);
  1920. #if !__has_builtin(__writefsword)
  1921. __INTRINSICS_USEINLINE
  1922. __buildwriteseg(__writefsword, unsigned short, "fs", "w")
  1923. #endif
  1924. #define __INTRINSIC_DEFINED___writefsword
  1925. #endif /* __INTRINSIC_PROLOG */
  1926. #if __INTRINSIC_PROLOG(__writefsdword)
  1927. void __writefsdword(unsigned __LONG32 Offset,unsigned __LONG32 Data);
  1928. #if !__has_builtin(__writefsdword)
  1929. __INTRINSICS_USEINLINE
  1930. __buildwriteseg(__writefsdword, unsigned __LONG32, "fs", "l")
  1931. #endif
  1932. #define __INTRINSIC_DEFINED___writefsdword
  1933. #endif /* __INTRINSIC_PROLOG */
  1934. #if __INTRINSIC_PROLOG(__readcr0)
  1935. unsigned __LONG32 __readcr0(void);
  1936. #if !__has_builtin(__readcr0)
  1937. __INTRINSICS_USEINLINE
  1938. __build_readcr(__readcr0, unsigned __LONG32, "0")
  1939. #endif
  1940. #define __INTRINSIC_DEFINED___readcr0
  1941. #endif /* __INTRINSIC_PROLOG */
  1942. #if __INTRINSIC_PROLOG(__readcr2)
  1943. unsigned __LONG32 __readcr2(void);
  1944. #if !__has_builtin(__readcr2)
  1945. __INTRINSICS_USEINLINE
  1946. __build_readcr(__readcr2, unsigned __LONG32, "2")
  1947. #endif
  1948. #define __INTRINSIC_DEFINED___readcr2
  1949. #endif /* __INTRINSIC_PROLOG */
  1950. #if __INTRINSIC_PROLOG(__readcr3)
  1951. unsigned __LONG32 __readcr3(void);
  1952. #if !__has_builtin(__readcr3)
  1953. __INTRINSICS_USEINLINE
  1954. __build_readcr(__readcr3, unsigned __LONG32, "3")
  1955. #endif
  1956. #define __INTRINSIC_DEFINED___readcr3
  1957. #endif /* __INTRINSIC_PROLOG */
  1958. #if __INTRINSIC_PROLOG(__readcr4)
  1959. unsigned __LONG32 __readcr4(void);
  1960. #if !__has_builtin(__readcr4)
  1961. __INTRINSICS_USEINLINE
  1962. __build_readcr(__readcr4, unsigned __LONG32, "4")
  1963. #endif
  1964. #define __INTRINSIC_DEFINED___readcr4
  1965. #endif /* __INTRINSIC_PROLOG */
  1966. #if __INTRINSIC_PROLOG(__readcr8)
  1967. unsigned __LONG32 __readcr8(void);
  1968. #if !__has_builtin(__readcr8)
  1969. __INTRINSICS_USEINLINE
  1970. __build_readcr(__readcr8, unsigned __LONG32, "8")
  1971. #endif
  1972. #define __INTRINSIC_DEFINED___readcr8
  1973. #endif /* __INTRINSIC_PROLOG */
  1974. #if __INTRINSIC_PROLOG(__writecr0)
  1975. void __writecr0(unsigned __LONG32);
  1976. #if !__has_builtin(__writecr0)
  1977. __INTRINSICS_USEINLINE
  1978. __build_writecr(__writecr0, unsigned __LONG32, "0")
  1979. #endif
  1980. #define __INTRINSIC_DEFINED___writecr0
  1981. #endif /* __INTRINSIC_PROLOG */
  1982. #if __INTRINSIC_PROLOG(__writecr3)
  1983. void __writecr3(unsigned __LONG32);
  1984. #if !__has_builtin(__writecr3)
  1985. __INTRINSICS_USEINLINE
  1986. __build_writecr(__writecr3, unsigned __LONG32, "3")
  1987. #endif
  1988. #define __INTRINSIC_DEFINED___writecr3
  1989. #endif /* __INTRINSIC_PROLOG */
  1990. #if __INTRINSIC_PROLOG(__writecr4)
  1991. void __writecr4(unsigned __LONG32);
  1992. #if !__has_builtin(__writecr4)
  1993. __INTRINSICS_USEINLINE
  1994. __build_writecr(__writecr4, unsigned __LONG32, "4")
  1995. #endif
  1996. #define __INTRINSIC_DEFINED___writecr4
  1997. #endif /* __INTRINSIC_PROLOG */
  1998. #if __INTRINSIC_PROLOG(__writecr8)
  1999. void __writecr8(unsigned __LONG32);
  2000. #if !__has_builtin(__writecr8)
  2001. __INTRINSICS_USEINLINE
  2002. __build_writecr(__writecr8, unsigned __LONG32, "8")
  2003. #endif
  2004. #define __INTRINSIC_DEFINED___writecr8
  2005. #endif /* __INTRINSIC_PROLOG */
  2006. #endif /* defined(__i386__) || defined(_X86_) */
  2007. #ifdef __cplusplus
  2008. }
  2009. #endif
  2010. #undef __INTRINSIC_ONLYSPECIAL
  2011. #undef __INTRINSIC_PROLOG
  2012. #undef __INTRINSIC_EPILOG
  2013. #undef __INTRINSICS_USEINLINE
  2014. #undef __FLAGCONSTRAINT
  2015. #undef __FLAGSET
  2016. #undef __FLAGCLOBBER1
  2017. #undef __FLAGCLOBBER2
  2018. #pragma pop_macro("__has_builtin")
  2019. #endif /* __MINGW_INTRIN_INLINE */