extra_ops_gcc_x86.hpp 57 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784
  1. /*
  2. * Distributed under the Boost Software License, Version 1.0.
  3. * (See accompanying file LICENSE_1_0.txt or copy at
  4. * http://www.boost.org/LICENSE_1_0.txt)
  5. *
  6. * Copyright (c) 2015 Andrey Semashev
  7. */
  8. /*!
  9. * \file atomic/detail/extra_ops_gcc_x86.hpp
  10. *
  11. * This header contains implementation of the extra atomic operations for x86.
  12. */
  13. #ifndef BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_X86_HPP_INCLUDED_
  14. #define BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_X86_HPP_INCLUDED_
  15. #include <cstddef>
  16. #include <boost/cstdint.hpp>
  17. #include <boost/memory_order.hpp>
  18. #include <boost/atomic/detail/config.hpp>
  19. #include <boost/atomic/detail/storage_traits.hpp>
  20. #include <boost/atomic/detail/extra_operations_fwd.hpp>
  21. #include <boost/atomic/detail/extra_ops_generic.hpp>
  22. #include <boost/atomic/capabilities.hpp>
  23. #ifdef BOOST_HAS_PRAGMA_ONCE
  24. #pragma once
  25. #endif
  26. namespace boost {
  27. namespace atomics {
  28. namespace detail {
  29. template< typename Base, bool Signed >
  30. struct extra_operations< Base, 1u, Signed, true > :
  31. public generic_extra_operations< Base, 1u, Signed >
  32. {
  33. typedef generic_extra_operations< Base, 1u, Signed > base_type;
  34. typedef typename base_type::storage_type storage_type;
  35. typedef typename storage_traits< 4u >::type temp_storage_type;
  36. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, original, result)\
  37. __asm__ __volatile__\
  38. (\
  39. ".align 16\n\t"\
  40. "1: movzbl %[orig], %2\n\t"\
  41. op " %b2\n\t"\
  42. "lock; cmpxchgb %b2, %[storage]\n\t"\
  43. "jne 1b"\
  44. : [orig] "+a" (original), [storage] "+m" (storage), "=&q" (result)\
  45. : \
  46. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  47. )
  48. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  49. {
  50. storage_type original = storage;
  51. temp_storage_type result;
  52. BOOST_ATOMIC_DETAIL_CAS_LOOP("negb", original, result);
  53. return original;
  54. }
  55. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  56. {
  57. storage_type original = storage;
  58. temp_storage_type result;
  59. BOOST_ATOMIC_DETAIL_CAS_LOOP("notb", original, result);
  60. return original;
  61. }
  62. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  63. {
  64. storage_type original = storage;
  65. temp_storage_type result;
  66. BOOST_ATOMIC_DETAIL_CAS_LOOP("negb", original, result);
  67. return static_cast< storage_type >(result);
  68. }
  69. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  70. {
  71. storage_type original = storage;
  72. temp_storage_type result;
  73. BOOST_ATOMIC_DETAIL_CAS_LOOP("notb", original, result);
  74. return static_cast< storage_type >(result);
  75. }
  76. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  77. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, original, result)\
  78. __asm__ __volatile__\
  79. (\
  80. ".align 16\n\t"\
  81. "1: mov %[arg], %2\n\t"\
  82. op " %%al, %b2\n\t"\
  83. "lock; cmpxchgb %b2, %[storage]\n\t"\
  84. "jne 1b"\
  85. : [orig] "+a" (original), [storage] "+m" (storage), "=&q" (result)\
  86. : [arg] "ir" ((temp_storage_type)argument)\
  87. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  88. )
  89. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  90. {
  91. storage_type original = storage;
  92. temp_storage_type result;
  93. BOOST_ATOMIC_DETAIL_CAS_LOOP("andb", v, original, result);
  94. return static_cast< storage_type >(result);
  95. }
  96. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  97. {
  98. storage_type original = storage;
  99. temp_storage_type result;
  100. BOOST_ATOMIC_DETAIL_CAS_LOOP("orb", v, original, result);
  101. return static_cast< storage_type >(result);
  102. }
  103. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  104. {
  105. storage_type original = storage;
  106. temp_storage_type result;
  107. BOOST_ATOMIC_DETAIL_CAS_LOOP("xorb", v, original, result);
  108. return static_cast< storage_type >(result);
  109. }
  110. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  111. static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  112. {
  113. return !!negate(storage, order);
  114. }
  115. static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  116. {
  117. return !!bitwise_complement(storage, order);
  118. }
  119. static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  120. {
  121. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  122. {
  123. __asm__ __volatile__
  124. (
  125. "lock; incb %[storage]\n\t"
  126. : [storage] "+m" (storage)
  127. :
  128. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  129. );
  130. }
  131. else
  132. {
  133. __asm__ __volatile__
  134. (
  135. "lock; addb %[argument], %[storage]\n\t"
  136. : [storage] "+m" (storage)
  137. : [argument] "iq" (v)
  138. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  139. );
  140. }
  141. }
  142. static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  143. {
  144. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  145. {
  146. __asm__ __volatile__
  147. (
  148. "lock; decb %[storage]\n\t"
  149. : [storage] "+m" (storage)
  150. :
  151. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  152. );
  153. }
  154. else
  155. {
  156. __asm__ __volatile__
  157. (
  158. "lock; subb %[argument], %[storage]\n\t"
  159. : [storage] "+m" (storage)
  160. : [argument] "iq" (v)
  161. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  162. );
  163. }
  164. }
  165. static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  166. {
  167. __asm__ __volatile__
  168. (
  169. "lock; negb %[storage]\n\t"
  170. : [storage] "+m" (storage)
  171. :
  172. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  173. );
  174. }
  175. static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  176. {
  177. __asm__ __volatile__
  178. (
  179. "lock; andb %[argument], %[storage]\n\t"
  180. : [storage] "+m" (storage)
  181. : [argument] "iq" (v)
  182. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  183. );
  184. }
  185. static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  186. {
  187. __asm__ __volatile__
  188. (
  189. "lock; orb %[argument], %[storage]\n\t"
  190. : [storage] "+m" (storage)
  191. : [argument] "iq" (v)
  192. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  193. );
  194. }
  195. static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  196. {
  197. __asm__ __volatile__
  198. (
  199. "lock; xorb %[argument], %[storage]\n\t"
  200. : [storage] "+m" (storage)
  201. : [argument] "iq" (v)
  202. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  203. );
  204. }
  205. static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  206. {
  207. __asm__ __volatile__
  208. (
  209. "lock; notb %[storage]\n\t"
  210. : [storage] "+m" (storage)
  211. :
  212. : "memory"
  213. );
  214. }
  215. static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  216. {
  217. bool res;
  218. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  219. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  220. {
  221. __asm__ __volatile__
  222. (
  223. "lock; incb %[storage]\n\t"
  224. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  225. :
  226. : "memory"
  227. );
  228. }
  229. else
  230. {
  231. __asm__ __volatile__
  232. (
  233. "lock; addb %[argument], %[storage]\n\t"
  234. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  235. : [argument] "iq" (v)
  236. : "memory"
  237. );
  238. }
  239. #else
  240. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  241. {
  242. __asm__ __volatile__
  243. (
  244. "lock; incb %[storage]\n\t"
  245. "setnz %[result]\n\t"
  246. : [storage] "+m" (storage), [result] "=q" (res)
  247. :
  248. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  249. );
  250. }
  251. else
  252. {
  253. __asm__ __volatile__
  254. (
  255. "lock; addb %[argument], %[storage]\n\t"
  256. "setnz %[result]\n\t"
  257. : [storage] "+m" (storage), [result] "=q" (res)
  258. : [argument] "iq" (v)
  259. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  260. );
  261. }
  262. #endif
  263. return res;
  264. }
  265. static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  266. {
  267. bool res;
  268. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  269. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  270. {
  271. __asm__ __volatile__
  272. (
  273. "lock; decb %[storage]\n\t"
  274. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  275. :
  276. : "memory"
  277. );
  278. }
  279. else
  280. {
  281. __asm__ __volatile__
  282. (
  283. "lock; subb %[argument], %[storage]\n\t"
  284. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  285. : [argument] "iq" (v)
  286. : "memory"
  287. );
  288. }
  289. #else
  290. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  291. {
  292. __asm__ __volatile__
  293. (
  294. "lock; decb %[storage]\n\t"
  295. "setnz %[result]\n\t"
  296. : [storage] "+m" (storage), [result] "=q" (res)
  297. :
  298. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  299. );
  300. }
  301. else
  302. {
  303. __asm__ __volatile__
  304. (
  305. "lock; subb %[argument], %[storage]\n\t"
  306. "setnz %[result]\n\t"
  307. : [storage] "+m" (storage), [result] "=q" (res)
  308. : [argument] "iq" (v)
  309. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  310. );
  311. }
  312. #endif
  313. return res;
  314. }
  315. static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  316. {
  317. bool res;
  318. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  319. __asm__ __volatile__
  320. (
  321. "lock; andb %[argument], %[storage]\n\t"
  322. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  323. : [argument] "iq" (v)
  324. : "memory"
  325. );
  326. #else
  327. __asm__ __volatile__
  328. (
  329. "lock; andb %[argument], %[storage]\n\t"
  330. "setnz %[result]\n\t"
  331. : [storage] "+m" (storage), [result] "=q" (res)
  332. : [argument] "iq" (v)
  333. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  334. );
  335. #endif
  336. return res;
  337. }
  338. static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  339. {
  340. bool res;
  341. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  342. __asm__ __volatile__
  343. (
  344. "lock; orb %[argument], %[storage]\n\t"
  345. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  346. : [argument] "iq" (v)
  347. : "memory"
  348. );
  349. #else
  350. __asm__ __volatile__
  351. (
  352. "lock; orb %[argument], %[storage]\n\t"
  353. "setnz %[result]\n\t"
  354. : [storage] "+m" (storage), [result] "=q" (res)
  355. : [argument] "iq" (v)
  356. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  357. );
  358. #endif
  359. return res;
  360. }
  361. static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  362. {
  363. bool res;
  364. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  365. __asm__ __volatile__
  366. (
  367. "lock; xorb %[argument], %[storage]\n\t"
  368. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  369. : [argument] "iq" (v)
  370. : "memory"
  371. );
  372. #else
  373. __asm__ __volatile__
  374. (
  375. "lock; xorb %[argument], %[storage]\n\t"
  376. "setnz %[result]\n\t"
  377. : [storage] "+m" (storage), [result] "=q" (res)
  378. : [argument] "iq" (v)
  379. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  380. );
  381. #endif
  382. return res;
  383. }
  384. };
  385. template< typename Base, bool Signed >
  386. struct extra_operations< Base, 2u, Signed, true > :
  387. public generic_extra_operations< Base, 2u, Signed >
  388. {
  389. typedef generic_extra_operations< Base, 2u, Signed > base_type;
  390. typedef typename base_type::storage_type storage_type;
  391. typedef typename storage_traits< 4u >::type temp_storage_type;
  392. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, original, result)\
  393. __asm__ __volatile__\
  394. (\
  395. ".align 16\n\t"\
  396. "1: movzwl %[orig], %2\n\t"\
  397. op " %w2\n\t"\
  398. "lock; cmpxchgw %w2, %[storage]\n\t"\
  399. "jne 1b"\
  400. : [orig] "+a" (original), [storage] "+m" (storage), "=&q" (result)\
  401. : \
  402. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  403. )
  404. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  405. {
  406. storage_type original = storage;
  407. temp_storage_type result;
  408. BOOST_ATOMIC_DETAIL_CAS_LOOP("negw", original, result);
  409. return original;
  410. }
  411. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  412. {
  413. storage_type original = storage;
  414. temp_storage_type result;
  415. BOOST_ATOMIC_DETAIL_CAS_LOOP("notw", original, result);
  416. return original;
  417. }
  418. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  419. {
  420. storage_type original = storage;
  421. temp_storage_type result;
  422. BOOST_ATOMIC_DETAIL_CAS_LOOP("negw", original, result);
  423. return static_cast< storage_type >(result);
  424. }
  425. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  426. {
  427. storage_type original = storage;
  428. temp_storage_type result;
  429. BOOST_ATOMIC_DETAIL_CAS_LOOP("notw", original, result);
  430. return static_cast< storage_type >(result);
  431. }
  432. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  433. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, original, result)\
  434. __asm__ __volatile__\
  435. (\
  436. ".align 16\n\t"\
  437. "1: mov %[arg], %2\n\t"\
  438. op " %%ax, %w2\n\t"\
  439. "lock; cmpxchgw %w2, %[storage]\n\t"\
  440. "jne 1b"\
  441. : [orig] "+a" (original), [storage] "+m" (storage), "=&q" (result)\
  442. : [arg] "ir" ((temp_storage_type)argument)\
  443. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  444. )
  445. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  446. {
  447. storage_type original = storage;
  448. temp_storage_type result;
  449. BOOST_ATOMIC_DETAIL_CAS_LOOP("andw", v, original, result);
  450. return static_cast< storage_type >(result);
  451. }
  452. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  453. {
  454. storage_type original = storage;
  455. temp_storage_type result;
  456. BOOST_ATOMIC_DETAIL_CAS_LOOP("orw", v, original, result);
  457. return static_cast< storage_type >(result);
  458. }
  459. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  460. {
  461. storage_type original = storage;
  462. temp_storage_type result;
  463. BOOST_ATOMIC_DETAIL_CAS_LOOP("xorw", v, original, result);
  464. return static_cast< storage_type >(result);
  465. }
  466. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  467. static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  468. {
  469. return !!negate(storage, order);
  470. }
  471. static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  472. {
  473. return !!bitwise_complement(storage, order);
  474. }
  475. static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  476. {
  477. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  478. {
  479. __asm__ __volatile__
  480. (
  481. "lock; incw %[storage]\n\t"
  482. : [storage] "+m" (storage)
  483. :
  484. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  485. );
  486. }
  487. else
  488. {
  489. __asm__ __volatile__
  490. (
  491. "lock; addw %[argument], %[storage]\n\t"
  492. : [storage] "+m" (storage)
  493. : [argument] "iq" (v)
  494. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  495. );
  496. }
  497. }
  498. static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  499. {
  500. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  501. {
  502. __asm__ __volatile__
  503. (
  504. "lock; decw %[storage]\n\t"
  505. : [storage] "+m" (storage)
  506. :
  507. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  508. );
  509. }
  510. else
  511. {
  512. __asm__ __volatile__
  513. (
  514. "lock; subw %[argument], %[storage]\n\t"
  515. : [storage] "+m" (storage)
  516. : [argument] "iq" (v)
  517. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  518. );
  519. }
  520. }
  521. static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  522. {
  523. __asm__ __volatile__
  524. (
  525. "lock; negw %[storage]\n\t"
  526. : [storage] "+m" (storage)
  527. :
  528. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  529. );
  530. }
  531. static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  532. {
  533. __asm__ __volatile__
  534. (
  535. "lock; andw %[argument], %[storage]\n\t"
  536. : [storage] "+m" (storage)
  537. : [argument] "iq" (v)
  538. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  539. );
  540. }
  541. static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  542. {
  543. __asm__ __volatile__
  544. (
  545. "lock; orw %[argument], %[storage]\n\t"
  546. : [storage] "+m" (storage)
  547. : [argument] "iq" (v)
  548. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  549. );
  550. }
  551. static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  552. {
  553. __asm__ __volatile__
  554. (
  555. "lock; xorw %[argument], %[storage]\n\t"
  556. : [storage] "+m" (storage)
  557. : [argument] "iq" (v)
  558. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  559. );
  560. }
  561. static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  562. {
  563. __asm__ __volatile__
  564. (
  565. "lock; notw %[storage]\n\t"
  566. : [storage] "+m" (storage)
  567. :
  568. : "memory"
  569. );
  570. }
  571. static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  572. {
  573. bool res;
  574. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  575. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  576. {
  577. __asm__ __volatile__
  578. (
  579. "lock; incw %[storage]\n\t"
  580. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  581. :
  582. : "memory"
  583. );
  584. }
  585. else
  586. {
  587. __asm__ __volatile__
  588. (
  589. "lock; addw %[argument], %[storage]\n\t"
  590. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  591. : [argument] "iq" (v)
  592. : "memory"
  593. );
  594. }
  595. #else
  596. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  597. {
  598. __asm__ __volatile__
  599. (
  600. "lock; incw %[storage]\n\t"
  601. "setnz %[result]\n\t"
  602. : [storage] "+m" (storage), [result] "=q" (res)
  603. :
  604. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  605. );
  606. }
  607. else
  608. {
  609. __asm__ __volatile__
  610. (
  611. "lock; addw %[argument], %[storage]\n\t"
  612. "setnz %[result]\n\t"
  613. : [storage] "+m" (storage), [result] "=q" (res)
  614. : [argument] "iq" (v)
  615. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  616. );
  617. }
  618. #endif
  619. return res;
  620. }
  621. static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  622. {
  623. bool res;
  624. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  625. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  626. {
  627. __asm__ __volatile__
  628. (
  629. "lock; decw %[storage]\n\t"
  630. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  631. :
  632. : "memory"
  633. );
  634. }
  635. else
  636. {
  637. __asm__ __volatile__
  638. (
  639. "lock; subw %[argument], %[storage]\n\t"
  640. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  641. : [argument] "iq" (v)
  642. : "memory"
  643. );
  644. }
  645. #else
  646. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  647. {
  648. __asm__ __volatile__
  649. (
  650. "lock; decw %[storage]\n\t"
  651. "setnz %[result]\n\t"
  652. : [storage] "+m" (storage), [result] "=q" (res)
  653. :
  654. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  655. );
  656. }
  657. else
  658. {
  659. __asm__ __volatile__
  660. (
  661. "lock; subw %[argument], %[storage]\n\t"
  662. "setnz %[result]\n\t"
  663. : [storage] "+m" (storage), [result] "=q" (res)
  664. : [argument] "iq" (v)
  665. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  666. );
  667. }
  668. #endif
  669. return res;
  670. }
  671. static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  672. {
  673. bool res;
  674. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  675. __asm__ __volatile__
  676. (
  677. "lock; andw %[argument], %[storage]\n\t"
  678. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  679. : [argument] "iq" (v)
  680. : "memory"
  681. );
  682. #else
  683. __asm__ __volatile__
  684. (
  685. "lock; andw %[argument], %[storage]\n\t"
  686. "setnz %[result]\n\t"
  687. : [storage] "+m" (storage), [result] "=q" (res)
  688. : [argument] "iq" (v)
  689. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  690. );
  691. #endif
  692. return res;
  693. }
  694. static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  695. {
  696. bool res;
  697. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  698. __asm__ __volatile__
  699. (
  700. "lock; orw %[argument], %[storage]\n\t"
  701. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  702. : [argument] "iq" (v)
  703. : "memory"
  704. );
  705. #else
  706. __asm__ __volatile__
  707. (
  708. "lock; orw %[argument], %[storage]\n\t"
  709. "setnz %[result]\n\t"
  710. : [storage] "+m" (storage), [result] "=q" (res)
  711. : [argument] "iq" (v)
  712. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  713. );
  714. #endif
  715. return res;
  716. }
  717. static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  718. {
  719. bool res;
  720. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  721. __asm__ __volatile__
  722. (
  723. "lock; xorw %[argument], %[storage]\n\t"
  724. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  725. : [argument] "iq" (v)
  726. : "memory"
  727. );
  728. #else
  729. __asm__ __volatile__
  730. (
  731. "lock; xorw %[argument], %[storage]\n\t"
  732. "setnz %[result]\n\t"
  733. : [storage] "+m" (storage), [result] "=q" (res)
  734. : [argument] "iq" (v)
  735. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  736. );
  737. #endif
  738. return res;
  739. }
  740. static BOOST_FORCEINLINE bool bit_test_and_set(storage_type volatile& storage, unsigned int bit_number, memory_order) BOOST_NOEXCEPT
  741. {
  742. bool res;
  743. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  744. __asm__ __volatile__
  745. (
  746. "lock; btsw %[bit_number], %[storage]\n\t"
  747. : [storage] "+m" (storage), [result] "=@ccc" (res)
  748. : [bit_number] "Kq" ((uint16_t)bit_number)
  749. : "memory"
  750. );
  751. #else
  752. __asm__ __volatile__
  753. (
  754. "lock; btsw %[bit_number], %[storage]\n\t"
  755. "setc %[result]\n\t"
  756. : [storage] "+m" (storage), [result] "=q" (res)
  757. : [bit_number] "Kq" ((uint16_t)bit_number)
  758. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  759. );
  760. #endif
  761. return res;
  762. }
  763. static BOOST_FORCEINLINE bool bit_test_and_reset(storage_type volatile& storage, unsigned int bit_number, memory_order) BOOST_NOEXCEPT
  764. {
  765. bool res;
  766. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  767. __asm__ __volatile__
  768. (
  769. "lock; btrw %[bit_number], %[storage]\n\t"
  770. : [storage] "+m" (storage), [result] "=@ccc" (res)
  771. : [bit_number] "Kq" ((uint16_t)bit_number)
  772. : "memory"
  773. );
  774. #else
  775. __asm__ __volatile__
  776. (
  777. "lock; btrw %[bit_number], %[storage]\n\t"
  778. "setc %[result]\n\t"
  779. : [storage] "+m" (storage), [result] "=q" (res)
  780. : [bit_number] "Kq" ((uint16_t)bit_number)
  781. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  782. );
  783. #endif
  784. return res;
  785. }
  786. static BOOST_FORCEINLINE bool bit_test_and_complement(storage_type volatile& storage, unsigned int bit_number, memory_order) BOOST_NOEXCEPT
  787. {
  788. bool res;
  789. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  790. __asm__ __volatile__
  791. (
  792. "lock; btcw %[bit_number], %[storage]\n\t"
  793. : [storage] "+m" (storage), [result] "=@ccc" (res)
  794. : [bit_number] "Kq" ((uint16_t)bit_number)
  795. : "memory"
  796. );
  797. #else
  798. __asm__ __volatile__
  799. (
  800. "lock; btcw %[bit_number], %[storage]\n\t"
  801. "setc %[result]\n\t"
  802. : [storage] "+m" (storage), [result] "=q" (res)
  803. : [bit_number] "Kq" ((uint16_t)bit_number)
  804. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  805. );
  806. #endif
  807. return res;
  808. }
  809. };
  810. template< typename Base, bool Signed >
  811. struct extra_operations< Base, 4u, Signed, true > :
  812. public generic_extra_operations< Base, 4u, Signed >
  813. {
  814. typedef generic_extra_operations< Base, 4u, Signed > base_type;
  815. typedef typename base_type::storage_type storage_type;
  816. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, original, result)\
  817. __asm__ __volatile__\
  818. (\
  819. ".align 16\n\t"\
  820. "1: mov %[orig], %[res]\n\t"\
  821. op " %[res]\n\t"\
  822. "lock; cmpxchgl %[res], %[storage]\n\t"\
  823. "jne 1b"\
  824. : [orig] "+a" (original), [storage] "+m" (storage), [res] "=&r" (result)\
  825. : \
  826. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  827. )
  828. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  829. {
  830. storage_type original = storage;
  831. storage_type result;
  832. BOOST_ATOMIC_DETAIL_CAS_LOOP("negl", original, result);
  833. return original;
  834. }
  835. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  836. {
  837. storage_type original = storage;
  838. storage_type result;
  839. BOOST_ATOMIC_DETAIL_CAS_LOOP("notl", original, result);
  840. return original;
  841. }
  842. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  843. {
  844. storage_type original = storage;
  845. storage_type result;
  846. BOOST_ATOMIC_DETAIL_CAS_LOOP("negl", original, result);
  847. return result;
  848. }
  849. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  850. {
  851. storage_type original = storage;
  852. storage_type result;
  853. BOOST_ATOMIC_DETAIL_CAS_LOOP("notl", original, result);
  854. return result;
  855. }
  856. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  857. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, original, result)\
  858. __asm__ __volatile__\
  859. (\
  860. ".align 16\n\t"\
  861. "1: mov %[arg], %[res]\n\t"\
  862. op " %%eax, %[res]\n\t"\
  863. "lock; cmpxchgl %[res], %[storage]\n\t"\
  864. "jne 1b"\
  865. : [orig] "+a" (original), [storage] "+m" (storage), [res] "=&r" (result)\
  866. : [arg] "ir" (argument)\
  867. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  868. )
  869. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  870. {
  871. storage_type original = storage;
  872. storage_type result;
  873. BOOST_ATOMIC_DETAIL_CAS_LOOP("andl", v, original, result);
  874. return static_cast< storage_type >(result);
  875. }
  876. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  877. {
  878. storage_type original = storage;
  879. storage_type result;
  880. BOOST_ATOMIC_DETAIL_CAS_LOOP("orl", v, original, result);
  881. return static_cast< storage_type >(result);
  882. }
  883. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  884. {
  885. storage_type original = storage;
  886. storage_type result;
  887. BOOST_ATOMIC_DETAIL_CAS_LOOP("xorl", v, original, result);
  888. return static_cast< storage_type >(result);
  889. }
  890. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  891. static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  892. {
  893. return !!negate(storage, order);
  894. }
  895. static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  896. {
  897. return !!bitwise_complement(storage, order);
  898. }
  899. static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  900. {
  901. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  902. {
  903. __asm__ __volatile__
  904. (
  905. "lock; incl %[storage]\n\t"
  906. : [storage] "+m" (storage)
  907. :
  908. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  909. );
  910. }
  911. else
  912. {
  913. __asm__ __volatile__
  914. (
  915. "lock; addl %[argument], %[storage]\n\t"
  916. : [storage] "+m" (storage)
  917. : [argument] "ir" (v)
  918. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  919. );
  920. }
  921. }
  922. static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  923. {
  924. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  925. {
  926. __asm__ __volatile__
  927. (
  928. "lock; decl %[storage]\n\t"
  929. : [storage] "+m" (storage)
  930. :
  931. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  932. );
  933. }
  934. else
  935. {
  936. __asm__ __volatile__
  937. (
  938. "lock; subl %[argument], %[storage]\n\t"
  939. : [storage] "+m" (storage)
  940. : [argument] "ir" (v)
  941. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  942. );
  943. }
  944. }
  945. static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  946. {
  947. __asm__ __volatile__
  948. (
  949. "lock; negl %[storage]\n\t"
  950. : [storage] "+m" (storage)
  951. :
  952. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  953. );
  954. }
  955. static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  956. {
  957. __asm__ __volatile__
  958. (
  959. "lock; andl %[argument], %[storage]\n\t"
  960. : [storage] "+m" (storage)
  961. : [argument] "ir" (v)
  962. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  963. );
  964. }
  965. static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  966. {
  967. __asm__ __volatile__
  968. (
  969. "lock; orl %[argument], %[storage]\n\t"
  970. : [storage] "+m" (storage)
  971. : [argument] "ir" (v)
  972. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  973. );
  974. }
  975. static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  976. {
  977. __asm__ __volatile__
  978. (
  979. "lock; xorl %[argument], %[storage]\n\t"
  980. : [storage] "+m" (storage)
  981. : [argument] "ir" (v)
  982. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  983. );
  984. }
  985. static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  986. {
  987. __asm__ __volatile__
  988. (
  989. "lock; notl %[storage]\n\t"
  990. : [storage] "+m" (storage)
  991. :
  992. : "memory"
  993. );
  994. }
  995. static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  996. {
  997. bool res;
  998. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  999. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1000. {
  1001. __asm__ __volatile__
  1002. (
  1003. "lock; incl %[storage]\n\t"
  1004. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1005. :
  1006. : "memory"
  1007. );
  1008. }
  1009. else
  1010. {
  1011. __asm__ __volatile__
  1012. (
  1013. "lock; addl %[argument], %[storage]\n\t"
  1014. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1015. : [argument] "ir" (v)
  1016. : "memory"
  1017. );
  1018. }
  1019. #else
  1020. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1021. {
  1022. __asm__ __volatile__
  1023. (
  1024. "lock; incl %[storage]\n\t"
  1025. "setnz %[result]\n\t"
  1026. : [storage] "+m" (storage), [result] "=q" (res)
  1027. :
  1028. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1029. );
  1030. }
  1031. else
  1032. {
  1033. __asm__ __volatile__
  1034. (
  1035. "lock; addl %[argument], %[storage]\n\t"
  1036. "setnz %[result]\n\t"
  1037. : [storage] "+m" (storage), [result] "=q" (res)
  1038. : [argument] "ir" (v)
  1039. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1040. );
  1041. }
  1042. #endif
  1043. return res;
  1044. }
  1045. static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1046. {
  1047. bool res;
  1048. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1049. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1050. {
  1051. __asm__ __volatile__
  1052. (
  1053. "lock; decl %[storage]\n\t"
  1054. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1055. :
  1056. : "memory"
  1057. );
  1058. }
  1059. else
  1060. {
  1061. __asm__ __volatile__
  1062. (
  1063. "lock; subl %[argument], %[storage]\n\t"
  1064. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1065. : [argument] "ir" (v)
  1066. : "memory"
  1067. );
  1068. }
  1069. #else
  1070. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1071. {
  1072. __asm__ __volatile__
  1073. (
  1074. "lock; decl %[storage]\n\t"
  1075. "setnz %[result]\n\t"
  1076. : [storage] "+m" (storage), [result] "=q" (res)
  1077. :
  1078. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1079. );
  1080. }
  1081. else
  1082. {
  1083. __asm__ __volatile__
  1084. (
  1085. "lock; subl %[argument], %[storage]\n\t"
  1086. "setnz %[result]\n\t"
  1087. : [storage] "+m" (storage), [result] "=q" (res)
  1088. : [argument] "ir" (v)
  1089. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1090. );
  1091. }
  1092. #endif
  1093. return res;
  1094. }
  1095. static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1096. {
  1097. bool res;
  1098. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1099. __asm__ __volatile__
  1100. (
  1101. "lock; andl %[argument], %[storage]\n\t"
  1102. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1103. : [argument] "ir" (v)
  1104. : "memory"
  1105. );
  1106. #else
  1107. __asm__ __volatile__
  1108. (
  1109. "lock; andl %[argument], %[storage]\n\t"
  1110. "setnz %[result]\n\t"
  1111. : [storage] "+m" (storage), [result] "=q" (res)
  1112. : [argument] "ir" (v)
  1113. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1114. );
  1115. #endif
  1116. return res;
  1117. }
  1118. static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1119. {
  1120. bool res;
  1121. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1122. __asm__ __volatile__
  1123. (
  1124. "lock; orl %[argument], %[storage]\n\t"
  1125. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1126. : [argument] "ir" (v)
  1127. : "memory"
  1128. );
  1129. #else
  1130. __asm__ __volatile__
  1131. (
  1132. "lock; orl %[argument], %[storage]\n\t"
  1133. "setnz %[result]\n\t"
  1134. : [storage] "+m" (storage), [result] "=q" (res)
  1135. : [argument] "ir" (v)
  1136. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1137. );
  1138. #endif
  1139. return res;
  1140. }
  1141. static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1142. {
  1143. bool res;
  1144. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1145. __asm__ __volatile__
  1146. (
  1147. "lock; xorl %[argument], %[storage]\n\t"
  1148. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1149. : [argument] "ir" (v)
  1150. : "memory"
  1151. );
  1152. #else
  1153. __asm__ __volatile__
  1154. (
  1155. "lock; xorl %[argument], %[storage]\n\t"
  1156. "setnz %[result]\n\t"
  1157. : [storage] "+m" (storage), [result] "=q" (res)
  1158. : [argument] "ir" (v)
  1159. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1160. );
  1161. #endif
  1162. return res;
  1163. }
  1164. static BOOST_FORCEINLINE bool bit_test_and_set(storage_type volatile& storage, unsigned int bit_number, memory_order) BOOST_NOEXCEPT
  1165. {
  1166. bool res;
  1167. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1168. __asm__ __volatile__
  1169. (
  1170. "lock; btsl %[bit_number], %[storage]\n\t"
  1171. : [storage] "+m" (storage), [result] "=@ccc" (res)
  1172. : [bit_number] "Kr" ((uint32_t)bit_number)
  1173. : "memory"
  1174. );
  1175. #else
  1176. __asm__ __volatile__
  1177. (
  1178. "lock; btsl %[bit_number], %[storage]\n\t"
  1179. "setc %[result]\n\t"
  1180. : [storage] "+m" (storage), [result] "=q" (res)
  1181. : [bit_number] "Kr" ((uint32_t)bit_number)
  1182. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1183. );
  1184. #endif
  1185. return res;
  1186. }
  1187. static BOOST_FORCEINLINE bool bit_test_and_reset(storage_type volatile& storage, unsigned int bit_number, memory_order) BOOST_NOEXCEPT
  1188. {
  1189. bool res;
  1190. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1191. __asm__ __volatile__
  1192. (
  1193. "lock; btrl %[bit_number], %[storage]\n\t"
  1194. : [storage] "+m" (storage), [result] "=@ccc" (res)
  1195. : [bit_number] "Kr" ((uint32_t)bit_number)
  1196. : "memory"
  1197. );
  1198. #else
  1199. __asm__ __volatile__
  1200. (
  1201. "lock; btrl %[bit_number], %[storage]\n\t"
  1202. "setc %[result]\n\t"
  1203. : [storage] "+m" (storage), [result] "=q" (res)
  1204. : [bit_number] "Kr" ((uint32_t)bit_number)
  1205. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1206. );
  1207. #endif
  1208. return res;
  1209. }
  1210. static BOOST_FORCEINLINE bool bit_test_and_complement(storage_type volatile& storage, unsigned int bit_number, memory_order) BOOST_NOEXCEPT
  1211. {
  1212. bool res;
  1213. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1214. __asm__ __volatile__
  1215. (
  1216. "lock; btcl %[bit_number], %[storage]\n\t"
  1217. : [storage] "+m" (storage), [result] "=@ccc" (res)
  1218. : [bit_number] "Kr" ((uint32_t)bit_number)
  1219. : "memory"
  1220. );
  1221. #else
  1222. __asm__ __volatile__
  1223. (
  1224. "lock; btcl %[bit_number], %[storage]\n\t"
  1225. "setc %[result]\n\t"
  1226. : [storage] "+m" (storage), [result] "=q" (res)
  1227. : [bit_number] "Kr" ((uint32_t)bit_number)
  1228. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1229. );
  1230. #endif
  1231. return res;
  1232. }
  1233. };
  1234. #if defined(__x86_64__)
  1235. template< typename Base, bool Signed >
  1236. struct extra_operations< Base, 8u, Signed, true > :
  1237. public generic_extra_operations< Base, 8u, Signed >
  1238. {
  1239. typedef generic_extra_operations< Base, 8u, Signed > base_type;
  1240. typedef typename base_type::storage_type storage_type;
  1241. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, original, result)\
  1242. __asm__ __volatile__\
  1243. (\
  1244. ".align 16\n\t"\
  1245. "1: mov %[orig], %[res]\n\t"\
  1246. op " %[res]\n\t"\
  1247. "lock; cmpxchgq %[res], %[storage]\n\t"\
  1248. "jne 1b"\
  1249. : [orig] "+a" (original), [storage] "+m" (storage), [res] "=&r" (result)\
  1250. : \
  1251. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  1252. )
  1253. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  1254. {
  1255. storage_type original = storage;
  1256. storage_type result;
  1257. BOOST_ATOMIC_DETAIL_CAS_LOOP("negq", original, result);
  1258. return original;
  1259. }
  1260. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  1261. {
  1262. storage_type original = storage;
  1263. storage_type result;
  1264. BOOST_ATOMIC_DETAIL_CAS_LOOP("notq", original, result);
  1265. return original;
  1266. }
  1267. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  1268. {
  1269. storage_type original = storage;
  1270. storage_type result;
  1271. BOOST_ATOMIC_DETAIL_CAS_LOOP("negq", original, result);
  1272. return result;
  1273. }
  1274. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  1275. {
  1276. storage_type original = storage;
  1277. storage_type result;
  1278. BOOST_ATOMIC_DETAIL_CAS_LOOP("notq", original, result);
  1279. return result;
  1280. }
  1281. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  1282. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, original, result)\
  1283. __asm__ __volatile__\
  1284. (\
  1285. ".align 16\n\t"\
  1286. "1: mov %[arg], %[res]\n\t"\
  1287. op " %%rax, %[res]\n\t"\
  1288. "lock; cmpxchgq %[res], %[storage]\n\t"\
  1289. "jne 1b"\
  1290. : [orig] "+a" (original), [storage] "+m" (storage), [res] "=&r" (result)\
  1291. : [arg] "r" (argument)\
  1292. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  1293. )
  1294. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1295. {
  1296. storage_type original = storage;
  1297. storage_type result;
  1298. BOOST_ATOMIC_DETAIL_CAS_LOOP("andq", v, original, result);
  1299. return static_cast< storage_type >(result);
  1300. }
  1301. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1302. {
  1303. storage_type original = storage;
  1304. storage_type result;
  1305. BOOST_ATOMIC_DETAIL_CAS_LOOP("orq", v, original, result);
  1306. return static_cast< storage_type >(result);
  1307. }
  1308. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1309. {
  1310. storage_type original = storage;
  1311. storage_type result;
  1312. BOOST_ATOMIC_DETAIL_CAS_LOOP("xorq", v, original, result);
  1313. return static_cast< storage_type >(result);
  1314. }
  1315. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  1316. static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  1317. {
  1318. return !!negate(storage, order);
  1319. }
  1320. static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  1321. {
  1322. return !!bitwise_complement(storage, order);
  1323. }
  1324. static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1325. {
  1326. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1327. {
  1328. __asm__ __volatile__
  1329. (
  1330. "lock; incq %[storage]\n\t"
  1331. : [storage] "+m" (storage)
  1332. :
  1333. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1334. );
  1335. }
  1336. else
  1337. {
  1338. __asm__ __volatile__
  1339. (
  1340. "lock; addq %[argument], %[storage]\n\t"
  1341. : [storage] "+m" (storage)
  1342. : [argument] "er" (v)
  1343. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1344. );
  1345. }
  1346. }
  1347. static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1348. {
  1349. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1350. {
  1351. __asm__ __volatile__
  1352. (
  1353. "lock; decq %[storage]\n\t"
  1354. : [storage] "+m" (storage)
  1355. :
  1356. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1357. );
  1358. }
  1359. else
  1360. {
  1361. __asm__ __volatile__
  1362. (
  1363. "lock; subq %[argument], %[storage]\n\t"
  1364. : [storage] "+m" (storage)
  1365. : [argument] "er" (v)
  1366. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1367. );
  1368. }
  1369. }
  1370. static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  1371. {
  1372. __asm__ __volatile__
  1373. (
  1374. "lock; negq %[storage]\n\t"
  1375. : [storage] "+m" (storage)
  1376. :
  1377. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1378. );
  1379. }
  1380. static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1381. {
  1382. __asm__ __volatile__
  1383. (
  1384. "lock; andq %[argument], %[storage]\n\t"
  1385. : [storage] "+m" (storage)
  1386. : [argument] "er" (v)
  1387. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1388. );
  1389. }
  1390. static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1391. {
  1392. __asm__ __volatile__
  1393. (
  1394. "lock; orq %[argument], %[storage]\n\t"
  1395. : [storage] "+m" (storage)
  1396. : [argument] "er" (v)
  1397. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1398. );
  1399. }
  1400. static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1401. {
  1402. __asm__ __volatile__
  1403. (
  1404. "lock; xorq %[argument], %[storage]\n\t"
  1405. : [storage] "+m" (storage)
  1406. : [argument] "er" (v)
  1407. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1408. );
  1409. }
  1410. static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  1411. {
  1412. __asm__ __volatile__
  1413. (
  1414. "lock; notq %[storage]\n\t"
  1415. : [storage] "+m" (storage)
  1416. :
  1417. : "memory"
  1418. );
  1419. }
  1420. static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1421. {
  1422. bool res;
  1423. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1424. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1425. {
  1426. __asm__ __volatile__
  1427. (
  1428. "lock; incq %[storage]\n\t"
  1429. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1430. :
  1431. : "memory"
  1432. );
  1433. }
  1434. else
  1435. {
  1436. __asm__ __volatile__
  1437. (
  1438. "lock; addq %[argument], %[storage]\n\t"
  1439. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1440. : [argument] "er" (v)
  1441. : "memory"
  1442. );
  1443. }
  1444. #else
  1445. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1446. {
  1447. __asm__ __volatile__
  1448. (
  1449. "lock; incq %[storage]\n\t"
  1450. "setnz %[result]\n\t"
  1451. : [storage] "+m" (storage), [result] "=q" (res)
  1452. :
  1453. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1454. );
  1455. }
  1456. else
  1457. {
  1458. __asm__ __volatile__
  1459. (
  1460. "lock; addq %[argument], %[storage]\n\t"
  1461. "setnz %[result]\n\t"
  1462. : [storage] "+m" (storage), [result] "=q" (res)
  1463. : [argument] "er" (v)
  1464. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1465. );
  1466. }
  1467. #endif
  1468. return res;
  1469. }
  1470. static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1471. {
  1472. bool res;
  1473. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1474. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1475. {
  1476. __asm__ __volatile__
  1477. (
  1478. "lock; decq %[storage]\n\t"
  1479. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1480. :
  1481. : "memory"
  1482. );
  1483. }
  1484. else
  1485. {
  1486. __asm__ __volatile__
  1487. (
  1488. "lock; subq %[argument], %[storage]\n\t"
  1489. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1490. : [argument] "er" (v)
  1491. : "memory"
  1492. );
  1493. }
  1494. #else
  1495. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1496. {
  1497. __asm__ __volatile__
  1498. (
  1499. "lock; decq %[storage]\n\t"
  1500. "setnz %[result]\n\t"
  1501. : [storage] "+m" (storage), [result] "=q" (res)
  1502. :
  1503. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1504. );
  1505. }
  1506. else
  1507. {
  1508. __asm__ __volatile__
  1509. (
  1510. "lock; subq %[argument], %[storage]\n\t"
  1511. "setnz %[result]\n\t"
  1512. : [storage] "+m" (storage), [result] "=q" (res)
  1513. : [argument] "er" (v)
  1514. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1515. );
  1516. }
  1517. #endif
  1518. return res;
  1519. }
  1520. static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1521. {
  1522. bool res;
  1523. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1524. __asm__ __volatile__
  1525. (
  1526. "lock; andq %[argument], %[storage]\n\t"
  1527. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1528. : [argument] "er" (v)
  1529. : "memory"
  1530. );
  1531. #else
  1532. __asm__ __volatile__
  1533. (
  1534. "lock; andq %[argument], %[storage]\n\t"
  1535. "setnz %[result]\n\t"
  1536. : [storage] "+m" (storage), [result] "=q" (res)
  1537. : [argument] "er" (v)
  1538. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1539. );
  1540. #endif
  1541. return res;
  1542. }
  1543. static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1544. {
  1545. bool res;
  1546. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1547. __asm__ __volatile__
  1548. (
  1549. "lock; orq %[argument], %[storage]\n\t"
  1550. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1551. : [argument] "er" (v)
  1552. : "memory"
  1553. );
  1554. #else
  1555. __asm__ __volatile__
  1556. (
  1557. "lock; orq %[argument], %[storage]\n\t"
  1558. "setnz %[result]\n\t"
  1559. : [storage] "+m" (storage), [result] "=q" (res)
  1560. : [argument] "er" (v)
  1561. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1562. );
  1563. #endif
  1564. return res;
  1565. }
  1566. static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1567. {
  1568. bool res;
  1569. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1570. __asm__ __volatile__
  1571. (
  1572. "lock; xorq %[argument], %[storage]\n\t"
  1573. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1574. : [argument] "er" (v)
  1575. : "memory"
  1576. );
  1577. #else
  1578. __asm__ __volatile__
  1579. (
  1580. "lock; xorq %[argument], %[storage]\n\t"
  1581. "setnz %[result]\n\t"
  1582. : [storage] "+m" (storage), [result] "=q" (res)
  1583. : [argument] "er" (v)
  1584. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1585. );
  1586. #endif
  1587. return res;
  1588. }
  1589. static BOOST_FORCEINLINE bool bit_test_and_set(storage_type volatile& storage, unsigned int bit_number, memory_order) BOOST_NOEXCEPT
  1590. {
  1591. bool res;
  1592. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1593. __asm__ __volatile__
  1594. (
  1595. "lock; btsq %[bit_number], %[storage]\n\t"
  1596. : [storage] "+m" (storage), [result] "=@ccc" (res)
  1597. : [bit_number] "Kr" ((uint64_t)bit_number)
  1598. : "memory"
  1599. );
  1600. #else
  1601. __asm__ __volatile__
  1602. (
  1603. "lock; btsq %[bit_number], %[storage]\n\t"
  1604. "setc %[result]\n\t"
  1605. : [storage] "+m" (storage), [result] "=q" (res)
  1606. : [bit_number] "Kr" ((uint64_t)bit_number)
  1607. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1608. );
  1609. #endif
  1610. return res;
  1611. }
  1612. static BOOST_FORCEINLINE bool bit_test_and_reset(storage_type volatile& storage, unsigned int bit_number, memory_order) BOOST_NOEXCEPT
  1613. {
  1614. bool res;
  1615. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1616. __asm__ __volatile__
  1617. (
  1618. "lock; btrq %[bit_number], %[storage]\n\t"
  1619. : [storage] "+m" (storage), [result] "=@ccc" (res)
  1620. : [bit_number] "Kr" ((uint64_t)bit_number)
  1621. : "memory"
  1622. );
  1623. #else
  1624. __asm__ __volatile__
  1625. (
  1626. "lock; btrq %[bit_number], %[storage]\n\t"
  1627. "setc %[result]\n\t"
  1628. : [storage] "+m" (storage), [result] "=q" (res)
  1629. : [bit_number] "Kr" ((uint64_t)bit_number)
  1630. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1631. );
  1632. #endif
  1633. return res;
  1634. }
  1635. static BOOST_FORCEINLINE bool bit_test_and_complement(storage_type volatile& storage, unsigned int bit_number, memory_order) BOOST_NOEXCEPT
  1636. {
  1637. bool res;
  1638. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1639. __asm__ __volatile__
  1640. (
  1641. "lock; btcq %[bit_number], %[storage]\n\t"
  1642. : [storage] "+m" (storage), [result] "=@ccc" (res)
  1643. : [bit_number] "Kr" ((uint64_t)bit_number)
  1644. : "memory"
  1645. );
  1646. #else
  1647. __asm__ __volatile__
  1648. (
  1649. "lock; btcq %[bit_number], %[storage]\n\t"
  1650. "setc %[result]\n\t"
  1651. : [storage] "+m" (storage), [result] "=q" (res)
  1652. : [bit_number] "Kr" ((uint64_t)bit_number)
  1653. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1654. );
  1655. #endif
  1656. return res;
  1657. }
  1658. };
  1659. #endif // defined(__x86_64__)
  1660. } // namespace detail
  1661. } // namespace atomics
  1662. } // namespace boost
  1663. #endif // BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_X86_HPP_INCLUDED_