asm-nseel-x86-gcc.c 44 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153
  1. /* note: only EEL_F_SIZE=8 is now supported (no float EEL_F's) */
  2. #ifndef AMD64ABI
  3. #define X64_EXTRA_STACK_SPACE 32 // win32 requires allocating space for 4 parameters at 8 bytes each, even though we pass via register
  4. #endif
  5. void nseel_asm_1pdd(void)
  6. {
  7. __asm__(
  8. FUNCTION_MARKER
  9. "movl $0xfefefefe, %edi\n"
  10. #ifdef TARGET_X64
  11. "fstpl (%rsi)\n"
  12. "movq (%rsi), %xmm0\n"
  13. #ifdef AMD64ABI
  14. "movl %rsi, %r15\n"
  15. "call *%edi\n"
  16. "movl %r15, %rsi\n"
  17. #else
  18. "subl X64_EXTRA_STACK_SPACE, %rsp\n"
  19. "call *%edi\n"
  20. "addl X64_EXTRA_STACK_SPACE, %rsp\n"
  21. #endif
  22. "movq xmm0, (%rsi)\n"
  23. "fldl (%rsi)\n"
  24. #else
  25. "subl $16, %esp\n"
  26. "fstpl (%esp)\n"
  27. "call *%edi\n"
  28. "addl $16, %esp\n"
  29. #endif
  30. FUNCTION_MARKER
  31. );
  32. }
  33. void nseel_asm_1pdd_end(void){}
  34. void nseel_asm_2pdd(void)
  35. {
  36. __asm__(
  37. FUNCTION_MARKER
  38. "movl $0xfefefefe, %edi\n"
  39. #ifdef TARGET_X64
  40. "fstpl 8(%rsi)\n"
  41. "fstpl (%rsi)\n"
  42. "movq 8(%rsi), %xmm1\n"
  43. "movq (%rsi), %xmm0\n"
  44. #ifdef AMD64ABI
  45. "movl %rsi, %r15\n"
  46. "call *%edi\n"
  47. "movl %r15, %rsi\n"
  48. #else
  49. "subl X64_EXTRA_STACK_SPACE, %rsp\n"
  50. "call *%edi\n"
  51. "addl X64_EXTRA_STACK_SPACE, %rsp\n"
  52. #endif
  53. "movq xmm0, (%rsi)\n"
  54. "fldl (%rsi)\n"
  55. #else
  56. "subl $16, %esp\n"
  57. "fstpl 8(%esp)\n"
  58. "fstpl (%esp)\n"
  59. "call *%edi\n"
  60. "addl $16, %esp\n"
  61. #endif
  62. FUNCTION_MARKER
  63. );
  64. }
  65. void nseel_asm_2pdd_end(void){}
  66. void nseel_asm_2pdds(void)
  67. {
  68. __asm__(
  69. FUNCTION_MARKER
  70. "movl $0xfefefefe, %eax\n"
  71. #ifdef TARGET_X64
  72. "fstpl (%rsi)\n"
  73. "movq (%rdi), %xmm0\n"
  74. "movq (%rsi), %xmm1\n"
  75. #ifdef AMD64ABI
  76. "movl %rsi, %r15\n"
  77. "movl %rdi, %r14\n"
  78. "call *%eax\n"
  79. "movl %r14, %rdi\n" /* restore thrashed rdi */
  80. "movl %r15, %rsi\n"
  81. "movl %r14, %rax\n" /* set return value */
  82. "movq xmm0, (%r14)\n"
  83. #else
  84. "subl X64_EXTRA_STACK_SPACE, %rsp\n"
  85. "call *%eax\n"
  86. "movq xmm0, (%edi)\n"
  87. "movl %edi, %eax\n" /* set return value */
  88. "addl X64_EXTRA_STACK_SPACE, %rsp\n"
  89. #endif
  90. #else
  91. "subl $8, %esp\n"
  92. "fstpl (%esp)\n"
  93. "pushl 4(%edi)\n" /* push parameter */
  94. "pushl (%edi)\n" /* push the rest of the parameter */
  95. "call *%eax\n"
  96. "addl $16, %esp\n"
  97. "fstpl (%edi)\n" /* store result */
  98. "movl %edi, %eax\n" /* set return value */
  99. #endif
  100. // denormal-fix result (this is only currently used for pow_op, so we want this!)
  101. "movl 4(%edi), %edx\n"
  102. "addl $0x00100000, %edx\n"
  103. "andl $0x7FF00000, %edx\n"
  104. "cmpl $0x00200000, %edx\n"
  105. "jg 0f\n"
  106. "subl %edx, %edx\n"
  107. #ifdef TARGET_X64
  108. "movll %rdx, (%rdi)\n"
  109. #else
  110. "movl %edx, (%edi)\n"
  111. "movl %edx, 4(%edi)\n"
  112. #endif
  113. "0:\n"
  114. FUNCTION_MARKER
  115. );
  116. }
  117. void nseel_asm_2pdds_end(void){}
  118. //---------------------------------------------------------------------------------------------------------------
  119. // do nothing, eh
  120. void nseel_asm_exec2(void)
  121. {
  122. __asm__(
  123. FUNCTION_MARKER
  124. ""
  125. FUNCTION_MARKER
  126. );
  127. }
  128. void nseel_asm_exec2_end(void) { }
  129. void nseel_asm_invsqrt(void)
  130. {
  131. __asm__(
  132. FUNCTION_MARKER
  133. "movl $0x5f3759df, %edx\n"
  134. "fsts (%esi)\n"
  135. #ifdef TARGET_X64
  136. "movl 0xfefefefe, %rax\n"
  137. "fmul" EEL_F_SUFFIX " (%rax)\n"
  138. "movsxl (%esi), %rcx\n"
  139. #else
  140. "fmul" EEL_F_SUFFIX " (0xfefefefe)\n"
  141. "movl (%esi), %ecx\n"
  142. #endif
  143. "sarl $1, %ecx\n"
  144. "subl %ecx, %edx\n"
  145. "movl %edx, (%esi)\n"
  146. "fmuls (%esi)\n"
  147. "fmuls (%esi)\n"
  148. #ifdef TARGET_X64
  149. "movl 0xfefefefe, %rax\n"
  150. "fadd" EEL_F_SUFFIX " (%rax)\n"
  151. #else
  152. "fadd" EEL_F_SUFFIX " (0xfefefefe)\n"
  153. #endif
  154. "fmuls (%esi)\n"
  155. FUNCTION_MARKER
  156. );
  157. }
  158. void nseel_asm_invsqrt_end(void) {}
  159. void nseel_asm_dbg_getstackptr(void)
  160. {
  161. __asm__(
  162. FUNCTION_MARKER
  163. #ifdef __clang__
  164. "ffree %st(0)\n"
  165. #else
  166. "fstpl %st(0)\n"
  167. #endif
  168. "movl %esp, (%esi)\n"
  169. "fildl (%esi)\n"
  170. FUNCTION_MARKER
  171. );
  172. }
  173. void nseel_asm_dbg_getstackptr_end(void) {}
  174. //---------------------------------------------------------------------------------------------------------------
  175. void nseel_asm_sin(void)
  176. {
  177. __asm__(
  178. FUNCTION_MARKER
  179. "fsin\n"
  180. FUNCTION_MARKER
  181. );
  182. }
  183. void nseel_asm_sin_end(void) {}
  184. //---------------------------------------------------------------------------------------------------------------
  185. void nseel_asm_cos(void)
  186. {
  187. __asm__(
  188. FUNCTION_MARKER
  189. "fcos\n"
  190. FUNCTION_MARKER
  191. );
  192. }
  193. void nseel_asm_cos_end(void) {}
  194. //---------------------------------------------------------------------------------------------------------------
  195. void nseel_asm_tan(void)
  196. {
  197. __asm__(
  198. FUNCTION_MARKER
  199. "fptan\n"
  200. "fstp %st(0)\n"
  201. FUNCTION_MARKER
  202. );
  203. }
  204. void nseel_asm_tan_end(void) {}
  205. //---------------------------------------------------------------------------------------------------------------
  206. void nseel_asm_sqr(void)
  207. {
  208. __asm__(
  209. FUNCTION_MARKER
  210. "fmul %st(0), %st(0)\n"
  211. FUNCTION_MARKER
  212. );
  213. }
  214. void nseel_asm_sqr_end(void) {}
  215. //---------------------------------------------------------------------------------------------------------------
  216. void nseel_asm_sqrt(void)
  217. {
  218. __asm__(
  219. FUNCTION_MARKER
  220. "fabs\n"
  221. "fsqrt\n"
  222. FUNCTION_MARKER
  223. );
  224. }
  225. void nseel_asm_sqrt_end(void) {}
  226. //---------------------------------------------------------------------------------------------------------------
  227. void nseel_asm_log(void)
  228. {
  229. __asm__(
  230. FUNCTION_MARKER
  231. "fldln2\n"
  232. "fxch\n"
  233. "fyl2x\n"
  234. FUNCTION_MARKER
  235. );
  236. }
  237. void nseel_asm_log_end(void) {}
  238. //---------------------------------------------------------------------------------------------------------------
  239. void nseel_asm_log10(void)
  240. {
  241. __asm__(
  242. FUNCTION_MARKER
  243. "fldlg2\n"
  244. "fxch\n"
  245. "fyl2x\n"
  246. FUNCTION_MARKER
  247. );
  248. }
  249. void nseel_asm_log10_end(void) {}
  250. //---------------------------------------------------------------------------------------------------------------
  251. void nseel_asm_abs(void)
  252. {
  253. __asm__(
  254. FUNCTION_MARKER
  255. "fabs\n"
  256. FUNCTION_MARKER
  257. );
  258. }
  259. void nseel_asm_abs_end(void) {}
  260. //---------------------------------------------------------------------------------------------------------------
  261. void nseel_asm_assign(void)
  262. {
  263. #ifdef TARGET_X64
  264. __asm__(
  265. FUNCTION_MARKER
  266. "movll (%rax), %rdx\n"
  267. "movll %rdx, %rcx\n"
  268. "shrl $32, %rdx\n"
  269. "addl $0x00100000, %edx\n"
  270. "andl $0x7FF00000, %edx\n"
  271. "cmpl $0x00200000, %edx\n"
  272. "movll %rdi, %rax\n"
  273. "jg 0f\n"
  274. "subl %ecx, %ecx\n"
  275. "0:\n"
  276. "movll %rcx, (%edi)\n"
  277. FUNCTION_MARKER
  278. );
  279. #else
  280. __asm__(
  281. FUNCTION_MARKER
  282. "movl (%eax), %ecx\n"
  283. "movl 4(%eax), %edx\n"
  284. "movl %edx, %eax\n"
  285. "addl $0x00100000, %eax\n" // if exponent is zero, make exponent 0x7ff, if 7ff, make 7fe
  286. "andl $0x7ff00000, %eax\n"
  287. "cmpl $0x00200000, %eax\n"
  288. "jg 0f\n"
  289. "subl %ecx, %ecx\n"
  290. "subl %edx, %edx\n"
  291. "0:\n"
  292. "movl %edi, %eax\n"
  293. "movl %ecx, (%edi)\n"
  294. "movl %edx, 4(%edi)\n"
  295. FUNCTION_MARKER
  296. );
  297. #endif
  298. }
  299. void nseel_asm_assign_end(void) {}
  300. //---------------------------------------------------------------------------------------------------------------
  301. void nseel_asm_assign_fromfp(void)
  302. {
  303. __asm__(
  304. FUNCTION_MARKER
  305. "fstpl (%edi)\n"
  306. "movl 4(%edi), %edx\n"
  307. "addl $0x00100000, %edx\n"
  308. "andl $0x7FF00000, %edx\n"
  309. "cmpl $0x00200000, %edx\n"
  310. "movl %edi, %eax\n"
  311. "jg 0f\n"
  312. "subl %edx, %edx\n"
  313. #ifdef TARGET_X64
  314. "movll %rdx, (%rdi)\n"
  315. #else
  316. "movl %edx, (%edi)\n"
  317. "movl %edx, 4(%edi)\n"
  318. #endif
  319. "0:\n"
  320. FUNCTION_MARKER
  321. );
  322. }
  323. void nseel_asm_assign_fromfp_end(void) {}
  324. //---------------------------------------------------------------------------------------------------------------
  325. void nseel_asm_assign_fast_fromfp(void)
  326. {
  327. __asm__(
  328. FUNCTION_MARKER
  329. "movl %edi, %eax\n"
  330. "fstpl (%edi)\n"
  331. FUNCTION_MARKER
  332. );
  333. }
  334. void nseel_asm_assign_fast_fromfp_end(void) {}
  335. //---------------------------------------------------------------------------------------------------------------
  336. void nseel_asm_assign_fast(void)
  337. {
  338. #ifdef TARGET_X64
  339. __asm__(
  340. FUNCTION_MARKER
  341. "movll (%rax), %rdx\n"
  342. "movll %rdx, (%edi)\n"
  343. "movll %rdi, %rax\n"
  344. FUNCTION_MARKER
  345. );
  346. #else
  347. __asm__(
  348. FUNCTION_MARKER
  349. "movl (%eax), %ecx\n"
  350. "movl %ecx, (%edi)\n"
  351. "movl 4(%eax), %ecx\n"
  352. "movl %edi, %eax\n"
  353. "movl %ecx, 4(%edi)\n"
  354. FUNCTION_MARKER
  355. );
  356. #endif
  357. }
  358. void nseel_asm_assign_fast_end(void) {}
  359. //---------------------------------------------------------------------------------------------------------------
  360. void nseel_asm_add(void)
  361. {
  362. __asm__(
  363. FUNCTION_MARKER
  364. #ifdef __clang__
  365. "faddp %st(1)\n"
  366. #else
  367. "fadd\n"
  368. #endif
  369. FUNCTION_MARKER
  370. );
  371. }
  372. void nseel_asm_add_end(void) {}
  373. void nseel_asm_add_op(void)
  374. {
  375. __asm__(
  376. FUNCTION_MARKER
  377. "fadd" EEL_F_SUFFIX " (%edi)\n"
  378. "movl %edi, %eax\n"
  379. "fstp" EEL_F_SUFFIX " (%edi)\n"
  380. "movl 4(%edi), %edx\n"
  381. "addl $0x00100000, %edx\n"
  382. "andl $0x7FF00000, %edx\n"
  383. "cmpl $0x00200000, %edx\n"
  384. "jg 0f\n"
  385. "subl %edx, %edx\n"
  386. #ifdef TARGET_X64
  387. "movll %rdx, (%rdi)\n"
  388. #else
  389. "movl %edx, (%edi)\n"
  390. "movl %edx, 4(%edi)\n"
  391. #endif
  392. "0:\n"
  393. FUNCTION_MARKER
  394. );
  395. }
  396. void nseel_asm_add_op_end(void) {}
  397. void nseel_asm_add_op_fast(void)
  398. {
  399. __asm__(
  400. FUNCTION_MARKER
  401. "fadd" EEL_F_SUFFIX " (%edi)\n"
  402. "movl %edi, %eax\n"
  403. "fstp" EEL_F_SUFFIX " (%edi)\n"
  404. FUNCTION_MARKER
  405. );
  406. }
  407. void nseel_asm_add_op_fast_end(void) {}
  408. //---------------------------------------------------------------------------------------------------------------
  409. void nseel_asm_sub(void)
  410. {
  411. __asm__(
  412. FUNCTION_MARKER
  413. #ifdef __clang__
  414. "fsubrp %st(0), %st(1)\n"
  415. #else
  416. #ifdef __GNUC__
  417. #ifdef __INTEL_COMPILER
  418. "fsub\n"
  419. #else
  420. "fsubr\n" // gnuc has fsub/fsubr backwards, ack
  421. #endif
  422. #else
  423. "fsub\n"
  424. #endif
  425. #endif
  426. FUNCTION_MARKER
  427. );
  428. }
  429. void nseel_asm_sub_end(void) {}
  430. void nseel_asm_sub_op(void)
  431. {
  432. __asm__(
  433. FUNCTION_MARKER
  434. "fsubr" EEL_F_SUFFIX " (%edi)\n"
  435. "movl %edi, %eax\n"
  436. "fstp" EEL_F_SUFFIX " (%edi)\n"
  437. "movl 4(%edi), %edx\n"
  438. "addl $0x00100000, %edx\n"
  439. "andl $0x7FF00000, %edx\n"
  440. "cmpl $0x00200000, %edx\n"
  441. "jg 0f\n"
  442. "subl %edx, %edx\n"
  443. #ifdef TARGET_X64
  444. "movll %rdx, (%rdi)\n"
  445. #else
  446. "movl %edx, (%edi)\n"
  447. "movl %edx, 4(%edi)\n"
  448. #endif
  449. "0:\n"
  450. FUNCTION_MARKER
  451. );
  452. }
  453. void nseel_asm_sub_op_end(void) {}
  454. void nseel_asm_sub_op_fast(void)
  455. {
  456. __asm__(
  457. FUNCTION_MARKER
  458. "fsubr" EEL_F_SUFFIX " (%edi)\n"
  459. "movl %edi, %eax\n"
  460. "fstp" EEL_F_SUFFIX " (%edi)\n"
  461. FUNCTION_MARKER
  462. );
  463. }
  464. void nseel_asm_sub_op_fast_end(void) {}
  465. //---------------------------------------------------------------------------------------------------------------
  466. void nseel_asm_mul(void)
  467. {
  468. __asm__(
  469. FUNCTION_MARKER
  470. #ifdef __clang__
  471. "fmulp %st(0), %st(1)\n"
  472. #else
  473. "fmul\n"
  474. #endif
  475. FUNCTION_MARKER
  476. );
  477. }
  478. void nseel_asm_mul_end(void) {}
  479. void nseel_asm_mul_op(void)
  480. {
  481. __asm__(
  482. FUNCTION_MARKER
  483. "fmul" EEL_F_SUFFIX " (%edi)\n"
  484. "movl %edi, %eax\n"
  485. "fstp" EEL_F_SUFFIX " (%edi)\n"
  486. "movl 4(%edi), %edx\n"
  487. "addl $0x00100000, %edx\n"
  488. "andl $0x7FF00000, %edx\n"
  489. "cmpl $0x00200000, %edx\n"
  490. "jg 0f\n"
  491. "subl %edx, %edx\n"
  492. #ifdef TARGET_X64
  493. "movll %rdx, (%rdi)\n"
  494. #else
  495. "movl %edx, (%edi)\n"
  496. "movl %edx, 4(%edi)\n"
  497. #endif
  498. "0:\n"
  499. FUNCTION_MARKER
  500. );
  501. }
  502. void nseel_asm_mul_op_end(void) {}
  503. void nseel_asm_mul_op_fast(void)
  504. {
  505. __asm__(
  506. FUNCTION_MARKER
  507. "fmul" EEL_F_SUFFIX " (%edi)\n"
  508. "movl %edi, %eax\n"
  509. "fstp" EEL_F_SUFFIX " (%edi)\n"
  510. FUNCTION_MARKER
  511. );
  512. }
  513. void nseel_asm_mul_op_fast_end(void) {}
  514. //---------------------------------------------------------------------------------------------------------------
  515. void nseel_asm_div(void)
  516. {
  517. __asm__(
  518. FUNCTION_MARKER
  519. #ifdef __clang__
  520. "fdivrp %st(1)\n"
  521. #else
  522. #ifdef __GNUC__
  523. #ifdef __INTEL_COMPILER
  524. "fdiv\n"
  525. #else
  526. "fdivr\n" // gcc inline asm seems to have fdiv/fdivr backwards
  527. #endif
  528. #else
  529. "fdiv\n"
  530. #endif
  531. #endif
  532. FUNCTION_MARKER
  533. );
  534. }
  535. void nseel_asm_div_end(void) {}
  536. void nseel_asm_div_op(void)
  537. {
  538. __asm__(
  539. FUNCTION_MARKER
  540. "fld" EEL_F_SUFFIX " (%edi)\n"
  541. #ifdef __clang__
  542. "fdivp %st(1)\n"
  543. #else
  544. #ifndef __GNUC__
  545. "fdivr\n"
  546. #else
  547. #ifdef __INTEL_COMPILER
  548. "fdivp %st(1)\n"
  549. #else
  550. "fdiv\n"
  551. #endif
  552. #endif
  553. #endif
  554. "movl %edi, %eax\n"
  555. "fstp" EEL_F_SUFFIX " (%edi)\n"
  556. "movl 4(%edi), %edx\n"
  557. "addl $0x00100000, %edx\n"
  558. "andl $0x7FF00000, %edx\n"
  559. "cmpl $0x00200000, %edx\n"
  560. "jg 0f\n"
  561. "subl %edx, %edx\n"
  562. #ifdef TARGET_X64
  563. "movll %rdx, (%rdi)\n"
  564. #else
  565. "movl %edx, (%edi)\n"
  566. "movl %edx, 4(%edi)\n"
  567. #endif
  568. "0:\n"
  569. FUNCTION_MARKER
  570. );
  571. }
  572. void nseel_asm_div_op_end(void) {}
  573. void nseel_asm_div_op_fast(void)
  574. {
  575. __asm__(
  576. FUNCTION_MARKER
  577. "fld" EEL_F_SUFFIX " (%edi)\n"
  578. #ifdef __clang__
  579. "fdivp %st(1)\n"
  580. #else
  581. #ifndef __GNUC__
  582. "fdivr\n"
  583. #else
  584. #ifdef __INTEL_COMPILER
  585. "fdivp %st(1)\n"
  586. #else
  587. "fdiv\n"
  588. #endif
  589. #endif
  590. #endif
  591. "movl %edi, %eax\n"
  592. "fstp" EEL_F_SUFFIX " (%edi)\n"
  593. FUNCTION_MARKER
  594. );
  595. }
  596. void nseel_asm_div_op_fast_end(void) {}
  597. //---------------------------------------------------------------------------------------------------------------
  598. void nseel_asm_mod(void)
  599. {
  600. __asm__(
  601. FUNCTION_MARKER
  602. "fabs\n"
  603. "fistpl (%esi)\n"
  604. "fabs\n"
  605. "fistpl 4(%esi)\n"
  606. "xorl %edx, %edx\n"
  607. "cmpl $0, (%esi)\n"
  608. "je 0f\n" // skip devide, set return to 0
  609. "movl 4(%esi), %eax\n"
  610. "divl (%esi)\n"
  611. "0:\n"
  612. "movl %edx, (%esi)\n"
  613. "fildl (%esi)\n"
  614. FUNCTION_MARKER
  615. );
  616. }
  617. void nseel_asm_mod_end(void) {}
  618. void nseel_asm_shl(void)
  619. {
  620. __asm__(
  621. FUNCTION_MARKER
  622. "fistpl (%esi)\n"
  623. "fistpl 4(%esi)\n"
  624. "movl (%esi), %ecx\n"
  625. "movl 4(%esi), %eax\n"
  626. "shll %cl, %eax\n"
  627. "movl %eax, (%esi)\n"
  628. "fildl (%esi)\n"
  629. FUNCTION_MARKER
  630. );
  631. }
  632. void nseel_asm_shl_end(void) {}
  633. void nseel_asm_shr(void)
  634. {
  635. __asm__(
  636. FUNCTION_MARKER
  637. "fistpl (%esi)\n"
  638. "fistpl 4(%esi)\n"
  639. "movl (%esi), %ecx\n"
  640. "movl 4(%esi), %eax\n"
  641. "sarl %cl, %eax\n"
  642. "movl %eax, (%esi)\n"
  643. "fildl (%esi)\n"
  644. FUNCTION_MARKER
  645. );
  646. }
  647. void nseel_asm_shr_end(void) {}
  648. void nseel_asm_mod_op(void)
  649. {
  650. __asm__(
  651. FUNCTION_MARKER
  652. "fld" EEL_F_SUFFIX " (%edi)\n"
  653. "fxch\n"
  654. "fabs\n"
  655. "fistpl (%edi)\n"
  656. "fabs\n"
  657. "fistpl (%esi)\n"
  658. "xorl %edx, %edx\n"
  659. "cmpl $0, (%edi)\n"
  660. "je 0f\n" // skip devide, set return to 0
  661. "movl (%esi), %eax\n"
  662. "divl (%edi)\n"
  663. "0:\n"
  664. "movl %edx, (%edi)\n"
  665. "fildl (%edi)\n"
  666. "movl %edi, %eax\n"
  667. "fstp" EEL_F_SUFFIX " (%edi)\n"
  668. FUNCTION_MARKER
  669. );
  670. }
  671. void nseel_asm_mod_op_end(void) {}
  672. //---------------------------------------------------------------------------------------------------------------
  673. void nseel_asm_or(void)
  674. {
  675. __asm__(
  676. FUNCTION_MARKER
  677. "fistpll (%esi)\n"
  678. "fistpll 8(%esi)\n"
  679. #ifdef TARGET_X64
  680. "movll 8(%rsi), %rdi\n"
  681. "orll %rdi, (%rsi)\n"
  682. #else
  683. "movl 8(%esi), %edi\n"
  684. "movl 12(%esi), %ecx\n"
  685. "orl %edi, (%esi)\n"
  686. "orl %ecx, 4(%esi)\n"
  687. #endif
  688. "fildll (%esi)\n"
  689. FUNCTION_MARKER
  690. );
  691. }
  692. void nseel_asm_or_end(void) {}
  693. void nseel_asm_or0(void)
  694. {
  695. __asm__(
  696. FUNCTION_MARKER
  697. "fistpll (%esi)\n"
  698. "fildll (%esi)\n"
  699. FUNCTION_MARKER
  700. );
  701. }
  702. void nseel_asm_or0_end(void) {}
  703. void nseel_asm_or_op(void)
  704. {
  705. __asm__(
  706. FUNCTION_MARKER
  707. "fld" EEL_F_SUFFIX " (%edi)\n"
  708. "fxch\n"
  709. "fistpll (%edi)\n"
  710. "fistpll (%esi)\n"
  711. #ifdef TARGET_X64
  712. "movll (%rsi), %rax\n"
  713. "orll %rax, (%rdi)\n"
  714. #else
  715. "movl (%esi), %eax\n"
  716. "movl 4(%esi), %ecx\n"
  717. "orl %eax, (%edi)\n"
  718. "orl %ecx, 4(%edi)\n"
  719. #endif
  720. "fildll (%edi)\n"
  721. "movl %edi, %eax\n"
  722. "fstp" EEL_F_SUFFIX " (%edi)\n"
  723. FUNCTION_MARKER
  724. );
  725. }
  726. void nseel_asm_or_op_end(void) {}
  727. void nseel_asm_xor(void)
  728. {
  729. __asm__(
  730. FUNCTION_MARKER
  731. "fistpll (%esi)\n"
  732. "fistpll 8(%esi)\n"
  733. #ifdef TARGET_X64
  734. "movll 8(%rsi), %rdi\n"
  735. "xorll %rdi, (%rsi)\n"
  736. #else
  737. "movl 8(%esi), %edi\n"
  738. "movl 12(%esi), %ecx\n"
  739. "xorl %edi, (%esi)\n"
  740. "xorl %ecx, 4(%esi)\n"
  741. #endif
  742. "fildll (%esi)\n"
  743. FUNCTION_MARKER
  744. );
  745. }
  746. void nseel_asm_xor_end(void) {}
  747. void nseel_asm_xor_op(void)
  748. {
  749. __asm__(
  750. FUNCTION_MARKER
  751. "fld" EEL_F_SUFFIX " (%edi)\n"
  752. "fxch\n"
  753. "fistpll (%edi)\n"
  754. "fistpll (%esi)\n"
  755. #ifdef TARGET_X64
  756. "movll (%rsi), %rax\n"
  757. "xorll %rax, (%rdi)\n"
  758. #else
  759. "movl (%esi), %eax\n"
  760. "movl 4(%esi), %ecx\n"
  761. "xorl %eax, (%edi)\n"
  762. "xorl %ecx, 4(%edi)\n"
  763. #endif
  764. "fildll (%edi)\n"
  765. "movl %edi, %eax\n"
  766. "fstp" EEL_F_SUFFIX " (%edi)\n"
  767. FUNCTION_MARKER
  768. );
  769. }
  770. void nseel_asm_xor_op_end(void) {}
  771. //---------------------------------------------------------------------------------------------------------------
  772. void nseel_asm_and(void)
  773. {
  774. __asm__(
  775. FUNCTION_MARKER
  776. "fistpll (%esi)\n"
  777. "fistpll 8(%esi)\n"
  778. #ifdef TARGET_X64
  779. "movll 8(%rsi), %rdi\n"
  780. "andll %rdi, (%rsi)\n"
  781. #else
  782. "movl 8(%esi), %edi\n"
  783. "movl 12(%esi), %ecx\n"
  784. "andl %edi, (%esi)\n"
  785. "andl %ecx, 4(%esi)\n"
  786. #endif
  787. "fildll (%esi)\n"
  788. FUNCTION_MARKER
  789. );
  790. }
  791. void nseel_asm_and_end(void) {}
  792. void nseel_asm_and_op(void)
  793. {
  794. __asm__(
  795. FUNCTION_MARKER
  796. "fld" EEL_F_SUFFIX " (%edi)\n"
  797. "fxch\n"
  798. "fistpll (%edi)\n"
  799. "fistpll (%esi)\n"
  800. #ifdef TARGET_X64
  801. "movll (%rsi), %rax\n"
  802. "andll %rax, (%rdi)\n"
  803. #else
  804. "movl (%esi), %eax\n"
  805. "movl 4(%esi), %ecx\n"
  806. "andl %eax, (%edi)\n"
  807. "andl %ecx, 4(%edi)\n"
  808. #endif
  809. "fildll (%edi)\n"
  810. "movl %edi, %eax\n"
  811. "fstp" EEL_F_SUFFIX " (%edi)\n"
  812. FUNCTION_MARKER
  813. );
  814. }
  815. void nseel_asm_and_op_end(void) {}
  816. //---------------------------------------------------------------------------------------------------------------
  817. void nseel_asm_uplus(void) // this is the same as doing nothing, it seems
  818. {
  819. __asm__(
  820. FUNCTION_MARKER
  821. ""
  822. FUNCTION_MARKER
  823. );
  824. }
  825. void nseel_asm_uplus_end(void) {}
  826. //---------------------------------------------------------------------------------------------------------------
  827. void nseel_asm_uminus(void)
  828. {
  829. __asm__(
  830. FUNCTION_MARKER
  831. "fchs\n"
  832. FUNCTION_MARKER
  833. );
  834. }
  835. void nseel_asm_uminus_end(void) {}
  836. //---------------------------------------------------------------------------------------------------------------
  837. void nseel_asm_sign(void)
  838. {
  839. __asm__(
  840. FUNCTION_MARKER
  841. #ifdef TARGET_X64
  842. "fst" EEL_F_SUFFIX " (%rsi)\n"
  843. "mov" EEL_F_SUFFIX " (%rsi), %rdx\n"
  844. "movll $0x7FFFFFFFFFFFFFFF, %rcx\n"
  845. "testll %rcx, %rdx\n"
  846. "jz 0f\n" // zero zero, return the value passed directly
  847. // calculate sign
  848. "incll %rcx\n" // rcx becomes 0x80000...
  849. "fstp %st(0)\n"
  850. "fld1\n"
  851. "testl %rcx, %rdx\n"
  852. "jz 0f\n"
  853. "fchs\n"
  854. "0:\n"
  855. #else
  856. "fsts (%esi)\n"
  857. "movl (%esi), %ecx\n"
  858. "movl $0x7FFFFFFF, %edx\n"
  859. "testl %edx, %ecx\n"
  860. "jz 0f\n" // zero zero, return the value passed directly
  861. // calculate sign
  862. "incl %edx\n" // edx becomes 0x8000...
  863. "fstp %st(0)\n"
  864. "fld1\n"
  865. "testl %edx, %ecx\n"
  866. "jz 0f\n"
  867. "fchs\n"
  868. "0:\n"
  869. #endif
  870. FUNCTION_MARKER
  871. );
  872. }
  873. void nseel_asm_sign_end(void) {}
  874. //---------------------------------------------------------------------------------------------------------------
  875. void nseel_asm_bnot(void)
  876. {
  877. __asm__(
  878. FUNCTION_MARKER
  879. "testl %eax, %eax\n"
  880. "setz %al\n"
  881. "andl $0xff, %eax\n"
  882. FUNCTION_MARKER
  883. );
  884. }
  885. void nseel_asm_bnot_end(void) {}
  886. //---------------------------------------------------------------------------------------------------------------
  887. void nseel_asm_fcall(void)
  888. {
  889. __asm__(
  890. FUNCTION_MARKER
  891. "movl $0xfefefefe, %edx\n"
  892. #ifdef TARGET_X64
  893. "subl $8, %esp\n"
  894. "call *%edx\n"
  895. "addl $8, %esp\n"
  896. #else
  897. "subl $12, %esp\n" /* keep stack 16 byte aligned, 4 bytes for return address */
  898. "call *%edx\n"
  899. "addl $12, %esp\n"
  900. #endif
  901. FUNCTION_MARKER
  902. );
  903. }
  904. void nseel_asm_fcall_end(void) {}
  905. void nseel_asm_band(void)
  906. {
  907. __asm__(
  908. FUNCTION_MARKER
  909. "testl %eax, %eax\n"
  910. "jz 0f\n"
  911. "movl $0xfefefefe, %ecx\n"
  912. #ifdef TARGET_X64
  913. "subl $8, %rsp\n"
  914. #else
  915. "subl $12, %esp\n"
  916. #endif
  917. "call *%ecx\n"
  918. #ifdef TARGET_X64
  919. "addl $8, %rsp\n"
  920. #else
  921. "addl $12, %esp\n"
  922. #endif
  923. "0:\n"
  924. FUNCTION_MARKER
  925. );
  926. }
  927. void nseel_asm_band_end(void) {}
  928. void nseel_asm_bor(void)
  929. {
  930. __asm__(
  931. FUNCTION_MARKER
  932. "testl %eax, %eax\n"
  933. "jnz 0f\n"
  934. "movl $0xfefefefe, %ecx\n"
  935. #ifdef TARGET_X64
  936. "subl $8, %rsp\n"
  937. #else
  938. "subl $12, %esp\n"
  939. #endif
  940. "call *%ecx\n"
  941. #ifdef TARGET_X64
  942. "addl $8, %rsp\n"
  943. #else
  944. "addl $12, %esp\n"
  945. #endif
  946. "0:\n"
  947. FUNCTION_MARKER
  948. );
  949. }
  950. void nseel_asm_bor_end(void) {}
  951. //---------------------------------------------------------------------------------------------------------------
  952. void nseel_asm_equal(void)
  953. {
  954. __asm__(
  955. FUNCTION_MARKER
  956. #ifdef __clang__
  957. "fsubp %st(1)\n"
  958. #else
  959. "fsub\n"
  960. #endif
  961. "fabs\n"
  962. #ifdef TARGET_X64
  963. "fcomp" EEL_F_SUFFIX " -8(%r12)\n" //[g_closefact]
  964. #else
  965. "fcomp" EEL_F_SUFFIX " -8(%ebx)\n" //[g_closefact]
  966. #endif
  967. "fstsw %ax\n"
  968. "andl $256, %eax\n" // old behavior: if 256 set, true (NaN means true)
  969. FUNCTION_MARKER
  970. );
  971. }
  972. void nseel_asm_equal_end(void) {}
  973. //
  974. //---------------------------------------------------------------------------------------------------------------
  975. void nseel_asm_equal_exact(void)
  976. {
  977. __asm__(
  978. FUNCTION_MARKER
  979. "fcompp\n"
  980. "fstsw %ax\n" // for equal 256 and 1024 should be clear, 16384 should be set
  981. "andl $17664, %eax\n" // mask C4/C3/C1, bits 8/10/14, 16384|256|1024 -- if equals 16384, then equality
  982. "cmp $16384, %eax\n"
  983. "je 0f\n"
  984. "subl %eax, %eax\n"
  985. "0:\n"
  986. FUNCTION_MARKER
  987. );
  988. }
  989. void nseel_asm_equal_exact_end(void) {}
  990. void nseel_asm_notequal_exact(void)
  991. {
  992. __asm__(
  993. FUNCTION_MARKER
  994. "fcompp\n"
  995. "fstsw %ax\n" // for equal 256 and 1024 should be clear, 16384 should be set
  996. "andl $17664, %eax\n" // mask C4/C3/C1, bits 8/10/14, 16384|256|1024 -- if equals 16384, then equality
  997. "cmp $16384, %eax\n"
  998. "je 0f\n"
  999. "subl %eax, %eax\n"
  1000. "0:\n"
  1001. "xorl $16384, %eax\n" // flip the result
  1002. FUNCTION_MARKER
  1003. );
  1004. }
  1005. void nseel_asm_notequal_exact_end(void) {}
  1006. //
  1007. //---------------------------------------------------------------------------------------------------------------
  1008. void nseel_asm_notequal(void)
  1009. {
  1010. __asm__(
  1011. FUNCTION_MARKER
  1012. #ifdef __clang__
  1013. "fsubp %st(1)\n"
  1014. #else
  1015. "fsub\n"
  1016. #endif
  1017. "fabs\n"
  1018. #ifdef TARGET_X64
  1019. "fcomp" EEL_F_SUFFIX " -8(%r12)\n" //[g_closefact]
  1020. #else
  1021. "fcomp" EEL_F_SUFFIX " -8(%ebx)\n" //[g_closefact]
  1022. #endif
  1023. "fstsw %ax\n"
  1024. "andl $256, %eax\n"
  1025. "xorl $256, %eax\n" // old behavior: if 256 set, FALSE (NaN makes for false)
  1026. FUNCTION_MARKER
  1027. );
  1028. }
  1029. void nseel_asm_notequal_end(void) {}
  1030. //---------------------------------------------------------------------------------------------------------------
  1031. void nseel_asm_above(void)
  1032. {
  1033. __asm__(
  1034. FUNCTION_MARKER
  1035. "fcompp\n"
  1036. "fstsw %ax\n"
  1037. "andl $1280, %eax\n" // (1024+256) old behavior: NaN would mean 1, preserve that
  1038. FUNCTION_MARKER
  1039. );
  1040. }
  1041. void nseel_asm_above_end(void) {}
  1042. //---------------------------------------------------------------------------------------------------------------
  1043. void nseel_asm_beloweq(void)
  1044. {
  1045. __asm__(
  1046. FUNCTION_MARKER
  1047. "fcompp\n"
  1048. "fstsw %ax\n"
  1049. "andl $256, %eax\n" // old behavior: NaN would be 0 (ugh)
  1050. "xorl $256, %eax\n"
  1051. FUNCTION_MARKER
  1052. );
  1053. }
  1054. void nseel_asm_beloweq_end(void) {}
  1055. void nseel_asm_booltofp(void)
  1056. {
  1057. __asm__(
  1058. FUNCTION_MARKER
  1059. "testl %eax, %eax\n"
  1060. "jz 0f\n"
  1061. "fld1\n"
  1062. "jmp 1f\n"
  1063. "0:\n"
  1064. "fldz\n"
  1065. "1:\n"
  1066. FUNCTION_MARKER
  1067. );
  1068. }
  1069. void nseel_asm_booltofp_end(void) {}
  1070. void nseel_asm_fptobool(void)
  1071. {
  1072. __asm__(
  1073. FUNCTION_MARKER
  1074. "fabs\n"
  1075. #ifdef TARGET_X64
  1076. "fcomp" EEL_F_SUFFIX " -8(%r12)\n" //[g_closefact]
  1077. #else
  1078. "fcomp" EEL_F_SUFFIX " -8(%ebx)\n" //[g_closefact]
  1079. #endif
  1080. "fstsw %ax\n"
  1081. "andl $256, %eax\n"
  1082. "xorl $256, %eax\n"
  1083. FUNCTION_MARKER
  1084. );
  1085. }
  1086. void nseel_asm_fptobool_end(void) {}
  1087. void nseel_asm_fptobool_rev(void)
  1088. {
  1089. __asm__(
  1090. FUNCTION_MARKER
  1091. "fabs\n"
  1092. #ifdef TARGET_X64
  1093. "fcomp" EEL_F_SUFFIX " -8(%r12)\n" //[g_closefact]
  1094. #else
  1095. "fcomp" EEL_F_SUFFIX " -8(%ebx)\n" //[g_closefact]
  1096. #endif
  1097. "fstsw %ax\n"
  1098. "andl $256, %eax\n"
  1099. FUNCTION_MARKER
  1100. );
  1101. }
  1102. void nseel_asm_fptobool_rev_end(void) {}
  1103. void nseel_asm_min(void)
  1104. {
  1105. __asm__(
  1106. FUNCTION_MARKER
  1107. "fld" EEL_F_SUFFIX " (%edi)\n"
  1108. "fcomp" EEL_F_SUFFIX " (%eax)\n"
  1109. "movl %eax, %ecx\n"
  1110. "fstsw %ax\n"
  1111. "testl $256, %eax\n"
  1112. "movl %ecx, %eax\n"
  1113. "jz 0f\n"
  1114. "movl %edi, %eax\n"
  1115. "0:\n"
  1116. FUNCTION_MARKER
  1117. );
  1118. }
  1119. void nseel_asm_min_end(void) {}
  1120. void nseel_asm_max(void)
  1121. {
  1122. __asm__(
  1123. FUNCTION_MARKER
  1124. "fld" EEL_F_SUFFIX " (%edi)\n"
  1125. "fcomp" EEL_F_SUFFIX " (%eax)\n"
  1126. "movl %eax, %ecx\n"
  1127. "fstsw %ax\n"
  1128. "testl $256, %eax\n"
  1129. "movl %ecx, %eax\n"
  1130. "jnz 0f\n"
  1131. "movl %edi, %eax\n"
  1132. "0:\n"
  1133. FUNCTION_MARKER
  1134. );
  1135. }
  1136. void nseel_asm_max_end(void) {}
  1137. void nseel_asm_min_fp(void)
  1138. {
  1139. __asm__(
  1140. FUNCTION_MARKER
  1141. "fcom\n"
  1142. "fstsw %ax\n"
  1143. "testl $256, %eax\n"
  1144. "jz 0f\n"
  1145. "fxch\n"
  1146. "0:\n"
  1147. "fstp %st(0)\n"
  1148. FUNCTION_MARKER
  1149. );
  1150. }
  1151. void nseel_asm_min_fp_end(void) {}
  1152. void nseel_asm_max_fp(void)
  1153. {
  1154. __asm__(
  1155. FUNCTION_MARKER
  1156. "fcom\n"
  1157. "fstsw %ax\n"
  1158. "testl $256, %eax\n"
  1159. "jnz 0f\n"
  1160. "fxch\n"
  1161. "0:\n"
  1162. "fstp %st(0)\n"
  1163. FUNCTION_MARKER
  1164. );
  1165. }
  1166. void nseel_asm_max_fp_end(void) {}
  1167. // just generic functions left, yay
  1168. void _asm_generic3parm(void)
  1169. {
  1170. __asm__(
  1171. FUNCTION_MARKER
  1172. #ifdef TARGET_X64
  1173. #ifdef AMD64ABI
  1174. "movl %rsi, %r15\n"
  1175. "movl %rdi, %rdx\n" // third parameter = parm
  1176. "movl $0xfefefefe, %rdi\n" // first parameter= context
  1177. "movl %ecx, %rsi\n" // second parameter = parm
  1178. "movl %rax, %rcx\n" // fourth parameter = parm
  1179. "movl $0xfefefefe, %rax\n" // call function
  1180. "call *%rax\n"
  1181. "movl %r15, %rsi\n"
  1182. #else
  1183. "movl %ecx, %edx\n" // second parameter = parm
  1184. "movl $0xfefefefe, %ecx\n" // first parameter= context
  1185. "movl %rdi, %r8\n" // third parameter = parm
  1186. "movl %rax, %r9\n" // fourth parameter = parm
  1187. "movl $0xfefefefe, %edi\n" // call function
  1188. "subl X64_EXTRA_STACK_SPACE, %rsp\n"
  1189. "call *%edi\n"
  1190. "addl X64_EXTRA_STACK_SPACE, %rsp\n"
  1191. #endif
  1192. #else
  1193. "movl $0xfefefefe, %edx\n"
  1194. "pushl %eax\n" // push parameter
  1195. "pushl %edi\n" // push parameter
  1196. "movl $0xfefefefe, %edi\n"
  1197. "pushl %ecx\n" // push parameter
  1198. "pushl %edx\n" // push context pointer
  1199. "call *%edi\n"
  1200. "addl $16, %esp\n"
  1201. #endif
  1202. FUNCTION_MARKER
  1203. );
  1204. }
  1205. void _asm_generic3parm_end(void) {}
  1206. void _asm_generic3parm_retd(void)
  1207. {
  1208. __asm__(
  1209. FUNCTION_MARKER
  1210. #ifdef TARGET_X64
  1211. #ifdef AMD64ABI
  1212. "movl %rsi, %r15\n"
  1213. "movl %rdi, %rdx\n" // third parameter = parm
  1214. "movl $0xfefefefe, %rdi\n" // first parameter= context
  1215. "movl %ecx, %rsi\n" // second parameter = parm
  1216. "movl %rax, %rcx\n" // fourth parameter = parm
  1217. "movl $0xfefefefe, %rax\n" // call function
  1218. "call *%rax\n"
  1219. "movl %r15, %rsi\n"
  1220. "movq xmm0, (%r15)\n"
  1221. "fldl (%r15)\n"
  1222. #else
  1223. "movl %ecx, %edx\n" // second parameter = parm
  1224. "movl $0xfefefefe, %ecx\n" // first parameter= context
  1225. "movl %rdi, %r8\n" // third parameter = parm
  1226. "movl %rax, %r9\n" // fourth parameter = parm
  1227. "movl $0xfefefefe, %edi\n" // call function
  1228. "subl X64_EXTRA_STACK_SPACE, %rsp\n"
  1229. "call *%edi\n"
  1230. "addl X64_EXTRA_STACK_SPACE, %rsp\n"
  1231. "movq xmm0, (%rsi)\n"
  1232. "fldl (%rsi)\n"
  1233. #endif
  1234. #else
  1235. "subl $16, %esp\n"
  1236. "movl $0xfefefefe, %edx\n"
  1237. "movl %edi, 8(%esp)\n"
  1238. "movl $0xfefefefe, %edi\n"
  1239. "movl %eax, 12(%esp)\n"
  1240. "movl %ecx, 4(%esp)\n"
  1241. "movl %edx, (%esp)\n"
  1242. "call *%edi\n"
  1243. "addl $16, %esp\n"
  1244. #endif
  1245. FUNCTION_MARKER
  1246. );
  1247. }
  1248. void _asm_generic3parm_retd_end(void) {}
  1249. void _asm_generic2parm(void) // this prob neds to be fixed for ppc
  1250. {
  1251. __asm__(
  1252. FUNCTION_MARKER
  1253. #ifdef TARGET_X64
  1254. #ifdef AMD64ABI
  1255. "movl %rsi, %r15\n"
  1256. "movl %edi, %esi\n" // second parameter = parm
  1257. "movl $0xfefefefe, %edi\n" // first parameter= context
  1258. "movl %rax, %rdx\n" // third parameter = parm
  1259. "movl $0xfefefefe, %rcx\n" // call function
  1260. "call *%rcx\n"
  1261. "movl %r15, %rsi\n"
  1262. #else
  1263. "movl $0xfefefefe, %ecx\n" // first parameter= context
  1264. "movl %edi, %edx\n" // second parameter = parm
  1265. "movl %rax, %r8\n" // third parameter = parm
  1266. "movl $0xfefefefe, %edi\n" // call function
  1267. "subl X64_EXTRA_STACK_SPACE, %rsp\n"
  1268. "call *%edi\n"
  1269. "addl X64_EXTRA_STACK_SPACE, %rsp\n"
  1270. #endif
  1271. #else
  1272. "movl $0xfefefefe, %edx\n"
  1273. "movl $0xfefefefe, %ecx\n"
  1274. "subl $4, %esp\n" // keep stack aligned
  1275. "pushl %eax\n" // push parameter
  1276. "pushl %edi\n" // push parameter
  1277. "pushl %edx\n" // push context pointer
  1278. "call *%ecx\n"
  1279. "addl $16, %esp\n"
  1280. #endif
  1281. FUNCTION_MARKER
  1282. );
  1283. }
  1284. void _asm_generic2parm_end(void) {}
  1285. void _asm_generic2parm_retd(void)
  1286. {
  1287. __asm__(
  1288. FUNCTION_MARKER
  1289. #ifdef TARGET_X64
  1290. #ifdef AMD64ABI
  1291. "movl %rsi, %r15\n"
  1292. "movl %rdi, %rsi\n" // second parameter = parm
  1293. "movl $0xfefefefe, %rdi\n" // first parameter= context
  1294. "movl $0xfefefefe, %rcx\n" // call function
  1295. "movl %rax, %rdx\n" // third parameter = parm
  1296. "call *%rcx\n"
  1297. "movl %r15, %rsi\n"
  1298. "movq xmm0, (%r15)\n"
  1299. "fldl (%r15)\n"
  1300. #else
  1301. "movl %rdi, %rdx\n" // second parameter = parm
  1302. "movl $0xfefefefe, %rcx\n" // first parameter= context
  1303. "movl $0xfefefefe, %rdi\n" // call function
  1304. "movl %rax, %r8\n" // third parameter = parm
  1305. "subl X64_EXTRA_STACK_SPACE, %rsp\n"
  1306. "call *%edi\n"
  1307. "addl X64_EXTRA_STACK_SPACE, %rsp\n"
  1308. "movq xmm0, (%rsi)\n"
  1309. "fldl (%rsi)\n"
  1310. #endif
  1311. #else
  1312. "subl $16, %esp\n"
  1313. "movl $0xfefefefe, %edx\n"
  1314. "movl $0xfefefefe, %ecx\n"
  1315. "movl %edx, (%esp)\n"
  1316. "movl %edi, 4(%esp)\n"
  1317. "movl %eax, 8(%esp)\n"
  1318. "call *%ecx\n"
  1319. "addl $16, %esp\n"
  1320. #endif
  1321. FUNCTION_MARKER
  1322. );
  1323. }
  1324. void _asm_generic2parm_retd_end(void) {}
  1325. void _asm_generic1parm(void)
  1326. {
  1327. __asm__(
  1328. FUNCTION_MARKER
  1329. #ifdef TARGET_X64
  1330. #ifdef AMD64ABI
  1331. "movl $0xfefefefe, %rdi\n" // first parameter= context
  1332. "movl %rsi, %r15\n"
  1333. "movl %eax, %rsi\n" // second parameter = parm
  1334. "movl $0xfefefefe, %rcx\n" // call function
  1335. "call *%rcx\n"
  1336. "movl %r15, %rsi\n"
  1337. #else
  1338. "movl $0xfefefefe, %ecx\n" // first parameter= context
  1339. "movl %eax, %edx\n" // second parameter = parm
  1340. "movl $0xfefefefe, %edi\n" // call function
  1341. "subl X64_EXTRA_STACK_SPACE, %rsp\n"
  1342. "call *%edi\n"
  1343. "addl X64_EXTRA_STACK_SPACE, %rsp\n"
  1344. #endif
  1345. #else
  1346. "movl $0xfefefefe, %edx\n"
  1347. "subl $8, %esp\n" // keep stack aligned
  1348. "movl $0xfefefefe, %ecx\n"
  1349. "pushl %eax\n" // push parameter
  1350. "pushl %edx\n" // push context pointer
  1351. "call *%ecx\n"
  1352. "addl $16, %esp\n"
  1353. #endif
  1354. FUNCTION_MARKER
  1355. );
  1356. }
  1357. void _asm_generic1parm_end(void) {}
  1358. void _asm_generic1parm_retd(void) // 1 parameter returning double
  1359. {
  1360. __asm__(
  1361. FUNCTION_MARKER
  1362. #ifdef TARGET_X64
  1363. #ifdef AMD64ABI
  1364. "movl $0xfefefefe, %rdi\n" // first parameter = context pointer
  1365. "movl $0xfefefefe, %rcx\n" // function address
  1366. "movl %rsi, %r15\n" // save rsi
  1367. "movl %rax, %rsi\n" // second parameter = parameter
  1368. "call *%rcx\n"
  1369. "movl %r15, %rsi\n"
  1370. "movq xmm0, (%r15)\n"
  1371. "fldl (%r15)\n"
  1372. #else
  1373. "movl $0xfefefefe, %ecx\n" // first parameter= context
  1374. "movl $0xfefefefe, %edi\n" // call function
  1375. "movl %rax, %rdx\n" // second parameter = parm
  1376. "subl X64_EXTRA_STACK_SPACE, %rsp\n"
  1377. "call *%edi\n"
  1378. "addl X64_EXTRA_STACK_SPACE, %rsp\n"
  1379. "movq xmm0, (%rsi)\n"
  1380. "fldl (%rsi)\n"
  1381. #endif
  1382. #else
  1383. "movl $0xfefefefe, %edx\n" // context pointer
  1384. "movl $0xfefefefe, %ecx\n" // func-addr
  1385. "subl $16, %esp\n"
  1386. "movl %eax, 4(%esp)\n" // push parameter
  1387. "movl %edx, (%esp)\n" // push context pointer
  1388. "call *%ecx\n"
  1389. "addl $16, %esp\n"
  1390. #endif
  1391. FUNCTION_MARKER
  1392. );
  1393. }
  1394. void _asm_generic1parm_retd_end(void) {}
  1395. // this gets its own stub because it's pretty crucial for performance :/
  1396. void _asm_megabuf(void)
  1397. {
  1398. __asm__(
  1399. FUNCTION_MARKER
  1400. #ifdef TARGET_X64
  1401. #ifdef AMD64ABI
  1402. "fadd" EEL_F_SUFFIX " -8(%r12)\n"
  1403. "fistpl (%rsi)\n"
  1404. // check if (%rsi) is in range, and buffer available, otherwise call function
  1405. "movl (%rsi), %edx\n"
  1406. "cmpl %1, %rdx\n" //REPLACE=((NSEEL_RAM_BLOCKS*NSEEL_RAM_ITEMSPERBLOCK))
  1407. "jae 0f\n"
  1408. "movll %rdx, %rax\n"
  1409. "shrll %2, %rax\n" //REPLACE=(NSEEL_RAM_ITEMSPERBLOCK_LOG2 - 3/*log2(sizeof(void *))*/ )
  1410. "andll %3, %rax\n" //REPLACE=((NSEEL_RAM_BLOCKS-1)*8 /*sizeof(void*)*/ )
  1411. "movll (%r12, %rax), %rax\n"
  1412. "testl %rax, %rax\n"
  1413. "jnz 1f\n"
  1414. "0:\n"
  1415. "movl $0xfefefefe, %rax\n"
  1416. "movl %r12, %rdi\n" // set first parm to ctx
  1417. "movl %rsi, %r15\n" // save rsi
  1418. "movl %rdx, %esi\n" // esi becomes second parameter (edi is first, context pointer)
  1419. "call *%rax\n"
  1420. "movl %r15, %rsi\n" // restore rsi
  1421. "jmp 2f\n"
  1422. "1:\n"
  1423. "andll %4, %rdx\n" //REPLACE=(NSEEL_RAM_ITEMSPERBLOCK-1)
  1424. "shlll $3, %rdx\n" // 3 is log2(sizeof(EEL_F))
  1425. "addll %rdx, %rax\n"
  1426. "2:\n"
  1427. #else
  1428. "fadd" EEL_F_SUFFIX " -8(%r12)\n"
  1429. "fistpl (%rsi)\n"
  1430. // check if (%rsi) is in range...
  1431. "movl (%rsi), %edi\n"
  1432. "cmpl %1, %edi\n" //REPLACE=((NSEEL_RAM_BLOCKS*NSEEL_RAM_ITEMSPERBLOCK))
  1433. "jae 0f\n"
  1434. "movll %rdi, %rax\n"
  1435. "shrll %2, %rax\n" //REPLACE=(NSEEL_RAM_ITEMSPERBLOCK_LOG2 - 3/*log2(sizeof(void *))*/ )
  1436. "andll %3, %rax\n" //REPLACE=((NSEEL_RAM_BLOCKS-1)*8 /*sizeof(void*)*/ )
  1437. "movll (%r12, %rax), %rax\n"
  1438. "testl %rax, %rax\n"
  1439. "jnz 1f\n"
  1440. "0:\n"
  1441. "movl $0xfefefefe, %rax\n" // function ptr
  1442. "movl %r12, %rcx\n" // set first parm to ctx
  1443. "movl %rdi, %rdx\n" // rdx is second parameter (rcx is first)
  1444. "subl X64_EXTRA_STACK_SPACE, %rsp\n"
  1445. "call *%rax\n"
  1446. "addl X64_EXTRA_STACK_SPACE, %rsp\n"
  1447. "jmp 2f\n"
  1448. "1:\n"
  1449. "andll %4, %rdi\n" //REPLACE=(NSEEL_RAM_ITEMSPERBLOCK-1)
  1450. "shlll $3, %rdi\n" // 3 is log2(sizeof(EEL_F))
  1451. "addll %rdi, %rax\n"
  1452. "2:\n"
  1453. #endif
  1454. FUNCTION_MARKER
  1455. #else
  1456. "fadd" EEL_F_SUFFIX " -8(%%ebx)\n"
  1457. "fistpl (%%esi)\n"
  1458. // check if (%esi) is in range, and buffer available, otherwise call function
  1459. "movl (%%esi), %%edi\n"
  1460. "cmpl %0, %%edi\n" //REPLACE=((NSEEL_RAM_BLOCKS*NSEEL_RAM_ITEMSPERBLOCK))
  1461. "jae 0f\n"
  1462. "movl %%edi, %%eax\n"
  1463. "shrl %1, %%eax\n" //REPLACE=(NSEEL_RAM_ITEMSPERBLOCK_LOG2 - 2/*log2(sizeof(void *))*/ )
  1464. "andl %2, %%eax\n" //REPLACE=((NSEEL_RAM_BLOCKS-1)*4 /*sizeof(void*)*/ )
  1465. "movl (%%ebx, %%eax), %%eax\n"
  1466. "testl %%eax, %%eax\n"
  1467. "jnz 1f\n"
  1468. "0:\n"
  1469. "subl $8, %%esp\n" // keep stack aligned
  1470. "movl $0xfefefefe, %%ecx\n"
  1471. "pushl %%edi\n" // parameter
  1472. "pushl %%ebx\n" // push context pointer
  1473. "call *%%ecx\n"
  1474. "addl $16, %%esp\n"
  1475. "jmp 2f\n"
  1476. "1:\n"
  1477. "andl %3, %%edi\n" //REPLACE=(NSEEL_RAM_ITEMSPERBLOCK-1)
  1478. "shll $3, %%edi\n" // 3 is log2(sizeof(EEL_F))
  1479. "addl %%edi, %%eax\n"
  1480. "2:"
  1481. FUNCTION_MARKER
  1482. #ifndef _MSC_VER
  1483. :: "i" (((NSEEL_RAM_BLOCKS*NSEEL_RAM_ITEMSPERBLOCK))),
  1484. "i" ((NSEEL_RAM_ITEMSPERBLOCK_LOG2 - 2/*log2(sizeof(void *))*/ )),
  1485. "i" (((NSEEL_RAM_BLOCKS-1)*4 /*sizeof(void*)*/ )),
  1486. "i" ((NSEEL_RAM_ITEMSPERBLOCK-1 ))
  1487. #endif
  1488. #endif
  1489. );
  1490. }
  1491. void _asm_megabuf_end(void) {}
  1492. void _asm_gmegabuf(void)
  1493. {
  1494. __asm__(
  1495. FUNCTION_MARKER
  1496. #ifdef TARGET_X64
  1497. #ifdef AMD64ABI
  1498. "movl %rsi, %r15\n"
  1499. "fadd" EEL_F_SUFFIX " -8(%r12)\n"
  1500. "movl $0xfefefefe, %rdi\n" // first parameter = context pointer
  1501. "fistpl (%rsi)\n"
  1502. "movl $0xfefefefe, %edx\n"
  1503. "movl (%rsi), %esi\n"
  1504. "call *%rdx\n"
  1505. "movl %r15, %rsi\n"
  1506. #else
  1507. "fadd" EEL_F_SUFFIX " -8(%r12)\n"
  1508. "movl $0xfefefefe, %rcx\n" // first parameter = context pointer
  1509. "fistpl (%rsi)\n"
  1510. "movl $0xfefefefe, %rdi\n"
  1511. "movl (%rsi), %edx\n"
  1512. "subl X64_EXTRA_STACK_SPACE, %rsp\n"
  1513. "call *%rdi\n"
  1514. "addl X64_EXTRA_STACK_SPACE, %rsp\n"
  1515. #endif
  1516. #else
  1517. "subl $16, %esp\n" // keep stack aligned
  1518. "movl $0xfefefefe, (%esp)\n"
  1519. "fadd" EEL_F_SUFFIX " -8(%ebx)\n"
  1520. "movl $0xfefefefe, %edi\n"
  1521. "fistpl 4(%esp)\n"
  1522. "call *%edi\n"
  1523. "addl $16, %esp\n"
  1524. #endif
  1525. FUNCTION_MARKER
  1526. );
  1527. }
  1528. void _asm_gmegabuf_end(void) {}
  1529. void nseel_asm_stack_push(void)
  1530. {
  1531. #ifdef TARGET_X64
  1532. __asm__(
  1533. FUNCTION_MARKER
  1534. "movl $0xfefefefe, %rdi\n"
  1535. "movll (%rax), %rcx\n"
  1536. "movll (%rdi), %rax\n"
  1537. "addll $8, %rax\n"
  1538. "movl $0xFEFEFEFE, %rdx\n"
  1539. "andll %rdx, %rax\n"
  1540. "movl $0xFEFEFEFE, %rdx\n"
  1541. "orll %rdx, %rax\n"
  1542. "movll %rcx, (%rax)\n"
  1543. "movll %rax, (%rdi)\n"
  1544. FUNCTION_MARKER
  1545. );
  1546. #else
  1547. __asm__(
  1548. FUNCTION_MARKER
  1549. "movl $0xfefefefe, %edi\n"
  1550. "movl (%eax), %ecx\n"
  1551. "movl 4(%eax), %edx\n"
  1552. "movl (%edi), %eax\n"
  1553. "addl $8, %eax\n"
  1554. "andl $0xfefefefe, %eax\n"
  1555. "orl $0xfefefefe, %eax\n"
  1556. "movl %ecx, (%eax)\n"
  1557. "movl %edx, 4(%eax)\n"
  1558. "movl %eax, (%edi)\n"
  1559. FUNCTION_MARKER
  1560. );
  1561. #endif
  1562. }
  1563. void nseel_asm_stack_push_end(void) {}
  1564. void nseel_asm_stack_pop(void)
  1565. {
  1566. #ifdef TARGET_X64
  1567. __asm__(
  1568. FUNCTION_MARKER
  1569. "movl $0xfefefefe, %rdi\n"
  1570. "movll (%rdi), %rcx\n"
  1571. "movq (%rcx), %xmm0\n"
  1572. "subll $8, %rcx\n"
  1573. "movl $0xFEFEFEFE, %rdx\n"
  1574. "andll %rdx, %rcx\n"
  1575. "movl $0xFEFEFEFE, %rdx\n"
  1576. "orll %rdx, %rcx\n"
  1577. "movll %rcx, (%rdi)\n"
  1578. "movq %xmm0, (%eax)\n"
  1579. FUNCTION_MARKER
  1580. );
  1581. #else
  1582. __asm__(
  1583. FUNCTION_MARKER
  1584. "movl $0xfefefefe, %edi\n"
  1585. "movl (%edi), %ecx\n"
  1586. "fld" EEL_F_SUFFIX " (%ecx)\n"
  1587. "subl $8, %ecx\n"
  1588. "andl $0xfefefefe, %ecx\n"
  1589. "orl $0xfefefefe, %ecx\n"
  1590. "movl %ecx, (%edi)\n"
  1591. "fstp" EEL_F_SUFFIX " (%eax)\n"
  1592. FUNCTION_MARKER
  1593. );
  1594. #endif
  1595. }
  1596. void nseel_asm_stack_pop_end(void) {}
  1597. void nseel_asm_stack_pop_fast(void)
  1598. {
  1599. #ifdef TARGET_X64
  1600. __asm__(
  1601. FUNCTION_MARKER
  1602. "movl $0xfefefefe, %rdi\n"
  1603. "movll (%rdi), %rcx\n"
  1604. "movll %rcx, %rax\n"
  1605. "subll $8, %rcx\n"
  1606. "movl $0xFEFEFEFE, %rdx\n"
  1607. "andll %rdx, %rcx\n"
  1608. "movl $0xFEFEFEFE, %rdx\n"
  1609. "orll %rdx, %rcx\n"
  1610. "movll %rcx, (%rdi)\n"
  1611. FUNCTION_MARKER
  1612. );
  1613. #else
  1614. __asm__(
  1615. FUNCTION_MARKER
  1616. "movl $0xfefefefe, %edi\n"
  1617. "movl (%edi), %ecx\n"
  1618. "movl %ecx, %eax\n"
  1619. "subl $8, %ecx\n"
  1620. "andl $0xfefefefe, %ecx\n"
  1621. "orl $0xfefefefe, %ecx\n"
  1622. "movl %ecx, (%edi)\n"
  1623. FUNCTION_MARKER
  1624. );
  1625. #endif
  1626. }
  1627. void nseel_asm_stack_pop_fast_end(void) {}
  1628. void nseel_asm_stack_peek_int(void)
  1629. {
  1630. #ifdef TARGET_X64
  1631. __asm__(
  1632. FUNCTION_MARKER
  1633. "movll $0xfefefefe, %rdi\n"
  1634. "movll (%rdi), %rax\n"
  1635. "movl $0xfefefefe, %rdx\n"
  1636. "subll %rdx, %rax\n"
  1637. "movl $0xFEFEFEFE, %rdx\n"
  1638. "andll %rdx, %rax\n"
  1639. "movl $0xFEFEFEFE, %rdx\n"
  1640. "orll %rdx, %rax\n"
  1641. FUNCTION_MARKER
  1642. );
  1643. #else
  1644. __asm__(
  1645. FUNCTION_MARKER
  1646. "movl $0xfefefefe, %edi\n"
  1647. "movl (%edi), %eax\n"
  1648. "movl $0xfefefefe, %edx\n"
  1649. "subl %edx, %eax\n"
  1650. "andl $0xfefefefe, %eax\n"
  1651. "orl $0xfefefefe, %eax\n"
  1652. FUNCTION_MARKER
  1653. );
  1654. #endif
  1655. }
  1656. void nseel_asm_stack_peek_int_end(void) {}
  1657. void nseel_asm_stack_peek(void)
  1658. {
  1659. #ifdef TARGET_X64
  1660. __asm__(
  1661. FUNCTION_MARKER
  1662. "movll $0xfefefefe, %rdi\n"
  1663. "fistpl (%rsi)\n"
  1664. "movll (%rdi), %rax\n"
  1665. "movll (%rsi), %rdx\n"
  1666. "shll $3, %rdx\n" // log2(sizeof(EEL_F))
  1667. "subl %rdx, %rax\n"
  1668. "movl $0xFEFEFEFE, %rdx\n"
  1669. "andll %rdx, %rax\n"
  1670. "movl $0xFEFEFEFE, %rdx\n"
  1671. "orll %rdx, %rax\n"
  1672. FUNCTION_MARKER
  1673. );
  1674. #else
  1675. __asm__(
  1676. FUNCTION_MARKER
  1677. "movl $0xfefefefe, %edi\n"
  1678. "fistpl (%esi)\n"
  1679. "movl (%edi), %eax\n"
  1680. "movl (%esi), %edx\n"
  1681. "shll $3, %edx\n" // log2(sizeof(EEL_F))
  1682. "subl %edx, %eax\n"
  1683. "andl $0xfefefefe, %eax\n"
  1684. "orl $0xfefefefe, %eax\n"
  1685. FUNCTION_MARKER
  1686. );
  1687. #endif
  1688. }
  1689. void nseel_asm_stack_peek_end(void) {}
  1690. void nseel_asm_stack_peek_top(void)
  1691. {
  1692. #ifdef TARGET_X64
  1693. __asm__(
  1694. FUNCTION_MARKER
  1695. "movll $0xfefefefe, %rdi\n"
  1696. "movll (%rdi), %rax\n"
  1697. FUNCTION_MARKER
  1698. );
  1699. #else
  1700. __asm__(
  1701. FUNCTION_MARKER
  1702. "movl $0xfefefefe, %edi\n"
  1703. "movl (%edi), %eax\n"
  1704. FUNCTION_MARKER
  1705. );
  1706. #endif
  1707. }
  1708. void nseel_asm_stack_peek_top_end(void) {}
  1709. void nseel_asm_stack_exch(void)
  1710. {
  1711. #ifdef TARGET_X64
  1712. __asm__(
  1713. FUNCTION_MARKER
  1714. "movll $0xfefefefe, %rdi\n"
  1715. "movll (%rdi), %rcx\n"
  1716. "movq (%rcx), %xmm0\n"
  1717. "movq (%rax), %xmm1\n"
  1718. "movq %xmm0, (%rax)\n"
  1719. "movq %xmm1, (%rcx)\n"
  1720. FUNCTION_MARKER
  1721. );
  1722. #else
  1723. __asm__(
  1724. FUNCTION_MARKER
  1725. "movl $0xfefefefe, %edi\n"
  1726. "movl (%edi), %ecx\n"
  1727. "fld" EEL_F_SUFFIX " (%ecx)\n"
  1728. "fld" EEL_F_SUFFIX " (%eax)\n"
  1729. "fstp" EEL_F_SUFFIX " (%ecx)\n"
  1730. "fstp" EEL_F_SUFFIX " (%eax)\n"
  1731. FUNCTION_MARKER
  1732. );
  1733. #endif
  1734. }
  1735. void nseel_asm_stack_exch_end(void) {}
  1736. #ifdef TARGET_X64
  1737. void eel_callcode64()
  1738. {
  1739. __asm__(
  1740. #ifndef EEL_X64_NO_CHANGE_FPFLAGS
  1741. "subl $16, %rsp\n"
  1742. "fnstcw (%rsp)\n"
  1743. "mov (%rsp), %ax\n"
  1744. "or $0xE3F, %ax\n" // 53 or 64 bit precision, trunc, and masking all exceptions
  1745. "mov %ax, 4(%rsp)\n"
  1746. "fldcw 4(%rsp)\n"
  1747. #endif
  1748. "push %rbx\n"
  1749. "push %rbp\n"
  1750. "push %r12\n"
  1751. "push %r13\n"
  1752. "push %r14\n"
  1753. "push %r15\n"
  1754. #ifdef AMD64ABI
  1755. "movll %rsi, %r12\n" // second parameter is ram-blocks pointer
  1756. "call %rdi\n"
  1757. #else
  1758. "push %rdi\n"
  1759. "push %rsi\n"
  1760. "movll %rdx, %r12\n" // second parameter is ram-blocks pointer
  1761. "call %rcx\n"
  1762. "pop %rsi\n"
  1763. "pop %rdi\n"
  1764. #endif
  1765. "fclex\n"
  1766. "pop %r15\n"
  1767. "pop %r14\n"
  1768. "pop %r13\n"
  1769. "pop %r12\n"
  1770. "pop %rbp\n"
  1771. "pop %rbx\n"
  1772. #ifndef EEL_X64_NO_CHANGE_FPFLAGS
  1773. "fldcw (%rsp)\n"
  1774. "addl $16, %rsp\n"
  1775. #endif
  1776. "ret\n"
  1777. );
  1778. }
  1779. void eel_callcode64_fast()
  1780. {
  1781. __asm__(
  1782. "push %rbx\n"
  1783. "push %rbp\n"
  1784. "push %r12\n"
  1785. "push %r13\n"
  1786. "push %r14\n"
  1787. "push %r15\n"
  1788. #ifdef AMD64ABI
  1789. "movll %rsi, %r12\n" // second parameter is ram-blocks pointer
  1790. "call %rdi\n"
  1791. #else
  1792. "push %rdi\n"
  1793. "push %rsi\n"
  1794. "movll %rdx, %r12\n" // second parameter is ram-blocks pointer
  1795. "call %rcx\n"
  1796. "pop %rsi\n"
  1797. "pop %rdi\n"
  1798. #endif
  1799. "pop %r15\n"
  1800. "pop %r14\n"
  1801. "pop %r13\n"
  1802. "pop %r12\n"
  1803. "pop %rbp\n"
  1804. "pop %rbx\n"
  1805. "ret\n"
  1806. );
  1807. }
  1808. void eel_setfp_round()
  1809. {
  1810. __asm__(
  1811. #ifndef EEL_X64_NO_CHANGE_FPFLAGS
  1812. "subl $16, %rsp\n"
  1813. "fnstcw (%rsp)\n"
  1814. "mov (%rsp), %ax\n"
  1815. "and $0xF3FF, %ax\n" // set round to nearest
  1816. "mov %ax, 4(%rsp)\n"
  1817. "fldcw 4(%rsp)\n"
  1818. "addl $16, %rsp\n"
  1819. #endif
  1820. "ret\n"
  1821. );
  1822. }
  1823. void eel_setfp_trunc()
  1824. {
  1825. __asm__(
  1826. #ifndef EEL_X64_NO_CHANGE_FPFLAGS
  1827. "subl $16, %rsp\n"
  1828. "fnstcw (%rsp)\n"
  1829. "mov (%rsp), %ax\n"
  1830. "or $0xC00, %ax\n" // set to truncate
  1831. "mov %ax, 4(%rsp)\n"
  1832. "fldcw 4(%rsp)\n"
  1833. "addl $16, %rsp\n"
  1834. #endif
  1835. "ret\n"
  1836. );
  1837. }
  1838. void eel_enterfp(int s[2])
  1839. {
  1840. __asm__(
  1841. #ifdef AMD64ABI
  1842. "fnstcw (%rdi)\n"
  1843. "mov (%rdi), %ax\n"
  1844. "or $0xE3F, %ax\n" // 53 or 64 bit precision, trunc, and masking all exceptions
  1845. "mov %ax, 4(%rdi)\n"
  1846. "fldcw 4(%rdi)\n"
  1847. #else
  1848. "fnstcw (%rcx)\n"
  1849. "mov (%rcx), %ax\n"
  1850. "or $0xE3F, %ax\n" // 53 or 64 bit precision, trunc, and masking all exceptions
  1851. "mov %ax, 4(%rcx)\n"
  1852. "fldcw 4(%rcx)\n"
  1853. #endif
  1854. "ret\n"
  1855. );
  1856. }
  1857. void eel_leavefp(int s[2])
  1858. {
  1859. __asm__(
  1860. #ifdef AMD64ABI
  1861. "fldcw (%rdi)\n"
  1862. #else
  1863. "fldcw (%rcx)\n"
  1864. #endif
  1865. "ret\n";
  1866. );
  1867. }
  1868. #endif