12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153 |
- /* note: only EEL_F_SIZE=8 is now supported (no float EEL_F's) */
- #ifndef AMD64ABI
- #define X64_EXTRA_STACK_SPACE 32 // win32 requires allocating space for 4 parameters at 8 bytes each, even though we pass via register
- #endif
- void nseel_asm_1pdd(void)
- {
- __asm__(
- FUNCTION_MARKER
-
- "movl $0xfefefefe, %edi\n"
- #ifdef TARGET_X64
- "fstpl (%rsi)\n"
- "movq (%rsi), %xmm0\n"
- #ifdef AMD64ABI
- "movl %rsi, %r15\n"
- "call *%edi\n"
- "movl %r15, %rsi\n"
- #else
- "subl X64_EXTRA_STACK_SPACE, %rsp\n"
- "call *%edi\n"
- "addl X64_EXTRA_STACK_SPACE, %rsp\n"
- #endif
- "movq xmm0, (%rsi)\n"
- "fldl (%rsi)\n"
- #else
- "subl $16, %esp\n"
- "fstpl (%esp)\n"
- "call *%edi\n"
- "addl $16, %esp\n"
- #endif
- FUNCTION_MARKER
-
- );
- }
- void nseel_asm_1pdd_end(void){}
- void nseel_asm_2pdd(void)
- {
- __asm__(
- FUNCTION_MARKER
-
- "movl $0xfefefefe, %edi\n"
- #ifdef TARGET_X64
- "fstpl 8(%rsi)\n"
- "fstpl (%rsi)\n"
- "movq 8(%rsi), %xmm1\n"
- "movq (%rsi), %xmm0\n"
- #ifdef AMD64ABI
- "movl %rsi, %r15\n"
- "call *%edi\n"
- "movl %r15, %rsi\n"
- #else
- "subl X64_EXTRA_STACK_SPACE, %rsp\n"
- "call *%edi\n"
- "addl X64_EXTRA_STACK_SPACE, %rsp\n"
- #endif
- "movq xmm0, (%rsi)\n"
- "fldl (%rsi)\n"
- #else
- "subl $16, %esp\n"
- "fstpl 8(%esp)\n"
- "fstpl (%esp)\n"
- "call *%edi\n"
- "addl $16, %esp\n"
- #endif
-
- FUNCTION_MARKER
- );
- }
- void nseel_asm_2pdd_end(void){}
- void nseel_asm_2pdds(void)
- {
- __asm__(
- FUNCTION_MARKER
-
- "movl $0xfefefefe, %eax\n"
- #ifdef TARGET_X64
- "fstpl (%rsi)\n"
- "movq (%rdi), %xmm0\n"
- "movq (%rsi), %xmm1\n"
- #ifdef AMD64ABI
- "movl %rsi, %r15\n"
- "movl %rdi, %r14\n"
- "call *%eax\n"
- "movl %r14, %rdi\n" /* restore thrashed rdi */
- "movl %r15, %rsi\n"
- "movl %r14, %rax\n" /* set return value */
- "movq xmm0, (%r14)\n"
- #else
- "subl X64_EXTRA_STACK_SPACE, %rsp\n"
- "call *%eax\n"
- "movq xmm0, (%edi)\n"
- "movl %edi, %eax\n" /* set return value */
- "addl X64_EXTRA_STACK_SPACE, %rsp\n"
- #endif
- #else
- "subl $8, %esp\n"
- "fstpl (%esp)\n"
- "pushl 4(%edi)\n" /* push parameter */
- "pushl (%edi)\n" /* push the rest of the parameter */
- "call *%eax\n"
- "addl $16, %esp\n"
- "fstpl (%edi)\n" /* store result */
- "movl %edi, %eax\n" /* set return value */
- #endif
- // denormal-fix result (this is only currently used for pow_op, so we want this!)
- "movl 4(%edi), %edx\n"
- "addl $0x00100000, %edx\n"
- "andl $0x7FF00000, %edx\n"
- "cmpl $0x00200000, %edx\n"
- "jg 0f\n"
- "subl %edx, %edx\n"
- #ifdef TARGET_X64
- "movll %rdx, (%rdi)\n"
- #else
- "movl %edx, (%edi)\n"
- "movl %edx, 4(%edi)\n"
- #endif
- "0:\n"
- FUNCTION_MARKER
-
- );
- }
- void nseel_asm_2pdds_end(void){}
- //---------------------------------------------------------------------------------------------------------------
- // do nothing, eh
- void nseel_asm_exec2(void)
- {
- __asm__(
- FUNCTION_MARKER
- ""
- FUNCTION_MARKER
- );
- }
- void nseel_asm_exec2_end(void) { }
- void nseel_asm_invsqrt(void)
- {
- __asm__(
- FUNCTION_MARKER
- "movl $0x5f3759df, %edx\n"
- "fsts (%esi)\n"
- #ifdef TARGET_X64
- "movl 0xfefefefe, %rax\n"
- "fmul" EEL_F_SUFFIX " (%rax)\n"
- "movsxl (%esi), %rcx\n"
- #else
- "fmul" EEL_F_SUFFIX " (0xfefefefe)\n"
- "movl (%esi), %ecx\n"
- #endif
- "sarl $1, %ecx\n"
- "subl %ecx, %edx\n"
- "movl %edx, (%esi)\n"
- "fmuls (%esi)\n"
- "fmuls (%esi)\n"
- #ifdef TARGET_X64
- "movl 0xfefefefe, %rax\n"
- "fadd" EEL_F_SUFFIX " (%rax)\n"
- #else
- "fadd" EEL_F_SUFFIX " (0xfefefefe)\n"
- #endif
- "fmuls (%esi)\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_invsqrt_end(void) {}
- void nseel_asm_dbg_getstackptr(void)
- {
- __asm__(
- FUNCTION_MARKER
- #ifdef __clang__
- "ffree %st(0)\n"
- #else
- "fstpl %st(0)\n"
- #endif
- "movl %esp, (%esi)\n"
- "fildl (%esi)\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_dbg_getstackptr_end(void) {}
- //---------------------------------------------------------------------------------------------------------------
- void nseel_asm_sin(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fsin\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_sin_end(void) {}
- //---------------------------------------------------------------------------------------------------------------
- void nseel_asm_cos(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fcos\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_cos_end(void) {}
- //---------------------------------------------------------------------------------------------------------------
- void nseel_asm_tan(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fptan\n"
- "fstp %st(0)\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_tan_end(void) {}
- //---------------------------------------------------------------------------------------------------------------
- void nseel_asm_sqr(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fmul %st(0), %st(0)\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_sqr_end(void) {}
- //---------------------------------------------------------------------------------------------------------------
- void nseel_asm_sqrt(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fabs\n"
- "fsqrt\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_sqrt_end(void) {}
- //---------------------------------------------------------------------------------------------------------------
- void nseel_asm_log(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fldln2\n"
- "fxch\n"
- "fyl2x\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_log_end(void) {}
- //---------------------------------------------------------------------------------------------------------------
- void nseel_asm_log10(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fldlg2\n"
- "fxch\n"
- "fyl2x\n"
-
- FUNCTION_MARKER
- );
- }
- void nseel_asm_log10_end(void) {}
- //---------------------------------------------------------------------------------------------------------------
- void nseel_asm_abs(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fabs\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_abs_end(void) {}
- //---------------------------------------------------------------------------------------------------------------
- void nseel_asm_assign(void)
- {
- #ifdef TARGET_X64
- __asm__(
- FUNCTION_MARKER
- "movll (%rax), %rdx\n"
- "movll %rdx, %rcx\n"
- "shrl $32, %rdx\n"
- "addl $0x00100000, %edx\n"
- "andl $0x7FF00000, %edx\n"
- "cmpl $0x00200000, %edx\n"
- "movll %rdi, %rax\n"
- "jg 0f\n"
- "subl %ecx, %ecx\n"
- "0:\n"
- "movll %rcx, (%edi)\n"
- FUNCTION_MARKER
- );
- #else
- __asm__(
- FUNCTION_MARKER
- "movl (%eax), %ecx\n"
- "movl 4(%eax), %edx\n"
- "movl %edx, %eax\n"
- "addl $0x00100000, %eax\n" // if exponent is zero, make exponent 0x7ff, if 7ff, make 7fe
- "andl $0x7ff00000, %eax\n"
- "cmpl $0x00200000, %eax\n"
- "jg 0f\n"
- "subl %ecx, %ecx\n"
- "subl %edx, %edx\n"
- "0:\n"
- "movl %edi, %eax\n"
- "movl %ecx, (%edi)\n"
- "movl %edx, 4(%edi)\n"
- FUNCTION_MARKER
- );
- #endif
- }
- void nseel_asm_assign_end(void) {}
- //---------------------------------------------------------------------------------------------------------------
- void nseel_asm_assign_fromfp(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fstpl (%edi)\n"
- "movl 4(%edi), %edx\n"
- "addl $0x00100000, %edx\n"
- "andl $0x7FF00000, %edx\n"
- "cmpl $0x00200000, %edx\n"
- "movl %edi, %eax\n"
- "jg 0f\n"
- "subl %edx, %edx\n"
- #ifdef TARGET_X64
- "movll %rdx, (%rdi)\n"
- #else
- "movl %edx, (%edi)\n"
- "movl %edx, 4(%edi)\n"
- #endif
- "0:\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_assign_fromfp_end(void) {}
- //---------------------------------------------------------------------------------------------------------------
- void nseel_asm_assign_fast_fromfp(void)
- {
- __asm__(
- FUNCTION_MARKER
- "movl %edi, %eax\n"
- "fstpl (%edi)\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_assign_fast_fromfp_end(void) {}
- //---------------------------------------------------------------------------------------------------------------
- void nseel_asm_assign_fast(void)
- {
- #ifdef TARGET_X64
- __asm__(
- FUNCTION_MARKER
- "movll (%rax), %rdx\n"
- "movll %rdx, (%edi)\n"
- "movll %rdi, %rax\n"
- FUNCTION_MARKER
- );
- #else
- __asm__(
- FUNCTION_MARKER
- "movl (%eax), %ecx\n"
- "movl %ecx, (%edi)\n"
- "movl 4(%eax), %ecx\n"
- "movl %edi, %eax\n"
- "movl %ecx, 4(%edi)\n"
- FUNCTION_MARKER
- );
- #endif
- }
- void nseel_asm_assign_fast_end(void) {}
- //---------------------------------------------------------------------------------------------------------------
- void nseel_asm_add(void)
- {
- __asm__(
- FUNCTION_MARKER
- #ifdef __clang__
- "faddp %st(1)\n"
- #else
- "fadd\n"
- #endif
- FUNCTION_MARKER
- );
- }
- void nseel_asm_add_end(void) {}
- void nseel_asm_add_op(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fadd" EEL_F_SUFFIX " (%edi)\n"
- "movl %edi, %eax\n"
- "fstp" EEL_F_SUFFIX " (%edi)\n"
- "movl 4(%edi), %edx\n"
- "addl $0x00100000, %edx\n"
- "andl $0x7FF00000, %edx\n"
- "cmpl $0x00200000, %edx\n"
- "jg 0f\n"
- "subl %edx, %edx\n"
- #ifdef TARGET_X64
- "movll %rdx, (%rdi)\n"
- #else
- "movl %edx, (%edi)\n"
- "movl %edx, 4(%edi)\n"
- #endif
- "0:\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_add_op_end(void) {}
- void nseel_asm_add_op_fast(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fadd" EEL_F_SUFFIX " (%edi)\n"
- "movl %edi, %eax\n"
- "fstp" EEL_F_SUFFIX " (%edi)\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_add_op_fast_end(void) {}
- //---------------------------------------------------------------------------------------------------------------
- void nseel_asm_sub(void)
- {
- __asm__(
- FUNCTION_MARKER
- #ifdef __clang__
- "fsubrp %st(0), %st(1)\n"
- #else
- #ifdef __GNUC__
- #ifdef __INTEL_COMPILER
- "fsub\n"
- #else
- "fsubr\n" // gnuc has fsub/fsubr backwards, ack
- #endif
- #else
- "fsub\n"
- #endif
- #endif
- FUNCTION_MARKER
- );
- }
- void nseel_asm_sub_end(void) {}
- void nseel_asm_sub_op(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fsubr" EEL_F_SUFFIX " (%edi)\n"
- "movl %edi, %eax\n"
- "fstp" EEL_F_SUFFIX " (%edi)\n"
- "movl 4(%edi), %edx\n"
- "addl $0x00100000, %edx\n"
- "andl $0x7FF00000, %edx\n"
- "cmpl $0x00200000, %edx\n"
- "jg 0f\n"
- "subl %edx, %edx\n"
- #ifdef TARGET_X64
- "movll %rdx, (%rdi)\n"
- #else
- "movl %edx, (%edi)\n"
- "movl %edx, 4(%edi)\n"
- #endif
- "0:\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_sub_op_end(void) {}
- void nseel_asm_sub_op_fast(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fsubr" EEL_F_SUFFIX " (%edi)\n"
- "movl %edi, %eax\n"
- "fstp" EEL_F_SUFFIX " (%edi)\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_sub_op_fast_end(void) {}
- //---------------------------------------------------------------------------------------------------------------
- void nseel_asm_mul(void)
- {
- __asm__(
- FUNCTION_MARKER
- #ifdef __clang__
- "fmulp %st(0), %st(1)\n"
- #else
- "fmul\n"
- #endif
- FUNCTION_MARKER
- );
- }
- void nseel_asm_mul_end(void) {}
- void nseel_asm_mul_op(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fmul" EEL_F_SUFFIX " (%edi)\n"
- "movl %edi, %eax\n"
- "fstp" EEL_F_SUFFIX " (%edi)\n"
- "movl 4(%edi), %edx\n"
- "addl $0x00100000, %edx\n"
- "andl $0x7FF00000, %edx\n"
- "cmpl $0x00200000, %edx\n"
- "jg 0f\n"
- "subl %edx, %edx\n"
- #ifdef TARGET_X64
- "movll %rdx, (%rdi)\n"
- #else
- "movl %edx, (%edi)\n"
- "movl %edx, 4(%edi)\n"
- #endif
- "0:\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_mul_op_end(void) {}
- void nseel_asm_mul_op_fast(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fmul" EEL_F_SUFFIX " (%edi)\n"
- "movl %edi, %eax\n"
- "fstp" EEL_F_SUFFIX " (%edi)\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_mul_op_fast_end(void) {}
- //---------------------------------------------------------------------------------------------------------------
- void nseel_asm_div(void)
- {
- __asm__(
- FUNCTION_MARKER
- #ifdef __clang__
- "fdivrp %st(1)\n"
- #else
- #ifdef __GNUC__
- #ifdef __INTEL_COMPILER
- "fdiv\n"
- #else
- "fdivr\n" // gcc inline asm seems to have fdiv/fdivr backwards
- #endif
- #else
- "fdiv\n"
- #endif
- #endif
- FUNCTION_MARKER
- );
- }
- void nseel_asm_div_end(void) {}
- void nseel_asm_div_op(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fld" EEL_F_SUFFIX " (%edi)\n"
- #ifdef __clang__
- "fdivp %st(1)\n"
- #else
- #ifndef __GNUC__
- "fdivr\n"
- #else
- #ifdef __INTEL_COMPILER
- "fdivp %st(1)\n"
- #else
- "fdiv\n"
- #endif
- #endif
- #endif
- "movl %edi, %eax\n"
- "fstp" EEL_F_SUFFIX " (%edi)\n"
- "movl 4(%edi), %edx\n"
- "addl $0x00100000, %edx\n"
- "andl $0x7FF00000, %edx\n"
- "cmpl $0x00200000, %edx\n"
- "jg 0f\n"
- "subl %edx, %edx\n"
- #ifdef TARGET_X64
- "movll %rdx, (%rdi)\n"
- #else
- "movl %edx, (%edi)\n"
- "movl %edx, 4(%edi)\n"
- #endif
- "0:\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_div_op_end(void) {}
- void nseel_asm_div_op_fast(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fld" EEL_F_SUFFIX " (%edi)\n"
- #ifdef __clang__
- "fdivp %st(1)\n"
- #else
- #ifndef __GNUC__
- "fdivr\n"
- #else
- #ifdef __INTEL_COMPILER
- "fdivp %st(1)\n"
- #else
- "fdiv\n"
- #endif
- #endif
- #endif
- "movl %edi, %eax\n"
- "fstp" EEL_F_SUFFIX " (%edi)\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_div_op_fast_end(void) {}
- //---------------------------------------------------------------------------------------------------------------
- void nseel_asm_mod(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fabs\n"
- "fistpl (%esi)\n"
- "fabs\n"
- "fistpl 4(%esi)\n"
- "xorl %edx, %edx\n"
- "cmpl $0, (%esi)\n"
- "je 0f\n" // skip devide, set return to 0
- "movl 4(%esi), %eax\n"
- "divl (%esi)\n"
- "0:\n"
- "movl %edx, (%esi)\n"
- "fildl (%esi)\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_mod_end(void) {}
- void nseel_asm_shl(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fistpl (%esi)\n"
- "fistpl 4(%esi)\n"
- "movl (%esi), %ecx\n"
- "movl 4(%esi), %eax\n"
- "shll %cl, %eax\n"
- "movl %eax, (%esi)\n"
- "fildl (%esi)\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_shl_end(void) {}
- void nseel_asm_shr(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fistpl (%esi)\n"
- "fistpl 4(%esi)\n"
- "movl (%esi), %ecx\n"
- "movl 4(%esi), %eax\n"
- "sarl %cl, %eax\n"
- "movl %eax, (%esi)\n"
- "fildl (%esi)\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_shr_end(void) {}
- void nseel_asm_mod_op(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fld" EEL_F_SUFFIX " (%edi)\n"
- "fxch\n"
- "fabs\n"
- "fistpl (%edi)\n"
- "fabs\n"
- "fistpl (%esi)\n"
- "xorl %edx, %edx\n"
- "cmpl $0, (%edi)\n"
- "je 0f\n" // skip devide, set return to 0
- "movl (%esi), %eax\n"
- "divl (%edi)\n"
- "0:\n"
- "movl %edx, (%edi)\n"
- "fildl (%edi)\n"
- "movl %edi, %eax\n"
- "fstp" EEL_F_SUFFIX " (%edi)\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_mod_op_end(void) {}
- //---------------------------------------------------------------------------------------------------------------
- void nseel_asm_or(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fistpll (%esi)\n"
- "fistpll 8(%esi)\n"
- #ifdef TARGET_X64
- "movll 8(%rsi), %rdi\n"
- "orll %rdi, (%rsi)\n"
- #else
- "movl 8(%esi), %edi\n"
- "movl 12(%esi), %ecx\n"
- "orl %edi, (%esi)\n"
- "orl %ecx, 4(%esi)\n"
- #endif
- "fildll (%esi)\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_or_end(void) {}
- void nseel_asm_or0(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fistpll (%esi)\n"
- "fildll (%esi)\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_or0_end(void) {}
- void nseel_asm_or_op(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fld" EEL_F_SUFFIX " (%edi)\n"
- "fxch\n"
- "fistpll (%edi)\n"
- "fistpll (%esi)\n"
- #ifdef TARGET_X64
- "movll (%rsi), %rax\n"
- "orll %rax, (%rdi)\n"
- #else
- "movl (%esi), %eax\n"
- "movl 4(%esi), %ecx\n"
- "orl %eax, (%edi)\n"
- "orl %ecx, 4(%edi)\n"
- #endif
- "fildll (%edi)\n"
- "movl %edi, %eax\n"
- "fstp" EEL_F_SUFFIX " (%edi)\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_or_op_end(void) {}
- void nseel_asm_xor(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fistpll (%esi)\n"
- "fistpll 8(%esi)\n"
- #ifdef TARGET_X64
- "movll 8(%rsi), %rdi\n"
- "xorll %rdi, (%rsi)\n"
- #else
- "movl 8(%esi), %edi\n"
- "movl 12(%esi), %ecx\n"
- "xorl %edi, (%esi)\n"
- "xorl %ecx, 4(%esi)\n"
- #endif
- "fildll (%esi)\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_xor_end(void) {}
- void nseel_asm_xor_op(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fld" EEL_F_SUFFIX " (%edi)\n"
- "fxch\n"
- "fistpll (%edi)\n"
- "fistpll (%esi)\n"
- #ifdef TARGET_X64
- "movll (%rsi), %rax\n"
- "xorll %rax, (%rdi)\n"
- #else
- "movl (%esi), %eax\n"
- "movl 4(%esi), %ecx\n"
- "xorl %eax, (%edi)\n"
- "xorl %ecx, 4(%edi)\n"
- #endif
- "fildll (%edi)\n"
- "movl %edi, %eax\n"
- "fstp" EEL_F_SUFFIX " (%edi)\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_xor_op_end(void) {}
- //---------------------------------------------------------------------------------------------------------------
- void nseel_asm_and(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fistpll (%esi)\n"
- "fistpll 8(%esi)\n"
- #ifdef TARGET_X64
- "movll 8(%rsi), %rdi\n"
- "andll %rdi, (%rsi)\n"
- #else
- "movl 8(%esi), %edi\n"
- "movl 12(%esi), %ecx\n"
- "andl %edi, (%esi)\n"
- "andl %ecx, 4(%esi)\n"
- #endif
- "fildll (%esi)\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_and_end(void) {}
- void nseel_asm_and_op(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fld" EEL_F_SUFFIX " (%edi)\n"
- "fxch\n"
- "fistpll (%edi)\n"
- "fistpll (%esi)\n"
- #ifdef TARGET_X64
- "movll (%rsi), %rax\n"
- "andll %rax, (%rdi)\n"
- #else
- "movl (%esi), %eax\n"
- "movl 4(%esi), %ecx\n"
- "andl %eax, (%edi)\n"
- "andl %ecx, 4(%edi)\n"
- #endif
- "fildll (%edi)\n"
- "movl %edi, %eax\n"
- "fstp" EEL_F_SUFFIX " (%edi)\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_and_op_end(void) {}
- //---------------------------------------------------------------------------------------------------------------
- void nseel_asm_uplus(void) // this is the same as doing nothing, it seems
- {
- __asm__(
- FUNCTION_MARKER
- ""
- FUNCTION_MARKER
- );
- }
- void nseel_asm_uplus_end(void) {}
- //---------------------------------------------------------------------------------------------------------------
- void nseel_asm_uminus(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fchs\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_uminus_end(void) {}
- //---------------------------------------------------------------------------------------------------------------
- void nseel_asm_sign(void)
- {
- __asm__(
- FUNCTION_MARKER
- #ifdef TARGET_X64
- "fst" EEL_F_SUFFIX " (%rsi)\n"
- "mov" EEL_F_SUFFIX " (%rsi), %rdx\n"
- "movll $0x7FFFFFFFFFFFFFFF, %rcx\n"
- "testll %rcx, %rdx\n"
- "jz 0f\n" // zero zero, return the value passed directly
- // calculate sign
- "incll %rcx\n" // rcx becomes 0x80000...
- "fstp %st(0)\n"
- "fld1\n"
- "testl %rcx, %rdx\n"
- "jz 0f\n"
- "fchs\n"
- "0:\n"
- #else
- "fsts (%esi)\n"
- "movl (%esi), %ecx\n"
- "movl $0x7FFFFFFF, %edx\n"
- "testl %edx, %ecx\n"
- "jz 0f\n" // zero zero, return the value passed directly
- // calculate sign
- "incl %edx\n" // edx becomes 0x8000...
- "fstp %st(0)\n"
- "fld1\n"
- "testl %edx, %ecx\n"
- "jz 0f\n"
- "fchs\n"
- "0:\n"
-
- #endif
- FUNCTION_MARKER
- );
- }
- void nseel_asm_sign_end(void) {}
- //---------------------------------------------------------------------------------------------------------------
- void nseel_asm_bnot(void)
- {
- __asm__(
- FUNCTION_MARKER
- "testl %eax, %eax\n"
- "setz %al\n"
- "andl $0xff, %eax\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_bnot_end(void) {}
- //---------------------------------------------------------------------------------------------------------------
- void nseel_asm_fcall(void)
- {
- __asm__(
- FUNCTION_MARKER
- "movl $0xfefefefe, %edx\n"
- #ifdef TARGET_X64
- "subl $8, %esp\n"
- "call *%edx\n"
- "addl $8, %esp\n"
- #else
- "subl $12, %esp\n" /* keep stack 16 byte aligned, 4 bytes for return address */
- "call *%edx\n"
- "addl $12, %esp\n"
- #endif
- FUNCTION_MARKER
- );
- }
- void nseel_asm_fcall_end(void) {}
- void nseel_asm_band(void)
- {
- __asm__(
- FUNCTION_MARKER
- "testl %eax, %eax\n"
- "jz 0f\n"
- "movl $0xfefefefe, %ecx\n"
- #ifdef TARGET_X64
- "subl $8, %rsp\n"
- #else
- "subl $12, %esp\n"
- #endif
- "call *%ecx\n"
- #ifdef TARGET_X64
- "addl $8, %rsp\n"
- #else
- "addl $12, %esp\n"
- #endif
- "0:\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_band_end(void) {}
- void nseel_asm_bor(void)
- {
- __asm__(
- FUNCTION_MARKER
- "testl %eax, %eax\n"
- "jnz 0f\n"
- "movl $0xfefefefe, %ecx\n"
- #ifdef TARGET_X64
- "subl $8, %rsp\n"
- #else
- "subl $12, %esp\n"
- #endif
- "call *%ecx\n"
- #ifdef TARGET_X64
- "addl $8, %rsp\n"
- #else
- "addl $12, %esp\n"
- #endif
- "0:\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_bor_end(void) {}
- //---------------------------------------------------------------------------------------------------------------
- void nseel_asm_equal(void)
- {
- __asm__(
- FUNCTION_MARKER
- #ifdef __clang__
- "fsubp %st(1)\n"
- #else
- "fsub\n"
- #endif
- "fabs\n"
- #ifdef TARGET_X64
- "fcomp" EEL_F_SUFFIX " -8(%r12)\n" //[g_closefact]
- #else
- "fcomp" EEL_F_SUFFIX " -8(%ebx)\n" //[g_closefact]
- #endif
- "fstsw %ax\n"
- "andl $256, %eax\n" // old behavior: if 256 set, true (NaN means true)
- FUNCTION_MARKER
- );
- }
- void nseel_asm_equal_end(void) {}
- //
- //---------------------------------------------------------------------------------------------------------------
- void nseel_asm_equal_exact(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fcompp\n"
- "fstsw %ax\n" // for equal 256 and 1024 should be clear, 16384 should be set
- "andl $17664, %eax\n" // mask C4/C3/C1, bits 8/10/14, 16384|256|1024 -- if equals 16384, then equality
- "cmp $16384, %eax\n"
- "je 0f\n"
- "subl %eax, %eax\n"
- "0:\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_equal_exact_end(void) {}
- void nseel_asm_notequal_exact(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fcompp\n"
- "fstsw %ax\n" // for equal 256 and 1024 should be clear, 16384 should be set
- "andl $17664, %eax\n" // mask C4/C3/C1, bits 8/10/14, 16384|256|1024 -- if equals 16384, then equality
- "cmp $16384, %eax\n"
- "je 0f\n"
- "subl %eax, %eax\n"
- "0:\n"
- "xorl $16384, %eax\n" // flip the result
- FUNCTION_MARKER
- );
- }
- void nseel_asm_notequal_exact_end(void) {}
- //
- //---------------------------------------------------------------------------------------------------------------
- void nseel_asm_notequal(void)
- {
- __asm__(
- FUNCTION_MARKER
- #ifdef __clang__
- "fsubp %st(1)\n"
- #else
- "fsub\n"
- #endif
- "fabs\n"
- #ifdef TARGET_X64
- "fcomp" EEL_F_SUFFIX " -8(%r12)\n" //[g_closefact]
- #else
- "fcomp" EEL_F_SUFFIX " -8(%ebx)\n" //[g_closefact]
- #endif
- "fstsw %ax\n"
- "andl $256, %eax\n"
- "xorl $256, %eax\n" // old behavior: if 256 set, FALSE (NaN makes for false)
- FUNCTION_MARKER
- );
- }
- void nseel_asm_notequal_end(void) {}
- //---------------------------------------------------------------------------------------------------------------
- void nseel_asm_above(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fcompp\n"
- "fstsw %ax\n"
- "andl $1280, %eax\n" // (1024+256) old behavior: NaN would mean 1, preserve that
- FUNCTION_MARKER
- );
- }
- void nseel_asm_above_end(void) {}
- //---------------------------------------------------------------------------------------------------------------
- void nseel_asm_beloweq(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fcompp\n"
- "fstsw %ax\n"
- "andl $256, %eax\n" // old behavior: NaN would be 0 (ugh)
- "xorl $256, %eax\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_beloweq_end(void) {}
- void nseel_asm_booltofp(void)
- {
- __asm__(
- FUNCTION_MARKER
- "testl %eax, %eax\n"
- "jz 0f\n"
- "fld1\n"
- "jmp 1f\n"
- "0:\n"
- "fldz\n"
- "1:\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_booltofp_end(void) {}
- void nseel_asm_fptobool(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fabs\n"
- #ifdef TARGET_X64
- "fcomp" EEL_F_SUFFIX " -8(%r12)\n" //[g_closefact]
- #else
- "fcomp" EEL_F_SUFFIX " -8(%ebx)\n" //[g_closefact]
- #endif
- "fstsw %ax\n"
- "andl $256, %eax\n"
- "xorl $256, %eax\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_fptobool_end(void) {}
- void nseel_asm_fptobool_rev(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fabs\n"
- #ifdef TARGET_X64
- "fcomp" EEL_F_SUFFIX " -8(%r12)\n" //[g_closefact]
- #else
- "fcomp" EEL_F_SUFFIX " -8(%ebx)\n" //[g_closefact]
- #endif
- "fstsw %ax\n"
- "andl $256, %eax\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_fptobool_rev_end(void) {}
- void nseel_asm_min(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fld" EEL_F_SUFFIX " (%edi)\n"
- "fcomp" EEL_F_SUFFIX " (%eax)\n"
- "movl %eax, %ecx\n"
- "fstsw %ax\n"
- "testl $256, %eax\n"
- "movl %ecx, %eax\n"
- "jz 0f\n"
- "movl %edi, %eax\n"
- "0:\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_min_end(void) {}
- void nseel_asm_max(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fld" EEL_F_SUFFIX " (%edi)\n"
- "fcomp" EEL_F_SUFFIX " (%eax)\n"
- "movl %eax, %ecx\n"
- "fstsw %ax\n"
- "testl $256, %eax\n"
- "movl %ecx, %eax\n"
- "jnz 0f\n"
- "movl %edi, %eax\n"
- "0:\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_max_end(void) {}
- void nseel_asm_min_fp(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fcom\n"
- "fstsw %ax\n"
- "testl $256, %eax\n"
- "jz 0f\n"
- "fxch\n"
- "0:\n"
- "fstp %st(0)\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_min_fp_end(void) {}
- void nseel_asm_max_fp(void)
- {
- __asm__(
- FUNCTION_MARKER
- "fcom\n"
- "fstsw %ax\n"
- "testl $256, %eax\n"
- "jnz 0f\n"
- "fxch\n"
- "0:\n"
- "fstp %st(0)\n"
- FUNCTION_MARKER
- );
- }
- void nseel_asm_max_fp_end(void) {}
- // just generic functions left, yay
- void _asm_generic3parm(void)
- {
- __asm__(
- FUNCTION_MARKER
- #ifdef TARGET_X64
- #ifdef AMD64ABI
- "movl %rsi, %r15\n"
- "movl %rdi, %rdx\n" // third parameter = parm
- "movl $0xfefefefe, %rdi\n" // first parameter= context
- "movl %ecx, %rsi\n" // second parameter = parm
- "movl %rax, %rcx\n" // fourth parameter = parm
- "movl $0xfefefefe, %rax\n" // call function
- "call *%rax\n"
- "movl %r15, %rsi\n"
- #else
- "movl %ecx, %edx\n" // second parameter = parm
- "movl $0xfefefefe, %ecx\n" // first parameter= context
- "movl %rdi, %r8\n" // third parameter = parm
- "movl %rax, %r9\n" // fourth parameter = parm
- "movl $0xfefefefe, %edi\n" // call function
- "subl X64_EXTRA_STACK_SPACE, %rsp\n"
- "call *%edi\n"
- "addl X64_EXTRA_STACK_SPACE, %rsp\n"
- #endif
- #else
-
- "movl $0xfefefefe, %edx\n"
- "pushl %eax\n" // push parameter
- "pushl %edi\n" // push parameter
- "movl $0xfefefefe, %edi\n"
- "pushl %ecx\n" // push parameter
- "pushl %edx\n" // push context pointer
- "call *%edi\n"
- "addl $16, %esp\n"
-
- #endif
- FUNCTION_MARKER
- );
- }
- void _asm_generic3parm_end(void) {}
- void _asm_generic3parm_retd(void)
- {
- __asm__(
- FUNCTION_MARKER
- #ifdef TARGET_X64
- #ifdef AMD64ABI
- "movl %rsi, %r15\n"
- "movl %rdi, %rdx\n" // third parameter = parm
- "movl $0xfefefefe, %rdi\n" // first parameter= context
- "movl %ecx, %rsi\n" // second parameter = parm
- "movl %rax, %rcx\n" // fourth parameter = parm
- "movl $0xfefefefe, %rax\n" // call function
- "call *%rax\n"
- "movl %r15, %rsi\n"
- "movq xmm0, (%r15)\n"
- "fldl (%r15)\n"
- #else
- "movl %ecx, %edx\n" // second parameter = parm
- "movl $0xfefefefe, %ecx\n" // first parameter= context
- "movl %rdi, %r8\n" // third parameter = parm
- "movl %rax, %r9\n" // fourth parameter = parm
- "movl $0xfefefefe, %edi\n" // call function
- "subl X64_EXTRA_STACK_SPACE, %rsp\n"
- "call *%edi\n"
- "addl X64_EXTRA_STACK_SPACE, %rsp\n"
- "movq xmm0, (%rsi)\n"
- "fldl (%rsi)\n"
- #endif
- #else
-
- "subl $16, %esp\n"
- "movl $0xfefefefe, %edx\n"
- "movl %edi, 8(%esp)\n"
- "movl $0xfefefefe, %edi\n"
- "movl %eax, 12(%esp)\n"
- "movl %ecx, 4(%esp)\n"
- "movl %edx, (%esp)\n"
- "call *%edi\n"
- "addl $16, %esp\n"
-
- #endif
- FUNCTION_MARKER
- );
- }
- void _asm_generic3parm_retd_end(void) {}
- void _asm_generic2parm(void) // this prob neds to be fixed for ppc
- {
- __asm__(
- FUNCTION_MARKER
- #ifdef TARGET_X64
- #ifdef AMD64ABI
- "movl %rsi, %r15\n"
- "movl %edi, %esi\n" // second parameter = parm
- "movl $0xfefefefe, %edi\n" // first parameter= context
- "movl %rax, %rdx\n" // third parameter = parm
- "movl $0xfefefefe, %rcx\n" // call function
- "call *%rcx\n"
- "movl %r15, %rsi\n"
- #else
- "movl $0xfefefefe, %ecx\n" // first parameter= context
- "movl %edi, %edx\n" // second parameter = parm
- "movl %rax, %r8\n" // third parameter = parm
- "movl $0xfefefefe, %edi\n" // call function
- "subl X64_EXTRA_STACK_SPACE, %rsp\n"
- "call *%edi\n"
- "addl X64_EXTRA_STACK_SPACE, %rsp\n"
- #endif
- #else
-
- "movl $0xfefefefe, %edx\n"
- "movl $0xfefefefe, %ecx\n"
- "subl $4, %esp\n" // keep stack aligned
- "pushl %eax\n" // push parameter
- "pushl %edi\n" // push parameter
- "pushl %edx\n" // push context pointer
- "call *%ecx\n"
- "addl $16, %esp\n"
-
- #endif
- FUNCTION_MARKER
- );
- }
- void _asm_generic2parm_end(void) {}
- void _asm_generic2parm_retd(void)
- {
- __asm__(
- FUNCTION_MARKER
- #ifdef TARGET_X64
- #ifdef AMD64ABI
- "movl %rsi, %r15\n"
- "movl %rdi, %rsi\n" // second parameter = parm
- "movl $0xfefefefe, %rdi\n" // first parameter= context
- "movl $0xfefefefe, %rcx\n" // call function
- "movl %rax, %rdx\n" // third parameter = parm
- "call *%rcx\n"
- "movl %r15, %rsi\n"
- "movq xmm0, (%r15)\n"
- "fldl (%r15)\n"
- #else
- "movl %rdi, %rdx\n" // second parameter = parm
- "movl $0xfefefefe, %rcx\n" // first parameter= context
- "movl $0xfefefefe, %rdi\n" // call function
- "movl %rax, %r8\n" // third parameter = parm
- "subl X64_EXTRA_STACK_SPACE, %rsp\n"
- "call *%edi\n"
- "addl X64_EXTRA_STACK_SPACE, %rsp\n"
- "movq xmm0, (%rsi)\n"
- "fldl (%rsi)\n"
- #endif
- #else
-
- "subl $16, %esp\n"
- "movl $0xfefefefe, %edx\n"
- "movl $0xfefefefe, %ecx\n"
- "movl %edx, (%esp)\n"
- "movl %edi, 4(%esp)\n"
- "movl %eax, 8(%esp)\n"
- "call *%ecx\n"
- "addl $16, %esp\n"
-
- #endif
- FUNCTION_MARKER
- );
- }
- void _asm_generic2parm_retd_end(void) {}
- void _asm_generic1parm(void)
- {
- __asm__(
- FUNCTION_MARKER
- #ifdef TARGET_X64
- #ifdef AMD64ABI
- "movl $0xfefefefe, %rdi\n" // first parameter= context
- "movl %rsi, %r15\n"
- "movl %eax, %rsi\n" // second parameter = parm
- "movl $0xfefefefe, %rcx\n" // call function
- "call *%rcx\n"
- "movl %r15, %rsi\n"
- #else
- "movl $0xfefefefe, %ecx\n" // first parameter= context
- "movl %eax, %edx\n" // second parameter = parm
- "movl $0xfefefefe, %edi\n" // call function
- "subl X64_EXTRA_STACK_SPACE, %rsp\n"
- "call *%edi\n"
- "addl X64_EXTRA_STACK_SPACE, %rsp\n"
- #endif
- #else
-
- "movl $0xfefefefe, %edx\n"
- "subl $8, %esp\n" // keep stack aligned
- "movl $0xfefefefe, %ecx\n"
- "pushl %eax\n" // push parameter
- "pushl %edx\n" // push context pointer
- "call *%ecx\n"
- "addl $16, %esp\n"
-
- #endif
- FUNCTION_MARKER
- );
- }
- void _asm_generic1parm_end(void) {}
- void _asm_generic1parm_retd(void) // 1 parameter returning double
- {
- __asm__(
- FUNCTION_MARKER
- #ifdef TARGET_X64
- #ifdef AMD64ABI
- "movl $0xfefefefe, %rdi\n" // first parameter = context pointer
- "movl $0xfefefefe, %rcx\n" // function address
- "movl %rsi, %r15\n" // save rsi
- "movl %rax, %rsi\n" // second parameter = parameter
- "call *%rcx\n"
-
- "movl %r15, %rsi\n"
- "movq xmm0, (%r15)\n"
- "fldl (%r15)\n"
- #else
- "movl $0xfefefefe, %ecx\n" // first parameter= context
- "movl $0xfefefefe, %edi\n" // call function
- "movl %rax, %rdx\n" // second parameter = parm
- "subl X64_EXTRA_STACK_SPACE, %rsp\n"
- "call *%edi\n"
- "addl X64_EXTRA_STACK_SPACE, %rsp\n"
- "movq xmm0, (%rsi)\n"
- "fldl (%rsi)\n"
- #endif
- #else
-
- "movl $0xfefefefe, %edx\n" // context pointer
- "movl $0xfefefefe, %ecx\n" // func-addr
- "subl $16, %esp\n"
- "movl %eax, 4(%esp)\n" // push parameter
- "movl %edx, (%esp)\n" // push context pointer
- "call *%ecx\n"
- "addl $16, %esp\n"
-
- #endif
- FUNCTION_MARKER
- );
- }
- void _asm_generic1parm_retd_end(void) {}
- // this gets its own stub because it's pretty crucial for performance :/
- void _asm_megabuf(void)
- {
- __asm__(
- FUNCTION_MARKER
- #ifdef TARGET_X64
- #ifdef AMD64ABI
- "fadd" EEL_F_SUFFIX " -8(%r12)\n"
- "fistpl (%rsi)\n"
- // check if (%rsi) is in range, and buffer available, otherwise call function
- "movl (%rsi), %edx\n"
- "cmpl %1, %rdx\n" //REPLACE=((NSEEL_RAM_BLOCKS*NSEEL_RAM_ITEMSPERBLOCK))
- "jae 0f\n"
- "movll %rdx, %rax\n"
- "shrll %2, %rax\n" //REPLACE=(NSEEL_RAM_ITEMSPERBLOCK_LOG2 - 3/*log2(sizeof(void *))*/ )
- "andll %3, %rax\n" //REPLACE=((NSEEL_RAM_BLOCKS-1)*8 /*sizeof(void*)*/ )
- "movll (%r12, %rax), %rax\n"
- "testl %rax, %rax\n"
- "jnz 1f\n"
- "0:\n"
- "movl $0xfefefefe, %rax\n"
- "movl %r12, %rdi\n" // set first parm to ctx
- "movl %rsi, %r15\n" // save rsi
- "movl %rdx, %esi\n" // esi becomes second parameter (edi is first, context pointer)
- "call *%rax\n"
- "movl %r15, %rsi\n" // restore rsi
- "jmp 2f\n"
- "1:\n"
- "andll %4, %rdx\n" //REPLACE=(NSEEL_RAM_ITEMSPERBLOCK-1)
- "shlll $3, %rdx\n" // 3 is log2(sizeof(EEL_F))
- "addll %rdx, %rax\n"
- "2:\n"
- #else
- "fadd" EEL_F_SUFFIX " -8(%r12)\n"
- "fistpl (%rsi)\n"
- // check if (%rsi) is in range...
- "movl (%rsi), %edi\n"
- "cmpl %1, %edi\n" //REPLACE=((NSEEL_RAM_BLOCKS*NSEEL_RAM_ITEMSPERBLOCK))
- "jae 0f\n"
- "movll %rdi, %rax\n"
- "shrll %2, %rax\n" //REPLACE=(NSEEL_RAM_ITEMSPERBLOCK_LOG2 - 3/*log2(sizeof(void *))*/ )
- "andll %3, %rax\n" //REPLACE=((NSEEL_RAM_BLOCKS-1)*8 /*sizeof(void*)*/ )
- "movll (%r12, %rax), %rax\n"
- "testl %rax, %rax\n"
- "jnz 1f\n"
- "0:\n"
- "movl $0xfefefefe, %rax\n" // function ptr
- "movl %r12, %rcx\n" // set first parm to ctx
- "movl %rdi, %rdx\n" // rdx is second parameter (rcx is first)
- "subl X64_EXTRA_STACK_SPACE, %rsp\n"
- "call *%rax\n"
- "addl X64_EXTRA_STACK_SPACE, %rsp\n"
- "jmp 2f\n"
- "1:\n"
- "andll %4, %rdi\n" //REPLACE=(NSEEL_RAM_ITEMSPERBLOCK-1)
- "shlll $3, %rdi\n" // 3 is log2(sizeof(EEL_F))
- "addll %rdi, %rax\n"
- "2:\n"
- #endif
- FUNCTION_MARKER
- #else
- "fadd" EEL_F_SUFFIX " -8(%%ebx)\n"
- "fistpl (%%esi)\n"
- // check if (%esi) is in range, and buffer available, otherwise call function
- "movl (%%esi), %%edi\n"
- "cmpl %0, %%edi\n" //REPLACE=((NSEEL_RAM_BLOCKS*NSEEL_RAM_ITEMSPERBLOCK))
- "jae 0f\n"
- "movl %%edi, %%eax\n"
- "shrl %1, %%eax\n" //REPLACE=(NSEEL_RAM_ITEMSPERBLOCK_LOG2 - 2/*log2(sizeof(void *))*/ )
- "andl %2, %%eax\n" //REPLACE=((NSEEL_RAM_BLOCKS-1)*4 /*sizeof(void*)*/ )
- "movl (%%ebx, %%eax), %%eax\n"
- "testl %%eax, %%eax\n"
- "jnz 1f\n"
- "0:\n"
- "subl $8, %%esp\n" // keep stack aligned
- "movl $0xfefefefe, %%ecx\n"
- "pushl %%edi\n" // parameter
- "pushl %%ebx\n" // push context pointer
- "call *%%ecx\n"
- "addl $16, %%esp\n"
- "jmp 2f\n"
- "1:\n"
- "andl %3, %%edi\n" //REPLACE=(NSEEL_RAM_ITEMSPERBLOCK-1)
- "shll $3, %%edi\n" // 3 is log2(sizeof(EEL_F))
- "addl %%edi, %%eax\n"
- "2:"
- FUNCTION_MARKER
- #ifndef _MSC_VER
- :: "i" (((NSEEL_RAM_BLOCKS*NSEEL_RAM_ITEMSPERBLOCK))),
- "i" ((NSEEL_RAM_ITEMSPERBLOCK_LOG2 - 2/*log2(sizeof(void *))*/ )),
- "i" (((NSEEL_RAM_BLOCKS-1)*4 /*sizeof(void*)*/ )),
- "i" ((NSEEL_RAM_ITEMSPERBLOCK-1 ))
- #endif
- #endif
- );
- }
- void _asm_megabuf_end(void) {}
- void _asm_gmegabuf(void)
- {
- __asm__(
- FUNCTION_MARKER
- #ifdef TARGET_X64
- #ifdef AMD64ABI
- "movl %rsi, %r15\n"
- "fadd" EEL_F_SUFFIX " -8(%r12)\n"
- "movl $0xfefefefe, %rdi\n" // first parameter = context pointer
- "fistpl (%rsi)\n"
- "movl $0xfefefefe, %edx\n"
- "movl (%rsi), %esi\n"
- "call *%rdx\n"
- "movl %r15, %rsi\n"
- #else
- "fadd" EEL_F_SUFFIX " -8(%r12)\n"
- "movl $0xfefefefe, %rcx\n" // first parameter = context pointer
- "fistpl (%rsi)\n"
- "movl $0xfefefefe, %rdi\n"
- "movl (%rsi), %edx\n"
- "subl X64_EXTRA_STACK_SPACE, %rsp\n"
- "call *%rdi\n"
- "addl X64_EXTRA_STACK_SPACE, %rsp\n"
- #endif
- #else
- "subl $16, %esp\n" // keep stack aligned
- "movl $0xfefefefe, (%esp)\n"
- "fadd" EEL_F_SUFFIX " -8(%ebx)\n"
- "movl $0xfefefefe, %edi\n"
- "fistpl 4(%esp)\n"
- "call *%edi\n"
- "addl $16, %esp\n"
- #endif
- FUNCTION_MARKER
- );
- }
- void _asm_gmegabuf_end(void) {}
- void nseel_asm_stack_push(void)
- {
- #ifdef TARGET_X64
- __asm__(
- FUNCTION_MARKER
- "movl $0xfefefefe, %rdi\n"
- "movll (%rax), %rcx\n"
- "movll (%rdi), %rax\n"
- "addll $8, %rax\n"
- "movl $0xFEFEFEFE, %rdx\n"
- "andll %rdx, %rax\n"
- "movl $0xFEFEFEFE, %rdx\n"
- "orll %rdx, %rax\n"
- "movll %rcx, (%rax)\n"
- "movll %rax, (%rdi)\n"
- FUNCTION_MARKER
- );
- #else
- __asm__(
- FUNCTION_MARKER
- "movl $0xfefefefe, %edi\n"
-
- "movl (%eax), %ecx\n"
- "movl 4(%eax), %edx\n"
- "movl (%edi), %eax\n"
- "addl $8, %eax\n"
- "andl $0xfefefefe, %eax\n"
- "orl $0xfefefefe, %eax\n"
-
- "movl %ecx, (%eax)\n"
- "movl %edx, 4(%eax)\n"
- "movl %eax, (%edi)\n"
- FUNCTION_MARKER
- );
- #endif
- }
- void nseel_asm_stack_push_end(void) {}
- void nseel_asm_stack_pop(void)
- {
- #ifdef TARGET_X64
- __asm__(
- FUNCTION_MARKER
- "movl $0xfefefefe, %rdi\n"
- "movll (%rdi), %rcx\n"
- "movq (%rcx), %xmm0\n"
- "subll $8, %rcx\n"
- "movl $0xFEFEFEFE, %rdx\n"
- "andll %rdx, %rcx\n"
- "movl $0xFEFEFEFE, %rdx\n"
- "orll %rdx, %rcx\n"
- "movll %rcx, (%rdi)\n"
- "movq %xmm0, (%eax)\n"
- FUNCTION_MARKER
- );
- #else
- __asm__(
- FUNCTION_MARKER
- "movl $0xfefefefe, %edi\n"
- "movl (%edi), %ecx\n"
- "fld" EEL_F_SUFFIX " (%ecx)\n"
- "subl $8, %ecx\n"
- "andl $0xfefefefe, %ecx\n"
- "orl $0xfefefefe, %ecx\n"
- "movl %ecx, (%edi)\n"
- "fstp" EEL_F_SUFFIX " (%eax)\n"
- FUNCTION_MARKER
- );
- #endif
- }
- void nseel_asm_stack_pop_end(void) {}
- void nseel_asm_stack_pop_fast(void)
- {
- #ifdef TARGET_X64
- __asm__(
- FUNCTION_MARKER
- "movl $0xfefefefe, %rdi\n"
- "movll (%rdi), %rcx\n"
- "movll %rcx, %rax\n"
- "subll $8, %rcx\n"
- "movl $0xFEFEFEFE, %rdx\n"
- "andll %rdx, %rcx\n"
- "movl $0xFEFEFEFE, %rdx\n"
- "orll %rdx, %rcx\n"
- "movll %rcx, (%rdi)\n"
- FUNCTION_MARKER
- );
- #else
- __asm__(
- FUNCTION_MARKER
- "movl $0xfefefefe, %edi\n"
- "movl (%edi), %ecx\n"
- "movl %ecx, %eax\n"
- "subl $8, %ecx\n"
- "andl $0xfefefefe, %ecx\n"
- "orl $0xfefefefe, %ecx\n"
- "movl %ecx, (%edi)\n"
- FUNCTION_MARKER
- );
- #endif
- }
- void nseel_asm_stack_pop_fast_end(void) {}
- void nseel_asm_stack_peek_int(void)
- {
- #ifdef TARGET_X64
- __asm__(
- FUNCTION_MARKER
- "movll $0xfefefefe, %rdi\n"
- "movll (%rdi), %rax\n"
- "movl $0xfefefefe, %rdx\n"
- "subll %rdx, %rax\n"
- "movl $0xFEFEFEFE, %rdx\n"
- "andll %rdx, %rax\n"
- "movl $0xFEFEFEFE, %rdx\n"
- "orll %rdx, %rax\n"
- FUNCTION_MARKER
- );
- #else
- __asm__(
- FUNCTION_MARKER
- "movl $0xfefefefe, %edi\n"
- "movl (%edi), %eax\n"
- "movl $0xfefefefe, %edx\n"
- "subl %edx, %eax\n"
- "andl $0xfefefefe, %eax\n"
- "orl $0xfefefefe, %eax\n"
- FUNCTION_MARKER
- );
- #endif
- }
- void nseel_asm_stack_peek_int_end(void) {}
- void nseel_asm_stack_peek(void)
- {
- #ifdef TARGET_X64
- __asm__(
- FUNCTION_MARKER
- "movll $0xfefefefe, %rdi\n"
- "fistpl (%rsi)\n"
- "movll (%rdi), %rax\n"
- "movll (%rsi), %rdx\n"
- "shll $3, %rdx\n" // log2(sizeof(EEL_F))
- "subl %rdx, %rax\n"
- "movl $0xFEFEFEFE, %rdx\n"
- "andll %rdx, %rax\n"
- "movl $0xFEFEFEFE, %rdx\n"
- "orll %rdx, %rax\n"
- FUNCTION_MARKER
- );
- #else
- __asm__(
- FUNCTION_MARKER
- "movl $0xfefefefe, %edi\n"
- "fistpl (%esi)\n"
- "movl (%edi), %eax\n"
- "movl (%esi), %edx\n"
- "shll $3, %edx\n" // log2(sizeof(EEL_F))
- "subl %edx, %eax\n"
- "andl $0xfefefefe, %eax\n"
- "orl $0xfefefefe, %eax\n"
- FUNCTION_MARKER
- );
- #endif
- }
- void nseel_asm_stack_peek_end(void) {}
- void nseel_asm_stack_peek_top(void)
- {
- #ifdef TARGET_X64
- __asm__(
- FUNCTION_MARKER
- "movll $0xfefefefe, %rdi\n"
- "movll (%rdi), %rax\n"
- FUNCTION_MARKER
- );
- #else
- __asm__(
- FUNCTION_MARKER
- "movl $0xfefefefe, %edi\n"
- "movl (%edi), %eax\n"
- FUNCTION_MARKER
- );
- #endif
- }
- void nseel_asm_stack_peek_top_end(void) {}
- void nseel_asm_stack_exch(void)
- {
- #ifdef TARGET_X64
- __asm__(
- FUNCTION_MARKER
- "movll $0xfefefefe, %rdi\n"
- "movll (%rdi), %rcx\n"
- "movq (%rcx), %xmm0\n"
- "movq (%rax), %xmm1\n"
- "movq %xmm0, (%rax)\n"
- "movq %xmm1, (%rcx)\n"
- FUNCTION_MARKER
- );
- #else
- __asm__(
- FUNCTION_MARKER
- "movl $0xfefefefe, %edi\n"
- "movl (%edi), %ecx\n"
- "fld" EEL_F_SUFFIX " (%ecx)\n"
- "fld" EEL_F_SUFFIX " (%eax)\n"
- "fstp" EEL_F_SUFFIX " (%ecx)\n"
- "fstp" EEL_F_SUFFIX " (%eax)\n"
- FUNCTION_MARKER
- );
- #endif
- }
- void nseel_asm_stack_exch_end(void) {}
- #ifdef TARGET_X64
- void eel_callcode64()
- {
- __asm__(
- #ifndef EEL_X64_NO_CHANGE_FPFLAGS
- "subl $16, %rsp\n"
- "fnstcw (%rsp)\n"
- "mov (%rsp), %ax\n"
- "or $0xE3F, %ax\n" // 53 or 64 bit precision, trunc, and masking all exceptions
- "mov %ax, 4(%rsp)\n"
- "fldcw 4(%rsp)\n"
- #endif
- "push %rbx\n"
- "push %rbp\n"
- "push %r12\n"
- "push %r13\n"
- "push %r14\n"
- "push %r15\n"
- #ifdef AMD64ABI
- "movll %rsi, %r12\n" // second parameter is ram-blocks pointer
- "call %rdi\n"
- #else
- "push %rdi\n"
- "push %rsi\n"
- "movll %rdx, %r12\n" // second parameter is ram-blocks pointer
- "call %rcx\n"
- "pop %rsi\n"
- "pop %rdi\n"
- #endif
- "fclex\n"
- "pop %r15\n"
- "pop %r14\n"
- "pop %r13\n"
- "pop %r12\n"
- "pop %rbp\n"
- "pop %rbx\n"
- #ifndef EEL_X64_NO_CHANGE_FPFLAGS
- "fldcw (%rsp)\n"
- "addl $16, %rsp\n"
- #endif
- "ret\n"
- );
- }
- void eel_callcode64_fast()
- {
- __asm__(
- "push %rbx\n"
- "push %rbp\n"
- "push %r12\n"
- "push %r13\n"
- "push %r14\n"
- "push %r15\n"
- #ifdef AMD64ABI
- "movll %rsi, %r12\n" // second parameter is ram-blocks pointer
- "call %rdi\n"
- #else
- "push %rdi\n"
- "push %rsi\n"
- "movll %rdx, %r12\n" // second parameter is ram-blocks pointer
- "call %rcx\n"
- "pop %rsi\n"
- "pop %rdi\n"
- #endif
- "pop %r15\n"
- "pop %r14\n"
- "pop %r13\n"
- "pop %r12\n"
- "pop %rbp\n"
- "pop %rbx\n"
- "ret\n"
- );
- }
- void eel_setfp_round()
- {
- __asm__(
- #ifndef EEL_X64_NO_CHANGE_FPFLAGS
- "subl $16, %rsp\n"
- "fnstcw (%rsp)\n"
- "mov (%rsp), %ax\n"
- "and $0xF3FF, %ax\n" // set round to nearest
- "mov %ax, 4(%rsp)\n"
- "fldcw 4(%rsp)\n"
- "addl $16, %rsp\n"
- #endif
- "ret\n"
- );
- }
- void eel_setfp_trunc()
- {
- __asm__(
- #ifndef EEL_X64_NO_CHANGE_FPFLAGS
- "subl $16, %rsp\n"
- "fnstcw (%rsp)\n"
- "mov (%rsp), %ax\n"
- "or $0xC00, %ax\n" // set to truncate
- "mov %ax, 4(%rsp)\n"
- "fldcw 4(%rsp)\n"
- "addl $16, %rsp\n"
- #endif
- "ret\n"
- );
- }
- void eel_enterfp(int s[2])
- {
- __asm__(
- #ifdef AMD64ABI
- "fnstcw (%rdi)\n"
- "mov (%rdi), %ax\n"
- "or $0xE3F, %ax\n" // 53 or 64 bit precision, trunc, and masking all exceptions
- "mov %ax, 4(%rdi)\n"
- "fldcw 4(%rdi)\n"
- #else
- "fnstcw (%rcx)\n"
- "mov (%rcx), %ax\n"
- "or $0xE3F, %ax\n" // 53 or 64 bit precision, trunc, and masking all exceptions
- "mov %ax, 4(%rcx)\n"
- "fldcw 4(%rcx)\n"
- #endif
- "ret\n"
- );
- }
- void eel_leavefp(int s[2])
- {
- __asm__(
- #ifdef AMD64ABI
- "fldcw (%rdi)\n"
- #else
- "fldcw (%rcx)\n"
- #endif
- "ret\n";
- );
- }
- #endif
|