/sysdeps/x86_64/ |
A D | wcschr.S | 34 movdqu (%rdi), %xmm0 35 pcmpeqd %xmm0, %xmm2 37 pcmpeqd %xmm1, %xmm0 39 pmovmskb %xmm0, %rax 45 movdqa (%rdi), %xmm0 46 pcmpeqd %xmm0, %xmm2 48 pcmpeqd %xmm1, %xmm0 50 pmovmskb %xmm0, %rax 59 movdqa (%rdi), %xmm0 60 pcmpeqd %xmm0, %xmm2 [all …]
|
A D | memset.S | 32 movd d, %xmm0; \ 34 punpcklbw %xmm0, %xmm0; \ 35 punpcklwd %xmm0, %xmm0; \ 36 pshufd $0, %xmm0, %xmm0 39 movd d, %xmm0; \ 41 pshufd $0, %xmm0, %xmm0
|
A D | memcmp.S | 103 pcmpeqb %xmm0, %xmm1 196 pmovmskb %xmm0, %edx 210 pmovmskb %xmm0, %edx 217 pmovmskb %xmm0, %edx 231 pmovmskb %xmm0, %edx 238 pmovmskb %xmm0, %edx 245 pmovmskb %xmm0, %edx 252 pmovmskb %xmm0, %edx 269 pmovmskb %xmm0, %edx 276 pmovmskb %xmm0, %edx [all …]
|
A D | strchr.S | 32 movdqu (%rdi), %xmm0 34 movdqa %xmm0, %xmm4 35 pcmpeqb %xmm1, %xmm0 37 por %xmm4, %xmm0 55 movdqa %xmm0, %xmm4 58 por %xmm4, %xmm0 61 movdqa %xmm0, %xmm4 65 por %xmm4, %xmm0 72 por %xmm3, %xmm0 112 por %xmm0, %xmm5 [all …]
|
A D | strcmp.S | 314 pxor %xmm0, %xmm0 330 pxor %xmm0, %xmm0 422 pxor %xmm0, %xmm0 444 pxor %xmm0, %xmm0 460 pxor %xmm0, %xmm0 550 pxor %xmm0, %xmm0 568 pxor %xmm0, %xmm0 585 pxor %xmm0, %xmm0 675 pxor %xmm0, %xmm0 693 pxor %xmm0, %xmm0 [all …]
|
A D | strrchr.S | 32 movdqu (%rdi), %xmm0 34 movdqa %xmm0, %xmm3 35 pcmpeqb %xmm1, %xmm0 37 pmovmskb %xmm0, %ecx 61 pcmpeqb %xmm0, %xmm2 110 movdqa %xmm3, %xmm0 112 pminub %xmm2, %xmm0 114 pminub %xmm4, %xmm0 115 pminub %xmm5, %xmm0 118 movdqa %xmm5, %xmm0 [all …]
|
/sysdeps/x86_64/fpu/ |
A D | svml_s_sincosf16_core.S | 181 vmovss %xmm0, (%eax) 184 vmovss %xmm0, (%eax) 187 vmovss %xmm0, (%eax) 190 vmovss %xmm0, (%eax) 193 vmovss %xmm0, (%eax) 196 vmovss %xmm0, (%eax) 199 vmovss %xmm0, (%eax) 202 vmovss %xmm0, (%eax) 205 vmovss %xmm0, (%eax) 208 vmovss %xmm0, (%eax) [all …]
|
A D | svml_s_wrapper_impl.h | 23 movaps %xmm0, (%rsp) 26 movss 4(%rsp), %xmm0 39 unpcklps %xmm0, %xmm2 41 movaps %xmm3, %xmm0 71 unpcklps %xmm0, %xmm2 73 movaps %xmm3, %xmm0 93 movaps %xmm0, (%rsp) 101 movss %xmm0, (%rbx) 102 movaps %xmm1, %xmm0 112 movaps %xmm1, %xmm0 [all …]
|
A D | svml_d_sincos8_core.S | 131 vmovsd %xmm0, (%eax) 134 vmovsd %xmm0, (%eax) 137 vmovsd %xmm0, (%eax) 140 vmovsd %xmm0, (%eax) 143 vmovsd %xmm0, (%eax) 146 vmovsd %xmm0, (%eax) 149 vmovsd %xmm0, (%eax) 152 vmovsd %xmm0, (%eax) 155 vmovsd %xmm0, (%eax) 158 vmovsd %xmm0, (%eax) [all …]
|
A D | svml_s_sincosf8_core_avx.S | 136 vmovss %xmm0, (%eax) 139 vmovss %xmm0, (%eax) 142 vmovss %xmm0, (%eax) 146 vmovss %xmm0, (%eax) 149 vmovss %xmm0, (%eax) 152 vmovss %xmm0, (%eax) 155 vmovss %xmm0, (%eax) 158 vmovss %xmm0, (%eax) 161 vmovss %xmm0, (%eax) 164 vmovss %xmm0, (%eax) [all …]
|
A D | svml_s_sincosf8_core.S | 136 vmovss %xmm0, (%eax) 139 vmovss %xmm0, (%eax) 142 vmovss %xmm0, (%eax) 145 vmovss %xmm0, (%eax) 148 vmovss %xmm0, (%eax) 151 vmovss %xmm0, (%eax) 154 vmovss %xmm0, (%eax) 157 vmovss %xmm0, (%eax) 160 vmovss %xmm0, (%eax) 163 vmovss %xmm0, (%eax) [all …]
|
A D | svml_s_sincosf4_core.S | 34 movaps %xmm0, 96(%rsp) 96 movaps %xmm0, 32(%esp) 98 movups 36(%esp), %xmm0 114 movss %xmm0, (%eax) 117 movss %xmm0, (%eax) 120 movss %xmm0, (%eax) 123 movss %xmm0, (%eax) 126 movss %xmm0, (%eax) 129 movss %xmm0, (%eax) 132 movss %xmm0, (%eax) [all …]
|
A D | s_fminf.S | 24 ucomiss %xmm0, %xmm1 26 minss %xmm1, %xmm0 32 movss %xmm0, -4(%rsp) 35 movss %xmm1, %xmm0 // otherwise return xmm1 39 ucomiss %xmm0, %xmm0 48 addss %xmm1, %xmm0
|
A D | s_fmax.S | 24 ucomisd %xmm0, %xmm1 26 maxsd %xmm1, %xmm0 32 movsd %xmm0, -8(%rsp) 35 movsd %xmm1, %xmm0 // otherwise return xmm1 39 ucomisd %xmm0, %xmm0 48 addsd %xmm1, %xmm0
|
A D | s_fmaxf.S | 24 ucomiss %xmm0, %xmm1 26 maxss %xmm1, %xmm0 32 movss %xmm0, -4(%rsp) 35 movss %xmm1, %xmm0 // otherwise return xmm1 39 ucomiss %xmm0, %xmm0 48 addss %xmm1, %xmm0
|
A D | s_fmin.S | 24 ucomisd %xmm0, %xmm1 26 minsd %xmm1, %xmm0 32 movsd %xmm0, -8(%rsp) 35 movsd %xmm1, %xmm0 // otherwise return xmm1 39 ucomisd %xmm0, %xmm0 48 addsd %xmm1, %xmm0
|
A D | svml_d_wrapper_impl.h | 23 movaps %xmm0, (%rsp) 26 movsd 8(%rsp), %xmm0 30 unpcklpd %xmm0, %xmm1 31 movaps %xmm1, %xmm0 41 movaps %xmm0, (%rsp) 50 unpcklpd %xmm0, %xmm1 51 movaps %xmm1, %xmm0 71 movaps %xmm0, (%rsp) 80 movsd %xmm0, (%rbx) 81 movapd %xmm1, %xmm0 [all …]
|
/sysdeps/i386/i686/multiarch/ |
A D | memset-sse2.S | 232 pxor %xmm0, %xmm0 234 movd %eax, %xmm0 235 pshufd $0, %xmm0, %xmm0 241 movdqu %xmm0, (%edx) 247 movd %xmm0, %eax 297 movdqa %xmm0, (%edx) 310 movdqa %xmm0, (%edx) 330 movdqa %xmm0, (%edx) 353 movdqa %xmm0, (%edx) 369 movntdq %xmm0, (%edx) [all …]
|
A D | memset-sse2-rep.S | 232 pxor %xmm0, %xmm0 234 movd %eax, %xmm0 235 pshufd $0, %xmm0, %xmm0 241 movdqu %xmm0, (%edx) 247 movd %xmm0, %eax 288 movdqa %xmm0, (%edx) 301 movdqa %xmm0, (%edx) 652 movq %xmm0, -8(%edx) 672 movq %xmm0, -9(%edx) 693 movq %xmm0, -10(%edx) [all …]
|
A D | strchr-sse2-bsf.S | 59 movdqa (%edi), %xmm0 60 pcmpeqb %xmm0, %xmm2 61 pcmpeqb %xmm1, %xmm0 65 pmovmskb %xmm0, %eax 96 movdqa (%edi), %xmm0 97 pcmpeqb %xmm0, %xmm2 99 pcmpeqb %xmm1, %xmm0 101 pmovmskb %xmm0, %eax 105 movdqa (%edi), %xmm0 106 pcmpeqb %xmm0, %xmm2 [all …]
|
A D | wcschr-sse2.S | 52 movdqu (%ecx), %xmm0 53 pcmpeqd %xmm0, %xmm2 54 pcmpeqd %xmm1, %xmm0 56 pmovmskb %xmm0, %eax 69 movdqa (%edi), %xmm0 70 pcmpeqd %xmm0, %xmm2 71 pcmpeqd %xmm1, %xmm0 73 pmovmskb %xmm0, %eax 110 movdqa (%ecx), %xmm0 111 pcmpeqd %xmm0, %xmm2 [all …]
|
/sysdeps/i386/i686/fpu/multiarch/ |
A D | s_sincosf-sse2.S | 127 mulsd %xmm0, %xmm0 /* y=t^2 */ 129 unpcklpd %xmm0, %xmm0 /* y|y */ 132 mulpd %xmm0, %xmm0 /* z=t^4|z=t^4 */ 168 shufps $1, %xmm0, %xmm0 /* move cos(x) to xmm0[0] */ 186 shufps $1, %xmm0, %xmm0 /* move sin(x) to xmm0[0] */ 289 mulsd %xmm0, %xmm0 /* DP y=x^2 */ 290 unpcklpd %xmm0, %xmm0 /* DP y|y */ 292 mulpd %xmm0, %xmm0 /* z=x^4|z=x^4 */ 312 shufps $1, %xmm0, %xmm0 /* move cos(x) to xmm0[0] */ 325 mulsd %xmm0, %xmm0 /* DP x^2 */ [all …]
|
/sysdeps/x86_64/multiarch/ |
A D | strcmp-sse42.S | 447 pcmpistri $0x3a,%xmm0, %xmm0 535 pcmpistri $0x3a,%xmm0, %xmm0 623 pcmpistri $0x3a,%xmm0, %xmm0 712 pcmpistri $0x3a,%xmm0, %xmm0 802 pcmpistri $0x3a,%xmm0, %xmm0 891 pcmpistri $0x3a,%xmm0, %xmm0 980 pcmpistri $0x3a,%xmm0, %xmm0 1069 pcmpistri $0x3a,%xmm0, %xmm0 1159 pcmpistri $0x3a,%xmm0, %xmm0 1248 pcmpistri $0x3a,%xmm0, %xmm0 [all …]
|
A D | memcpy-ssse3-back.S | 110 movdqu (%rsi), %xmm0 200 movdqu %xmm0, (%r8) 231 movdqu %xmm0, (%r8) 268 movdqu %xmm0, (%r8) 314 movdqu %xmm0, (%r8) 351 movdqu %xmm0, (%r8) 397 movdqu %xmm0, (%r8) 434 movdqu %xmm0, (%r8) 480 movdqu %xmm0, (%r8) 517 movdqu %xmm0, (%r8) [all …]
|
A D | strlen-vec.S | 85 pxor %xmm0, %xmm0 127 PCMPEQ %xmm0, %xmm4 185 movdqa (%rax), %xmm0 189 PCMPEQ %xmm3, %xmm0 190 pmovmskb %xmm0, %edx 199 pxor %xmm0, %xmm0 212 pxor %xmm0, %xmm0 231 PCMPEQ %xmm3, %xmm0 232 pmovmskb %xmm0, %edx 242 PCMPEQ %xmm3, %xmm0 [all …]
|