Lines Matching refs:ecx
69 movl LEN(%esp), %ecx
80 cmp $16, %ecx
83 cmpl $32, %ecx
88 movdqu -16(%eax, %ecx), %xmm1
90 movdqu %xmm1, -16(%edx, %ecx)
94 cmpl $64, %ecx
100 movdqu -16(%eax, %ecx), %xmm2
101 movdqu -32(%eax, %ecx), %xmm3
104 movdqu %xmm2, -16(%edx, %ecx)
105 movdqu %xmm3, -32(%edx, %ecx)
109 cmpl $128, %ecx
117 movdqu -64(%eax, %ecx), %xmm4
118 movdqu -48(%eax, %ecx), %xmm5
119 movdqu -32(%eax, %ecx), %xmm6
120 movdqu -16(%eax, %ecx), %xmm7
125 movdqu %xmm4, -64(%edx, %ecx)
126 movdqu %xmm5, -48(%edx, %ecx)
127 movdqu %xmm6, -32(%edx, %ecx)
128 movdqu %xmm7, -16(%edx, %ecx)
132 add %ecx, %eax
145 leal (%edx, %ecx), %esi
146 movdqu -16(%eax, %ecx), %xmm0
149 mov %ecx, %edi
150 movl %esi, %ecx
151 andl $-16, %ecx
152 leal (%ecx), %ebx
181 movaps %xmm0, -64(%ecx)
183 movaps %xmm1, -48(%ecx)
184 movaps %xmm2, -32(%ecx)
185 movaps %xmm3, -16(%ecx)
186 subl $64, %ecx
206 testl %ecx, %ecx
212 movzbl -1(%eax,%ecx), %ebx
214 movb %bl, -1(%edx,%ecx)
219 movzwl -2(%eax,%ecx), %ebx
221 movw %bx, -2(%edx,%ecx)
227 movl -4(%eax,%ecx), %ebx
228 movl -8(%eax,%ecx), %esi
229 movl %ebx, -4(%edx,%ecx)
230 movl %esi, -8(%edx,%ecx)
231 subl $8, %ecx
237 movl -4(%eax,%ecx), %eax
239 movl %eax, -4(%edx,%ecx)
249 movntdq %xmm0, -64(%ecx)
251 movntdq %xmm1, -48(%ecx)
252 movntdq %xmm2, -32(%ecx)
253 movntdq %xmm3, -16(%ecx)
254 subl $64, %ecx
269 add %edx, %ecx
270 cmp %eax, %ecx
271 movl LEN(%esp), %ecx
276 cmp $16, %ecx
279 cmpl $32, %ecx
284 movdqu -16(%eax, %ecx), %xmm1
286 movdqu %xmm1, -16(%edx, %ecx)
290 cmpl $64, %ecx
296 movdqu -16(%eax, %ecx), %xmm2
297 movdqu -32(%eax, %ecx), %xmm3
300 movdqu %xmm2, -16(%edx, %ecx)
301 movdqu %xmm3, -32(%edx, %ecx)
305 cmpl $128, %ecx
313 movdqu -64(%eax, %ecx), %xmm4
314 movdqu -48(%eax, %ecx), %xmm5
315 movdqu -32(%eax, %ecx), %xmm6
316 movdqu -16(%eax, %ecx), %xmm7
321 movdqu %xmm4, -64(%edx, %ecx)
322 movdqu %xmm5, -48(%edx, %ecx)
323 movdqu %xmm6, -32(%edx, %ecx)
324 movdqu %xmm7, -16(%edx, %ecx)
333 movdqu -16(%eax, %ecx), %xmm4
334 movdqu -32(%eax, %ecx), %xmm5
335 movdqu -48(%eax, %ecx), %xmm6
336 movdqu -64(%eax, %ecx), %xmm7
337 leal (%edx, %ecx), %esi
341 mov %ecx, %edi
342 leal 16(%edx), %ecx
343 andl $-16, %ecx
344 movl %ecx, %ebx
348 subl %ecx, %ebx
375 movdqa %xmm0, (%ecx)
377 movaps %xmm1, 16(%ecx)
378 movaps %xmm2, 32(%ecx)
379 movaps %xmm3, 48(%ecx)
380 addl $64, %ecx
399 testl %ecx, %ecx
405 movzbl -1(%eax,%ecx), %ebx
407 movb %bl, -1(%edx,%ecx)
412 movzwl -2(%eax,%ecx), %ebx
414 movw %bx, -2(%edx,%ecx)
420 movl -4(%eax,%ecx), %eax
422 movl %eax, -4(%edx,%ecx)
427 movq -8(%eax, %ecx), %xmm1
429 movq %xmm1, -8(%edx, %ecx)
445 movntdq %xmm0, (%ecx)
447 movntdq %xmm1, 16(%ecx)
448 movntdq %xmm2, 32(%ecx)
449 movntdq %xmm3, 48(%ecx)
450 addl $64, %ecx
466 cmp $16, %ecx
470 cmp $SHARED_CACHE_SIZE_HALF, %ecx
475 cmp __x86_shared_cache_size_half@GOTOFF(%ebx), %ecx
477 cmp __x86_shared_cache_size_half, %ecx
483 movdqu -16(%eax, %ecx), %xmm1
484 cmpl $32, %ecx
486 movdqu %xmm1, -16(%edx, %ecx)
490 movdqu -32(%eax, %ecx), %xmm1
491 cmpl $64, %ecx
493 movdqu %xmm1, -32(%edx, %ecx)
498 movdqu -48(%eax, %ecx), %xmm2
499 movdqu -64(%eax, %ecx), %xmm3
500 cmpl $128, %ecx
503 movdqu %xmm2, -48(%edx, %ecx)
504 movdqu %xmm3, -64(%edx, %ecx)
511 addl %edx, %ecx
512 andl $-64, %ecx
518 subl $64, %ecx
519 cmpl %ebx, %ecx
522 subl $64, %ecx
523 cmpl %ebx, %ecx
540 cmpl %ebx, %ecx
578 movdqu -64(%eax, %ecx), %xmm4
579 movdqu -48(%eax, %ecx), %xmm5
580 movdqu -32(%eax, %ecx), %xmm6
581 movdqu -16(%eax, %ecx), %xmm7
586 movdqu %xmm4, -64(%edx, %ecx)
587 movdqu %xmm5, -48(%edx, %ecx)
588 movdqu %xmm6, -32(%edx, %ecx)
589 movdqu %xmm7, -16(%edx, %ecx)
595 movdqu -128(%eax, %ecx), %xmm4
596 movdqu -112(%eax, %ecx), %xmm5
597 movdqu -96(%eax, %ecx), %xmm6
598 movdqu -80(%eax, %ecx), %xmm7
603 movdqu %xmm4, -128(%edx, %ecx)
604 movdqu %xmm5, -112(%edx, %ecx)
605 movdqu %xmm6, -96(%edx, %ecx)
606 movdqu %xmm7, -80(%edx, %ecx)
613 addl %edx, %ecx
614 andl $-128, %ecx
637 cmpl %ebx, %ecx
648 testl %ecx, %ecx
655 movzwl -2(%eax,%ecx), %ebx
656 movw %bx, -2(%edx,%ecx)
661 movq -8(%eax, %ecx), %xmm1
663 movq %xmm1, -8(%edx, %ecx)
669 movl -4(%eax,%ecx), %ebx
670 movl %ebx, -4(%edx,%ecx)
675 movl LEN(%esp), %ecx
676 add %ecx, %eax