📄 gemv_n_sse2.s
字号:
addpd %xmm15, %xmm5 movapd %xmm4, 0 * SIZE(Y1) movapd %xmm5, 2 * SIZE(Y1) addq $4 * SIZE, A1 addq $4 * SIZE, A2 addq $4 * SIZE, Y1 ALIGN_3.L17: testq $2, MM je .L18 movapd 0 * SIZE(Y1), %xmm4 movapd 0 * SIZE(A1), %xmm8 movapd 0 * SIZE(A2), %xmm10 movapd 0 * SIZE(A1, LDA, 2), %xmm12 movapd 0 * SIZE(A2, LDA, 2), %xmm14 mulpd %xmm0, %xmm8 mulpd %xmm1, %xmm10 mulpd %xmm2, %xmm12 mulpd %xmm3, %xmm14 addpd %xmm8, %xmm4 addpd %xmm10, %xmm4 addpd %xmm12, %xmm4 addpd %xmm14, %xmm4 movapd %xmm4, 0 * SIZE(Y1) addq $2 * SIZE, A1 addq $2 * SIZE, A2 addq $2 * SIZE, Y1 ALIGN_3.L18: testq $1, MM je .L19 movsd 0 * SIZE(Y1), %xmm4 movsd 0 * SIZE(A1), %xmm8 movsd 0 * SIZE(A2), %xmm9 movsd 0 * SIZE(A1, LDA, 2), %xmm10 movsd 0 * SIZE(A2, LDA, 2), %xmm11 mulsd %xmm0, %xmm8 mulsd %xmm1, %xmm9 mulsd %xmm2, %xmm10 mulsd %xmm3, %xmm11 addsd %xmm8, %xmm4 addsd %xmm9, %xmm4 addsd %xmm10, %xmm4 addsd %xmm11, %xmm4 movsd %xmm4, 0 * SIZE(Y1) addq $1 * SIZE, A1 addq $1 * SIZE, A2 addq $1 * SIZE, Y1 ALIGN_3.L19: decq J jg .L11 ALIGN_3.L20: testq $2, N je .L30 movq BUFFER, Y1 movq A, A1 leaq (A, LDA, 1), A2 leaq (A, LDA, 2), A movsd (X), %xmm0 addq INCX, X movsd (X), %xmm1 addq INCX, X mulsd STACK_ALPHA, %xmm0 mulsd STACK_ALPHA, %xmm1 unpcklpd %xmm0, %xmm0 unpcklpd %xmm1, %xmm1 testq $SIZE, A je .L22 movsd (Y1), %xmm4 movsd (A1), %xmm8 movsd (A2), %xmm9 mulsd %xmm0, %xmm8 mulsd %xmm1, %xmm9 addsd %xmm8, %xmm4 addsd %xmm9, %xmm4 movsd %xmm4, 0 * SIZE(Y1) addq $1 * SIZE, A1 addq $1 * SIZE, A2 addq $1 * SIZE, Y1 ALIGN_3.L22: movq MM, I sarq $4, I jle .L25 movapd 0 * SIZE(A1), %xmm8 movapd 2 * SIZE(A1), %xmm9 movapd 4 * SIZE(A1), %xmm10 movapd 6 * SIZE(A1), %xmm11 movapd 0 * SIZE(A2), %xmm12 movapd 2 * SIZE(A2), %xmm13 movapd 4 * SIZE(A2), %xmm14 movapd 6 * SIZE(A2), %xmm15 movapd 0 * SIZE(Y1), %xmm4 movapd 2 * SIZE(Y1), %xmm5 movapd 4 * SIZE(Y1), %xmm6 movapd 6 * SIZE(Y1), %xmm7 mulpd %xmm0, %xmm8 mulpd %xmm0, %xmm9 mulpd %xmm0, %xmm10 mulpd %xmm0, %xmm11 decq I jle .L24 ALIGN_3.L23: PREFETCH PREFETCHSIZE * SIZE(A1) addpd %xmm8, %xmm4 movapd 8 * SIZE(A1), %xmm8 mulpd %xmm1, %xmm12 addpd %xmm9, %xmm5 movapd 10 * SIZE(A1), %xmm9 mulpd %xmm1, %xmm13 addpd %xmm10, %xmm6 movapd 12 * SIZE(A1), %xmm10 mulpd %xmm1, %xmm14 addpd %xmm11, %xmm7 movapd 14 * SIZE(A1), %xmm11 mulpd %xmm1, %xmm15 PREFETCH PREFETCHSIZE * SIZE(Y1) addpd %xmm12, %xmm4 movapd 8 * SIZE(A2), %xmm12 mulpd %xmm0, %xmm8 addpd %xmm13, %xmm5 movapd 10 * SIZE(A2), %xmm13 mulpd %xmm0, %xmm9 addpd %xmm14, %xmm6 movapd 12 * SIZE(A2), %xmm14 mulpd %xmm0, %xmm10 addpd %xmm15, %xmm7 movapd 14 * SIZE(A2), %xmm15 mulpd %xmm0, %xmm11 movapd %xmm4, 0 * SIZE(Y1) movapd 8 * SIZE(Y1), %xmm4 movapd %xmm5, 2 * SIZE(Y1) movapd 10 * SIZE(Y1), %xmm5 movapd %xmm6, 4 * SIZE(Y1) movapd 12 * SIZE(Y1), %xmm6 movapd %xmm7, 6 * SIZE(Y1) movapd 14 * SIZE(Y1), %xmm7 PREFETCH PREFETCHSIZE * SIZE(A2) addpd %xmm8, %xmm4 movapd 16 * SIZE(A1), %xmm8 mulpd %xmm1, %xmm12 addpd %xmm9, %xmm5 movapd 18 * SIZE(A1), %xmm9 mulpd %xmm1, %xmm13 addpd %xmm10, %xmm6 movapd 20 * SIZE(A1), %xmm10 mulpd %xmm1, %xmm14 addpd %xmm11, %xmm7 movapd 22 * SIZE(A1), %xmm11 mulpd %xmm1, %xmm15 addpd %xmm12, %xmm4 movapd 16 * SIZE(A2), %xmm12 mulpd %xmm0, %xmm8 addpd %xmm13, %xmm5 movapd 18 * SIZE(A2), %xmm13 mulpd %xmm0, %xmm9 addpd %xmm14, %xmm6 movapd 20 * SIZE(A2), %xmm14 mulpd %xmm0, %xmm10 addpd %xmm15, %xmm7 movapd 22 * SIZE(A2), %xmm15 mulpd %xmm0, %xmm11 movapd %xmm4, 8 * SIZE(Y1) movapd 16 * SIZE(Y1), %xmm4 movapd %xmm5, 10 * SIZE(Y1) movapd 18 * SIZE(Y1), %xmm5 movapd %xmm6, 12 * SIZE(Y1) movapd 20 * SIZE(Y1), %xmm6 movapd %xmm7, 14 * SIZE(Y1) movapd 22 * SIZE(Y1), %xmm7 addq $16 * SIZE, A1 addq $16 * SIZE, A2 addq $16 * SIZE, Y1 decq I jg .L23 ALIGN_3.L24: addpd %xmm8, %xmm4 movapd 8 * SIZE(A1), %xmm8 mulpd %xmm1, %xmm12 addpd %xmm9, %xmm5 movapd 10 * SIZE(A1), %xmm9 mulpd %xmm1, %xmm13 addpd %xmm10, %xmm6 movapd 12 * SIZE(A1), %xmm10 mulpd %xmm1, %xmm14 addpd %xmm11, %xmm7 movapd 14 * SIZE(A1), %xmm11 mulpd %xmm1, %xmm15 addpd %xmm12, %xmm4 movapd 8 * SIZE(A2), %xmm12 mulpd %xmm0, %xmm8 addpd %xmm13, %xmm5 movapd 10 * SIZE(A2), %xmm13 mulpd %xmm0, %xmm9 addpd %xmm14, %xmm6 movapd 12 * SIZE(A2), %xmm14 mulpd %xmm0, %xmm10 addpd %xmm15, %xmm7 movapd 14 * SIZE(A2), %xmm15 mulpd %xmm0, %xmm11 movapd %xmm4, 0 * SIZE(Y1) movapd 8 * SIZE(Y1), %xmm4 movapd %xmm5, 2 * SIZE(Y1) movapd 10 * SIZE(Y1), %xmm5 movapd %xmm6, 4 * SIZE(Y1) movapd 12 * SIZE(Y1), %xmm6 movapd %xmm7, 6 * SIZE(Y1) movapd 14 * SIZE(Y1), %xmm7 addpd %xmm8, %xmm4 mulpd %xmm1, %xmm12 addpd %xmm9, %xmm5 mulpd %xmm1, %xmm13 addpd %xmm10, %xmm6 mulpd %xmm1, %xmm14 addpd %xmm11, %xmm7 mulpd %xmm1, %xmm15 addpd %xmm12, %xmm4 addpd %xmm13, %xmm5 addpd %xmm14, %xmm6 addpd %xmm15, %xmm7 movapd %xmm4, 8 * SIZE(Y1) movapd %xmm5, 10 * SIZE(Y1) movapd %xmm6, 12 * SIZE(Y1) movapd %xmm7, 14 * SIZE(Y1) addq $16 * SIZE, A1 addq $16 * SIZE, A2 addq $16 * SIZE, Y1 ALIGN_3.L25: testq $8, MM je .L26 movapd 0 * SIZE(Y1), %xmm4 movapd 2 * SIZE(Y1), %xmm5 movapd 4 * SIZE(Y1), %xmm6 movapd 6 * SIZE(Y1), %xmm7 movapd 0 * SIZE(A1), %xmm8 movapd 2 * SIZE(A1), %xmm9 movapd 4 * SIZE(A1), %xmm10 movapd 6 * SIZE(A1), %xmm11 movapd 0 * SIZE(A2), %xmm12 movapd 2 * SIZE(A2), %xmm13 movapd 4 * SIZE(A2), %xmm14 movapd 6 * SIZE(A2), %xmm15 mulpd %xmm0, %xmm8 mulpd %xmm0, %xmm9 mulpd %xmm0, %xmm10 mulpd %xmm0, %xmm11 addpd %xmm8, %xmm4 mulpd %xmm1, %xmm12 addpd %xmm9, %xmm5 mulpd %xmm1, %xmm13 addpd %xmm10, %xmm6 mulpd %xmm1, %xmm14 addpd %xmm11, %xmm7 mulpd %xmm1, %xmm15 addpd %xmm12, %xmm4 addpd %xmm13, %xmm5 addpd %xmm14, %xmm6 addpd %xmm15, %xmm7 movapd %xmm4, 0 * SIZE(Y1) movapd %xmm5, 2 * SIZE(Y1) movapd %xmm6, 4 * SIZE(Y1) movapd %xmm7, 6 * SIZE(Y1) addq $8 * SIZE, A1 addq $8 * SIZE, A2 addq $8 * SIZE, Y1 ALIGN_3.L26: testq $4, MM je .L27 movapd 0 * SIZE(Y1), %xmm4 movapd 2 * SIZE(Y1), %xmm5 movapd 0 * SIZE(A1), %xmm8 movapd 2 * SIZE(A1), %xmm9 movapd 0 * SIZE(A2), %xmm10 movapd 2 * SIZE(A2), %xmm11 mulpd %xmm0, %xmm8 mulpd %xmm0, %xmm9 mulpd %xmm1, %xmm10 mulpd %xmm1, %xmm11 addpd %xmm8, %xmm4 addpd %xmm9, %xmm5 addpd %xmm10, %xmm4 addpd %xmm11, %xmm5 movapd %xmm4, 0 * SIZE(Y1) movapd %xmm5, 2 * SIZE(Y1) addq $4 * SIZE, A1 addq $4 * SIZE, A2 addq $4 * SIZE, Y1 ALIGN_3.L27: testq $2, MM je .L28 movapd 0 * SIZE(Y1), %xmm4 movapd 0 * SIZE(A1), %xmm8 movapd 0 * SIZE(A2), %xmm10 mulpd %xmm0, %xmm8 mulpd %xmm1, %xmm10 addpd %xmm8, %xmm4 addpd %xmm10, %xmm4 movapd %xmm4, 0 * SIZE(Y1) addq $2 * SIZE, A1 addq $2 * SIZE, A2 addq $2 * SIZE, Y1 ALIGN_3.L28: testq $1, MM je .L30 movsd 0 * SIZE(Y1), %xmm4 movsd 0 * SIZE(A1), %xmm8 movsd 0 * SIZE(A2), %xmm9 mulsd %xmm0, %xmm8 mulsd %xmm1, %xmm9 addsd %xmm8, %xmm4 addsd %xmm9, %xmm4 movsd %xmm4, 0 * SIZE(Y1) addq $1 * SIZE, A1 addq $1 * SIZE, A2 addq $1 * SIZE, Y1 ALIGN_3.L30: testq $1, N je .L990 movq BUFFER, Y1 movq A, A1 movsd (X), %xmm0 mulsd STACK_ALPHA, %xmm0 unpcklpd %xmm0, %xmm0 testq $SIZE, A je .L32 movsd (Y1), %xmm4 movsd (A1), %xmm8 mulsd %xmm0, %xmm8 addsd %xmm8, %xmm4 movsd %xmm4, (Y1) addq $1 * SIZE, A1 addq $1 * SIZE, Y1 ALIGN_3.L32: movq MM, I sarq $4, I jle .L35 movapd 0 * SIZE(A1), %xmm8 movapd 2 * SIZE(A1), %xmm9 movapd 4 * SIZE(A1), %xmm10 movapd 6 * SIZE(A1), %xmm11 movapd 8 * SIZE(A1), %xmm12 mulpd %xmm0, %xmm8 movapd 10 * SIZE(A1), %xmm13 mulpd %xmm0, %xmm9 movapd 12 * SIZE(A1), %xmm14 mulpd %xmm0, %xmm10 movapd 14 * SIZE(A1), %xmm15 mulpd %xmm0, %xmm11 movapd 0 * SIZE(Y1), %xmm4 movapd 2 * SIZE(Y1), %xmm5 movapd 4 * SIZE(Y1), %xmm6 movapd 6 * SIZE(Y1), %xmm7 decq I jle .L34 ALIGN_3.L33: PREFETCH PREFETCHSIZE * SIZE(A1) addpd %xmm8, %xmm4 movapd 16 * SIZE(A1), %xmm8 mulpd %xmm0, %xmm12 addpd %xmm9, %xmm5 movapd 18 * SIZE(A1), %xmm9 mulpd %xmm0, %xmm13 addpd %xmm10, %xmm6 movapd 20 * SIZE(A1), %xmm10 mulpd %xmm0, %xmm14 addpd %xmm11, %xmm7 movapd 22 * SIZE(A1), %xmm11 mulpd %xmm0, %xmm15 movapd %xmm4, 0 * SIZE(Y1) movapd 8 * SIZE(Y1), %xmm4 movapd %xmm5, 2 * SIZE(Y1) movapd 10 * SIZE(Y1), %xmm5 movapd %xmm6, 4 * SIZE(Y1) movapd 12 * SIZE(Y1), %xmm6 movapd %xmm7, 6 * SIZE(Y1) movapd 14 * SIZE(Y1), %xmm7 PREFETCH PREFETCHSIZE * SIZE(Y1) addpd %xmm12, %xmm4 movapd 24 * SIZE(A1), %xmm12 mulpd %xmm0, %xmm8 addpd %xmm13, %xmm5 movapd 26 * SIZE(A1), %xmm13 mulpd %xmm0, %xmm9 addpd %xmm14, %xmm6 movapd 28 * SIZE(A1), %xmm14 mulpd %xmm0, %xmm10 addpd %xmm15, %xmm7 movapd 30 * SIZE(A1), %xmm15 mulpd %xmm0, %xmm11 movapd %xmm4, 8 * SIZE(Y1) movapd 16 * SIZE(Y1), %xmm4 movapd %xmm5, 10 * SIZE(Y1) movapd 18 * SIZE(Y1), %xmm5 movapd %xmm6, 12 * SIZE(Y1) movapd 20 * SIZE(Y1), %xmm6 movapd %xmm7, 14 * SIZE(Y1) movapd 22 * SIZE(Y1), %xmm7 addq $16 * SIZE, A1 addq $16 * SIZE, Y1 decq I jg .L33 ALIGN_3.L34: addpd %xmm8, %xmm4 mulpd %xmm0, %xmm12 addpd %xmm9, %xmm5 mulpd %xmm0, %xmm13 addpd %xmm10, %xmm6 mulpd %xmm0, %xmm14 addpd %xmm11, %xmm7 mulpd %xmm0, %xmm15 movapd %xmm4, 0 * SIZE(Y1) movapd 8 * SIZE(Y1), %xmm4 movapd %xmm5, 2 * SIZE(Y1) movapd 10 * SIZE(Y1), %xmm5 movapd %xmm6, 4 * SIZE(Y1) movapd 12 * SIZE(Y1), %xmm6 movapd %xmm7, 6 * SIZE(Y1) movapd 14 * SIZE(Y1), %xmm7 addpd %xmm12, %xmm4 addpd %xmm13, %xmm5 addpd %xmm14, %xmm6 addpd %xmm15, %xmm7 movapd %xmm4, 8 * SIZE(Y1) movapd %xmm5, 10 * SIZE(Y1) movapd %xmm6, 12 * SIZE(Y1) movapd %xmm7, 14 * SIZE(Y1) addq $16 * SIZE, A1 addq $16 * SIZE, A2 addq $16 * SIZE, Y1 ALIGN_3.L35: testq $8, MM je .L36 movapd 0 * SIZE(Y1), %xmm4 movapd 2 * SIZE(Y1), %xmm5 movapd 4 * SIZE(Y1), %xmm6 movapd 6 * SIZE(Y1), %xmm7 movapd 0 * SIZE(A1), %xmm8 movapd 2 * SIZE(A1), %xmm9 movapd 4 * SIZE(A1), %xmm10 movapd 6 * SIZE(A1), %xmm11 mulpd %xmm0, %xmm8 mulpd %xmm0, %xmm9 mulpd %xmm0, %xmm10 mulpd %xmm0, %xmm11 addpd %xmm8, %xmm4 addpd %xmm9, %xmm5 addpd %xmm10, %xmm6 addpd %xmm11, %xmm7 movapd %xmm4, 0 * SIZE(Y1) movapd %xmm5, 2 * SIZE(Y1) movapd %xmm6, 4 * SIZE(Y1) movapd %xmm7, 6 * SIZE(Y1) addq $8 * SIZE, A1 addq $8 * SIZE, Y1 ALIGN_3.L36: testq $4, MM je .L37 movapd 0 * SIZE(Y1), %xmm4 movapd 2 * SIZE(Y1), %xmm5 movapd 0 * SIZE(A1), %xmm8 movapd 2 * SIZE(A1), %xmm9 mulpd %xmm0, %xmm8 mulpd %xmm0, %xmm9 addpd %xmm8, %xmm4 addpd %xmm9, %xmm5 movapd %xmm4, 0 * SIZE(Y1) movapd %xmm5, 2 * SIZE(Y1) addq $4 * SIZE, A1 addq $4 * SIZE, Y1 ALIGN_3.L37: testq $2, MM je .L38 movapd 0 * SIZE(Y1), %xmm4 movapd 0 * SIZE(A1), %xmm8 mulpd %xmm0, %xmm8 addpd %xmm8, %xmm4 movapd %xmm4, 0 * SIZE(Y1) addq $2 * SIZE, A1 addq $2 * SIZE, Y1 ALIGN_3.L38: testq $1, MM je .L990 movsd 0 * SIZE(Y1), %xmm4 movsd 0 * SIZE(A1), %xmm8 mulsd %xmm0, %xmm8 addsd %xmm8, %xmm4 movsd %xmm4, 0 * SIZE(Y1) jmp .L990 ALIGN_3.L40: movq N, J sarq $2, J jle .L50 ALIGN_3.L41: movq BUFFER, Y1 movq A, A1 leaq (A, LDA, 1), A2 leaq (A, LDA, 4), A movsd (X), %xmm0 addq INCX, X movsd (X), %xmm1 addq INCX, X movsd (X), %xmm2 addq INCX, X movsd (X), %xmm3 addq INCX, X
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -