⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 gemv_n_sse2_core2.s

📁 Optimized GotoBLAS libraries
💻 S
📖 第 1 页 / 共 4 页
字号:
/*********************************************************************//*                                                                   *//*             Optimized BLAS libraries                              *//*                     By Kazushige Goto <kgoto@tacc.utexas.edu>     *//*                                                                   *//* Copyright (c) The University of Texas, 2005. All rights reserved. *//* UNIVERSITY EXPRESSLY DISCLAIMS ANY AND ALL WARRANTIES CONCERNING  *//* THIS SOFTWARE AND DOCUMENTATION, INCLUDING ANY WARRANTIES OF      *//* MERCHANTABILITY, FITNESS FOR ANY PARTICULAR PURPOSE,              *//* NON-INFRINGEMENT AND WARRANTIES OF PERFORMANCE, AND ANY WARRANTY  *//* THAT MIGHT OTHERWISE ARISE FROM COURSE OF DEALING OR USAGE OF     *//* TRADE. NO WARRANTY IS EITHER EXPRESS OR IMPLIED WITH RESPECT TO   *//* THE USE OF THE SOFTWARE OR DOCUMENTATION.                         *//* Under no circumstances shall University be liable for incidental, *//* special, indirect, direct or consequential damages or loss of     *//* profits, interruption of business, or related expenses which may  *//* arise from use of Software or Documentation, including but not    *//* limited to those resulting from defects in Software and/or        *//* Documentation, or loss or inaccuracy of data of any kind.         *//*********************************************************************/#define ASSEMBLER#include "common.h"#define PREFETCH	prefetch#define PREFETCHW	prefetchw#define PREFETCHSIZE	32#ifndef WINDOWS_ABI#define STACKSIZE	64	#define OLD_M	  %rdi#define OLD_N	  %rsi#define OLD_LDA	  %r8#define STACK_INCX	 8 + STACKSIZE(%rsp)#define STACK_Y		16 + STACKSIZE(%rsp)#define STACK_INCY	24 + STACKSIZE(%rsp)#define STACK_BUFFER	32 + STACKSIZE(%rsp)#define STACK_ALPHA	48	      (%rsp)#else#define STACKSIZE	256	#define OLD_M	  %rcx#define OLD_N	  %rdx#define OLD_A		 40 + STACKSIZE(%rsp)#define OLD_LDA		 48 + STACKSIZE(%rsp)#define OLD_X		 56 + STACKSIZE(%rsp)#define STACK_INCX	 64 + STACKSIZE(%rsp)#define STACK_Y		 72 + STACKSIZE(%rsp)#define STACK_INCY	 80 + STACKSIZE(%rsp)#define STACK_BUFFER	 88 + STACKSIZE(%rsp)#define STACK_ALPHA	224	       (%rsp)#endif#define M	  %r12#define N	  %r13#define A	  %rcx#define LDA	  %rbx#define X	  %r9#define INCX	  %rdx#define Y	  %r14#define INCY	  %r10#define TEMP	%rax#define I	%rax#define J	%r11#define A1	%rdi#define A2	%rsi#define Y1	%rbp#define BUFFER	%r15#define MM	%r8		PROLOGUE	PROFCODE	subq	$STACKSIZE, %rsp	movq	%rbx,  0(%rsp)	movq	%rbp,  8(%rsp)	movq	%r12, 16(%rsp)	movq	%r13, 24(%rsp)	movq	%r14, 32(%rsp)	movq	%r15, 40(%rsp)#ifdef WINDOWS_ABI	movq	%rdi,    48(%rsp)	movq	%rsi,    56(%rsp)	movups	%xmm6,   64(%rsp)	movups	%xmm7,   80(%rsp)	movups	%xmm8,   96(%rsp)	movups	%xmm9,  112(%rsp)	movups	%xmm10, 128(%rsp)	movups	%xmm11, 144(%rsp)	movups	%xmm12, 160(%rsp)	movups	%xmm13, 176(%rsp)	movups	%xmm14, 192(%rsp)	movups	%xmm15, 208(%rsp)	movq	OLD_M,	      M	movq	OLD_N,        N	movq	OLD_A,        A	movq	OLD_LDA,      LDA	movq	OLD_X,        X#else	movq	OLD_M,	      M	movq	OLD_N,        N	movq	OLD_LDA,      LDA#endif	movq	STACK_INCX,   INCX	movq	STACK_Y,      Y	movq	STACK_INCY,   INCY	movq	STACK_BUFFER, BUFFER#ifndef WINDOWS_ABI	movsd	 %xmm0, STACK_ALPHA#else	movsd	 %xmm3, STACK_ALPHA#endif	leaq	(,INCX, SIZE), INCX	leaq	(,INCY, SIZE), INCY	leaq	(,LDA,  SIZE), LDA	testq	N, N		# if n <= 0 goto END	jle	.L999	testq	M, M		# if n <= 0 goto END	jle	.L999	movq	BUFFER, Y1	movq	M, MM	movq	BUFFER, %rax	decq	MM	addq	$1 * SIZE, BUFFER	testq	$SIZE, A	cmoveq	M, MM	cmoveq	%rax, BUFFER	pxor	%xmm4, %xmm4	movq	M,  %rax	addq	$8, %rax	sarq	$3, %rax	ALIGN_3.L01:	movapd	%xmm4, 0 * SIZE(Y1)	movapd	%xmm4, 2 * SIZE(Y1)	movapd	%xmm4, 4 * SIZE(Y1)	movapd	%xmm4, 6 * SIZE(Y1)	addq	$8 * SIZE, Y1	decq	%rax	jg	.L01	ALIGN_3.L10:	subq	$-16 * SIZE, A		testq	$SIZE, LDA	jne	.L40	movq	N,  J	sarq	$2, J	jle	.L20	ALIGN_3.L11:	leaq	16 * SIZE(BUFFER), Y1	movq	A,  A1	leaq	(A,  LDA, 1), A2	leaq	(A,  LDA, 4), A	movsd	(X), %xmm12	addq	INCX, X	movsd	(X), %xmm13	addq	INCX, X	movsd	(X), %xmm14	addq	INCX, X	movsd	(X), %xmm15	addq	INCX, X	mulsd	STACK_ALPHA, %xmm12	mulsd	STACK_ALPHA, %xmm13	mulsd	STACK_ALPHA, %xmm14	mulsd	STACK_ALPHA, %xmm15	unpcklpd %xmm12, %xmm12	unpcklpd %xmm13, %xmm13	unpcklpd %xmm14, %xmm14	unpcklpd %xmm15, %xmm15	ALIGN_3	testq	$SIZE, A	je	.L12	movsd	 -16 * SIZE(Y1), %xmm0	movsd	 -16 * SIZE(A1), %xmm4	movsd	 -16 * SIZE(A2), %xmm5	movsd	 -16 * SIZE(A1, LDA, 2), %xmm6	movsd	 -16 * SIZE(A2, LDA, 2), %xmm7	mulsd	 %xmm12, %xmm4	mulsd	 %xmm13, %xmm5	mulsd	 %xmm14, %xmm6	mulsd	 %xmm15, %xmm7	addsd	 %xmm4,  %xmm0	addsd	 %xmm5,  %xmm0	addsd	 %xmm6,  %xmm0	addsd	 %xmm7,  %xmm0	movsd	 %xmm0, -16 * SIZE(Y1)	addq	 $1 * SIZE, A1	addq	 $1 * SIZE, A2	addq	 $1 * SIZE, Y1	ALIGN_3.L12:	movq	MM,  I	sarq	$4, I	jle	.L15	movapd	 -16 * SIZE(A1), %xmm8	movapd	 -14 * SIZE(A1), %xmm9	movapd	 -12 * SIZE(A1), %xmm10	movapd	 -10 * SIZE(A1), %xmm11	movapd	 -16 * SIZE(Y1), %xmm0	movapd	 -14 * SIZE(Y1), %xmm1	movapd	 -12 * SIZE(Y1), %xmm2	movapd	 -10 * SIZE(Y1), %xmm3	movapd	  -8 * SIZE(Y1), %xmm4	movapd	  -6 * SIZE(Y1), %xmm5	movapd	  -4 * SIZE(Y1), %xmm6	movapd	  -2 * SIZE(Y1), %xmm7	mulpd	 %xmm12, %xmm8	mulpd	 %xmm12, %xmm9	mulpd	 %xmm12, %xmm10	mulpd	 %xmm12, %xmm11	decq	 I	jle	 .L14	ALIGN_3.L13:	addpd	 %xmm8,  %xmm0	addpd	 %xmm9,  %xmm1	addpd	 %xmm10, %xmm2	addpd	 %xmm11, %xmm3	movapd	 -8 * SIZE(A1), %xmm8	movapd	 -6 * SIZE(A1), %xmm9	movapd	 -4 * SIZE(A1), %xmm10	movapd	 -2 * SIZE(A1), %xmm11	mulpd	 %xmm12, %xmm8	mulpd	 %xmm12, %xmm9	mulpd	 %xmm12, %xmm10	mulpd	 %xmm12, %xmm11	addpd	 %xmm8,  %xmm4	addpd	 %xmm9,  %xmm5	addpd	 %xmm10, %xmm6	addpd	 %xmm11, %xmm7	movapd	 -16 * SIZE(A2), %xmm8	movapd	 -14 * SIZE(A2), %xmm9	movapd	 -12 * SIZE(A2), %xmm10	movapd	 -10 * SIZE(A2), %xmm11	mulpd	 %xmm13, %xmm8	mulpd	 %xmm13, %xmm9	mulpd	 %xmm13, %xmm10	mulpd	 %xmm13, %xmm11	addpd	 %xmm8,  %xmm0	addpd	 %xmm9,  %xmm1	addpd	 %xmm10, %xmm2	addpd	 %xmm11, %xmm3	movapd	 -8 * SIZE(A2), %xmm8	movapd	 -6 * SIZE(A2), %xmm9	movapd	 -4 * SIZE(A2), %xmm10	movapd	 -2 * SIZE(A2), %xmm11	mulpd	 %xmm13, %xmm8	mulpd	 %xmm13, %xmm9	mulpd	 %xmm13, %xmm10	mulpd	 %xmm13, %xmm11	addpd	 %xmm8, %xmm4	addpd	 %xmm9, %xmm5	addpd	 %xmm10, %xmm6	addpd	 %xmm11, %xmm7	movapd	-16 * SIZE(A1, LDA, 2), %xmm8	movapd	-14 * SIZE(A1, LDA, 2), %xmm9	movapd	-12 * SIZE(A1, LDA, 2), %xmm10	movapd	-10 * SIZE(A1, LDA, 2), %xmm11	mulpd	 %xmm14, %xmm8	mulpd	 %xmm14, %xmm9	mulpd	 %xmm14, %xmm10	mulpd	 %xmm14, %xmm11	addpd	 %xmm8,  %xmm0	addpd	 %xmm9,  %xmm1	addpd	 %xmm10, %xmm2	addpd	 %xmm11, %xmm3	movapd	 -8 * SIZE(A1, LDA, 2), %xmm8	movapd	 -6 * SIZE(A1, LDA, 2), %xmm9	movapd	 -4 * SIZE(A1, LDA, 2), %xmm10	movapd	 -2 * SIZE(A1, LDA, 2), %xmm11	mulpd	 %xmm14, %xmm8	mulpd	 %xmm14, %xmm9	mulpd	 %xmm14, %xmm10	mulpd	 %xmm14, %xmm11	addpd	 %xmm8, %xmm4	addpd	 %xmm9, %xmm5	addpd	 %xmm10, %xmm6	addpd	 %xmm11, %xmm7	movapd	-16 * SIZE(A2, LDA, 2), %xmm8	movapd	-14 * SIZE(A2, LDA, 2), %xmm9	movapd	-12 * SIZE(A2, LDA, 2), %xmm10	movapd	-10 * SIZE(A2, LDA, 2), %xmm11	mulpd	 %xmm15, %xmm8	mulpd	 %xmm15, %xmm9	mulpd	 %xmm15, %xmm10	mulpd	 %xmm15, %xmm11	addpd	 %xmm8,  %xmm0	addpd	 %xmm9,  %xmm1	addpd	 %xmm10, %xmm2	addpd	 %xmm11, %xmm3	movapd	 -8 * SIZE(A2, LDA, 2), %xmm8	movapd	 -6 * SIZE(A2, LDA, 2), %xmm9	movapd	 -4 * SIZE(A2, LDA, 2), %xmm10	movapd	 -2 * SIZE(A2, LDA, 2), %xmm11	mulpd	 %xmm15, %xmm8	mulpd	 %xmm15, %xmm9	mulpd	 %xmm15, %xmm10	mulpd	 %xmm15, %xmm11	addpd	 %xmm8,  %xmm4	addpd	 %xmm9,  %xmm5	addpd	 %xmm10, %xmm6	addpd	 %xmm11, %xmm7	movapd	 %xmm0, -16 * SIZE(Y1)	movapd	 %xmm1, -14 * SIZE(Y1)	movapd	 %xmm2, -12 * SIZE(Y1)	movapd	 %xmm3, -10 * SIZE(Y1)	movapd	   0 * SIZE(Y1), %xmm0	movapd	   2 * SIZE(Y1), %xmm1	movapd	   4 * SIZE(Y1), %xmm2	movapd	   6 * SIZE(Y1), %xmm3	movapd	 %xmm4, -8 * SIZE(Y1)	movapd	 %xmm5, -6 * SIZE(Y1)	movapd	 %xmm6, -4 * SIZE(Y1)	movapd	 %xmm7, -2 * SIZE(Y1)	movapd	   8 * SIZE(Y1), %xmm4	movapd	  10 * SIZE(Y1), %xmm5	movapd	  12 * SIZE(Y1), %xmm6	movapd	  14 * SIZE(Y1), %xmm7	movapd	   0 * SIZE(A1), %xmm8	movapd	   2 * SIZE(A1), %xmm9	movapd	   4 * SIZE(A1), %xmm10	movapd	   6 * SIZE(A1), %xmm11	mulpd	 %xmm12, %xmm8	mulpd	 %xmm12, %xmm9	mulpd	 %xmm12, %xmm10	mulpd	 %xmm12, %xmm11	subq	 $-16 * SIZE, A1	subq	 $-16 * SIZE, A2	subq	 $-16 * SIZE, Y1	subq	 $1, I	jg,pt	.L13	ALIGN_3.L14:	addpd	 %xmm8,  %xmm0	addpd	 %xmm9,  %xmm1	addpd	 %xmm10, %xmm2	addpd	 %xmm11, %xmm3	movapd	 -8 * SIZE(A1), %xmm8	movapd	 -6 * SIZE(A1), %xmm9	movapd	 -4 * SIZE(A1), %xmm10	movapd	 -2 * SIZE(A1), %xmm11	mulpd	 %xmm12, %xmm8	mulpd	 %xmm12, %xmm9	mulpd	 %xmm12, %xmm10	mulpd	 %xmm12, %xmm11	addpd	 %xmm8,  %xmm4	addpd	 %xmm9,  %xmm5	addpd	 %xmm10, %xmm6	addpd	 %xmm11, %xmm7	movapd	 -16 * SIZE(A2), %xmm8	movapd	 -14 * SIZE(A2), %xmm9	movapd	 -12 * SIZE(A2), %xmm10	movapd	 -10 * SIZE(A2), %xmm11	mulpd	 %xmm13, %xmm8	mulpd	 %xmm13, %xmm9	mulpd	 %xmm13, %xmm10	mulpd	 %xmm13, %xmm11	addpd	 %xmm8,  %xmm0	addpd	 %xmm9,  %xmm1	addpd	 %xmm10, %xmm2	addpd	 %xmm11, %xmm3	movapd	 -8 * SIZE(A2), %xmm8	movapd	 -6 * SIZE(A2), %xmm9	movapd	 -4 * SIZE(A2), %xmm10	movapd	 -2 * SIZE(A2), %xmm11	mulpd	 %xmm13, %xmm8	mulpd	 %xmm13, %xmm9	mulpd	 %xmm13, %xmm10	mulpd	 %xmm13, %xmm11	addpd	 %xmm8, %xmm4	addpd	 %xmm9, %xmm5	addpd	 %xmm10, %xmm6	addpd	 %xmm11, %xmm7	movapd	-16 * SIZE(A1, LDA, 2), %xmm8	movapd	-14 * SIZE(A1, LDA, 2), %xmm9	movapd	-12 * SIZE(A1, LDA, 2), %xmm10	movapd	-10 * SIZE(A1, LDA, 2), %xmm11	mulpd	 %xmm14, %xmm8	mulpd	 %xmm14, %xmm9	mulpd	 %xmm14, %xmm10	mulpd	 %xmm14, %xmm11	addpd	 %xmm8,  %xmm0	addpd	 %xmm9,  %xmm1	addpd	 %xmm10, %xmm2	addpd	 %xmm11, %xmm3	movapd	 -8 * SIZE(A1, LDA, 2), %xmm8	movapd	 -6 * SIZE(A1, LDA, 2), %xmm9	movapd	 -4 * SIZE(A1, LDA, 2), %xmm10	movapd	 -2 * SIZE(A1, LDA, 2), %xmm11	mulpd	 %xmm14, %xmm8	mulpd	 %xmm14, %xmm9	mulpd	 %xmm14, %xmm10	mulpd	 %xmm14, %xmm11	addpd	 %xmm8, %xmm4	addpd	 %xmm9, %xmm5	addpd	 %xmm10, %xmm6	addpd	 %xmm11, %xmm7	movapd	-16 * SIZE(A2, LDA, 2), %xmm8	movapd	-14 * SIZE(A2, LDA, 2), %xmm9	movapd	-12 * SIZE(A2, LDA, 2), %xmm10	movapd	-10 * SIZE(A2, LDA, 2), %xmm11	mulpd	 %xmm15, %xmm8	mulpd	 %xmm15, %xmm9	mulpd	 %xmm15, %xmm10	mulpd	 %xmm15, %xmm11	addpd	 %xmm8,  %xmm0	addpd	 %xmm9,  %xmm1	addpd	 %xmm10, %xmm2	addpd	 %xmm11, %xmm3	movapd	 -8 * SIZE(A2, LDA, 2), %xmm8	movapd	 -6 * SIZE(A2, LDA, 2), %xmm9	movapd	 -4 * SIZE(A2, LDA, 2), %xmm10	movapd	 -2 * SIZE(A2, LDA, 2), %xmm11	mulpd	 %xmm15, %xmm8	mulpd	 %xmm15, %xmm9	mulpd	 %xmm15, %xmm10	mulpd	 %xmm15, %xmm11	addpd	 %xmm8,  %xmm4	addpd	 %xmm9,  %xmm5	addpd	 %xmm10, %xmm6	addpd	 %xmm11, %xmm7	movapd	 %xmm0, -16 * SIZE(Y1)	movapd	 %xmm1, -14 * SIZE(Y1)	movapd	 %xmm2, -12 * SIZE(Y1)	movapd	 %xmm3, -10 * SIZE(Y1)	movapd	 %xmm4, -8 * SIZE(Y1)	movapd	 %xmm5, -6 * SIZE(Y1)	movapd	 %xmm6, -4 * SIZE(Y1)	movapd	 %xmm7, -2 * SIZE(Y1)	subq	 $-16 * SIZE, A1	subq	 $-16 * SIZE, A2	subq	 $-16 * SIZE, Y1	ALIGN_3.L15:	testq	$8, MM	je	.L16	movapd	 -16 * SIZE(Y1), %xmm0	movapd	 -14 * SIZE(Y1), %xmm1	movapd	 -12 * SIZE(Y1), %xmm2	movapd	 -10 * SIZE(Y1), %xmm3	movapd	 -16 * SIZE(A1), %xmm8	movapd	 -14 * SIZE(A1), %xmm9	movapd	 -12 * SIZE(A1), %xmm10	movapd	 -10 * SIZE(A1), %xmm11	mulpd	 %xmm12, %xmm8	mulpd	 %xmm12, %xmm9	mulpd	 %xmm12, %xmm10	mulpd	 %xmm12, %xmm11	addpd	 %xmm8,  %xmm0	addpd	 %xmm9,  %xmm1	addpd	 %xmm10, %xmm2	addpd	 %xmm11, %xmm3	movapd	 -16 * SIZE(A2), %xmm8	movapd	 -14 * SIZE(A2), %xmm9	movapd	 -12 * SIZE(A2), %xmm10	movapd	 -10 * SIZE(A2), %xmm11	mulpd	 %xmm13, %xmm8	mulpd	 %xmm13, %xmm9	mulpd	 %xmm13, %xmm10	mulpd	 %xmm13, %xmm11	addpd	 %xmm8, %xmm0	addpd	 %xmm9, %xmm1	addpd	 %xmm10, %xmm2	addpd	 %xmm11, %xmm3	movapd	-16 * SIZE(A1, LDA, 2), %xmm8	movapd	-14 * SIZE(A1, LDA, 2), %xmm9	movapd	-12 * SIZE(A1, LDA, 2), %xmm10	movapd	-10 * SIZE(A1, LDA, 2), %xmm11	mulpd	 %xmm14, %xmm8	mulpd	 %xmm14, %xmm9	mulpd	 %xmm14, %xmm10	mulpd	 %xmm14, %xmm11	addpd	 %xmm8,  %xmm0	addpd	 %xmm9,  %xmm1	addpd	 %xmm10, %xmm2	addpd	 %xmm11, %xmm3	movapd	-16 * SIZE(A2, LDA, 2), %xmm8	movapd	-14 * SIZE(A2, LDA, 2), %xmm9	movapd	-12 * SIZE(A2, LDA, 2), %xmm10	movapd	-10 * SIZE(A2, LDA, 2), %xmm11	mulpd	 %xmm15, %xmm8	mulpd	 %xmm15, %xmm9	mulpd	 %xmm15, %xmm10	mulpd	 %xmm15, %xmm11	addpd	 %xmm8, %xmm0	addpd	 %xmm9, %xmm1	addpd	 %xmm10, %xmm2	addpd	 %xmm11, %xmm3	movapd	 %xmm0, -16 * SIZE(Y1)	movapd	 %xmm1, -14 * SIZE(Y1)	movapd	 %xmm2, -12 * SIZE(Y1)	movapd	 %xmm3, -10 * SIZE(Y1)	addq	 $8 * SIZE, A1	addq	 $8 * SIZE, A2	addq	 $8 * SIZE, Y1	ALIGN_3.L16:	testq	$4, MM	je	.L17	movapd	 -16 * SIZE(Y1), %xmm0	movapd	 -14 * SIZE(Y1), %xmm1	movapd	 -16 * SIZE(A1), %xmm8

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -