📄 ztrsm_kernel_hummer_ln.s
字号:
FXCSMADD f2, B3, A5, f2 FXCPMADD f1, B3, A6, f1 FXCSMADD f3, B3, A6, f3 FXCPMADD f0, B4, A7, f0 FXCSMADD f2, B4, A7, f2 FXCPMADD f1, B4, A8, f1 FXCSMADD f3, B4, A8, f3 .align 4.L64:#if defined(LT) || defined(RN) andi. r0, KK, 3 mtspr CTR, r0 ble+ .L68#else andi. r0, TEMP, 3 mtspr CTR, r0 ble+ .L68#endif LFPDUX A1, AO, INC2 LFPDUX B1, BO, INC2 LFPDUX A2, AO, INC2 bdz- .L67 .align 4.L66: FXCPMADD f0, B1, A1, f0 FXCSMADD f2, B1, A1, f2 LFPDUX A1, AO, INC2 FXCPMADD f1, B1, A2, f1 FXCSMADD f3, B1, A2, f3 LFPDUX B1, BO, INC2 LFPDUX A2, AO, INC2 bdnz+ .L66 .align 4.L67: FXCPMADD f0, B1, A1, f0 FXCSMADD f2, B1, A1, f2 FXCPMADD f1, B1, A2, f1 FXCSMADD f3, B1, A2, f3 .align 4.L68: fpadd f0, f0, f2 fpadd f1, f1, f3#if defined(LN) || defined(RT)#ifdef LN subi r0, KK, 2#else subi r0, KK, 1#endif slwi TEMP, r0, 1 + ZBASE_SHIFT slwi r0, r0, 0 + ZBASE_SHIFT add AO, AORIG, TEMP add BO, B, r0 addi BO, BO, - 2 * SIZE#endif#if defined(LN) || defined(LT) LFPDUX f16, BO, INC2 LFPDUX f17, BO, INC2 subi BO, BO, 4 * SIZE#else LFPDUX f16, AO, INC2 LFPDUX f17, AO, INC2 subi AO, AO, 4 * SIZE#endif fpsub f0, f16, f0 fpsub f1, f17, f1#ifdef LN LFPDUX A1, AO, INC2 add AO, AO, INC2 LFPDUX A2, AO, INC2 LFPDUX A3, AO, INC2 subi AO, AO, 8 * SIZE fxpmul f4, A3, f1 FXCXNPMA f1, A3, f1, f4 fxcpnmsub f0, A2, f1, f0 FXCXNSMA f0, A2, f1, f0 fxpmul f4, A1, f0 FXCXNPMA f0, A1, f0, f4#endif#ifdef LT LFPDUX A1, AO, INC2 LFPDUX A2, AO, INC2 add AO, AO, INC2 LFPDUX A3, AO, INC2 subi AO, AO, 8 * SIZE fxpmul f4, A1, f0 FXCXNPMA f0, A1, f0, f4 fxcpnmsub f1, A2, f0, f1 FXCXNSMA f1, A2, f0, f1 fxpmul f6, A3, f1 FXCXNPMA f1, A3, f1, f6#endif#ifdef RN LFPDX A1, BO, INC2 fxpmul f4, A1, f0 fxpmul f5, A1, f1 FXCXNPMA f0, A1, f0, f4 FXCXNPMA f1, A1, f1, f5#endif#ifdef RT LFPDX A1, BO, INC2 fxpmul f4, A1, f0 fxpmul f5, A1, f1 FXCXNPMA f0, A1, f0, f4 FXCXNPMA f1, A1, f1, f5#endif#ifdef LN subi CO1, CO1, 4 * SIZE#endif#if defined(LN) || defined(LT) STFPDUX f0, BO, INC2 STFPDUX f1, BO, INC2 subi BO, BO, 4 * SIZE#else STFPDUX f0, AO, INC2 STFPDUX f1, AO, INC2 subi AO, AO, 4 * SIZE#endif STFDUX f0, CO1, INC STFSDUX f0, CO1, INC STFDUX f1, CO1, INC STFSDUX f1, CO1, INC#ifdef LN subi CO1, CO1, 4 * SIZE#endif#ifdef RT slwi r0, K, 1 + ZBASE_SHIFT add AORIG, AORIG, r0#endif#if defined(LT) || defined(RN) sub TEMP, K, KK slwi r0, TEMP, 1 + ZBASE_SHIFT slwi TEMP, TEMP, 0 + ZBASE_SHIFT add AO, AO, r0 add BO, BO, TEMP#endif#ifdef LT addi KK, KK, 2#endif#ifdef LN subi KK, KK, 2#endif li r0, FZERO lfpsx f0, SP, r0 .align 4.L70: srawi. I, M, 2 ble .L89 .align 4.L51:#if defined(LT) || defined(RN) fpmr f4, f0 addi BO, B, - 2 * SIZE fpmr f1, f0 fpmr f5, f0 fpmr f2, f0 fpmr f6, f0 fpmr f3, f0 fpmr f7, f0 srawi. r0, KK, 2 mtspr CTR, r0 ble .L54#else#ifdef LN slwi r0, K, 2 + ZBASE_SHIFT sub AORIG, AORIG, r0#endif slwi r0 , KK, 2 + ZBASE_SHIFT slwi TEMP, KK, 0 + ZBASE_SHIFT add AO, AORIG, r0 add BO, B, TEMP sub TEMP, K, KK fpmr f4, f0 addi BO, BO, - 2 * SIZE fpmr f1, f0 fpmr f5, f0 fpmr f2, f0 fpmr f6, f0 fpmr f3, f0 fpmr f7, f0 srawi. r0, TEMP, 2 mtspr CTR, r0 ble .L54#endif LFPDUX B1, BO, INC2 LFPDUX A1, AO, INC2 LFPDUX A2, AO, INC2 LFPDUX B2, BO, INC2 LFPDUX A3, AO, INC2 LFPDUX A4, AO, INC2 LFPDUX B3, BO, INC2 LFPDUX A5, AO, INC2 LFPDUX A6, AO, INC2 LFPDUX A7, AO, INC2 LFPDUX A8, AO, INC2 bdz- .L53 .align 4.L52: FXCPMADD f0, B1, A1, f0 LFPDUX B4, BO, INC2 FXCSMADD f4, B1, A1, f4 LFPDUX A1, AO, INC2 FXCPMADD f1, B1, A2, f1 nop FXCSMADD f5, B1, A2, f5 LFPDUX A2, AO, INC2 FXCPMADD f2, B1, A3, f2 nop FXCSMADD f6, B1, A3, f6 LFPDUX A3, AO, INC2 FXCPMADD f3, B1, A4, f3 nop FXCSMADD f7, B1, A4, f7 LFPDUX A4, AO, INC2 FXCPMADD f0, B2, A5, f0 LFPDUX B1, BO, INC2 FXCSMADD f4, B2, A5, f4 LFPDUX A5, AO, INC2 FXCPMADD f1, B2, A6, f1 nop FXCSMADD f5, B2, A6, f5 LFPDUX A6, AO, INC2 FXCPMADD f2, B2, A7, f2 nop FXCSMADD f6, B2, A7, f6 LFPDUX A7, AO, INC2 FXCPMADD f3, B2, A8, f3 nop FXCSMADD f7, B2, A8, f7 LFPDUX A8, AO, INC2 FXCPMADD f0, B3, A1, f0 LFPDUX B2, BO, INC2 FXCSMADD f4, B3, A1, f4 LFPDUX A1, AO, INC2 FXCPMADD f1, B3, A2, f1 nop FXCSMADD f5, B3, A2, f5 LFPDUX A2, AO, INC2 FXCPMADD f2, B3, A3, f2 nop FXCSMADD f6, B3, A3, f6 LFPDUX A3, AO, INC2 FXCPMADD f3, B3, A4, f3 nop FXCSMADD f7, B3, A4, f7 LFPDUX A4, AO, INC2 FXCPMADD f0, B4, A5, f0 LFPDUX B3, BO, INC2 FXCSMADD f4, B4, A5, f4 LFPDUX A5, AO, INC2 FXCPMADD f1, B4, A6, f1 nop FXCSMADD f5, B4, A6, f5 LFPDUX A6, AO, INC2 FXCPMADD f2, B4, A7, f2 nop FXCSMADD f6, B4, A7, f6 LFPDUX A7, AO, INC2 FXCPMADD f3, B4, A8, f3 nop FXCSMADD f7, B4, A8, f7 LFPDUX A8, AO, INC2 bdnz+ .L52 .align 4.L53: FXCPMADD f0, B1, A1, f0 LFPDUX B4, BO, INC2 FXCSMADD f4, B1, A1, f4 LFPDUX A1, AO, INC2 FXCPMADD f1, B1, A2, f1 nop FXCSMADD f5, B1, A2, f5 LFPDUX A2, AO, INC2 FXCPMADD f2, B1, A3, f2 nop FXCSMADD f6, B1, A3, f6 LFPDUX A3, AO, INC2 FXCPMADD f3, B1, A4, f3 nop FXCSMADD f7, B1, A4, f7 LFPDUX A4, AO, INC2 FXCPMADD f0, B2, A5, f0 nop FXCSMADD f4, B2, A5, f4 LFPDUX A5, AO, INC2 FXCPMADD f1, B2, A6, f1 nop FXCSMADD f5, B2, A6, f5 LFPDUX A6, AO, INC2 FXCPMADD f2, B2, A7, f2 nop FXCSMADD f6, B2, A7, f6 LFPDUX A7, AO, INC2 FXCPMADD f3, B2, A8, f3 nop FXCSMADD f7, B2, A8, f7 LFPDUX A8, AO, INC2 FXCPMADD f0, B3, A1, f0 FXCSMADD f4, B3, A1, f4 FXCPMADD f1, B3, A2, f1 FXCSMADD f5, B3, A2, f5 FXCPMADD f2, B3, A3, f2 FXCSMADD f6, B3, A3, f6 FXCPMADD f3, B3, A4, f3 FXCSMADD f7, B3, A4, f7 FXCPMADD f0, B4, A5, f0 FXCSMADD f4, B4, A5, f4 FXCPMADD f1, B4, A6, f1 FXCSMADD f5, B4, A6, f5 FXCPMADD f2, B4, A7, f2 FXCSMADD f6, B4, A7, f6 FXCPMADD f3, B4, A8, f3 FXCSMADD f7, B4, A8, f7 .align 4.L54:#if defined(LT) || defined(RN) andi. r0, KK, 3 mtspr CTR, r0 ble+ .L58#else andi. r0, TEMP, 3 mtspr CTR, r0 ble+ .L58#endif LFPDUX A1, AO, INC2 LFPDUX B1, BO, INC2 LFPDUX A2, AO, INC2 LFPDUX A3, AO, INC2 LFPDUX A4, AO, INC2 bdz- .L57 .align 4.L56: FXCPMADD f0, B1, A1, f0 FXCSMADD f4, B1, A1, f4 LFPDUX A1, AO, INC2 FXCPMADD f1, B1, A2, f1 FXCSMADD f5, B1, A2, f5 LFPDUX A2, AO, INC2 FXCPMADD f2, B1, A3, f2 FXCSMADD f6, B1, A3, f6 LFPDUX A3, AO, INC2 FXCPMADD f3, B1, A4, f3 FXCSMADD f7, B1, A4, f7 LFPDUX A4, AO, INC2 LFPDUX B1, BO, INC2 bdnz+ .L56 .align 4.L57: FXCPMADD f0, B1, A1, f0 FXCSMADD f4, B1, A1, f4 FXCPMADD f1, B1, A2, f1 FXCSMADD f5, B1, A2, f5 FXCPMADD f2, B1, A3, f2 FXCSMADD f6, B1, A3, f6 FXCPMADD f3, B1, A4, f3 FXCSMADD f7, B1, A4, f7 .align 4.L58: fpadd f0, f0, f4 fpadd f1, f1, f5 fpadd f2, f2, f6 fpadd f3, f3, f7#if defined(LN) || defined(RT)#ifdef LN subi r0, KK, 4#else subi r0, KK, 1#endif slwi TEMP, r0, 2 + ZBASE_SHIFT slwi r0, r0, 0 + ZBASE_SHIFT add AO, AORIG, TEMP add BO, B, r0 addi BO, BO, - 2 * SIZE#endif#if defined(LN) || defined(LT) LFPDUX f16, BO, INC2 LFPDUX f17, BO, INC2 LFPDUX f18, BO, INC2 LFPDUX f19, BO, INC2 subi BO, BO, 8 * SIZE#else LFPDUX f16, AO, INC2 LFPDUX f17, AO, INC2 LFPDUX f18, AO, INC2 LFPDUX f19, AO, INC2 subi AO, AO, 8 * SIZE#endif fpsub f0, f16, f0 fpsub f1, f17, f1 fpsub f2, f18, f2 fpsub f3, f19, f3#ifdef LN LFPDUX A1, AO, INC2 add AO, AO, INC2 add AO, AO, INC2 add AO, AO, INC2 LFPDUX A2, AO, INC2 LFPDUX A3, AO, INC2 add AO, AO, INC2 add AO, AO, INC2 LFPDUX A4, AO, INC2 LFPDUX A5, AO, INC2 LFPDUX A6, AO, INC2 add AO, AO, INC2 LFPDUX A7, AO, INC2 LFPDUX A8, AO, INC2 LFPDUX A9, AO, INC2 LFPDUX A10, AO, INC2 subi AO, AO, 32 * SIZE fxpmul f4, A10, f3 FXCXNPMA f3, A10, f3, f4 fxcpnmsub f2, A9, f3, f2 FXCXNSMA f2, A9, f3, f2 fxcpnmsub f1, A8, f3, f1 FXCXNSMA f1, A8, f3, f1 fxcpnmsub f0, A7, f3, f0 FXCXNSMA f0, A7, f3, f0 fxpmul f4, A6, f2 FXCXNPMA f2, A6, f2, f4 fxcpnmsub f1, A5, f2, f1 FXCXNSMA f1, A5, f2, f1 fxcpnmsub f0, A4, f2, f0 FXCXNSMA f0, A4, f2, f0 fxpmul f4, A3, f1 FXCXNPMA f1, A3, f1, f4 fxcpnmsub f0, A2, f1, f0 FXCXNSMA f0, A2, f1, f0 fxpmul f4, A1, f0 FXCXNPMA f0, A1, f0, f4#endif#ifdef LT LFPDUX A1, AO, INC2 LFPDUX A2, AO, INC2 LFPDUX A3, AO, INC2 LFPDUX A4, AO, INC2 add AO, AO, INC2 LFPDUX A5, AO, INC2 LFPDUX A6, AO, INC2 LFPDUX A7, AO, INC2 add AO, AO, INC2 add AO, AO, INC2 LFPDUX A8, AO, INC2 LFPDUX A9, AO, INC2 add AO, AO, INC2 add AO, AO, INC2 add AO, AO, INC2 LFPDUX A10, AO, INC2 subi AO, AO, 32 * SIZE fxpmul f4, A1, f0 FXCXNPMA f0, A1, f0, f4 fxcpnmsub f1, A2, f0, f1 FXCXNSMA f1, A2, f0, f1 fxcpnmsub f2, A3, f0, f2 FXCXNSMA f2, A3, f0, f2 fxcpnmsub f3, A4, f0, f3 FXCXNSMA f3, A4, f0, f3 fxpmul f6, A5, f1 FXCXNPMA f1, A5, f1, f6 fxcpnmsub f2, A6, f1, f2 FXCXNSMA f2, A6, f1, f2 fxcpnmsub f3, A7, f1, f3 FXCXNSMA f3, A7, f1, f3 fxpmul f4, A8, f2 FXCXNPMA f2, A8, f2, f4 fxcpnmsub f3, A9, f2, f3 FXCXNSMA f3, A9, f2, f3 fxpmul f6, A10, f3 FXCXNPMA f3, A10, f3, f6#endif#ifdef RN LFPDX A1, BO, INC2 fxpmul f4, A1, f0 fxpmul f5, A1, f1 fxpmul f6, A1, f2 fxpmul f7, A1, f3 FXCXNPMA f0, A1, f0, f4 FXCXNPMA f1, A1, f1, f5 FXCXNPMA f2, A1, f2, f6 FXCXNPMA f3, A1, f3, f7#endif#ifdef RT LFPDX A1, BO, INC2 fxpmul f4, A1, f0 fxpmul f5, A1, f1 fxpmul f6, A1, f2 fxpmul f7, A1, f3 FXCXNPMA f0, A1, f0, f4 FXCXNPMA f1, A1, f1, f5 FXCXNPMA f2, A1, f2, f6 FXCXNPMA f3, A1, f3, f7#endif#ifdef LN subi CO1, CO1, 8 * SIZE#endif#if defined(LN) || defined(LT) STFPDUX f0, BO, INC2 STFPDUX f1, BO, INC2 STFPDUX f2, BO, INC2 STFPDUX f3, BO, INC2 subi BO, BO, 8 * SIZE#else STFPDUX f0, AO, INC2 STFPDUX f1, AO, INC2 STFPDUX f2, AO, INC2 STFPDUX f3, AO, INC2 subi AO, AO, 8 * SIZE#endif STFDUX f0, CO1, INC STFSDUX f0, CO1, INC STFDUX f1, CO1, INC STFSDUX f1, CO1, INC STFDUX f2, CO1, INC STFSDUX f2, CO1, INC STFDUX f3, CO1, INC STFSDUX f3, CO1, INC#ifdef LN subi CO1, CO1, 8 * SIZE#endif#ifdef RT slwi r0, K, 2 + ZBASE_SHIFT add AORIG, AORIG, r0#endif#if defined(LT) || defined(RN) sub TEMP, K, KK slwi r0, TEMP, 2 + ZBASE_SHIFT slwi TEMP, TEMP, 0 + ZBASE_SHIFT add AO, AO, r0 add BO, BO, TEMP#endif#ifdef LT addi KK, KK, 4#endif#ifdef LN subi KK, KK, 4#endif addic. I, I, -1 li r0, FZERO lfpsx f0, SP, r0 bgt+ .L51 .align 4.L89:#ifdef LN slwi r0, K, 0 + ZBASE_SHIFT add B, B, r0#endif#if defined(LT) || defined(RN) addi B, BO, 2 * SIZE#endif#ifdef RN addi KK, KK, 1#endif#ifdef RT subi KK, KK, 1#endif .align 4.L999: addi SP, SP, 20 lwzu r14, 4(SP) lwzu r15, 4(SP) lwzu r16, 4(SP) lwzu r17, 4(SP) lwzu r18, 4(SP) lwzu r19, 4(SP) lwzu r20, 4(SP) lwzu r21, 4(SP) lwzu r22, 4(SP) lwzu r23, 4(SP) lwzu r24, 4(SP) lwzu r25, 4(SP) lwzu r26, 4(SP) lwzu r27, 4(SP) lwzu r28, 4(SP) lwzu r29, 4(SP) lwzu r30, 4(SP) lwzu r31, 4(SP) subi SP, SP, 12 li r0, 16 lfpdux f31, SP, r0 lfpdux f30, SP, r0 lfpdux f29, SP, r0 lfpdux f28, SP, r0 lfpdux f27, SP, r0 lfpdux f26, SP, r0 lfpdux f25, SP, r0 lfpdux f24, SP, r0 lfpdux f23, SP, r0 lfpdux f22, SP, r0 lfpdux f21, SP, r0 lfpdux f20, SP, r0 lfpdux f19, SP, r0 lfpdux f18, SP, r0 lfpdux f17, SP, r0 lfpdux f16, SP, r0 lfpdux f15, SP, r0 lfpdux f14, SP, r0 addi SP, SP, 16 blr .align 4 EPILOGUE#endif
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -