📄 ztrsm_kernel_hummer_lt.s
字号:
#ifdef LN subi CO1, CO1, 4 * SIZE subi CO2, CO2, 4 * SIZE#endif#if defined(LN) || defined(LT) STFPDUX f0, BO, INC4 STFPDUX f8, BO2, INC4 STFPDUX f1, BO, INC4 STFPDUX f9, BO2, INC4 subi BO, BO, 8 * SIZE subi BO2, BO2, 8 * SIZE#else STFPDUX f0, AO, INC4 STFPDUX f1, AO2, INC4 STFPDUX f8, AO, INC4 STFPDUX f9, AO2, INC4 subi AO, AO, 8 * SIZE subi AO2, AO2, 8 * SIZE#endif STFDUX f0, CO1, INC STFSDUX f0, CO1, INC STFDUX f1, CO1, INC STFSDUX f1, CO1, INC STFDUX f8, CO2, INC STFSDUX f8, CO2, INC STFDUX f9, CO2, INC STFSDUX f9, CO2, INC#ifdef LN subi CO1, CO1, 4 * SIZE subi CO2, CO2, 4 * SIZE#endif#ifdef RT slwi r0, K, 1 + ZBASE_SHIFT add AORIG, AORIG, r0#endif#if defined(LT) || defined(RN) sub TEMP, K, KK slwi r0, TEMP, 1 + ZBASE_SHIFT add AO, AO, r0 add BO, BO, r0#endif#ifdef LT addi KK, KK, 2#endif#ifdef LN subi KK, KK, 2#endif li r0, FZERO lfpsx f0, SP, r0 .align 4.L30: andi. I, M, 1 beq .L49#if defined(LT) || defined(RN) addi AO2, AO, 2 * SIZE fpmr f1, f0 addi BO, B, - 4 * SIZE fpmr f2, f0 addi BO2, B, - 2 * SIZE fpmr f3, f0 srawi. r0, KK, 2 mtspr CTR, r0 ble .L34#else#ifdef LN slwi r0, K, 0 + ZBASE_SHIFT sub AORIG, AORIG, r0#endif slwi r0 , KK, 0 + ZBASE_SHIFT slwi TEMP, KK, 1 + ZBASE_SHIFT add AO, AORIG, r0 add BO, B, TEMP sub TEMP, K, KK addi AO2, AO, 2 * SIZE fpmr f1, f0 addi BO, BO, - 4 * SIZE fpmr f2, f0 addi BO2, BO, 2 * SIZE fpmr f3, f0 srawi. r0, TEMP, 2 mtspr CTR, r0 ble .L34#endif LFPDUX A1, AO, INC4 LFPDUX B1, BO, INC4 LFPDUX B2, BO2, INC4 LFPDUX A2, AO2, INC4 LFPDUX B3, BO, INC4 LFPDUX B4, BO2, INC4 LFPDUX A3, AO, INC4 LFPDUX A5, BO, INC4 LFPDUX A6, BO2, INC4 LFPDUX A4, AO2, INC4 LFPDUX A7, BO, INC4 LFPDUX A8, BO2, INC4 bdz- .L33 .align 4.L32: FXCPMADD f0, B1, A1, f0 FXCSMADD f1, B1, A1, f1 LFPDUX B1, BO, INC4 FXCPMADD f2, B2, A1, f2 FXCSMADD f3, B2, A1, f3 LFPDUX B2, BO2, INC4 LFPDUX A1, AO, INC4 FXCPMADD f0, B3, A2, f0 FXCSMADD f1, B3, A2, f1 LFPDUX B3, BO, INC4 FXCPMADD f2, B4, A2, f2 FXCSMADD f3, B4, A2, f3 LFPDUX B4, BO2, INC4 LFPDUX A2, AO2, INC4 FXCPMADD f0, A5, A3, f0 FXCSMADD f1, A5, A3, f1 LFPDUX A5, BO, INC4 FXCPMADD f2, A6, A3, f2 FXCSMADD f3, A6, A3, f3 LFPDUX A6, BO2, INC4 LFPDUX A3, AO, INC4 FXCPMADD f0, A7, A4, f0 FXCSMADD f1, A7, A4, f1 LFPDUX A7, BO, INC4 FXCPMADD f2, A8, A4, f2 FXCSMADD f3, A8, A4, f3 LFPDUX A8, BO2, INC4 LFPDUX A4, AO2, INC4 bdnz+ .L32 .align 4.L33: FXCPMADD f0, B1, A1, f0 FXCSMADD f1, B1, A1, f1 FXCPMADD f2, B2, A1, f2 FXCSMADD f3, B2, A1, f3 FXCPMADD f0, B3, A2, f0 FXCSMADD f1, B3, A2, f1 FXCPMADD f2, B4, A2, f2 FXCSMADD f3, B4, A2, f3 FXCPMADD f0, A5, A3, f0 FXCSMADD f1, A5, A3, f1 FXCPMADD f2, A6, A3, f2 FXCSMADD f3, A6, A3, f3 FXCPMADD f0, A7, A4, f0 FXCSMADD f1, A7, A4, f1 FXCPMADD f2, A8, A4, f2 FXCSMADD f3, A8, A4, f3 .align 4.L34:#if defined(LT) || defined(RN) andi. r0, KK, 3 mtspr CTR, r0 ble+ .L38#else andi. r0, TEMP, 3 mtspr CTR, r0 ble+ .L38#endif LFPDX A1, AO, INC4 LFPDUX B1, BO, INC4 LFPDUX B2, BO2, INC4 add AO, AO, INC2 bdz- .L37 .align 4.L36: FXCPMADD f0, B1, A1, f0 FXCSMADD f1, B1, A1, f1 LFPDUX B1, BO, INC4 FXCPMADD f2, B2, A1, f2 FXCSMADD f3, B2, A1, f3 LFPDX A1, AO, INC4 LFPDUX B2, BO2, INC4 add AO, AO, INC2 bdnz+ .L36 .align 4.L37: FXCPMADD f0, B1, A1, f0 FXCSMADD f1, B1, A1, f1 FXCPMADD f2, B2, A1, f2 FXCSMADD f3, B2, A1, f3 .align 4.L38: fpadd f0, f0, f1 fpadd f2, f2, f3#if defined(LN) || defined(RT)#ifdef LN subi r0, KK, 1#else subi r0, KK, 2#endif slwi TEMP, r0, 0 + ZBASE_SHIFT slwi r0, r0, 1 + ZBASE_SHIFT add AO, AORIG, TEMP add BO, B, r0 addi BO, BO, - 4 * SIZE#endif addi AO2, AO, 2 * SIZE addi BO2, BO, 2 * SIZE#if defined(LN) || defined(LT) LFPDX f16, BO, INC4 LFPDX f17, BO2, INC4#else LFPDX f16, AO, INC4 LFPDX f17, AO2, INC4#endif fpsub f0, f16, f0 fpsub f2, f17, f2#ifdef LN LFPDX A1, AO, INC4 fxpmul f4, A1, f0 fxpmul f5, A1, f2 FXCXNPMA f0, A1, f0, f4 FXCXNPMA f2, A1, f2, f5#endif#ifdef LT LFPDX A1, AO, INC4 fxpmul f4, A1, f0 fxpmul f5, A1, f2 FXCXNPMA f0, A1, f0, f4 FXCXNPMA f2, A1, f2, f5#endif#ifdef RN LFPDUX A1, BO, INC4 LFPDUX A2, BO2, INC4 add BO, BO, INC4 LFPDUX A3, BO2, INC4 subi BO, BO, 8 * SIZE subi BO2, BO2, 8 * SIZE fxpmul f4, A1, f0 FXCXNPMA f0, A1, f0, f4 fxcpnmsub f2, A2, f0, f2 FXCXNSMA f2, A2, f0, f2 fxpmul f4, A3, f2 FXCXNPMA f2, A3, f2, f4#endif#ifdef RT LFPDUX A1, BO, INC4 add BO2, BO2, INC4 LFPDUX A2, BO, INC4 LFPDUX A3, BO2, INC4 subi BO, BO, 8 * SIZE subi BO2, BO2, 8 * SIZE fxpmul f4, A3, f2 FXCXNPMA f2, A3, f2, f4 fxcpnmsub f0, A2, f2, f0 FXCXNSMA f0, A2, f2, f0 fxpmul f4, A1, f0 FXCXNPMA f0, A1, f0, f4#endif#ifdef LN subi CO1, CO1, 2 * SIZE subi CO2, CO2, 2 * SIZE#endif#if defined(LN) || defined(LT) STFPDX f0, BO, INC4 STFPDX f2, BO2, INC4#else STFPDX f0, AO, INC4 STFPDX f2, AO2, INC4#endif STFDUX f0, CO1, INC STFSDUX f0, CO1, INC STFDUX f2, CO2, INC STFSDUX f2, CO2, INC#ifdef LN subi CO1, CO1, 2 * SIZE subi CO2, CO2, 2 * SIZE#endif#ifdef RT slwi r0, K, 0 + ZBASE_SHIFT add AORIG, AORIG, r0#endif#if defined(LT) || defined(RN) sub TEMP, K, KK slwi r0, TEMP, 0 + ZBASE_SHIFT slwi TEMP, TEMP, 1 + ZBASE_SHIFT add AO, AO, r0 add BO, BO, TEMP#endif#ifdef LT addi KK, KK, 1#endif#ifdef LN subi KK, KK, 1#endif li r0, FZERO lfpsx f0, SP, r0 .align 4.L49:#ifdef LN slwi r0, K, 1 + ZBASE_SHIFT add B, B, r0#endif#if defined(LT) || defined(RN) addi B, BO, 4 * SIZE#endif#ifdef RN addi KK, KK, 2#endif#ifdef RT subi KK, KK, 2#endif addic. J, J, -1 bgt+ .L10 .align 4.L50: andi. J, N, 1 beq .L999#ifdef RT slwi r0, K, 0 + ZBASE_SHIFT sub B, B, r0 sub C, C, LDC#endif mr CO1, C#ifdef LN add KK, M, OFFSET#endif#ifdef LT mr KK, OFFSET#endif#if defined(LN) || defined(RT) addi AORIG, A, -2 * SIZE#else addi AO, A, -2 * SIZE#endif#ifndef RT add C, CO2, LDC#endif li r0, FZERO lfpsx f0, SP, r0 srawi. I, M, 2 ble .L60 .align 4.L51:#if defined(LT) || defined(RN) fpmr f4, f0 addi BO, B, - 2 * SIZE fpmr f1, f0 fpmr f5, f0 fpmr f2, f0 fpmr f6, f0 fpmr f3, f0 fpmr f7, f0 srawi. r0, KK, 2 mtspr CTR, r0 ble .L54#else#ifdef LN slwi r0, K, 2 + ZBASE_SHIFT sub AORIG, AORIG, r0#endif slwi r0 , KK, 2 + ZBASE_SHIFT slwi TEMP, KK, 0 + ZBASE_SHIFT add AO, AORIG, r0 add BO, B, TEMP sub TEMP, K, KK fpmr f4, f0 addi BO, BO, - 2 * SIZE fpmr f1, f0 fpmr f5, f0 fpmr f2, f0 fpmr f6, f0 fpmr f3, f0 fpmr f7, f0 srawi. r0, TEMP, 2 mtspr CTR, r0 ble .L54#endif LFPDUX B1, BO, INC2 LFPDUX A1, AO, INC2 LFPDUX A2, AO, INC2 LFPDUX B2, BO, INC2 LFPDUX A3, AO, INC2 LFPDUX A4, AO, INC2 LFPDUX B3, BO, INC2 LFPDUX A5, AO, INC2 LFPDUX A6, AO, INC2 LFPDUX A7, AO, INC2 LFPDUX A8, AO, INC2 bdz- .L53 .align 4.L52: FXCPMADD f0, B1, A1, f0 LFPDUX B4, BO, INC2 FXCSMADD f4, B1, A1, f4 LFPDUX A1, AO, INC2 FXCPMADD f1, B1, A2, f1 nop FXCSMADD f5, B1, A2, f5 LFPDUX A2, AO, INC2 FXCPMADD f2, B1, A3, f2 nop FXCSMADD f6, B1, A3, f6 LFPDUX A3, AO, INC2 FXCPMADD f3, B1, A4, f3 nop FXCSMADD f7, B1, A4, f7 LFPDUX A4, AO, INC2 FXCPMADD f0, B2, A5, f0 LFPDUX B1, BO, INC2 FXCSMADD f4, B2, A5, f4 LFPDUX A5, AO, INC2 FXCPMADD f1, B2, A6, f1 nop FXCSMADD f5, B2, A6, f5 LFPDUX A6, AO, INC2 FXCPMADD f2, B2, A7, f2 nop FXCSMADD f6, B2, A7, f6 LFPDUX A7, AO, INC2 FXCPMADD f3, B2, A8, f3 nop FXCSMADD f7, B2, A8, f7 LFPDUX A8, AO, INC2 FXCPMADD f0, B3, A1, f0 LFPDUX B2, BO, INC2 FXCSMADD f4, B3, A1, f4 LFPDUX A1, AO, INC2 FXCPMADD f1, B3, A2, f1 nop FXCSMADD f5, B3, A2, f5 LFPDUX A2, AO, INC2 FXCPMADD f2, B3, A3, f2 nop FXCSMADD f6, B3, A3, f6 LFPDUX A3, AO, INC2 FXCPMADD f3, B3, A4, f3 nop FXCSMADD f7, B3, A4, f7 LFPDUX A4, AO, INC2 FXCPMADD f0, B4, A5, f0 LFPDUX B3, BO, INC2 FXCSMADD f4, B4, A5, f4 LFPDUX A5, AO, INC2 FXCPMADD f1, B4, A6, f1 nop FXCSMADD f5, B4, A6, f5 LFPDUX A6, AO, INC2 FXCPMADD f2, B4, A7, f2 nop FXCSMADD f6, B4, A7, f6 LFPDUX A7, AO, INC2 FXCPMADD f3, B4, A8, f3 nop FXCSMADD f7, B4, A8, f7 LFPDUX A8, AO, INC2 bdnz+ .L52 .align 4.L53: FXCPMADD f0, B1, A1, f0 LFPDUX B4, BO, INC2 FXCSMADD f4, B1, A1, f4 LFPDUX A1, AO, INC2 FXCPMADD f1, B1, A2, f1 nop FXCSMADD f5, B1, A2, f5 LFPDUX A2, AO, INC2 FXCPMADD f2, B1, A3, f2 nop FXCSMADD f6, B1, A3, f6 LFPDUX A3, AO, INC2 FXCPMADD f3, B1, A4, f3 nop FXCSMADD f7, B1, A4, f7 LFPDUX A4, AO, INC2 FXCPMADD f0, B2, A5, f0 nop FXCSMADD f4, B2, A5, f4 LFPDUX A5, AO, INC2 FXCPMADD f1, B2, A6, f1 nop FXCSMADD f5, B2, A6, f5 LFPDUX A6, AO, INC2 FXCPMADD f2, B2, A7, f2 nop FXCSMADD f6, B2, A7, f6 LFPDUX A7, AO, INC2 FXCPMADD f3, B2, A8, f3 nop FXCSMADD f7, B2, A8, f7 LFPDUX A8, AO, INC2 FXCPMADD f0, B3, A1, f0 FXCSMADD f4, B3, A1, f4 FXCPMADD f1, B3, A2, f1 FXCSMADD f5, B3, A2, f5 FXCPMADD f2, B3, A3, f2 FXCSMADD f6, B3, A3, f6 FXCPMADD f3, B3, A4, f3 FXCSMADD f7, B3, A4, f7 FXCPMADD f0, B4, A5, f0 FXCSMADD f4, B4, A5, f4 FXCPMADD f1, B4, A6, f1 FXCSMADD f5, B4, A6, f5 FXCPMADD f2, B4, A7, f2 FXCSMADD f6, B4, A7, f6 FXCPMADD f3, B4, A8, f3 FXCSMADD f7, B4, A8, f7 .align 4.L54:#if defined(LT) || defined(RN) andi. r0, KK, 3 mtspr CTR, r0 ble+ .L58#else andi. r0, TEMP, 3 mtspr CTR, r0 ble+ .L58#endif LFPDUX A1, AO, INC2 LFPDUX B1, BO, INC2 LFPDUX A2, AO, INC2 LFPDUX A3, AO, INC2 LFPDUX A4, AO, INC2 bdz- .L57 .align 4.L56: FXCPMADD f0, B1, A1, f0 FXCSMADD f4, B1, A1, f4 LFPDUX A1, AO, INC2 FXCPMADD f1, B1, A2, f1 FXCSMADD f5, B1, A2, f5 LFPDUX A2, AO, INC2 FXCPMADD f2, B1, A3, f2 FXCSMADD f6, B1, A3, f6 LFPDUX A3, AO, INC2 FXCPMADD f3, B1, A4, f3 FXCSMADD f7, B1, A4, f7 LFPDUX A4, AO, INC2 LFPDUX B1, BO, INC2 bdnz+ .L56 .align 4.L57: FXCPMADD f0, B1, A1, f0 FXCSMADD f4, B1, A1, f4 FXCPMADD f1, B1, A2, f1 FXCSMADD f5, B1, A2, f5 FXCPMADD f2, B1, A3, f2 FXCSMADD f6, B1, A3, f6 FXCPMADD f3, B1, A4, f3 FXCSMADD f7, B1, A4, f7 .align 4.L58: fpadd f0, f0, f4 fpadd f1, f1, f5 fpadd f2, f2, f6 fpadd f3, f3, f7#if defined(LN) || defined(RT)#ifdef LN subi r0, KK, 4#else subi r0, KK, 1#endif slwi TEMP, r0, 2 + ZBASE_SHIFT slwi r0, r0, 0 + ZBASE_SHIFT add AO, AORIG, TEMP add BO, B, r0 addi BO, BO, - 2 * SIZE#endif#if defined(LN) || defined(LT) LFPDUX f16, BO, INC2 LFPDUX f17, BO, INC2 LFPDUX f18, BO, INC2 LFPDUX f19, BO, INC2 subi BO, BO, 8 * SIZE#else LFPDUX f16, AO, INC2 LFPDUX f17, AO, INC2 LFPDUX f18, AO, INC2 LFPDUX f19, AO, INC2 subi AO, AO, 8 * SIZE#endif fpsub f0, f16, f0 fpsub f1, f17, f1 fpsub f2, f18, f2 fpsub f3, f19, f3#ifdef LN LFPDUX A1, AO, INC2 add AO, AO, INC2 add AO, AO, INC2 add AO, AO, INC2 LFPDUX A2, AO, INC2 LFPDUX A3, AO, INC2 add AO, AO, INC2 add AO, AO, INC2 LFPDUX A4, AO, INC2 LFPDUX A5, AO, INC2 LFPDUX A6, AO, INC2 add AO, AO, INC2 LFPDUX A7, AO, INC2 LFPDUX A8, AO, INC2 LFPDUX A9, AO, INC2 LFPDUX A10, AO, INC2 subi AO, AO, 32 * SIZE fxpmul f4, A10, f3 FXCXNPMA f3, A10, f3, f4 fxcpnmsub f2, A9, f3, f2 FXCXNSMA f2, A9, f3, f2 fxcpnmsub f1, A8, f3, f1 FXCXNSMA f1, A8, f3, f1 fxcpnmsub f0, A7, f3, f0 FXCXNSMA f0, A7, f3, f0 fxpmul f4, A6, f2 FXCXNPMA f2, A6, f2, f4 fxcpnmsub f1, A5, f2, f1 FXCXNSMA f1, A5, f2, f1 fxcpnmsub f0, A4, f2, f0 FXCXNSMA f0, A4, f2, f0 fxpmul f4, A3, f1 FXCXNPMA f1, A3, f1, f4 fxcpnmsub f0, A2, f1, f0 FXCXNSMA f0, A2, f1, f0 fxpmul f4, A1, f0 FXCXNPMA f0, A1, f0, f4#endif#ifdef LT
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -