📄 sparc.md
字号:
(match_operand:QI 1 "reg_or_0_operand" "rJ")) (clobber (match_scratch:SI 2 "=&r"))] "(reload_completed || reload_in_progress) && ! TARGET_PTR64" "sethi %%hi(%a0),%2\;stb %r1,[%2+%%lo(%a0)]" [(set_attr "type" "store") (set_attr "length" "2")])(define_expand "movhi" [(set (match_operand:HI 0 "general_operand" "") (match_operand:HI 1 "general_operand" ""))] "" "{ if (emit_move_sequence (operands, HImode)) DONE;}")(define_insn "*movhi_insn" [(set (match_operand:HI 0 "reg_or_nonsymb_mem_operand" "=r,r,r,Q") (match_operand:HI 1 "move_operand" "rI,K,Q,rJ"))] "register_operand (operands[0], HImode) || register_operand (operands[1], HImode) || operands[1] == const0_rtx" "@ mov %1,%0 sethi %%hi(%a1),%0 lduh %1,%0 sth %r1,%0" [(set_attr "type" "move,move,load,store") (set_attr "length" "*,1,*,1")])(define_insn "*lo_sum_hi" [(set (match_operand:HI 0 "register_operand" "=r") (lo_sum:HI (match_operand:HI 1 "register_operand" "r") (match_operand 2 "immediate_operand" "in")))] "" "or %1,%%lo(%a2),%0" [(set_attr "length" "1")])(define_insn "*store_hi" [(set (mem:HI (match_operand:SI 0 "symbolic_operand" "")) (match_operand:HI 1 "reg_or_0_operand" "rJ")) (clobber (match_scratch:SI 2 "=&r"))] "(reload_completed || reload_in_progress) && ! TARGET_PTR64" "sethi %%hi(%a0),%2\;sth %r1,[%2+%%lo(%a0)]" [(set_attr "type" "store") (set_attr "length" "2")])(define_expand "movsi" [(set (match_operand:SI 0 "general_operand" "") (match_operand:SI 1 "general_operand" ""))] "" "{ if (emit_move_sequence (operands, SImode)) DONE;}");; We must support both 'r' and 'f' registers here, because combine may;; convert SFmode hard registers to SImode hard registers when simplifying;; subreg sets.;; We cannot combine the similar 'r' and 'f' constraints, because it causes;; problems with register allocation. Reload might try to put an integer;; in an fp register, or an fp number is an integer register.(define_insn "*movsi_insn" [(set (match_operand:SI 0 "reg_or_nonsymb_mem_operand" "=r,f,r,r,f,Q,Q") (match_operand:SI 1 "move_operand" "rI,!f,K,Q,!Q,rJ,!f"))] "register_operand (operands[0], SImode) || register_operand (operands[1], SImode) || operands[1] == const0_rtx" "@ mov %1,%0 fmovs %1,%0 sethi %%hi(%a1),%0 ld %1,%0 ld %1,%0 st %r1,%0 st %r1,%0" [(set_attr "type" "move,fp,move,load,load,store,store") (set_attr "length" "*,*,1,*,*,*,*")])(define_insn "*store_si" [(set (mem:SI (match_operand:SI 0 "symbolic_operand" "")) (match_operand:SI 1 "reg_or_0_operand" "rJ")) (clobber (match_scratch:SI 2 "=&r"))] "(reload_completed || reload_in_progress) && ! TARGET_PTR64" "sethi %%hi(%a0),%2\;st %r1,[%2+%%lo(%a0)]" [(set_attr "type" "store") (set_attr "length" "2")])(define_expand "movdi" [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand" "") (match_operand:DI 1 "general_operand" ""))] "" "{ if (emit_move_sequence (operands, DImode)) DONE;}")(define_insn "*movdi_sp32_insn" [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand" "=r,T,U,Q,r,r,?f,?f,?Q") (match_operand:DI 1 "general_operand" "r,U,T,r,Q,i,f,Q,f"))] "! TARGET_V9 && (register_operand (operands[0], DImode) || register_operand (operands[1], DImode) || operands[1] == const0_rtx)" "*{ if (FP_REG_P (operands[0]) || FP_REG_P (operands[1])) return output_fp_move_double (operands); return output_move_double (operands);}" [(set_attr "type" "move,store,load,store,load,multi,fp,fpload,fpstore") (set_attr "length" "2,1,1,3,3,3,2,3,3")]);;; ??? The trick used below can be extended to load any negative 32 bit;;; constant in two instructions. Currently the compiler will use HIGH/LO_SUM;;; for anything not matching the HIK constraints, which results in 5;;; instructions. Positive 32 bit constants can be loaded in the obvious way;;; with sethi/ori. To extend the trick, in the xor instruction, use ;;; xor %o0, ((op1 & 0x3ff) | -0x400), %o0;;; This needs the original value of operands[1], not the inverted value.(define_insn "*movdi_sp64_insn" [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand" "=r,r,r,Q,?f,?f,?Q") (match_operand:DI 1 "move_operand" "rI,K,Q,rJ,f,Q,f"))] "TARGET_V9 && (register_operand (operands[0], DImode) || register_operand (operands[1], DImode) || operands[1] == const0_rtx)" "*{ switch (which_alternative) { case 0: return \"mov %1,%0\"; case 1: /* Sethi does not sign extend, so we must use a little trickery to use it for negative numbers. Invert the constant before loading it in, then use a xor immediate to invert the loaded bits (along with the upper 32 bits) to the desired constant. This works because the sethi and immediate fields overlap. */ if ((INTVAL (operands[1]) & 0x80000000) == 0) return \"sethi %%hi(%a1),%0\"; else { operands[1] = gen_rtx (CONST_INT, VOIDmode, ~ INTVAL (operands[1])); output_asm_insn (\"sethi %%hi(%a1),%0\", operands); /* The low 10 bits are already zero, but invert the rest. Assemblers don't accept 0x1c00, so use -0x400 instead. */ return \"xor %0,-0x400,%0\"; } case 2: return \"ldx %1,%0\"; case 3: return \"stx %r1,%0\"; case 4: return \"mov %1,%0\"; case 5: return \"ldd %1,%0\"; case 6: return \"std %1,%0\"; }}" [(set_attr "type" "move,move,load,store,fp,fpload,fpstore") (set_attr "length" "1,2,1,1,1,1,1")]);; ??? There's no symbolic (set (mem:DI ...) ...).;; Experimentation with v9 suggested one isn't needed.;; Block move insns.;; ??? We get better code without it. See output_block_move in sparc.c.;; The definition of this insn does not really explain what it does,;; but it should suffice;; that anything generated as this insn will be recognized as one;; and that it will not successfully combine with anything.;(define_expand "movstrsi"; [(parallel [(set (mem:BLK (match_operand:BLK 0 "general_operand" "")); (mem:BLK (match_operand:BLK 1 "general_operand" ""))); (use (match_operand:SI 2 "nonmemory_operand" "")); (use (match_operand:SI 3 "immediate_operand" "")); (clobber (match_dup 0)); (clobber (match_dup 1)); (clobber (match_scratch:SI 4 "")); (clobber (reg:SI 0)); (clobber (reg:SI 1))])]; ""; ";{; /* If the size isn't known, don't emit inline code. output_block_move; would output code that's much slower than the library function.; Also don't output code for large blocks. */; if (GET_CODE (operands[2]) != CONST_INT; || GET_CODE (operands[3]) != CONST_INT; || INTVAL (operands[2]) / INTVAL (operands[3]) > 16); FAIL;;; operands[0] = copy_to_mode_reg (Pmode, XEXP (operands[0], 0));; operands[1] = copy_to_mode_reg (Pmode, XEXP (operands[1], 0));; operands[2] = force_not_mem (operands[2]);;}");(define_insn "*block_move_insn"; [(set (mem:BLK (match_operand:SI 0 "register_operand" "+r")); (mem:BLK (match_operand:SI 1 "register_operand" "+r"))); (use (match_operand:SI 2 "nonmemory_operand" "rn")); (use (match_operand:SI 3 "immediate_operand" "i")); (clobber (match_dup 0)); (clobber (match_dup 1)); (clobber (match_scratch:SI 4 "=&r")); (clobber (reg:SI 0)); (clobber (reg:SI 1))]; ""; "* return output_block_move (operands);"; [(set_attr "type" "multi"); (set_attr "length" "6")]);; Floating point move insns;; This pattern forces (set (reg:SF ...) (const_double ...));; to be reloaded by putting the constant into memory.;; It must come before the more general movsf pattern.(define_insn "*movsf_const_insn" [(set (match_operand:SF 0 "general_operand" "=?r,f,m") (match_operand:SF 1 "" "?F,m,G"))] "TARGET_FPU && GET_CODE (operands[1]) == CONST_DOUBLE" "*{ switch (which_alternative) { case 0: return singlemove_string (operands); case 1: return \"ld %1,%0\"; case 2: return \"st %%g0,%0\"; }}" [(set_attr "type" "load,fpload,store") (set_attr "length" "2,1,1")])(define_expand "movsf" [(set (match_operand:SF 0 "general_operand" "") (match_operand:SF 1 "general_operand" ""))] "" "{ if (emit_move_sequence (operands, SFmode)) DONE;}")(define_insn "*movsf_insn" [(set (match_operand:SF 0 "reg_or_nonsymb_mem_operand" "=f,r,f,r,Q,Q") (match_operand:SF 1 "reg_or_nonsymb_mem_operand" "f,r,Q,Q,f,r"))] "TARGET_FPU && (register_operand (operands[0], SFmode) || register_operand (operands[1], SFmode))" "@ fmovs %1,%0 mov %1,%0 ld %1,%0 ld %1,%0 st %r1,%0 st %r1,%0" [(set_attr "type" "fp,move,fpload,load,fpstore,store")]);; Exactly the same as above, except that all `f' cases are deleted.;; This is necessary to prevent reload from ever trying to use a `f' reg;; when -mno-fpu.(define_insn "*movsf_no_f_insn" [(set (match_operand:SF 0 "reg_or_nonsymb_mem_operand" "=r,r,Q") (match_operand:SF 1 "reg_or_nonsymb_mem_operand" "r,Q,r"))] "! TARGET_FPU && (register_operand (operands[0], SFmode) || register_operand (operands[1], SFmode))" "@ mov %1,%0 ld %1,%0 st %r1,%0" [(set_attr "type" "move,load,store")])(define_insn "*store_sf" [(set (mem:SF (match_operand:SI 0 "symbolic_operand" "i")) (match_operand:SF 1 "reg_or_0_operand" "rfG")) (clobber (match_scratch:SI 2 "=&r"))] "(reload_completed || reload_in_progress) && ! TARGET_PTR64" "sethi %%hi(%a0),%2\;st %r1,[%2+%%lo(%a0)]" [(set_attr "type" "store") (set_attr "length" "2")]);; This pattern forces (set (reg:DF ...) (const_double ...));; to be reloaded by putting the constant into memory.;; It must come before the more general movdf pattern.(define_insn "*movdf_const_insn" [(set (match_operand:DF 0 "general_operand" "=?r,e,o") (match_operand:DF 1 "" "?F,m,G"))] "TARGET_FPU && GET_CODE (operands[1]) == CONST_DOUBLE" "*{ switch (which_alternative) { case 0: return output_move_double (operands); case 1: return output_fp_move_double (operands); case 2: if (TARGET_V9) { return \"stx %%g0,%0\"; } else { operands[1] = adj_offsettable_operand (operands[0], 4); return \"st %%g0,%0\;st %%g0,%1\"; } }}" [(set_attr "type" "load,fpload,store") (set_attr "length" "3,3,3")])(define_expand "movdf" [(set (match_operand:DF 0 "general_operand" "") (match_operand:DF 1 "general_operand" ""))] "" "{ if (emit_move_sequence (operands, DFmode)) DONE;}")(define_insn "*movdf_insn" [(set (match_operand:DF 0 "reg_or_nonsymb_mem_operand" "=T,U,e,r,Q,Q,e,r") (match_operand:DF 1 "reg_or_nonsymb_mem_operand" "U,T,e,r,e,r,Q,Q"))] "TARGET_FPU && (register_operand (operands[0], DFmode) || register_operand (operands[1], DFmode))" "*{ if (FP_REG_P (operands[0]) || FP_REG_P (operands[1])) return output_fp_move_double (operands); return output_move_double (operands);}" [(set_attr "type" "fpstore,fpload,fp,move,fpstore,store,fpload,load") (set_attr "length" "1,1,2,2,3,3,3,3")]);; Exactly the same as above, except that all `e' cases are deleted.;; This is necessary to prevent reload from ever trying to use a `e' reg;; when -mno-fpu.(define_insn "*movdf_no_e_insn" [(set (match_operand:DF 0 "reg_or_nonsymb_mem_operand" "=T,U,r,Q,&r") (match_operand:DF 1 "reg_or_nonsymb_mem_operand" "U,T,r,r,Q"))] "! TARGET_FPU && (register_operand (operands[0], DFmode) || register_operand (operands[1], DFmode))" "* return output_move_double (operands);" [(set_attr "type" "store,load,move,store,load") (set_attr "length" "1,1,2,3,3")]);; Must handle overlapping registers here, since parameters can be unaligned;; in registers.;; ??? Do we need a v9 version of this?(define_split [(set (match_operand:DF 0 "register_operand" "") (match_operand:DF 1 "register_operand" ""))] "! TARGET_V9 && reload_completed" [(set (match_dup 2) (match_dup 3)) (set (match_dup 4) (match_dup 5))] "{ rtx first_set = operand_subword (operands[0], 0, 0, DFmode); rtx second_use = operand_subword (operands[1], 1, 0, DFmode); if (REGNO (first_set) == REGNO (second_use)) { operands[2] = operand_subword (operands[0], 1, 0, DFmode); operands[3] = second_use; operands[4] = first_set; operands[5] = operand_subword (operands[1], 0, 0, DFmode); } else { operands[2] = first_set; operands[3] = operand_subword (operands[1], 0, 0, DFmode); operands[4] = operand_subword (operands[0], 1, 0, DFmode); operands[5] = second_use; }}")(define_insn "*store_df" [(set (mem:DF (match_operand:SI 0 "symbolic_operand" "i,i")) (match_operand:DF 1 "reg_or_0_operand" "re,G")) (clobber (match_scratch:SI 2 "=&r,&r"))] "(reload_completed || reload_in_progress) && ! TARGET_PTR64" "*{ output_asm_insn (\"sethi %%hi(%a0),%2\", operands); if (which_alternative == 0) return \"std %1,[%2+%%lo(%a0)]\"; else return \"st %%g0,[%2+%%lo(%a0)]\;st %%g0,[%2+%%lo(%a0+4)]\";}" [(set_attr "type" "store") (set_attr "length" "3")]);; This pattern forces (set (reg:TF ...) (const_double ...));; to be reloaded by putting the constant into memory.;; It must come before the more general movtf pattern.(define_insn "*movtf_const_insn" [(set (match_operand:TF 0 "general_operand" "=
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -