📄 sparc.md
字号:
(define_insn "*inverted_branch" [(set (pc) (if_then_else (match_operator 0 "noov_compare_op" [(reg 100) (const_int 0)]) (pc) (label_ref (match_operand 1 "" ""))))] "" "*{ return output_cbranch (operands[0], 1, 1, final_sequence && INSN_ANNULLED_BRANCH_P (insn), ! final_sequence, insn);}" [(set_attr "type" "branch")]);; XXX fpcmp nop braindamage(define_insn "*normal_fp_branch" [(set (pc) (if_then_else (match_operator 1 "comparison_operator" [(match_operand:CCFP 0 "fcc_reg_operand" "c") (const_int 0)]) (label_ref (match_operand 2 "" "")) (pc)))] "" "*{ return output_cbranch (operands[1], 2, 0, final_sequence && INSN_ANNULLED_BRANCH_P (insn), ! final_sequence, insn);}" [(set_attr "type" "branch")]);; XXX fpcmp nop braindamage(define_insn "*inverted_fp_branch" [(set (pc) (if_then_else (match_operator 1 "comparison_operator" [(match_operand:CCFP 0 "fcc_reg_operand" "c") (const_int 0)]) (pc) (label_ref (match_operand 2 "" ""))))] "" "*{ return output_cbranch (operands[1], 2, 1, final_sequence && INSN_ANNULLED_BRANCH_P (insn), ! final_sequence, insn);}" [(set_attr "type" "branch")]);; XXX fpcmp nop braindamage(define_insn "*normal_fpe_branch" [(set (pc) (if_then_else (match_operator 1 "comparison_operator" [(match_operand:CCFPE 0 "fcc_reg_operand" "c") (const_int 0)]) (label_ref (match_operand 2 "" "")) (pc)))] "" "*{ return output_cbranch (operands[1], 2, 0, final_sequence && INSN_ANNULLED_BRANCH_P (insn), ! final_sequence, insn);}" [(set_attr "type" "branch")]);; XXX fpcmp nop braindamage(define_insn "*inverted_fpe_branch" [(set (pc) (if_then_else (match_operator 1 "comparison_operator" [(match_operand:CCFPE 0 "fcc_reg_operand" "c") (const_int 0)]) (pc) (label_ref (match_operand 2 "" ""))))] "" "*{ return output_cbranch (operands[1], 2, 1, final_sequence && INSN_ANNULLED_BRANCH_P (insn), ! final_sequence, insn);}" [(set_attr "type" "branch")]);; Sparc V9-specific jump insns. None of these are guaranteed to be;; in the architecture.;; There are no 32 bit brreg insns.;; XXX(define_insn "*normal_int_branch_sp64" [(set (pc) (if_then_else (match_operator 0 "v9_regcmp_op" [(match_operand:DI 1 "register_operand" "r") (const_int 0)]) (label_ref (match_operand 2 "" "")) (pc)))] "TARGET_ARCH64" "*{ return output_v9branch (operands[0], 1, 2, 0, final_sequence && INSN_ANNULLED_BRANCH_P (insn), ! final_sequence, insn);}" [(set_attr "type" "branch")]);; XXX(define_insn "*inverted_int_branch_sp64" [(set (pc) (if_then_else (match_operator 0 "v9_regcmp_op" [(match_operand:DI 1 "register_operand" "r") (const_int 0)]) (pc) (label_ref (match_operand 2 "" ""))))] "TARGET_ARCH64" "*{ return output_v9branch (operands[0], 1, 2, 1, final_sequence && INSN_ANNULLED_BRANCH_P (insn), ! final_sequence, insn);}" [(set_attr "type" "branch")]);; Load program counter insns.(define_insn "get_pc" [(clobber (reg:SI 15)) (set (match_operand 0 "register_operand" "=r") (unspec [(match_operand 1 "" "") (match_operand 2 "" "")] 2))] "flag_pic && REGNO (operands[0]) == 23" "sethi\\t%%hi(%a1-4), %0\\n\\tcall\\t%a2\\n\\tadd\\t%0, %%lo(%a1+4), %0" [(set_attr "length" "3")]);; Currently unused...;; (define_insn "get_pc_via_rdpc";; [(set (match_operand 0 "register_operand" "=r") (pc))];; "TARGET_V9";; "rd\\t%%pc, %0";; [(set_attr "type" "move")]);; Move instructions(define_expand "movqi" [(set (match_operand:QI 0 "general_operand" "") (match_operand:QI 1 "general_operand" ""))] "" "{ /* Working with CONST_INTs is easier, so convert a double if needed. */ if (GET_CODE (operands[1]) == CONST_DOUBLE) { operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]) & 0xff); } else if (GET_CODE (operands[1]) == CONST_INT) { /* And further, we know for all QI cases that only the low byte is significant, which we can always process in a single insn. So mask it now. */ operands[1] = GEN_INT (INTVAL (operands[1]) & 0xff); } /* Handle sets of MEM first. */ if (GET_CODE (operands[0]) == MEM) { /* This checks TARGET_LIVE_G0 for us. */ if (reg_or_0_operand (operands[1], QImode)) goto movqi_is_ok; if (! reload_in_progress) { operands[0] = validize_mem (operands[0]); operands[1] = force_reg (QImode, operands[1]); } } /* Fixup PIC cases. */ if (flag_pic) { if (CONSTANT_P (operands[1]) && pic_address_needs_scratch (operands[1])) operands[1] = legitimize_pic_address (operands[1], QImode, 0); if (symbolic_operand (operands[1], QImode)) { operands[1] = legitimize_pic_address (operands[1], QImode, (reload_in_progress ? operands[0] : NULL_RTX)); goto movqi_is_ok; } } /* All QI constants require only one insn, so proceed. */ movqi_is_ok: ;}")(define_insn "*movqi_insn" [(set (match_operand:QI 0 "general_operand" "=r,r,m") (match_operand:QI 1 "input_operand" "rI,m,rJ"))] "(register_operand (operands[0], QImode) || reg_or_0_operand (operands[1], QImode))" "@ mov\\t%1, %0 ldub\\t%1, %0 stb\\t%r1, %0" [(set_attr "type" "move,load,store") (set_attr "length" "1")])(define_expand "movhi" [(set (match_operand:HI 0 "general_operand" "") (match_operand:HI 1 "general_operand" ""))] "" "{ /* Working with CONST_INTs is easier, so convert a double if needed. */ if (GET_CODE (operands[1]) == CONST_DOUBLE) operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1])); /* Handle sets of MEM first. */ if (GET_CODE (operands[0]) == MEM) { /* This checks TARGET_LIVE_G0 for us. */ if (reg_or_0_operand (operands[1], HImode)) goto movhi_is_ok; if (! reload_in_progress) { operands[0] = validize_mem (operands[0]); operands[1] = force_reg (HImode, operands[1]); } } /* Fixup PIC cases. */ if (flag_pic) { if (CONSTANT_P (operands[1]) && pic_address_needs_scratch (operands[1])) operands[1] = legitimize_pic_address (operands[1], HImode, 0); if (symbolic_operand (operands[1], HImode)) { operands[1] = legitimize_pic_address (operands[1], HImode, (reload_in_progress ? operands[0] : NULL_RTX)); goto movhi_is_ok; } } /* This makes sure we will not get rematched due to splittage. */ if (! CONSTANT_P (operands[1]) || input_operand (operands[1], HImode)) ; else if (CONSTANT_P (operands[1]) && GET_CODE (operands[1]) != HIGH && GET_CODE (operands[1]) != LO_SUM) { sparc_emit_set_const32 (operands[0], operands[1]); DONE; } movhi_is_ok: ;}")(define_insn "*movhi_const64_special" [(set (match_operand:HI 0 "register_operand" "=r") (match_operand:HI 1 "const64_high_operand" ""))] "TARGET_ARCH64" "sethi\\t%%hi(%a1), %0" [(set_attr "type" "move") (set_attr "length" "1")])(define_insn "*movhi_insn" [(set (match_operand:HI 0 "general_operand" "=r,r,r,m") (match_operand:HI 1 "input_operand" "rI,K,m,rJ"))] "(register_operand (operands[0], HImode) || reg_or_0_operand (operands[1], HImode))" "@ mov\\t%1, %0 sethi\\t%%hi(%a1), %0 lduh\\t%1, %0 sth\\t%r1, %0" [(set_attr "type" "move,move,load,store") (set_attr "length" "1")]);; We always work with constants here.(define_insn "*movhi_lo_sum" [(set (match_operand:HI 0 "register_operand" "=r") (ior:HI (match_operand:HI 1 "arith_operand" "%r") (match_operand:HI 2 "arith_operand" "I")))] "" "or\\t%1, %2, %0" [(set_attr "type" "ialu") (set_attr "length" "1")])(define_expand "movsi" [(set (match_operand:SI 0 "general_operand" "") (match_operand:SI 1 "general_operand" ""))] "" "{ /* Working with CONST_INTs is easier, so convert a double if needed. */ if (GET_CODE (operands[1]) == CONST_DOUBLE) operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1])); /* Handle sets of MEM first. */ if (GET_CODE (operands[0]) == MEM) { /* This checks TARGET_LIVE_G0 for us. */ if (reg_or_0_operand (operands[1], SImode)) goto movsi_is_ok; if (! reload_in_progress) { operands[0] = validize_mem (operands[0]); operands[1] = force_reg (SImode, operands[1]); } } /* Fixup PIC cases. */ if (flag_pic) { if (CONSTANT_P (operands[1]) && pic_address_needs_scratch (operands[1])) operands[1] = legitimize_pic_address (operands[1], SImode, 0); if (GET_CODE (operands[1]) == LABEL_REF) { /* shit */ emit_insn (gen_movsi_pic_label_ref (operands[0], operands[1])); DONE; } if (symbolic_operand (operands[1], SImode)) { operands[1] = legitimize_pic_address (operands[1], SImode, (reload_in_progress ? operands[0] : NULL_RTX)); goto movsi_is_ok; } } /* If we are trying to toss an integer constant into the FPU registers, force it into memory. */ if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) >= SPARC_FIRST_FP_REG && REGNO (operands[0]) <= SPARC_LAST_V9_FP_REG && CONSTANT_P (operands[1])) operands[1] = validize_mem (force_const_mem (GET_MODE (operands[0]), operands[1])); /* This makes sure we will not get rematched due to splittage. */ if (! CONSTANT_P (operands[1]) || input_operand (operands[1], SImode)) ; else if (CONSTANT_P (operands[1]) && GET_CODE (operands[1]) != HIGH && GET_CODE (operands[1]) != LO_SUM) { sparc_emit_set_const32 (operands[0], operands[1]); DONE; } movsi_is_ok: ;}");; Special LIVE_G0 pattern to obtain zero in a register.(define_insn "*movsi_zero_liveg0" [(set (match_operand:SI 0 "register_operand" "=r") (match_operand:SI 1 "zero_operand" "J"))] "TARGET_LIVE_G0" "and\\t%0, 0, %0" [(set_attr "type" "binary") (set_attr "length" "1")]);; This is needed to show CSE exactly which bits are set;; in a 64-bit register by sethi instructions.(define_insn "*movsi_const64_special" [(set (match_operand:SI 0 "register_operand" "=r") (match_operand:SI 1 "const64_high_operand" ""))] "TARGET_ARCH64" "sethi\\t%%hi(%a1), %0" [(set_attr "type" "move") (set_attr "length" "1")])(define_insn "*movsi_insn" [(set (match_operand:SI 0 "general_operand" "=r,f,r,r,r,f,m,m,d") (match_operand:SI 1 "input_operand" "rI,!f,K,J,m,!m,rJ,!f,J"))] "(register_operand (operands[0], SImode) || reg_or_0_operand (operands[1], SImode))" "@ mov\\t%1, %0 fmovs\\t%1, %0 sethi\\t%%hi(%a1), %0 clr\\t%0 ld\\t%1, %0 ld\\t%1, %0 st\\t%r1, %0 st\\t%1, %0 fzeros\\t%0" [(set_attr "type" "move,fpmove,move,move,load,fpload,store,fpstore,fpmove") (set_attr "length" "1")])(define_insn "*movsi_lo_sum" [(set (match_operand:SI 0 "register_operand" "=r") (lo_sum:SI (match_operand:SI 1 "register_operand" "r") (match_operand:SI 2 "immediate_operand" "in")))] "" "or\\t%1, %%lo(%a2), %0" [(set_attr "type" "ialu") (set_attr "length" "1")])(define_insn "*movsi_high" [(set (match_operand:SI 0 "register_operand" "=r") (high:SI (match_operand:SI 1 "immediate_operand" "in")))] "" "sethi\\t%%hi(%a1), %0" [(set_attr "type" "move") (set_attr "length" "1")]);; The next two patterns must wrap the SYMBOL_REF in an UNSPEC;; so that CSE won't optimize the address computation away.(define_insn "movsi_lo_sum_pic" [(set (match_operand:SI 0 "register_operand" "=r") (lo_sum:SI (match_operand:SI 1 "register_operand" "r") (unspec:SI [(match_operand:SI 2 "immediate_operand" "in")] 0)))] "flag_pic" "or\\t%1, %%lo(%a2), %0" [(set_attr "type" "ialu") (set_attr "length" "1")])(define_insn "movsi_high_pic" [(set (match_operand:SI 0 "register_operand" "=r") (high:SI (unspec:SI [(match_operand 1 "" "")] 0)))] "flag_pic && check_pic (1)" "sethi\\t%%hi(%a1), %0" [(set_attr "type" "move") (set_attr "length"
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -