sparc.md
来自「GCC编译器源代码」· Markdown 代码 · 共 2,069 行 · 第 1/5 页
MD
2,069 行
}")(define_expand "blt" [(set (pc) (if_then_else (lt (match_dup 1) (const_int 0)) (label_ref (match_operand 0 "" "")) (pc)))] "" "{ if (TARGET_ARCH64 && sparc_compare_op1 == const0_rtx && GET_CODE (sparc_compare_op0) == REG && GET_MODE (sparc_compare_op0) == DImode) { emit_v9_brxx_insn (LT, sparc_compare_op0, operands[0]); DONE; } else if (GET_MODE (sparc_compare_op0) == TFmode && ! TARGET_HARD_QUAD) { emit_float_lib_cmp (sparc_compare_op0, sparc_compare_op1, LT); emit_jump_insn (gen_bne (operands[0])); DONE; } operands[1] = gen_compare_reg (LT, sparc_compare_op0, sparc_compare_op1);}")(define_expand "bltu" [(set (pc) (if_then_else (ltu (match_dup 1) (const_int 0)) (label_ref (match_operand 0 "" "")) (pc)))] "" "{ operands[1] = gen_compare_reg (LTU, sparc_compare_op0, sparc_compare_op1);}")(define_expand "bge" [(set (pc) (if_then_else (ge (match_dup 1) (const_int 0)) (label_ref (match_operand 0 "" "")) (pc)))] "" "{ if (TARGET_ARCH64 && sparc_compare_op1 == const0_rtx && GET_CODE (sparc_compare_op0) == REG && GET_MODE (sparc_compare_op0) == DImode) { emit_v9_brxx_insn (GE, sparc_compare_op0, operands[0]); DONE; } else if (GET_MODE (sparc_compare_op0) == TFmode && ! TARGET_HARD_QUAD) { emit_float_lib_cmp (sparc_compare_op0, sparc_compare_op1, GE); emit_jump_insn (gen_bne (operands[0])); DONE; } operands[1] = gen_compare_reg (GE, sparc_compare_op0, sparc_compare_op1);}")(define_expand "bgeu" [(set (pc) (if_then_else (geu (match_dup 1) (const_int 0)) (label_ref (match_operand 0 "" "")) (pc)))] "" "{ operands[1] = gen_compare_reg (GEU, sparc_compare_op0, sparc_compare_op1);}")(define_expand "ble" [(set (pc) (if_then_else (le (match_dup 1) (const_int 0)) (label_ref (match_operand 0 "" "")) (pc)))] "" "{ if (TARGET_ARCH64 && sparc_compare_op1 == const0_rtx && GET_CODE (sparc_compare_op0) == REG && GET_MODE (sparc_compare_op0) == DImode) { emit_v9_brxx_insn (LE, sparc_compare_op0, operands[0]); DONE; } else if (GET_MODE (sparc_compare_op0) == TFmode && ! TARGET_HARD_QUAD) { emit_float_lib_cmp (sparc_compare_op0, sparc_compare_op1, LE); emit_jump_insn (gen_bne (operands[0])); DONE; } operands[1] = gen_compare_reg (LE, sparc_compare_op0, sparc_compare_op1);}")(define_expand "bleu" [(set (pc) (if_then_else (leu (match_dup 1) (const_int 0)) (label_ref (match_operand 0 "" "")) (pc)))] "" "{ operands[1] = gen_compare_reg (LEU, sparc_compare_op0, sparc_compare_op1);}");; Now match both normal and inverted jump.(define_insn "*normal_branch" [(set (pc) (if_then_else (match_operator 0 "noov_compare_op" [(reg 100) (const_int 0)]) (label_ref (match_operand 1 "" "")) (pc)))] "" "*{ return output_cbranch (operands[0], 1, 0, final_sequence && INSN_ANNULLED_BRANCH_P (insn), ! final_sequence);}" [(set_attr "type" "branch")])(define_insn "*inverted_branch" [(set (pc) (if_then_else (match_operator 0 "noov_compare_op" [(reg 100) (const_int 0)]) (pc) (label_ref (match_operand 1 "" ""))))] "" "*{ return output_cbranch (operands[0], 1, 1, final_sequence && INSN_ANNULLED_BRANCH_P (insn), ! final_sequence);}" [(set_attr "type" "branch")])(define_insn "*normal_fp_branch" [(set (pc) (if_then_else (match_operator 1 "comparison_operator" [(match_operand:CCFP 0 "fcc_reg_operand" "c") (const_int 0)]) (label_ref (match_operand 2 "" "")) (pc)))] "" "*{ return output_cbranch (operands[1], 2, 0, final_sequence && INSN_ANNULLED_BRANCH_P (insn), ! final_sequence);}" [(set_attr "type" "branch")])(define_insn "*inverted_fp_branch" [(set (pc) (if_then_else (match_operator 1 "comparison_operator" [(match_operand:CCFP 0 "fcc_reg_operand" "c") (const_int 0)]) (pc) (label_ref (match_operand 2 "" ""))))] "" "*{ return output_cbranch (operands[1], 2, 1, final_sequence && INSN_ANNULLED_BRANCH_P (insn), ! final_sequence);}" [(set_attr "type" "branch")])(define_insn "*normal_fpe_branch" [(set (pc) (if_then_else (match_operator 1 "comparison_operator" [(match_operand:CCFPE 0 "fcc_reg_operand" "c") (const_int 0)]) (label_ref (match_operand 2 "" "")) (pc)))] "" "*{ return output_cbranch (operands[1], 2, 0, final_sequence && INSN_ANNULLED_BRANCH_P (insn), ! final_sequence);}" [(set_attr "type" "branch")])(define_insn "*inverted_fpe_branch" [(set (pc) (if_then_else (match_operator 1 "comparison_operator" [(match_operand:CCFPE 0 "fcc_reg_operand" "c") (const_int 0)]) (pc) (label_ref (match_operand 2 "" ""))))] "" "*{ return output_cbranch (operands[1], 2, 1, final_sequence && INSN_ANNULLED_BRANCH_P (insn), ! final_sequence);}" [(set_attr "type" "branch")]);; Sparc V9-specific jump insns. None of these are guaranteed to be;; in the architecture.;; There are no 32 bit brreg insns.(define_insn "*normal_int_branch_sp64" [(set (pc) (if_then_else (match_operator 0 "v9_regcmp_op" [(match_operand:DI 1 "register_operand" "r") (const_int 0)]) (label_ref (match_operand 2 "" "")) (pc)))] "TARGET_ARCH64" "*{ return output_v9branch (operands[0], 1, 2, 0, final_sequence && INSN_ANNULLED_BRANCH_P (insn), ! final_sequence);}" [(set_attr "type" "branch")])(define_insn "*inverted_int_branch_sp64" [(set (pc) (if_then_else (match_operator 0 "v9_regcmp_op" [(match_operand:DI 1 "register_operand" "r") (const_int 0)]) (pc) (label_ref (match_operand 2 "" ""))))] "TARGET_ARCH64" "*{ return output_v9branch (operands[0], 1, 2, 1, final_sequence && INSN_ANNULLED_BRANCH_P (insn), ! final_sequence);}" [(set_attr "type" "branch")]);; Esoteric move insns (lo_sum, high, pic).(define_insn "*lo_sum_si" [(set (match_operand:SI 0 "register_operand" "=r") (lo_sum:SI (match_operand:SI 1 "register_operand" "r") (match_operand:SI 2 "immediate_operand" "in")))] "" ;; V9 needs "add" because of the code models. We still use "or" for v8 ;; so we can compare the old compiler with the new. "* return TARGET_ARCH64 ? \"add %1,%%lo(%a2),%0\" : \"or %1,%%lo(%a2),%0\";" ;; Need to set length for this arith insn because operand2 ;; is not an "arith_operand". [(set_attr "length" "1")]);; For PIC, symbol_refs are put inside unspec so that the optimizer will not;; confuse them with real addresses.(define_insn "pic_lo_sum_si" [(set (match_operand:SI 0 "register_operand" "=r") (lo_sum:SI (match_operand:SI 1 "register_operand" "r") (unspec:SI [(match_operand:SI 2 "immediate_operand" "in")] 0)))] "flag_pic" ;; V9 needs "add" because of the code models. We still use "or" for v8 ;; so we can compare the old compiler with the new. "* return TARGET_ARCH64 ? \"add %1,%%lo(%a2),%0\" : \"or %1,%%lo(%a2),%0\";" ;; Need to set length for this arith insn because operand2 ;; is not an "arith_operand". [(set_attr "length" "1")]);; The PIC version of sethi must appear before the non-pic case so that;; the unspec will not be matched as part of the operand.;; For PIC, symbol_refs are put inside unspec so that the optimizer will not;; confuse them with real addresses.(define_insn "pic_sethi_si" [(set (match_operand:SI 0 "register_operand" "=r") (high:SI (unspec:SI [(match_operand 1 "" "")] 0)))] "flag_pic && check_pic (1)" "sethi %%hi(%a1),%0" [(set_attr "type" "move") (set_attr "length" "1")])(define_insn "pic_lo_sum_di" [(set (match_operand:DI 0 "register_operand" "=r") (lo_sum:SI (match_operand:DI 1 "register_operand" "r") (unspec:SI [(match_operand:DI 2 "immediate_operand" "in")] 0)))] "TARGET_ARCH64 && flag_pic" "add %1,%%lo(%a2),%0" [(set_attr "length" "1")])(define_insn "pic_sethi_di" [(set (match_operand:DI 0 "register_operand" "=r") (high:SI (unspec:SI [(match_operand 1 "" "")] 0)))] "TARGET_ARCH64 && flag_pic && check_pic (1)" "sethi %%hi(%a1),%0" [(set_attr "type" "move") (set_attr "length" "1")])(define_insn "get_pc_via_call" [(set (pc) (label_ref (match_operand 0 "" ""))) (set (reg:SI 15) (label_ref (match_operand 1 "" "")))] "" "call %l0%#" [(set_attr "type" "uncond_branch")])(define_insn "get_pc_via_rdpc" [(set (match_operand:DI 0 "register_operand" "=r") (pc))] "TARGET_PTR64" "rd %%pc,%0" [(set_attr "type" "move")]);; Special pic pattern, for loading the address of a label into a register.;; It clobbers o7 because the call puts the return address (i.e. pc value);; there. The pic tablejump pattern also uses this.(define_insn "move_pic_label_si" [(set (match_operand:SI 0 "register_operand" "=r") ; This was previously (label_ref:SI (match_operand 1 "" "")) but that ; loses the volatil and other flags of the original label_ref. (match_operand:SI 1 "label_ref_operand" "")) (set (reg:SI 15) (pc))] "flag_pic" "*{ if (get_attr_length (insn) == 2) return \"\\n1:\;call 2f\;add %%o7,%%lo(%l1-1b),%0\\n2:\"; else return \"\\n1:\;call 2f\;sethi %%hi(%l1-1b),%0\\n2:\\tor %0,%%lo(%l1-1b),%0\;add %0,%%o7,%0\";}" [(set_attr "type" "multi") ; 960 = 4096 bytes / 4 bytes/insn - 64 (for not always perfect length calcs) (set (attr "length") (if_then_else (ltu (minus (match_dup 1) (pc)) (const_int 960)) (const_int 2) (const_int 4)))]);; Special sparc64 pattern for loading the address of a label into a register.;; The pic and non-pic cases are the same since it's the most efficient way.;;;; ??? The non-pic case doesn't need to use %o7, we could use a scratch;; instead. But the pic case doesn't need to use %o7 either. We handle them;; both here so that when this is fixed, they can both be fixed together.;; Don't forget that the pic jump table stuff uses %o7 (that will need to be;; changed too).(define_insn "move_label_di" [(set (match_operand:DI 0 "register_operand" "=r") ; This was previously (label_ref:DI (match_operand 1 "" "")) but that ; loses the volatil and other flags of the original label_ref. (match_operand:DI 1 "label_ref_operand" "")) (set (reg:DI 15) (pc))] "TARGET_ARCH64" "*{ if (get_attr_length (insn) == 2) return \"\\n1:\;rd %%pc,%%o7\;add %%o7,%l1-1b,%0\"; else return \"\\n1:\;rd %%pc,%%o7\;sethi %%hi(%l1-1b),%0\;add %0,%%lo(%l1-1b),%0\;sra %0,0,%0\;add %0,%%o7,%0\";}" [(set_attr "type" "multi") ; 960 = 4096 bytes / 4 bytes/insn - 64 (for not always perfect length calcs) (set (attr "length") (if_then_else (ltu (minus (match_dup 1) (pc)) (const_int 960)) (const_int 2) (const_int 5)))])(define_insn "*sethi_hi" [(set (match_operand:HI 0 "register_operand" "=r") (high:HI (match_operand 1 "" "")))] "check_pic (1)" "sethi %%hi(%a1),%0" [(set_attr "type" "move") (set_attr "length" "1")]);; This must appear after the PIC sethi so that the PIC unspec will not;; be matched as part of the operand.(define_insn "*sethi_si" [(set (match_operand:SI 0 "register_operand" "=r") (high:SI (match_operand 1 "" "")))] "check_pic (1)" "sethi %%hi(%a1),%0" [(set_attr "type" "move") (set_attr "length" "1")])(define_insn "*lo_sum_di_sp32" [(set (match_operand:DI 0 "register_operand" "=r") (lo_sum:DI (match_operand:DI 1 "register_operand" "0") (match_operand:DI 2 "immediate_operand" "in")))] "! TARGET_ARCH64" "*{ /* Don't output a 64 bit constant, since we can't trust the assembler to handle it correctly. */ if (GET_CODE (operands[2]) == CONST_DOUBLE) operands[2] = GEN_INT (CONST_DOUBLE_LOW (operands[2])); else if (GET_CODE (operands[2]) == CONST_INT && HOST_BITS_PER_WIDE_INT > 32 && INTVAL (operands[2]) > 0xffffffff) operands[2] = GEN_INT (INTVAL (operands[2]) & 0xffffffff); return \"or %L1,%%lo(%a2),%L0\";}" ;; Need to set length for this arith insn because operand2 ;; is not an "arith_operand". [(set_attr "length" "1")]);; ??? Optimizer does not handle "or %o1,%lo(0),%o1". How about add?(define_insn "*lo_sum_di_sp64" [(set (match_operand:DI 0 "register_operand" "=r") (lo_sum:DI (match_operand:DI 1 "register_operand" "0") (match_operand:DI 2 "immediate_operand" "in")))] "TARGET_ARCH64" "*{ /* Don't output a 64 bit constant, since we can't trust the assembler to handle it correctly. */ if (GET_CODE (operands[2]) == CONST_DOUBLE) operands[2] = GEN_INT (CONST_DOUBLE_LOW (operands[2]));
⌨️ 快捷键说明
复制代码Ctrl + C
搜索代码Ctrl + F
全屏模式F11
增大字号Ctrl + =
减小字号Ctrl + -
显示快捷键?