📄 mpc6xx_lo.s
字号:
# This routine accepts a pointer to the data structure containing
# the user register set, and restores its context.
# Call from C:
#
# void asm_switch_context(&context);
#
asm_switch_context:
# Point r31 to the user register data structure
addi r31,r3,0 # arg in r3
#
# Processor specific registers
#
mfspr r3,spr_pvr
rlwinm r3,r3,16,16,31
cmpli cr0,0,r3,PVR_602
bc 12,2,restore_spr_602 # beq
cmpli cr0,0,r3,PVR_603
bc 12,2,restore_spr_603 # beq
cmpli cr0,0,r3,PVR_603E
bc 12,2,restore_spr_603E # beq
cmpli cr0,0,r3,PVR_603EV
bc 12,2,restore_spr_603EV # beq
cmpli cr0,0,r3,PVR_604
bc 12,2,restore_spr_604 # beq
cmpli cr0,0,r3,PVR_604E
bc 12,2,restore_spr_604E # beq
cmpli cr0,0,r3,PVR_MACH5
bc 12,2,restore_spr_MACH5 # beq
cmpli cr0,0,r3,PVR_750
bc 12,2,restore_spr_750 # beq
b restore_spr_done
restore_spr_602:
b restore_spr_done
restore_spr_603:
# lwz r10,o_603_hid0(r31)
# lwz r11,o_603_dmiss(r31)
# lwz r12,o_603_dcmp(r31)
# lwz r13,o_603_hash1(r31)
# lwz r14,o_603_hash2(r31)
# lwz r15,o_603_imiss(r31)
# lwz r16,o_603_icmp(r31)
# lwz r17,o_603_rpa(r31)
lwz r18,o_603_iabr(r31)
# lwz r19,o_603_ear(r31)
# mtspr spr_603_hid0,r10
# mtspr spr_603_dmiss,r11
# mtspr spr_603_dcmp,r12
# mtspr spr_603_hash1,r13
# mtspr spr_603_hash2,r14
# mtspr spr_603_imiss,r15
# mtspr spr_603_icmp,r16
# mtspr spr_603_rpa,r17
mtspr spr_603_iabr,r18
# mtspr spr_603_ear,r19
b restore_spr_done
restore_spr_603E:
restore_spr_603EV:
# lwz r10,o_603e_hid0(r31)
# lwz r11,o_603e_hid1(r31)
# lwz r12,o_603e_dmiss(r31)
# lwz r13,o_603e_dcmp(r31)
# lwz r14,o_603e_hash1(r31)
# lwz r15,o_603e_hash2(r31)
# lwz r16,o_603e_imiss(r31)
# lwz r17,o_603e_icmp(r31)
# lwz r18,o_603e_rpa(r31)
lwz r19,o_603e_iabr(r31)
# lwz r20,o_603e_ear(r31)
# mtspr spr_603e_hid0,r10
# mtspr spr_603e_hid1,r11
# mtspr spr_603e_dmiss,r12
# mtspr spr_603e_dcmp,r13
# mtspr spr_603e_hash1,r14
# mtspr spr_603e_hash2,r15
# mtspr spr_603e_imiss,r16
# mtspr spr_603e_icmp,r17
# mtspr spr_603e_rpa,r18
mtspr spr_603e_iabr,r19
# mtspr spr_603e_ear,r20
b restore_spr_done
restore_spr_604:
# lwz r10,o_604_hid0(r31)
# lwz r11,o_604_pmc1(r31)
# lwz r12,o_604_pmc2(r31)
# lwz r13,o_604_mmcr0(r31)
# lwz r14,o_604_sda(r31)
# lwz r15,o_604_sia(r31)
lwz r16,o_604_iabr(r31)
lwz r17,o_604_dabr(r31)
# lwz r18,o_604_ear(r31)
# lwz r19,o_604_pir(r31)
# mtspr spr_604_hid0,r10
# mtspr spr_604_pmc1,r11
# mtspr spr_604_pmc2,r12
# mtspr spr_604_mmcr0,r13
# mtspr spr_604_sda,r14
# mtspr spr_604_sia,r15
mtspr spr_604_iabr,r16
mtspr spr_604_dabr,r17
# mtspr spr_604_ear,r18
# mtspr spr_604_pir,r19
b restore_spr_done
restore_spr_604E:
restore_spr_MACH5:
# lwz r10,o_604e_hid0(r31)
# lwz r11,o_604e_hid1(r31)
# lwz r12,o_604e_pmc1(r31)
# lwz r13,o_604e_pmc2(r31)
# lwz r14,o_604e_pmc3(r31)
# lwz r15,o_604e_pmc4(r31)
# lwz r16,o_604e_mmcr0(r31)
# lwz r17,o_604e_mmcr1(r31)
# lwz r18,o_604e_sda(r31)
# lwz r19,o_604e_sia(r31)
lwz r20,o_604e_iabr(r31)
lwz r21,o_604e_dabr(r31)
# lwz r22,o_604e_ear(r31)
# lwz r23,o_604e_pir(r31)
# mtspr spr_604e_hid0,r10
# mtspr spr_604e_hid1,r11
# mtspr spr_604e_pmc1,r12
# mtspr spr_604e_pmc2,r13
# mtspr spr_604e_pmc3,r14
# mtspr spr_604e_pmc4,r15
# mtspr spr_604e_mmcr0,r16
# mtspr spr_604e_mmcr1,r17
# mtspr spr_604e_sda,r18
# mtspr spr_604e_sia,r19
mtspr spr_604e_iabr,r20
mtspr spr_604e_dabr,r21
# mtspr spr_604e_ear,r22
# mtspr spr_604e_pir,r23
b restore_spr_done
restore_spr_750:
# lwz r10,o_750_hid0(r31)
# lwz r11,o_750_hid1(r31)
# lwz r12,o_750_pmc1(r31)
# lwz r13,o_750_pmc2(r31)
# lwz r14,o_750_pmc3(r31)
# lwz r15,o_750_pmc4(r31)
# lwz r16,o_750_mmcr0(r31)
# lwz r17,o_750_mmcr1(r31)
# lwz r18,o_750_sia(r31)
# lwz r19,o_750_thrm1(r31)
# lwz r20,o_750_thrm2(r31)
# lwz r21,o_750_thrm3(r31)
# lwz r22,o_750_ictc(r31)
# lwz r23,o_750_l2cr(r31)
# lwz r24,o_750_iabr(r31)
# lwz r25,o_750_dabr(r31)
# lwz r26,o_750_ear(r31)
# mtspr spr_750_hid0,r10
# mtspr spr_750_hid1,r11
# mtspr spr_750_pmc1,r12
# mtspr spr_750_pmc2,r13
# mtspr spr_750_pmc3,r14
# mtspr spr_750_pmc4,r15
# mtspr spr_750_mmcr0,r16
# mtspr spr_750_mmcr1,r17
# mtspr spr_750_sia,r18
# mtspr spr_750_thrm1,r19
# mtspr spr_750_thrm2,r20
# mtspr spr_750_thrm3,r21
# mtspr spr_750_ictc,r22
# mtspr spr_750_l2cr,r23
# mtspr spr_750_iabr,r24
# mtspr spr_750_dabr,r25
# mtspr spr_750_ear,r26
b restore_spr_done
restore_spr_done:
sync
isync
#
# Restore common Special Purpose Registers
#
lwz r10,o_dec(r31)
lwz r11,o_sprg0(r31)
lwz r12,o_sprg1(r31)
lwz r13,o_sprg2(r31)
lwz r14,o_sprg3(r31)
lwz r15,o_dsisr(r31)
lwz r16,o_dar(r31)
lwz r17,o_sdr1(r31)
lwz r18,o_ibat0u(r31)
lwz r19,o_ibat0l(r31)
lwz r20,o_ibat1u(r31)
lwz r21,o_ibat1l(r31)
lwz r22,o_ibat2u(r31)
lwz r23,o_ibat2l(r31)
lwz r24,o_ibat3u(r31)
lwz r25,o_ibat3l(r31)
mtspr spr_dec,r10
# mtspr spr_sprg0,r11
# mtspr spr_sprg1,r12
mtspr spr_sprg2,r13
mtspr spr_sprg3,r14
# mtspr spr_dsisr,r15
# mtspr spr_dar,r16
mtspr spr_sdr1,r17
mtspr spr_ibat0u,r18
mtspr spr_ibat0l,r19
mtspr spr_ibat1u,r20
mtspr spr_ibat1l,r21
mtspr spr_ibat2u,r22
mtspr spr_ibat2l,r23
mtspr spr_ibat3u,r24
mtspr spr_ibat3l,r25
sync
lwz r10,o_dbat0u(r31)
lwz r11,o_dbat0l(r31)
lwz r12,o_dbat1u(r31)
lwz r13,o_dbat1l(r31)
lwz r14,o_dbat2u(r31)
lwz r15,o_dbat2l(r31)
lwz r16,o_dbat3u(r31)
lwz r17,o_dbat3l(r31)
mtspr spr_dbat0u,r10
mtspr spr_dbat0l,r11
mtspr spr_dbat1u,r12
mtspr spr_dbat1l,r13
mtspr spr_dbat2u,r14
mtspr spr_dbat2l,r15
mtspr spr_dbat3u,r16
mtspr spr_dbat3l,r17
sync
# Restore all segment registers
lwz r10,o_sr0(r31)
lwz r11,o_sr1(r31)
lwz r12,o_sr2(r31)
lwz r13,o_sr3(r31)
lwz r14,o_sr4(r31)
lwz r15,o_sr5(r31)
lwz r16,o_sr6(r31)
lwz r17,o_sr7(r31)
lwz r18,o_sr8(r31)
lwz r19,o_sr9(r31)
lwz r20,o_sr10(r31)
lwz r21,o_sr11(r31)
lwz r22,o_sr12(r31)
lwz r23,o_sr13(r31)
lwz r24,o_sr14(r31)
lwz r25,o_sr15(r31)
mtsr sr0,r10
mtsr sr1,r11
mtsr sr2,r12
mtsr sr3,r13
mtsr sr4,r14
mtsr sr5,r15
mtsr sr6,r16
mtsr sr7,r17
mtsr sr8,r18
mtsr sr9,r19
mtsr sr10,r20
mtsr sr11,r21
mtsr sr12,r22
mtsr sr13,r23
mtsr sr14,r24
mtsr sr15,r25
sync
lwz r10,o_xer(r31)
lwz r11,o_lr(r31)
lwz r12,o_ctr(r31)
lwz r13,o_tbl(r31)
lwz r14,o_tbu(r31)
mtspr spr_xer,r10
mtspr spr_lr,r11
mtspr spr_ctr,r12
# mtspr 284,r13 # tbl
# mtspr 285,r14 # tbu
sync
# Restore all FPRs.
mfmsr r4
isync
addi r3,r4,0x2000 # MSR[FP]=1
mtmsr r3
sync
lfs f0,o_fpscr(r31)
mtfsf 0xFF,f0
lfs f0,o_f0(r31)
lfs f1,o_f1(r31)
lfs f2,o_f2(r31)
lfs f3,o_f3(r31)
lfs f4,o_f4(r31)
lfs f5,o_f5(r31)
lfs f6,o_f6(r31)
lfs f7,o_f7(r31)
lfs f8,o_f8(r31)
lfs f9,o_f9(r31)
lfs f10,o_f10(r31)
lfs f11,o_f11(r31)
lfs f12,o_f12(r31)
lfs f13,o_f13(r31)
lfs f14,o_f14(r31)
lfs f15,o_f15(r31)
lfs f16,o_f16(r31)
lfs f17,o_f17(r31)
lfs f18,o_f18(r31)
lfs f19,o_f19(r31)
lfs f20,o_f20(r31)
lfs f21,o_f21(r31)
lfs f22,o_f22(r31)
lfs f23,o_f23(r31)
lfs f24,o_f24(r31)
lfs f25,o_f25(r31)
lfs f26,o_f26(r31)
lfs f27,o_f27(r31)
lfs f28,o_f28(r31)
lfs f29,o_f29(r31)
lfs f30,o_f30(r31)
lfs f31,o_f31(r31)
mtmsr r4 # MSR[FP]=0
sync
# Restore CR, and setup MSR, IP for the RFI
lwz r10,o_cr(r31) # load cr
lwz r11,o_msr(r31) # load msr
lwz r12,o_srr0(r31) # load ip
mtcrf 0xFF,r10
mtspr spr_srr1,r11
mtspr spr_srr0,r12
sync
isync
# Restore all GPRs.
lwz r0,o_r0(r31) # load r0
lwz r1,o_r1(r31) # load r1
lwz r2,o_r2(r31) # load r2
lwz r3,o_r3(r31) # load r3
lwz r4,o_r4(r31) # load r4
lwz r5,o_r5(r31) # load r5
lwz r6,o_r6(r31) # load r6
lwz r7,o_r7(r31) # load r7
lwz r8,o_r8(r31) # load r8
lwz r9,o_r9(r31) # load r9
lwz r10,o_r10(r31) # load r10
lwz r11,o_r11(r31) # load r11
lwz r12,o_r12(r31) # load r12
lwz r13,o_r13(r31) # load r13
lwz r14,o_r14(r31) # load r14
lwz r15,o_r15(r31) # load r15
lwz r16,o_r16(r31) # load r16
lwz r17,o_r17(r31) # load r17
lwz r18,o_r18(r31) # load r18
lwz r19,o_r19(r31) # load r19
lwz r20,o_r20(r31) # load r20
lwz r21,o_r21(r31) # load r21
lwz r22,o_r22(r31) # load r22
lwz r23,o_r23(r31) # load r23
lwz r24,o_r24(r31) # load r24
lwz r25,o_r25(r31) # load r25
lwz r26,o_r26(r31) # load r26
lwz r27,o_r27(r31) # load r27
lwz r28,o_r28(r31) # load r28
lwz r29,o_r29(r31) # load r29
lwz r30,o_r30(r31) # load r30
lwz r31,o_r31(r31) # load r31 # must be last !!!
sync
isync
# Here we go!
rfi
######################################################################
#
# This is the Interrupt Service Routine for External Interrupts. This
# routine saves off a few registers that may be modified ala EABI, and
# then calls the higher level C routine to determine and dispatch the
# correct interrupt handler.
#
.equ ISRSZ,128
asm_isr_handler:
# Save volatile registers, as per EABI ; NEED FPRs ???
#
stwu r1,-ISRSZ(r1)
stw r0,40(r1) # Save R0
stw r2,44(r1) # Save R2
stw r3,48(r1) # Save R3
stw r4,52(r1) # Save R4
stw r5,56(r1) # Save R5
stw r6,60(r1) # Save R6
stw r7,64(r1) # Save R7
stw r8,68(r1) # Save R8
stw r9,72(r1) # Save R9
stw r10,76(r1) # Save R10
stw r11,80(r1) # Save R11
stw r12,84(r1) # Save R12
stw r13,88(r1) # Save R13
mfspr r0,spr_sprg1
stw r0,92(r1) # Save LR - Exception header
mfcr r0
stw r0,96(r1) # Save CR
mfspr r0,spr_xer
stw r0,100(r1) # Save XER
mfspr r0,spr_ctr
stw r0,104(r1) # Save CTR
mfspr r0,spr_sprg0
stw r0,108(r1) # Save R31 - Exception header
mfspr r0,spr_srr0
stw r0,112(r1) # Save IP
mfspr r0,spr_srr1
stw r0,116(r1) # Save MSR
sync
isync
# Call the higher level interrupt handler
#
mfspr r4,spr_lr # LR contains exception number
rlwinm r3,r4,0,16,23 # lr & 0x0000FF00
bl isr_execute_handler
# Code to determine if we go back to monitor or user code
# r3 == 0 if IRQ not handled, r3 == 1 if IRQ handled
cmpi cr0,0,r3,0x0000
beq cr0,nothandled
handled:
# Restore registers
#
lwz r2,44(r1) # Restore R2
lwz r3,48(r1) # Restore R3
lwz r4,52(r1) # Restore R4
lwz r5,56(r1) # Restore R5
lwz r6,60(r1) # Restore R6
lwz r7,64(r1) # Restore R7
lwz r8,68(r1) # Restore R8
lwz r9,72(r1) # Restore R9
lwz r10,76(r1) # Restore R10
lwz r11,80(r1) # Restore R11
lwz r12,84(r1) # Restore R12
lwz r13,88(r1) # Restore R13
lwz r0,92(r1) #
mtspr spr_lr,r0 # Restore LR
lwz r0,96(r1) #
mtcrf 0xFF,r0 # Restore CR
lwz r0,100(r1) #
mtspr spr_xer,r0 # Restore XER
lwz r0,104(r1) #
mtspr spr_ctr,r0 # Restore CTR
lwz r31,108(r1) # Restore R31
lwz r0,112(r1)
mtspr spr_srr0,r0 # Restore IP
lwz r0,116(r1)
mtspr spr_srr1,r0 # Restore MSR
lwz r0,40(r1) # Restore R0 - Must be done last!
addi r1,r1,ISRSZ
sync
isync
# All done!
rfi
nothandled:
# Restore registers
#
lwz r2,44(r1) # Restore R2
lwz r3,48(r1) # Restore R3
lwz r4,52(r1) # Restore R4
lwz r5,56(r1) # Restore R5
lwz r6,60(r1) # Restore R6
lwz r7,64(r1) # Restore R7
lwz r8,68(r1) # Restore R8
lwz r9,72(r1) # Restore R9
lwz r10,76(r1) # Restore R10
lwz r11,80(r1) # Restore R11
lwz r12,84(r1) # Restore R12
lwz r13,88(r1) # Restore R13
lwz r0,92(r1) #
mtspr spr_sprg1,r0 # Restore LR (for exception body)
lwz r0,96(r1) #
mtcrf 0xFF,r0 # Restore CR
lwz r0,100(r1) #
mtspr spr_xer,r0 # Restore XER
lwz r0,104(r1) #
mtspr spr_ctr,r0 # Restore CTR
lwz r31,108(r1) # Restore R31
mtspr spr_sprg0,r31 # Restore r31 (for exception body)
lwz r0,112(r1)
mtspr spr_srr0,r0 # Restore IP
lwz r0,116(r1)
mtspr spr_srr1,r0 # Restore MSR
lwz r0,40(r1) # Restore R0 - Must be done last!
addi r1,r1,ISRSZ
sync
isync
# Jump to generic exception handler. Prep just like header.
# sprg0 == r31
# sprg1 == LR
# LR[16-23] == exception number
addi r31,r0,0x0500
mtspr spr_lr,r31
b asm_exception_body
######################################################################
#
# This routine flushes and invalidates the caches. MPC6xx has
# 32-byte block size.
#
# To flush D-cache requires a three step process:
#
# 1) Perform a Read of memory starting at 0x00000000 thru __CACHE_SIZE
# 2) Perform DCBF of memory starting at 0x00000000 thru __CACHE_SIZE
# 3) Disable and invalidate the D-cache.
#
# The value of __CACHE_SIZE is the sum of all caches associated with the
# processor: on-chip L1, MPC10x L2, MPC7xx back-side L2, etc.
#
# In Step 1) above, only one word from each 32-byte cache block needs to be
# read. By reading memory from 0x00000000 thru __CACHE_SIZE, any dirty cache
# blocks above physical address __CACHE_SIZE are committed back to memory in
# order to be replaced by memory below __CACHE_SIZE.
#
# In Step 2) above, all dirty cache blocks below __CACHE_SIZE are
# committed back to memory. This completes the flush of the cache, as all
# physical memory is now coherent.
#
# The value __CACHE_SIZE is a linker-defined symbol so that it can be
# tuned for the particular implementation at hand. Currently, a safe
# value to use which will work with all known MPC6xx and MPC7xx parts to
# date, 1998, is 0x00280000, 2.5M. A value of __CACHE_SIZE less than the
# cache size(s) will not guarantee flushing of the entire cache. A value
# larger than the cache size(s) simply executes thru the flush loop a few
# more times than necessary, but otherwise is valid as well.
#
# All MPC6xx have HID0[DCE,DCFI] to disable data cache.
# All MPC6xx have HID0[ICE,ICFI] to disable instruction cache.
#
# NOTE: THIS CODE REQUIRES THE MMU BE DISABLED!!!
#
.equ BLOCK_SIZE,32
.extern __CACHE_SIZE
cpu_cache_flush:
#
# Check to see if we need to flush D-cache by examining
# HID0[DCE]. If HID0[DCE]=1, then we flush.
#
mfspr r6,spr_603_hid0
andi. r5,r6,0x4000 # HID0[DCE]
cmpli cr0,0,r5,0x0000
bc 12,2,cache_invalidate # beq
#
# Flush D-cache. FIX!!!! Turn on I-cache for better performance.
#
addis r5,0,(~(BLOCK_SIZE-1))@h # Initial addr for lwzu/dcbf
ori r5,5,(~(BLOCK_SIZE-1))@l
addis r6,0,(__CACHE_SIZE)@h # Load r6 with __CACHE_SIZE
ori r6,6,(__CACHE_SIZE)@l
addi r7,0,BLOCK_SIZE # Load r7 with BLOCK_SIZE
addi r4,r5,0
step1loop:
lwzu r3,BLOCK_SIZE(r4)
cmpl 0,0,r4,r6
bc 12,0,step1loop # if (r4 < r6) goto step1loop
sync
addi r4,r5,0
step2loop:
dcbf r4,r7
addi r4,r4,BLOCK_SIZE
cmpl 0,0,r4,r6
bc 12,0,step2loop # if (r4 < r6) goto step2loop
sync
cache_invalidate:
#
# Now invalidate the caches with HID0[ICFI,DCFI]
#
mfspr r6,spr_603_hid0
isync
addis r5,0,(~0x0000CC00)@h # r5 = HID0[ICE=0,DCE=0,ICFI=0,DCFI=0]
ori r5,r5,(~0x0000CC00)@l
and r5,r5,r6
ori r4,r5,0x0C00 # r4 = HID0[ICE=0,DCE=0,ICFI=1,DCFI=1]
isync
sync
mtspr spr_603_hid0,r5
mtspr spr_603_hid0,r4
mtspr spr_603_hid0,r5
isync
#
# For MPC750, disable on-chip L2 -- FIX !!! This is a cheat
# because we've used the dcbfs to flush L1 thru L2 to memory.
# This just simply turns it off
#
mfspr r3,spr_pvr
rlwinm r3,r3,16,16,31
cmpli cr0,0,r3,PVR_750
bc 4,2,cache_done # bne
mfspr r4,spr_750_l2cr
addis r5,0,0x8000 # L2CR[L2E]=0
nor r5,r5,r5
and r4,r4,r5
sync
mtspr spr_750_l2cr,r4
sync
cache_done:
blr
######################################################################
######################################################################
#
# These routines perform I/O to memory mapped I/O peripherals.
#
# uint32 cpu_iord_8 (void *addr);
# uint32 cpu_iord_16(void *addr);
# uint32 cpu_iord_32(void *addr);
# void cpu_iowr_8 (void *addr, uint32 data);
# void cpu_iowr_16(void *addr, uint32 data);
# void cpu_iowr_32(void *addr, uint32 data);
#
# Under EABI, addr is in r3, and data is in r4. Return value also in r3
#
cpu_iord_8:
eieio
lbz r3,0(r3)
eieio
blr
cpu_iord_16:
eieio
lhz r3,0(r3)
eieio
blr
cpu_iord_32:
eieio
lwz r3,0(r3)
eieio
blr
cpu_iowr_8:
eieio
stb r4,0(r3)
eieio
blr
cpu_iowr_16:
eieio
sth r4,0(r3)
eieio
blr
cpu_iowr_32:
eieio
stw r4,0(r3)
eieio
blr
cpu_iord_16r:
eieio
lhbrx r3,0,r3
eieio
blr
cpu_iord_32r:
eieio
lwbrx r3,0,r3
eieio
blr
cpu_iowd_16r:
eieio
sthbrx r4,0,r3
eieio
blr
cpu_iowd_32r:
eieio
stwbrx r4,0,r3
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -