📄 memory.c
字号:
}while(pa % MB); } else{ nvalid[MemUPA] += MB/BY2PG; mapfree(&rmapupa, pa, MB); *pte = 0; pa += MB; } /* * Done with this 4MB chunk, review the options: * 1) not physical memory and >=16MB - invalidate the PDB entry; * 2) physical memory - use the 4MB page extension if possible; * 3) not physical memory and <16MB - use the 4MB page extension * if possible; * 4) mixed or no 4MB page extension - commit the already * initialised space for the page table. */ if(pa%(4*MB) == 0 && pa >= 32*MB && nvalid[MemUPA] == (4*MB)/BY2PG){ /* * If we encounter a 4MB chunk of missing memory * at a sufficiently high offset, call it the end of * memory. Otherwise we run the risk of thinking * that video memory is real RAM. */ break; } if(pa <= maxkpa && pa%(4*MB) == 0){ table = &m->pdb[PDX(KADDR(pa - 4*MB))]; if(nvalid[MemUPA] == (4*MB)/BY2PG) *table = 0; else if(nvalid[MemRAM] == (4*MB)/BY2PG && (m->cpuiddx & 0x08)) *table = (pa - 4*MB)|PTESIZE|PTEWRITE|PTEVALID; else if(nvalid[MemUMB] == (4*MB)/BY2PG && (m->cpuiddx & 0x08)) *table = (pa - 4*MB)|PTESIZE|PTEWRITE|PTEUNCACHED|PTEVALID; else{ *table = map|PTEWRITE|PTEVALID; map = 0; } } mmuflushtlb(PADDR(m->pdb)); x += 0x3141526; } /* * If we didn't reach the end of the 4MB chunk, that part won't * be mapped. Commit the already initialised space for the page table. */ if(pa % (4*MB) && pa <= maxkpa){ m->pdb[PDX(KADDR(pa))] = map|PTEWRITE|PTEVALID; map = 0; } if(map) mapfree(&rmapram, map, BY2PG); m->pdb[PDX(vbase)] = 0; mmuflushtlb(PADDR(m->pdb)); mapfree(&rmapupa, pa, (u32int)-pa); *k0 = kzero;}/* * BIOS Int 0x15 E820 memory map. */enum{ SMAP = ('S'<<24)|('M'<<16)|('A'<<8)|'P', Ememory = 1, Ereserved = 2, Carry = 1,};typedef struct Emap Emap;struct Emap{ uvlong base; uvlong len; ulong type;};static Emap emap[16];int nemap;static char *etypes[] ={ "type=0", "memory", "reserved", "acpi reclaim", "acpi nvs",};static intemapcmp(const void *va, const void *vb){ Emap *a, *b; a = (Emap*)va; b = (Emap*)vb; if(a->base < b->base) return -1; if(a->base > b->base) return 1; if(a->len < b->len) return -1; if(a->len > b->len) return 1; return a->type - b->type;}static voidmap(ulong base, ulong len, int type){ ulong e, n; ulong *table, flags, maxkpa; /* * Split any call crossing 4*MB to make below simpler. */ if(base < 4*MB && len > 4*MB-base){ n = 4*MB - base; map(base, n, type); map(4*MB, len-n, type); } /* * Let lowraminit and umbscan hash out the low 4MB. */ if(base < 4*MB) return; /* * Any non-memory below 16*MB is used as upper mem blocks. */ if(type == MemUPA && base < 16*MB && base+len > 16*MB){ map(base, 16*MB-base, MemUMB); map(16*MB, len-(16*MB-base), MemUPA); return; } /* * Memory below CPU0MACH is reserved for the kernel * and already mapped. */ if(base < PADDR(CPU0MACH)+BY2PG){ n = PADDR(CPU0MACH)+BY2PG - base; if(len <= n) return; map(PADDR(CPU0MACH), len-n, type); return; } /* * Memory between KTZERO and end is the kernel itself * and is already mapped. */ if(base < PADDR(KTZERO) && base+len > PADDR(KTZERO)){ map(base, PADDR(KTZERO)-base, type); return; } if(PADDR(KTZERO) < base && base < PADDR(PGROUND((ulong)end))){ n = PADDR(PGROUND((ulong)end)); if(len <= n) return; map(PADDR(PGROUND((ulong)end)), len-n, type); return; } /* * Now we have a simple case. */ // print("map %.8lux %.8lux %d\n", base, base+len, type); switch(type){ case MemRAM: mapfree(&rmapram, base, len); flags = PTEWRITE|PTEVALID; break; case MemUMB: mapfree(&rmapumb, base, len); flags = PTEWRITE|PTEUNCACHED|PTEVALID; break; case MemUPA: mapfree(&rmapupa, base, len); flags = 0; break; default: case MemReserved: flags = 0; break; } /* * bottom 4MB is already mapped - just twiddle flags. * (not currently used - see above) */ if(base < 4*MB){ table = KADDR(PPN(m->pdb[PDX(base)])); e = base+len; base = PPN(base); for(; base<e; base+=BY2PG) table[PTX(base)] |= flags; return; } /* * Only map from KZERO to 2^32. */ if(flags){ maxkpa = -KZERO; if(base >= maxkpa) return; if(len > maxkpa-base) len = maxkpa - base; pdbmap(m->pdb, base|flags, base+KZERO, len); }}static inte820scan(void){ int i; Ureg u; ulong cont, base, len; uvlong last; Emap *e; if(getconf("*norealmode") || getconf("*noe820scan")) return -1; cont = 0; for(i=0; i<nelem(emap); i++){ memset(&u, 0, sizeof u); u.ax = 0xE820; u.bx = cont; u.cx = 20; u.dx = SMAP; u.es = (PADDR(RMBUF)>>4)&0xF000; u.di = PADDR(RMBUF)&0xFFFF; u.trap = 0x15; realmode(&u); cont = u.bx; if((u.flags&Carry) || u.ax != SMAP || u.cx != 20) break; e = &emap[nemap++]; *e = *(Emap*)RMBUF; if(u.bx == 0) break; } if(nemap == 0) return -1; qsort(emap, nemap, sizeof emap[0], emapcmp); for(i=0; i<nemap; i++){ e = &emap[i]; print("E820: %.8llux %.8llux ", e->base, e->base+e->len); if(e->type < nelem(etypes)) print("%s\n", etypes[e->type]); else print("type=%lud\n", e->type); } last = 0; for(i=0; i<nemap; i++){ e = &emap[i]; /* * pull out the info but only about the low 32 bits... */ if(e->base >= (1LL<<32)) break; base = e->base; if(base+e->len > (1LL<<32)) len = -base; else len = e->len; /* * If the map skips addresses, mark them available. */ if(last < e->base) map(last, e->base-last, MemUPA); last = base+len; if(e->type == Ememory) map(base, len, MemRAM); else map(base, len, MemReserved); } if(last < (1LL<<32)) map(last, (u32int)-last, MemUPA); return 0;}voidmeminit(void){ int i; Map *mp; Confmem *cm; ulong pa, *pte; ulong maxmem, lost; char *p; if(p = getconf("*maxmem")) maxmem = strtoul(p, 0, 0); else maxmem = 0; /* * Set special attributes for memory between 640KB and 1MB: * VGA memory is writethrough; * BIOS ROM's/UMB's are uncached; * then scan for useful memory. */ for(pa = 0xA0000; pa < 0xC0000; pa += BY2PG){ pte = mmuwalk(m->pdb, (ulong)KADDR(pa), 2, 0); *pte |= PTEWT; } for(pa = 0xC0000; pa < 0x100000; pa += BY2PG){ pte = mmuwalk(m->pdb, (ulong)KADDR(pa), 2, 0); *pte |= PTEUNCACHED; } mmuflushtlb(PADDR(m->pdb)); umbscan(); lowraminit(); if(e820scan() < 0) ramscan(maxmem); /* * Set the conf entries describing banks of allocatable memory. */ for(i=0; i<nelem(mapram) && i<nelem(conf.mem); i++){ mp = &rmapram.map[i]; cm = &conf.mem[i]; cm->base = mp->addr; cm->npage = mp->size/BY2PG; } lost = 0; for(; i<nelem(mapram); i++) lost += rmapram.map[i].size; if(lost) print("meminit - lost %lud bytes\n", lost); if(MEMDEBUG) memdebug();}/* * Allocate memory from the upper memory blocks. */ulongumbmalloc(ulong addr, int size, int align){ ulong a; if(a = mapalloc(&rmapumb, addr, size, align)) return (ulong)KADDR(a); return 0;}voidumbfree(ulong addr, int size){ mapfree(&rmapumb, PADDR(addr), size);}ulongumbrwmalloc(ulong addr, int size, int align){ ulong a; uchar *p; if(a = mapalloc(&rmapumbrw, addr, size, align)) return(ulong)KADDR(a); /* * Perhaps the memory wasn't visible before * the interface is initialised, so try again. */ if((a = umbmalloc(addr, size, align)) == 0) return 0; p = (uchar*)a; p[0] = 0xCC; p[size-1] = 0xCC; if(p[0] == 0xCC && p[size-1] == 0xCC) return a; umbfree(a, size); return 0;}voidumbrwfree(ulong addr, int size){ mapfree(&rmapumbrw, PADDR(addr), size);}/* * Give out otherwise-unused physical address space * for use in configuring devices. Note that unlike upamalloc * before it, upaalloc does not map the physical address * into virtual memory. Call vmap to do that. */ulongupaalloc(int size, int align){ ulong a; a = mapalloc(&rmapupa, 0, size, align); if(a == 0){ print("out of physical address space allocating %d\n", size); mapprint(&rmapupa); } return a;}voidupafree(ulong pa, int size){ mapfree(&rmapupa, pa, size);}voidupareserve(ulong pa, int size){ ulong a; a = mapalloc(&rmapupa, pa, size, 0); if(a != pa){ /* * This can happen when we're using the E820 * map, which might have already reserved some * of the regions claimed by the pci devices. */ // print("upareserve: cannot reserve pa=%#.8lux size=%d\n", pa, size); if(a != 0) mapfree(&rmapupa, a, size); }}voidmemorysummary(void){ memdebug();}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -