📄 gmem.c
字号:
local_mc_allocs, local_mc_frees, ((gdouble) local_mc_frees) / local_mc_allocs * 100.0, local_mc_allocs - local_mc_frees);}static gpointerprofiler_try_malloc (gsize n_bytes){ gulong *p;#ifdef G_ENABLE_DEBUG if (g_trap_malloc_size == n_bytes) G_BREAKPOINT ();#endif /* G_ENABLE_DEBUG */ p = standard_malloc (sizeof (gulong) * 2 + n_bytes); if (p) { p[0] = 0; /* free count */ p[1] = n_bytes; /* length */ profiler_log (PROFILER_ALLOC, n_bytes, TRUE); p += 2; } else profiler_log (PROFILER_ALLOC, n_bytes, FALSE); return p;}static gpointerprofiler_malloc (gsize n_bytes){ gpointer mem = profiler_try_malloc (n_bytes); if (!mem) g_mem_profile (); return mem;}static gpointerprofiler_calloc (gsize n_blocks, gsize n_block_bytes){ gsize l = n_blocks * n_block_bytes; gulong *p;#ifdef G_ENABLE_DEBUG if (g_trap_malloc_size == l) G_BREAKPOINT ();#endif /* G_ENABLE_DEBUG */ p = standard_calloc (1, sizeof (gulong) * 2 + l); if (p) { p[0] = 0; /* free count */ p[1] = l; /* length */ profiler_log (PROFILER_ALLOC | PROFILER_ZINIT, l, TRUE); p += 2; } else { profiler_log (PROFILER_ALLOC | PROFILER_ZINIT, l, FALSE); g_mem_profile (); } return p;}static voidprofiler_free (gpointer mem){ gulong *p = mem; p -= 2; if (p[0]) /* free count */ { g_warning ("free(%p): memory has been freed %lu times already", p + 2, p[0]); profiler_log (PROFILER_FREE, p[1], /* length */ FALSE); } else {#ifdef G_ENABLE_DEBUG if (g_trap_free_size == p[1]) G_BREAKPOINT ();#endif /* G_ENABLE_DEBUG */ profiler_log (PROFILER_FREE, p[1], /* length */ TRUE); memset (p + 2, 0xaa, p[1]); /* for all those that miss standard_free (p); in this place, yes, * we do leak all memory when profiling, and that is intentional * to catch double frees. patch submissions are futile. */ } p[0] += 1;}static gpointerprofiler_try_realloc (gpointer mem, gsize n_bytes){ gulong *p = mem; p -= 2;#ifdef G_ENABLE_DEBUG if (g_trap_realloc_size == n_bytes) G_BREAKPOINT ();#endif /* G_ENABLE_DEBUG */ if (mem && p[0]) /* free count */ { g_warning ("realloc(%p, %lu): memory has been freed %lu times already", p + 2, (gulong)n_bytes, p[0]); profiler_log (PROFILER_ALLOC | PROFILER_RELOC, n_bytes, FALSE); return NULL; } else { p = standard_realloc (mem ? p : NULL, sizeof (gulong) * 2 + n_bytes); if (p) { if (mem) profiler_log (PROFILER_FREE | PROFILER_RELOC, p[1], TRUE); p[0] = 0; p[1] = n_bytes; profiler_log (PROFILER_ALLOC | PROFILER_RELOC, p[1], TRUE); p += 2; } else profiler_log (PROFILER_ALLOC | PROFILER_RELOC, n_bytes, FALSE); return p; }}static gpointerprofiler_realloc (gpointer mem, gsize n_bytes){ mem = profiler_try_realloc (mem, n_bytes); if (!mem) g_mem_profile (); return mem;}static GMemVTable profiler_table = { profiler_malloc, profiler_realloc, profiler_free, profiler_calloc, profiler_try_malloc, profiler_try_realloc,};GMemVTable *glib_mem_profiler_table = &profiler_table;#endif /* !G_DISABLE_CHECKS *//* --- MemChunks --- */typedef struct _GFreeAtom GFreeAtom;typedef struct _GMemArea GMemArea;struct _GFreeAtom{ GFreeAtom *next;};struct _GMemArea{ GMemArea *next; /* the next mem area */ GMemArea *prev; /* the previous mem area */ gulong index; /* the current index into the "mem" array */ gulong free; /* the number of free bytes in this mem area */ gulong allocated; /* the number of atoms allocated from this area */ gulong mark; /* is this mem area marked for deletion */ gchar mem[MEM_AREA_SIZE]; /* the mem array from which atoms get allocated * the actual size of this array is determined by * the mem chunk "area_size". ANSI says that it * must be declared to be the maximum size it * can possibly be (even though the actual size * may be less). */};struct _GMemChunk{ const gchar *name; /* name of this MemChunk...used for debugging output */ gint type; /* the type of MemChunk: ALLOC_ONLY or ALLOC_AND_FREE */ gint num_mem_areas; /* the number of memory areas */ gint num_marked_areas; /* the number of areas marked for deletion */ guint atom_size; /* the size of an atom */ gulong area_size; /* the size of a memory area */ GMemArea *mem_area; /* the current memory area */ GMemArea *mem_areas; /* a list of all the mem areas owned by this chunk */ GMemArea *free_mem_area; /* the free area...which is about to be destroyed */ GFreeAtom *free_atoms; /* the free atoms list */ GTree *mem_tree; /* tree of mem areas sorted by memory address */ GMemChunk *next; /* pointer to the next chunk */ GMemChunk *prev; /* pointer to the previous chunk */};#ifndef DISABLE_MEM_POOLSstatic gulong g_mem_chunk_compute_size (gulong size, gulong min_size) G_GNUC_CONST;static gint g_mem_chunk_area_compare (GMemArea *a, GMemArea *b);static gint g_mem_chunk_area_search (GMemArea *a, gchar *addr);/* here we can't use StaticMutexes, as they depend upon a working * g_malloc, the same holds true for StaticPrivate */static GMutex *mem_chunks_lock = NULL;static GMemChunk *mem_chunks = NULL;GMemChunk*g_mem_chunk_new (const gchar *name, gint atom_size, gulong area_size, gint type){ GMemChunk *mem_chunk; gulong rarea_size; g_return_val_if_fail (atom_size > 0, NULL); g_return_val_if_fail (area_size >= atom_size, NULL); ENTER_MEM_CHUNK_ROUTINE (); area_size = (area_size + atom_size - 1) / atom_size; area_size *= atom_size; mem_chunk = g_new (GMemChunk, 1); mem_chunk->name = name; mem_chunk->type = type; mem_chunk->num_mem_areas = 0; mem_chunk->num_marked_areas = 0; mem_chunk->mem_area = NULL; mem_chunk->free_mem_area = NULL; mem_chunk->free_atoms = NULL; mem_chunk->mem_tree = NULL; mem_chunk->mem_areas = NULL; mem_chunk->atom_size = atom_size; if (mem_chunk->type == G_ALLOC_AND_FREE) mem_chunk->mem_tree = g_tree_new ((GCompareFunc) g_mem_chunk_area_compare); if (mem_chunk->atom_size % G_MEM_ALIGN) mem_chunk->atom_size += G_MEM_ALIGN - (mem_chunk->atom_size % G_MEM_ALIGN); rarea_size = area_size + sizeof (GMemArea) - MEM_AREA_SIZE; rarea_size = g_mem_chunk_compute_size (rarea_size, atom_size + sizeof (GMemArea) - MEM_AREA_SIZE); mem_chunk->area_size = rarea_size - (sizeof (GMemArea) - MEM_AREA_SIZE); g_mutex_lock (mem_chunks_lock); mem_chunk->next = mem_chunks; mem_chunk->prev = NULL; if (mem_chunks) mem_chunks->prev = mem_chunk; mem_chunks = mem_chunk; g_mutex_unlock (mem_chunks_lock); LEAVE_MEM_CHUNK_ROUTINE (); return mem_chunk;}voidg_mem_chunk_destroy (GMemChunk *mem_chunk){ GMemArea *mem_areas; GMemArea *temp_area; g_return_if_fail (mem_chunk != NULL); ENTER_MEM_CHUNK_ROUTINE (); mem_areas = mem_chunk->mem_areas; while (mem_areas) { temp_area = mem_areas; mem_areas = mem_areas->next; g_free (temp_area); } if (mem_chunk->next) mem_chunk->next->prev = mem_chunk->prev; if (mem_chunk->prev) mem_chunk->prev->next = mem_chunk->next; g_mutex_lock (mem_chunks_lock); if (mem_chunk == mem_chunks) mem_chunks = mem_chunks->next; g_mutex_unlock (mem_chunks_lock); if (mem_chunk->type == G_ALLOC_AND_FREE) g_tree_destroy (mem_chunk->mem_tree); g_free (mem_chunk); LEAVE_MEM_CHUNK_ROUTINE ();}gpointerg_mem_chunk_alloc (GMemChunk *mem_chunk){ GMemArea *temp_area; gpointer mem; ENTER_MEM_CHUNK_ROUTINE (); g_return_val_if_fail (mem_chunk != NULL, NULL); while (mem_chunk->free_atoms) { /* Get the first piece of memory on the "free_atoms" list. * We can go ahead and destroy the list node we used to keep * track of it with and to update the "free_atoms" list to * point to its next element. */ mem = mem_chunk->free_atoms; mem_chunk->free_atoms = mem_chunk->free_atoms->next; /* Determine which area this piece of memory is allocated from */ temp_area = g_tree_search (mem_chunk->mem_tree, (GCompareFunc) g_mem_chunk_area_search, mem); /* If the area has been marked, then it is being destroyed. * (ie marked to be destroyed). * We check to see if all of the segments on the free list that * reference this area have been removed. This occurs when * the ammount of free memory is less than the allocatable size. * If the chunk should be freed, then we place it in the "free_mem_area". * This is so we make sure not to free the mem area here and then * allocate it again a few lines down. * If we don't allocate a chunk a few lines down then the "free_mem_area" * will be freed. * If there is already a "free_mem_area" then we'll just free this mem area. */ if (temp_area->mark) { /* Update the "free" memory available in that area */ temp_area->free += mem_chunk->atom_size; if (temp_area->free == mem_chunk->area_size) { if (temp_area == mem_chunk->mem_area) mem_chunk->mem_area = NULL; if (mem_chunk->free_mem_area) { mem_chunk->num_mem_areas -= 1; if (temp_area->next) temp_area->next->prev = temp_area->prev; if (temp_area->prev) temp_area->prev->next = temp_area->next; if (temp_area == mem_chunk->mem_areas) mem_chunk->mem_areas = mem_chunk->mem_areas->next; if (mem_chunk->type == G_ALLOC_AND_FREE) g_tree_remove (mem_chunk->mem_tree, temp_area); g_free (temp_area); } else mem_chunk->free_mem_area = temp_area; mem_chunk->num_marked_areas -= 1; } } else { /* Update the number of allocated atoms count. */ temp_area->allocated += 1; /* The area wasn't marked...return the memory */ goto outa_here; } } /* If there isn't a current mem area or the current mem area is out of space * then allocate a new mem area. We'll first check and see if we can use * the "free_mem_area". Otherwise we'll just malloc the mem area. */ if ((!mem_chunk->mem_area) || ((mem_chunk->mem_area->index + mem_chunk->atom_size) > mem_chunk->area_size)) { if (mem_chunk->free_mem_area) { mem_chunk->mem_area = mem_chunk->free_mem_area; mem_chunk->free_mem_area = NULL; } else {#ifdef ENABLE_GC_FRIENDLY mem_chunk->mem_area = (GMemArea*) g_malloc0 (sizeof (GMemArea) - MEM_AREA_SIZE + mem_chunk->area_size); #else /* !ENABLE_GC_FRIENDLY */ mem_chunk->mem_area = (GMemArea*) g_malloc (sizeof (GMemArea) - MEM_AREA_SIZE + mem_chunk->area_size);#endif /* ENABLE_GC_FRIENDLY */ mem_chunk->num_mem_areas += 1; mem_chunk->mem_area->next = mem_chunk->mem_areas; mem_chunk->mem_area->prev = NULL;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -