>From ed60b3ae46e50f3ad214d1e2591d1afa7da651a6 Mon Sep 17 00:00:00 2001 From: Paul Eggert Date: Fri, 13 Dec 2019 13:45:10 -0800 Subject: [PATCH] Simplify use of mallopt Do not mess with mallopt advice except when necessary. The advice we were giving was so old it was surely wrong now. This change also should fix some trivial performance issues. * src/alloc.c (MMAP_MAX_AREAS): Remove. All uses changed to glibc default. (pointers_fit_in_lispobj_p, mmap_lisp_allowed_p): Define if M_MMAP_MAX, not if DOUG_LEA_MALLOC. (maybe_disable_malloc_mmap, reenable_malloc_mmap): New functions, to simplify callers. Use glibc default when reenabling. (lisp_align_malloc, allocate_string_data, allocate_vectorlike): Use these functions. (lisp_align_malloc): Fix bug where mmap continued to be disabled when memory was exhausted, on platforms using unexec. (allocate_vectorlike): Do not disable and then reenable mmap when calling allocate_vector_from_block, as it allocates only small blocks. (init_alloc_once_for_pdumper): Do not alter M_MMAP_THRESHOLD or M_MMAP_MAX; the defaults are fine. * src/ralloc.c (r_alloc_init) [DOUGL_LEA_MALLOC]: Do not alter M_TOP_PAD. --- src/alloc.c | 70 +++++++++++++++++++--------------------------------- src/ralloc.c | 18 +++----------- 2 files changed, 28 insertions(+), 60 deletions(-) diff --git a/src/alloc.c b/src/alloc.c index 9fbd0d0573..4e8d7d9721 100644 --- a/src/alloc.c +++ b/src/alloc.c @@ -106,11 +106,6 @@ Copyright (C) 1985-1986, 1988, 1993-1995, 1997-2019 Free Software #ifdef DOUG_LEA_MALLOC -/* Specify maximum number of areas to mmap. It would be nice to use a - value that explicitly means "no limit". */ - -# define MMAP_MAX_AREAS 100000000 - /* A pointer to the memory allocated that copies that static data inside glibc's malloc. */ static void *malloc_state_ptr; @@ -552,7 +547,8 @@ tally_consing (ptrdiff_t nbytes) consing_until_gc -= nbytes; } -#ifdef DOUG_LEA_MALLOC +#ifdef M_MMAP_MAX + static bool pointers_fit_in_lispobj_p (void) { @@ -569,6 +565,23 @@ mmap_lisp_allowed_p (void) regions. */ return pointers_fit_in_lispobj_p () && !will_dump_with_unexec_p (); } + +/* Temporarily disable and then reenable malloc's use of mmap. */ +static void +maybe_disable_malloc_mmap (void) +{ + if (!mmap_lisp_allowed_p ()) + mallopt (M_MMAP_MAX, 0); +} +static void +reenable_malloc_mmap (void) +{ + if (!mmap_lisp_allowed_p ()) + mallopt (M_MMAP_MAX, 1<<16 /* glibc default */); +} +#else +static void maybe_disable_malloc_mmap (void) {} +static void reenable_malloc_mmap (void) {} #endif /* Head of a circularly-linked list of extant finalizers. */ @@ -1133,11 +1146,7 @@ lisp_align_malloc (size_t nbytes, enum mem_type type) int i; bool aligned; -#ifdef DOUG_LEA_MALLOC - if (!mmap_lisp_allowed_p ()) - mallopt (M_MMAP_MAX, 0); -#endif - + maybe_disable_malloc_mmap (); #ifdef USE_ALIGNED_ALLOC verify (ABLOCKS_BYTES % BLOCK_ALIGN == 0); abase = base = aligned_alloc (BLOCK_ALIGN, ABLOCKS_BYTES); @@ -1145,6 +1154,7 @@ lisp_align_malloc (size_t nbytes, enum mem_type type) base = malloc (ABLOCKS_BYTES); abase = pointer_align (base, BLOCK_ALIGN); #endif + reenable_malloc_mmap (); if (base == 0) { @@ -1156,11 +1166,6 @@ lisp_align_malloc (size_t nbytes, enum mem_type type) if (!aligned) ((void **) abase)[-1] = base; -#ifdef DOUG_LEA_MALLOC - if (!mmap_lisp_allowed_p ()) - mallopt (M_MMAP_MAX, MMAP_MAX_AREAS); -#endif - #if ! USE_LSB_TAG /* If the memory just allocated cannot be addressed thru a Lisp object's pointer, and it needs to be, that's equivalent to @@ -1807,18 +1812,9 @@ allocate_string_data (struct Lisp_String *s, if (nbytes > LARGE_STRING_BYTES) { size_t size = FLEXSIZEOF (struct sblock, data, needed); - -#ifdef DOUG_LEA_MALLOC - if (!mmap_lisp_allowed_p ()) - mallopt (M_MMAP_MAX, 0); -#endif - + maybe_disable_malloc_mmap (); b = lisp_malloc (size + GC_STRING_EXTRA, MEM_TYPE_NON_LISP); - -#ifdef DOUG_LEA_MALLOC - if (!mmap_lisp_allowed_p ()) - mallopt (M_MMAP_MAX, MMAP_MAX_AREAS); -#endif + reenable_malloc_mmap (); data = b->data; b->next = large_sblocks; @@ -3145,27 +3141,19 @@ allocate_vectorlike (ptrdiff_t len) MALLOC_BLOCK_INPUT; -#ifdef DOUG_LEA_MALLOC - if (!mmap_lisp_allowed_p ()) - mallopt (M_MMAP_MAX, 0); -#endif - if (nbytes <= VBLOCK_BYTES_MAX) p = allocate_vector_from_block (vroundup (nbytes)); else { + maybe_disable_malloc_mmap (); struct large_vector *lv = lisp_malloc (large_vector_offset + nbytes, MEM_TYPE_VECTORLIKE); + reenable_malloc_mmap (); lv->next = large_vectors; large_vectors = lv; p = large_vector_vec (lv); } -#ifdef DOUG_LEA_MALLOC - if (!mmap_lisp_allowed_p ()) - mallopt (M_MMAP_MAX, MMAP_MAX_AREAS); -#endif - if (find_suspicious_object_in_range (p, (char *) p + nbytes)) emacs_abort (); @@ -7302,14 +7290,6 @@ init_alloc_once_for_pdumper (void) purebeg = PUREBEG; pure_size = PURESIZE; mem_init (); - -#ifdef DOUG_LEA_MALLOC - mallopt (M_TRIM_THRESHOLD, 128 * 1024); /* Trim threshold. */ - mallopt (M_MMAP_THRESHOLD, 64 * 1024); /* Mmap threshold. */ - mallopt (M_MMAP_MAX, MMAP_MAX_AREAS); /* Max. number of mmap'ed areas. */ -#endif - - init_finalizer_list (&finalizers); init_finalizer_list (&doomed_finalizers); refill_memory_reserve (); diff --git a/src/ralloc.c b/src/ralloc.c index 66ea2ec411..857702825b 100644 --- a/src/ralloc.c +++ b/src/ralloc.c @@ -1176,23 +1176,11 @@ r_alloc_init (void) extra_bytes = PAGE_ROUNDUP (50000); #endif -#ifdef DOUG_LEA_MALLOC - block_input (); - mallopt (M_TOP_PAD, 64 * 4096); - unblock_input (); -#else -#if !defined SYSTEM_MALLOC && !defined HYBRID_MALLOC - /* Give GNU malloc's morecore some hysteresis so that we move all - the relocatable blocks much less often. The number used to be - 64, but alloc.c would override that with 32 in code that was - removed when SYNC_INPUT became the only input handling mode. - That code was conditioned on !DOUG_LEA_MALLOC, so the call to - mallopt above is left unchanged. (Actually, I think there's no - system nowadays that uses DOUG_LEA_MALLOC and also uses - REL_ALLOC.) */ +#if !defined DOUG_LEA_MALLOC && !defined HYBRID_MALLOC && !defined SYSTEM_MALLOC + /* Give gmalloc's morecore some hysteresis so that we move all + the relocatable blocks much less often. */ __malloc_extra_blocks = 32; #endif -#endif #if !defined SYSTEM_MALLOC && !defined HYBRID_MALLOC first_heap->end = (void *) PAGE_ROUNDUP (first_heap->start); -- 2.23.0