From: Jeremy Harris Date: Tue, 28 Jan 2020 21:10:17 +0000 (+0000) Subject: Taint: slow-mode checking only X-Git-Tag: exim-4_94_RC0~122 X-Git-Url: https://git.exim.org/exim.git/commitdiff_plain/4381d60bc96bed88d96e8cc6b534dd0dcd48163f Taint: slow-mode checking only --- diff --git a/doc/doc-txt/ChangeLog b/doc/doc-txt/ChangeLog index 43b306d3b..1b38268b4 100644 --- a/doc/doc-txt/ChangeLog +++ b/doc/doc-txt/ChangeLog @@ -100,13 +100,9 @@ JH/21 Bug 2501: Fix init call in the heimdal authenticator. Previously it buffer was in use at the time. Change to a compile-time increase in the buffer size, when this authenticator is compiled into exim. -JH/22 Taint checking: move to a hybrid approach for checking. Previously, one - of two ways was used, depending on a build-time flag. The fast method - relied on assumptions about the OS and libc malloc, which were known to - not hold for the BSD-derived platforms, and discovered to not hold for - 32-bit Linux either. In fact the glibc documentation describes cases - where these assumptions do not hold. The new implementation tests for - the situation arising and actively switches over from fast to safe mode. +JH/22 Taint-checking: move to safe-mode taint checking on all platforms. The + previous fast-mode was untenable in the face of glibs using mmap to + support larger malloc requests. PP/01 Update the openssl_options possible values through OpenSSL 1.1.1c. New values supported, if defined on system where compiled: diff --git a/src/src/functions.h b/src/src/functions.h index 473fb8759..8b04d587d 100644 --- a/src/src/functions.h +++ b/src/src/functions.h @@ -620,10 +620,7 @@ return FALSE; #else extern BOOL is_tainted_fn(const void *); -extern void * tainted_base, * tainted_top; - -return f.taint_check_slow - ? is_tainted_fn(p) : p >= tainted_base && p < tainted_top; +return is_tainted_fn(p); #endif } diff --git a/src/src/store.c b/src/src/store.c index 3192b9774..8e776568a 100644 --- a/src/src/store.c +++ b/src/src/store.c @@ -126,13 +126,6 @@ static storeblock *current_block[NPOOLS]; static void *next_yield[NPOOLS]; static int yield_length[NPOOLS] = { -1, -1, -1, -1, -1, -1 }; -/* The limits of the tainted pools. Tracking these on new allocations enables -a fast is_tainted implementation. We assume the kernel only allocates mmaps using -one side or the other of data+heap, not both. */ - -void * tainted_base = (void *)-1; -void * tainted_top = (void *)0; - /* pool_malloc holds the amount of memory used by the store pools; this goes up and down as store is reset or released. nonpool_malloc is the total got by malloc from other calls; this doesn't go down because it is just freed by @@ -224,36 +217,6 @@ log_write(0, LOG_MAIN|LOG_PANIC_DIE, "Taint mismatch, %s: %s %d\n", msg, func, line); } -static void -use_slow_taint_check(const uschar * why) -{ -#ifndef COMPILE_UTILITY -DEBUG(D_any) - debug_printf("switching to slow-mode taint checking (after %s) " - "taint bounds %p %p\n", why, tainted_base, tainted_top); -#endif -f.taint_check_slow = TRUE; -} - -/* If the creation of a new tainted region results in any of the -untainted regions appearing to be tainted, using the fast-mode test, -we need to switch to safe-but-slow mode. */ - -static void -verify_all_untainted(void) -{ -for (int pool = 0; pool < POOL_TAINT_BASE; pool++) - for (storeblock * b = chainbase[pool]; b; b = b->next) - { - uschar * bc = US b + ALIGNED_SIZEOF_STOREBLOCK; - if (is_tainted(bc)) - { - use_slow_taint_check(US"mmap"); - return; - } - } -} - /************************************************* @@ -845,10 +808,6 @@ if (!(yield = mmap(NULL, (size_t)size, log_write(0, LOG_MAIN|LOG_PANIC_DIE, "failed to mmap %d bytes of memory: " "called from line %d of %s", size, line, func); -if (yield < tainted_base) tainted_base = yield; -if ((top = US yield + size) > tainted_top) tainted_top = top; -if (!f.taint_check_slow) verify_all_untainted(); - return store_alloc_tail(yield, size, func, line, US"Mmap"); } @@ -879,14 +838,6 @@ if (!(yield = malloc((size_t)size))) log_write(0, LOG_MAIN|LOG_PANIC_DIE, "failed to malloc %d bytes of memory: " "called from line %d in %s", size, linenumber, func); -/* If malloc ever returns apparently tainted memory, which glibc -malloc will as it uses mmap for larger requests, we must switch to -the slower checking for tainting (checking an address against all -the tainted pool block spans, rather than just the mmap span) */ - -if (!f.taint_check_slow && is_tainted(yield)) - use_slow_taint_check(US"malloc"); - return store_alloc_tail(yield, size, func, linenumber, US"Malloc"); }