recopy a string being built into a tainted allocation if it meets a %s for a
tainted argument. Any intermediate-layer function that (can) return a new
allocation should behave this way; returning a tainted result if any tainted
- content is used. Users of functions that modify existing allocations should
- check if a tainted source and an untainted destination is used, and fail instead
- (sprintf() being the classic case).
+ content is used. Intermediate-layer functions (eg. Ustrncpy) that modify
+ existing allocations fail if tainted data is written into an untainted area.
+ Users of functions that modify existing allocations should check if a tainted
+ source and an untainted destination is used, and fail instead (sprintf() being
+ the classic case).
*/
/******************************************************************************/
-#ifndef TAINT_CHECK_FAST
/* Test if a pointer refers to tainted memory.
Slower version check, for use when platform intermixes malloc and mmap area
is_tainted_fn(const void * p)
{
storeblock * b;
-int pool;
-for (pool = POOL_TAINT_BASE; pool < nelem(chainbase); pool++)
+for (int pool = POOL_TAINT_BASE; pool < nelem(chainbase); pool++)
if ((b = current_block[pool]))
{
- char * bc = CS b + ALIGNED_SIZEOF_STOREBLOCK;
- if (CS p >= bc && CS p <= bc + b->length) return TRUE;
+ uschar * bc = US b + ALIGNED_SIZEOF_STOREBLOCK;
+ if (US p >= bc && US p <= bc + b->length) return TRUE;
}
-for (pool = POOL_TAINT_BASE; pool < nelem(chainbase); pool++)
+for (int pool = POOL_TAINT_BASE; pool < nelem(chainbase); pool++)
for (b = chainbase[pool]; b; b = b->next)
{
- char * bc = CS b + ALIGNED_SIZEOF_STOREBLOCK;
- if (CS p >= bc && CS p <= bc + b->length) return TRUE;
+ uschar * bc = US b + ALIGNED_SIZEOF_STOREBLOCK;
+ if (US p >= bc && US p <= bc + b->length) return TRUE;
}
return FALSE;
}
-#endif
void
msg, func, line);
}
+static void
+use_slow_taint_check(void)
+{
+#ifndef COMPILE_UTILITY
+DEBUG(D_any) debug_printf("switching to slow-mode taint checking\n");
+#endif
+f.taint_check_slow = TRUE;
+}
+
+static void
+verify_all_untainted(void)
+{
+for (int pool = 0; pool < POOL_TAINT_BASE; pool++)
+ for (storeblock * b = chainbase[pool]; b; b = b->next)
+ {
+ uschar * bc = US b + ALIGNED_SIZEOF_STOREBLOCK;
+ if (is_tainted(bc))
+ {
+ use_slow_taint_check();
+ return;
+ }
+ }
+}
+
+
/*************************************************
* Get a block from the current pool *
BOOL release_ok = !tainted && store_last_get[pool] == block;
uschar * newtext;
-#ifndef MACRO_PREDEF
+#if !defined(MACRO_PREDEF) && !defined(COMPILE_UTILITY)
if (is_tainted(block) != tainted)
die_tainted(US"store_newblock", CUS func, linenumber);
#endif
if (yield < tainted_base) tainted_base = yield;
if ((top = US yield + size) > tainted_top) tainted_top = top;
+if (!f.taint_check_slow) use_slow_taint_check();
return store_alloc_tail(yield, size, func, line, US"Mmap");
}
log_write(0, LOG_MAIN|LOG_PANIC_DIE, "failed to malloc %d bytes of memory: "
"called from line %d in %s", size, linenumber, func);
+/* If malloc ever returns apparently tainted memory, which glibc
+malloc will as it uses mmap for larger requests, we must switch to
+the slower checking for tainting (checking an address against all
+the tainted pool block spans, rather than just the mmap span) */
+
+if (!f.taint_check_slow && is_tainted(yield))
+ use_slow_taint_check();
+
return store_alloc_tail(yield, size, func, linenumber, US"Malloc");
}