string_copy_taint_trc((s), tainted, __FUNCTION__, __LINE__)
static inline uschar *
-string_copy(const uschar * s)
+string_copy_trc(const uschar * s, const char * func, int line)
{
-return string_copy_taint((s), is_tainted(s));
+return string_copy_taint_trc((s), is_tainted(s), func, line);
}
+#define string_copy(s) \
+ string_copy_trc((s), __FUNCTION__, __LINE__)
+
/*************************************************
* Copy, lowercase and save string *
is_tainted_fn(const void * p)
{
storeblock * b;
-int pool;
-for (pool = POOL_TAINT_BASE; pool < nelem(chainbase); pool++)
+for (int pool = POOL_TAINT_BASE; pool < nelem(chainbase); pool++)
if ((b = current_block[pool]))
{
uschar * bc = US b + ALIGNED_SIZEOF_STOREBLOCK;
if (US p >= bc && US p <= bc + b->length) return TRUE;
}
-for (pool = POOL_TAINT_BASE; pool < nelem(chainbase); pool++)
+for (int pool = POOL_TAINT_BASE; pool < nelem(chainbase); pool++)
for (b = chainbase[pool]; b; b = b->next)
{
uschar * bc = US b + ALIGNED_SIZEOF_STOREBLOCK;
static void
use_slow_taint_check(void)
{
+#ifndef COMPILE_UTILITY
DEBUG(D_any) debug_printf("switching to slow-mode taint checking\n");
+#endif
f.taint_check_slow = TRUE;
}
+static void
+verify_all_untainted(void)
+{
+for (int pool = 0; pool < POOL_TAINT_BASE; pool++)
+ for (storeblock * b = chainbase[pool]; b; b = b->next)
+ {
+ uschar * bc = US b + ALIGNED_SIZEOF_STOREBLOCK;
+ if (is_tainted(bc))
+ {
+ use_slow_taint_check();
+ return;
+ }
+ }
+}
+
+
/*************************************************
* Get a block from the current pool *
BOOL release_ok = !tainted && store_last_get[pool] == block;
uschar * newtext;
-#ifndef MACRO_PREDEF
+#if !defined(MACRO_PREDEF) && !defined(COMPILE_UTILITY)
if (is_tainted(block) != tainted)
die_tainted(US"store_newblock", CUS func, linenumber);
#endif
if (yield < tainted_base) tainted_base = yield;
if ((top = US yield + size) > tainted_top) tainted_top = top;
+if (!f.taint_check_slow) use_slow_taint_check();
return store_alloc_tail(yield, size, func, line, US"Mmap");
}