Modified: trunk/Source/WTF/wtf/FastMalloc.cpp (143995 => 143996)
--- trunk/Source/WTF/wtf/FastMalloc.cpp 2013-02-26 02:10:53 UTC (rev 143995)
+++ trunk/Source/WTF/wtf/FastMalloc.cpp 2013-02-26 02:13:31 UTC (rev 143996)
@@ -512,19 +512,6 @@
#define MESSAGE LOG_ERROR
#define CHECK_CONDITION ASSERT
-#if ENABLE(TCMALLOC_HARDENING)
-/*
- * To make it harder to exploit use-after free style exploits
- * we mask the addresses we put into our linked lists with the
- * address of kLLHardeningMask. Due to ASLR the address of
- * kLLHardeningMask should be sufficiently randomized to make direct
- * freelist manipulation much more difficult.
- */
-static const char kLLHardeningMask = 0;
-enum {
- MaskKeyShift = 13
-};
-
template <unsigned> struct EntropySource;
template <> struct EntropySource<4> {
static uint32_t value()
@@ -544,9 +531,22 @@
}
};
+#if ENABLE(TCMALLOC_HARDENING)
+/*
+ * To make it harder to exploit use-after free style exploits
+ * we mask the addresses we put into our linked lists with the
+ * address of kLLHardeningMask. Due to ASLR the address of
+ * kLLHardeningMask should be sufficiently randomized to make direct
+ * freelist manipulation much more difficult.
+ */
+static const char kLLHardeningMask = 0;
+enum {
+ MaskKeyShift = 13
+};
+
static ALWAYS_INLINE uintptr_t internalEntropyValue()
{
- static uintptr_t value = EntropySource<sizeof(uintptr_t)>::value();
+ static uintptr_t value = EntropySource<sizeof(uintptr_t)>::value() | 1;
ASSERT(value);
return value;
}
@@ -573,23 +573,27 @@
#define PTR_TO_UINT32(ptr) static_cast<uint32_t>(reinterpret_cast<uintptr_t>(ptr))
#define END_POISON_INDEX(allocationSize) (((allocationSize) - sizeof(uint32_t)) / sizeof(uint32_t))
#define POISON_ALLOCATION(allocation, allocationSize) do { \
- reinterpret_cast<uint32_t*>(allocation)[0] = 1; \
- reinterpret_cast<uint32_t*>(allocation)[1] = 1; \
- if (allocationSize < 4 * sizeof(uint32_t)) \
+ ASSERT((allocationSize) >= 2 * sizeof(uint32_t)); \
+ reinterpret_cast<uint32_t*>(allocation)[0] = 0xbadbeef1; \
+ reinterpret_cast<uint32_t*>(allocation)[1] = 0xbadbeef3; \
+ if ((allocationSize) < 4 * sizeof(uint32_t)) \
break; \
- reinterpret_cast<uint32_t*>(allocation)[2] = 1; \
- reinterpret_cast<uint32_t*>(allocation)[END_POISON_INDEX(allocationSize)] = 1; \
+ reinterpret_cast<uint32_t*>(allocation)[2] = 0xbadbeef5; \
+ reinterpret_cast<uint32_t*>(allocation)[END_POISON_INDEX(allocationSize)] = 0xbadbeef7; \
} while (false);
#define POISON_DEALLOCATION_EXPLICIT(allocation, allocationSize, startPoison, endPoison) do { \
- if (allocationSize < 4 * sizeof(uint32_t)) \
+ ASSERT((allocationSize) >= 2 * sizeof(uint32_t)); \
+ reinterpret_cast<uint32_t*>(allocation)[0] = 0xbadbeef9; \
+ reinterpret_cast<uint32_t*>(allocation)[1] = 0xbadbeefb; \
+ if ((allocationSize) < 4 * sizeof(uint32_t)) \
break; \
reinterpret_cast<uint32_t*>(allocation)[2] = (startPoison) ^ PTR_TO_UINT32(allocation); \
reinterpret_cast<uint32_t*>(allocation)[END_POISON_INDEX(allocationSize)] = (endPoison) ^ PTR_TO_UINT32(allocation); \
} while (false)
#define POISON_DEALLOCATION(allocation, allocationSize) \
- POISON_DEALLOCATION_EXPLICIT(allocation, allocationSize, freedObjectStartPoison(), freedObjectEndPoison())
+ POISON_DEALLOCATION_EXPLICIT(allocation, (allocationSize), freedObjectStartPoison(), freedObjectEndPoison())
#define MAY_BE_POISONED(allocation, allocationSize) (((allocationSize) >= 4 * sizeof(uint32_t)) && ( \
(reinterpret_cast<uint32_t*>(allocation)[2] == (freedObjectStartPoison() ^ PTR_TO_UINT32(allocation))) || \
@@ -1150,6 +1154,20 @@
}
}
+enum {
+ kSpanCookieBits = 10,
+ kSpanCookieMask = (1 << 10) - 1,
+ kSpanThisShift = 7
+};
+
+static uint32_t spanValidationCookie;
+static uint32_t spanInitializerCookie()
+{
+ static uint32_t value = EntropySource<sizeof(uint32_t)>::value() & kSpanCookieMask;
+ spanValidationCookie = value;
+ return value;
+}
+
// Information kept for a span (a contiguous run of pages).
struct Span {
PageID start; // Starting page number
@@ -1172,6 +1190,17 @@
unsigned int sizeclass : 8; // Size-class for small objects (or 0)
unsigned int refcount : 11; // Number of non-free objects
bool decommitted : 1;
+ void initCookie()
+ {
+ m_cookie = ((reinterpret_cast<uintptr_t>(this) >> kSpanThisShift) & kSpanCookieMask) ^ spanInitializerCookie();
+ }
+ void clearCookie() { m_cookie = 0; }
+ bool isValid() const
+ {
+ return (((reinterpret_cast<uintptr_t>(this) >> kSpanThisShift) & kSpanCookieMask) ^ m_cookie) == spanValidationCookie;
+ }
+private:
+ uint32_t m_cookie : kSpanCookieBits;
#undef SPAN_HISTORY
#ifdef SPAN_HISTORY
@@ -1202,6 +1231,7 @@
memset(result, 0, sizeof(*result));
result->start = p;
result->length = len;
+ result->initCookie();
#ifdef SPAN_HISTORY
result->nexthistory = 0;
#endif
@@ -1209,10 +1239,12 @@
}
static inline void DeleteSpan(Span* span) {
+ RELEASE_ASSERT(span->isValid());
#ifndef NDEBUG
// In debug mode, trash the contents of deleted Spans
memset(span, 0x3f, sizeof(*span));
#endif
+ span->clearCookie();
span_allocator.Delete(span);
}
@@ -2587,10 +2619,12 @@
// Runs through the linked list to ensure that
// we can do that, and ensures that 'missing'
// is not present
- NEVER_INLINE void Validate(HardenedSLL missing) {
+ NEVER_INLINE void Validate(HardenedSLL missing, size_t size) {
HardenedSLL node = list_;
+ UNUSED_PARAM(size);
while (node) {
RELEASE_ASSERT(node != missing);
+ RELEASE_ASSERT(IS_DEFINITELY_POISONED(node.value(), size));
node = SLL_Next(node, entropy_);
}
}
@@ -3121,7 +3155,15 @@
}
ASSERT(ptr == start);
ASSERT(ptr == head.value());
- POISON_DEALLOCATION_EXPLICIT(ptr, size, startPoison, endPoison);
+#ifndef NDEBUG
+ {
+ HardenedSLL node = head;
+ while (node) {
+ ASSERT(IS_DEFINITELY_POISONED(node.value(), size));
+ node = SLL_Next(node, entropy_);
+ }
+ }
+#endif
span->objects = head;
ASSERT(span->objects.value() == head.value());
span->refcount = 0; // No sub-object in use yet
@@ -3200,7 +3242,7 @@
size_ += allocationSize;
FreeList* list = &list_[cl];
if (MAY_BE_POISONED(ptr.value(), allocationSize))
- list->Validate(ptr);
+ list->Validate(ptr, allocationSize);
POISON_DEALLOCATION(ptr.value(), allocationSize);
list->Push(ptr);
@@ -3954,6 +3996,7 @@
if (cl == 0) {
span = pageheap->GetDescriptor(p);
+ RELEASE_ASSERT(span->isValid());
cl = span->sizeclass;
pageheap->CacheSizeClass(p, cl);
}
@@ -3966,8 +4009,7 @@
heap->Deallocate(HardenedSLL::create(ptr), cl);
} else {
// Delete directly into central cache
- size_t allocationSize = ByteSizeForClass(cl);
- POISON_DEALLOCATION(ptr, allocationSize);
+ POISON_DEALLOCATION(ptr, ByteSizeForClass(cl));
SLL_SetNext(HardenedSLL::create(ptr), HardenedSLL::null(), central_cache[cl].entropy());
central_cache[cl].InsertRange(HardenedSLL::create(ptr), HardenedSLL::create(ptr), 1);
}