Changeset 143488 in webkit
- Timestamp:
- Feb 20, 2013 1:09:16 PM (11 years ago)
- Location:
- trunk/Source/WTF
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/Source/WTF/ChangeLog
r143424 r143488 1 2013-02-20 Oliver Hunt <oliver@apple.com> 2 3 Moar hardening 4 https://bugs.webkit.org/show_bug.cgi?id=110275 5 6 Reviewed by Mark Hahnenberg. 7 8 We now poison objects when they get freed, and verify that 9 any object that is being freed is not poisoned. If the 10 object looks like it's poisoned we validate the freelist, 11 and ensure the object is not already present. If it is 12 we crash. 13 14 On allocation, we ensure that the object being allocated 15 is poisoned, then clear the poisoning fields. 16 17 * wtf/FastMalloc.cpp: 18 (WTF::internalEntropyValue): 19 (WTF): 20 (WTF::freedObjectStartPoison): 21 (WTF::freedObjectEndPoison): 22 (TCMalloc_ThreadCache_FreeList): 23 (WTF::TCMalloc_ThreadCache_FreeList::Validate): 24 (WTF::TCMalloc_Central_FreeList::Populate): 25 (WTF::TCMalloc_ThreadCache::Allocate): 26 (WTF::TCMalloc_ThreadCache::Deallocate): 27 (WTF::TCMalloc_ThreadCache::CreateCacheIfNecessary): 28 1 29 2013-02-19 Sheriff Bot <webkit.review.bot@gmail.com> 2 30 -
trunk/Source/WTF/wtf/FastMalloc.cpp
r143424 r143488 545 545 }; 546 546 547 static ALWAYS_INLINE uintptr_t internalEntropyValue() { 547 static ALWAYS_INLINE uintptr_t internalEntropyValue() 548 { 548 549 static uintptr_t value = EntropySource<sizeof(uintptr_t)>::value(); 549 550 ASSERT(value); … … 555 556 #define XOR_MASK_PTR_WITH_KEY(ptr, key, entropy) (reinterpret_cast<typeof(ptr)>(reinterpret_cast<uintptr_t>(ptr)^(ROTATE_VALUE(reinterpret_cast<uintptr_t>(key), MaskKeyShift)^entropy))) 556 557 558 559 static ALWAYS_INLINE uint32_t freedObjectStartPoison() 560 { 561 static uint32_t value = EntropySource<sizeof(uint32_t)>::value() | 1; 562 ASSERT(value); 563 return value; 564 } 565 566 static ALWAYS_INLINE uint32_t freedObjectEndPoison() 567 { 568 static uint32_t value = EntropySource<sizeof(uint32_t)>::value() | 1; 569 ASSERT(value); 570 return value; 571 } 572 573 #define PTR_TO_UINT32(ptr) static_cast<uint32_t>(reinterpret_cast<uintptr_t>(ptr)) 574 #define END_POISON_INDEX(allocationSize) (((allocationSize) - sizeof(uint32_t)) / sizeof(uint32_t)) 575 #define POISON_ALLOCATION(allocation, allocationSize) do { \ 576 reinterpret_cast<uint32_t*>(allocation)[0] = 1; \ 577 reinterpret_cast<uint32_t*>(allocation)[1] = 1; \ 578 if (allocationSize < 4 * sizeof(uint32_t)) \ 579 break; \ 580 reinterpret_cast<uint32_t*>(allocation)[2] = 1; \ 581 reinterpret_cast<uint32_t*>(allocation)[END_POISON_INDEX(allocationSize)] = 1; \ 582 } while (false); 583 584 #define POISON_DEALLOCATION_EXPLICIT(allocation, allocationSize, startPoison, endPoison) do { \ 585 if (allocationSize < 4 * sizeof(uint32_t)) \ 586 break; \ 587 reinterpret_cast<uint32_t*>(allocation)[2] = (startPoison) ^ PTR_TO_UINT32(allocation); \ 588 reinterpret_cast<uint32_t*>(allocation)[END_POISON_INDEX(allocationSize)] = (endPoison) ^ PTR_TO_UINT32(allocation); \ 589 } while (false) 590 591 #define POISON_DEALLOCATION(allocation, allocationSize) \ 592 POISON_DEALLOCATION_EXPLICIT(allocation, allocationSize, freedObjectStartPoison(), freedObjectEndPoison()) 593 594 #define MAY_BE_POISONED(allocation, allocationSize) (((allocationSize) >= 4 * sizeof(uint32_t)) && ( \ 595 (reinterpret_cast<uint32_t*>(allocation)[2] == (freedObjectStartPoison() ^ PTR_TO_UINT32(allocation))) || \ 596 (reinterpret_cast<uint32_t*>(allocation)[END_POISON_INDEX(allocationSize)] == (freedObjectEndPoison() ^ PTR_TO_UINT32(allocation))) \ 597 )) 598 599 #define IS_DEFINITELY_POISONED(allocation, allocationSize) (((allocationSize) < 4 * sizeof(uint32_t)) || ( \ 600 (reinterpret_cast<uint32_t*>(allocation)[2] == (freedObjectStartPoison() ^ PTR_TO_UINT32(allocation))) && \ 601 (reinterpret_cast<uint32_t*>(allocation)[END_POISON_INDEX(allocationSize)] == (freedObjectEndPoison() ^ PTR_TO_UINT32(allocation))) \ 602 )) 603 557 604 #else 605 606 #define POISON_ALLOCATION(allocation, allocationSize) 607 #define POISON_DEALLOCATION(allocation, allocationSize) 608 #define POISON_DEALLOCATION_EXPLICIT(allocation, allocationSize, startPoison, endPoison) 609 #define MAY_BE_POISONED(allocation, allocationSize) (false) 610 #define IS_DEFINITELY_POISONED(allocation, allocationSize) (true) 558 611 #define XOR_MASK_PTR_WITH_KEY(ptr, key, entropy) (((void)entropy), ((void)key), ptr) 612 559 613 #define HARDENING_ENTROPY 0 560 #endif 561 614 615 #endif 562 616 563 617 //------------------------------------------------------------------- … … 2531 2585 } 2532 2586 2587 // Runs through the linked list to ensure that 2588 // we can do that, and ensures that 'missing' 2589 // is not present 2590 NEVER_INLINE void Validate(HardenedSLL missing) { 2591 HardenedSLL node = list_; 2592 while (node) { 2593 RELEASE_ASSERT(node != missing); 2594 node = SLL_Next(node, entropy_); 2595 } 2596 } 2597 2533 2598 #ifdef WTF_CHANGES 2534 2599 template <class Finder, class Reader> … … 3042 3107 char* ptr = start + (npages << kPageShift) - ((npages << kPageShift) % size); 3043 3108 int num = 0; 3109 #if ENABLE(TCMALLOC_HARDENING) 3110 uint32_t startPoison = freedObjectStartPoison(); 3111 uint32_t endPoison = freedObjectEndPoison(); 3112 #endif 3113 3044 3114 while (ptr > start) { 3045 3115 ptr -= size; 3046 3116 HardenedSLL node = HardenedSLL::create(ptr); 3117 POISON_DEALLOCATION_EXPLICIT(ptr, size, startPoison, endPoison); 3047 3118 SLL_SetNext(node, head, entropy_); 3048 3119 head = node; … … 3051 3122 ASSERT(ptr == start); 3052 3123 ASSERT(ptr == head.value()); 3124 POISON_DEALLOCATION_EXPLICIT(ptr, size, startPoison, endPoison); 3053 3125 span->objects = head; 3054 3126 ASSERT(span->objects.value() == head.value()); … … 3116 3188 } 3117 3189 size_ -= allocationSize; 3118 return list->Pop(); 3190 void* result = list->Pop(); 3191 if (!result) 3192 return 0; 3193 RELEASE_ASSERT(IS_DEFINITELY_POISONED(result, allocationSize)); 3194 POISON_ALLOCATION(result, allocationSize); 3195 return result; 3119 3196 } 3120 3197 3121 3198 inline void TCMalloc_ThreadCache::Deallocate(HardenedSLL ptr, size_t cl) { 3122 size_ += ByteSizeForClass(cl); 3199 size_t allocationSize = ByteSizeForClass(cl); 3200 size_ += allocationSize; 3123 3201 FreeList* list = &list_[cl]; 3202 if (MAY_BE_POISONED(ptr.value(), allocationSize)) 3203 list->Validate(ptr); 3204 3205 POISON_DEALLOCATION(ptr.value(), allocationSize); 3124 3206 list->Push(ptr); 3125 3207 // If enough data is free, put back into central cache … … 3816 3898 ASSERT_SPAN_COMMITTED(span); 3817 3899 pageheap->CacheSizeClass(span->start, 0); 3818 return 3819 CheckedMallocResult(reinterpret_cast<void*>(span->start << kPageShift)); 3900 void* result = reinterpret_cast<void*>(span->start << kPageShift); 3901 POISON_ALLOCATION(result, span->length << kPageShift); 3902 return CheckedMallocResult(result); 3820 3903 } 3821 3904 … … 3884 3967 } else { 3885 3968 // Delete directly into central cache 3969 size_t allocationSize = ByteSizeForClass(cl); 3970 POISON_DEALLOCATION(ptr, allocationSize); 3886 3971 SLL_SetNext(HardenedSLL::create(ptr), HardenedSLL::null(), central_cache[cl].entropy()); 3887 3972 central_cache[cl].InsertRange(HardenedSLL::create(ptr), HardenedSLL::create(ptr), 1); … … 3898 3983 } 3899 3984 #endif 3985 3986 POISON_DEALLOCATION(ptr, span->length << kPageShift); 3900 3987 pageheap->Delete(span); 3901 3988 }
Note: See TracChangeset
for help on using the changeset viewer.