1/*
2 * Copyright (C) 2011-2018 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 * 3. Neither the name of Apple Inc. ("Apple") nor the names of
14 * its contributors may be used to endorse or promote products derived
15 * from this software without specific prior written permission.
16 *
17 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 */
28
29#include "config.h"
30#include <stdarg.h>
31#include <wtf/MetaAllocator.h>
32#include <wtf/Vector.h>
33
34#if OS(WINDOWS)
35#undef small
36#endif
37
38namespace TestWebKitAPI {
39
40using namespace WTF;
41
42class MetaAllocatorTest: public testing::Test {
43public:
44 enum SanityCheckMode { RunSanityCheck, DontRunSanityCheck };
45
46 enum HeapGrowthMode { DontGrowHeap, ForTestDemandAllocCoalesce, ForTestDemandAllocDontCoalesce };
47
48 HeapGrowthMode currentHeapGrowthMode;
49 size_t allowAllocatePages;
50 size_t requestedNumPages;
51
52 class SimpleTestAllocator: public MetaAllocator {
53 public:
54 SimpleTestAllocator(MetaAllocatorTest* parent)
55 : MetaAllocator(32)
56 , m_parent(parent)
57 {
58 addFreshFreeSpace(reinterpret_cast<void*>(basePage * pageSize()), defaultPagesInHeap * pageSize());
59 }
60
61 virtual ~SimpleTestAllocator()
62 {
63 EXPECT_TRUE(!m_parent->allocatorDestroyed);
64 m_parent->allocatorDestroyed = true;
65 }
66
67 virtual FreeSpacePtr allocateNewSpace(size_t& numPages)
68 {
69 switch (m_parent->currentHeapGrowthMode) {
70 case DontGrowHeap:
71 return nullptr;
72
73 case ForTestDemandAllocCoalesce:
74 case ForTestDemandAllocDontCoalesce: {
75 EXPECT_TRUE(m_parent->allowAllocatePages);
76 EXPECT_TRUE(m_parent->allowAllocatePages >= numPages);
77 m_parent->requestedNumPages = numPages;
78 numPages = m_parent->allowAllocatePages;
79
80 unsigned offset;
81 if (m_parent->currentHeapGrowthMode == ForTestDemandAllocCoalesce)
82 offset = 0;
83 else
84 offset = 1;
85
86 void* result = reinterpret_cast<void*>((basePage + defaultPagesInHeap + offset) * pageSize());
87
88 m_parent->allowAllocatePages = 0;
89 m_parent->currentHeapGrowthMode = DontGrowHeap;
90
91 for (size_t counter = 0; counter < numPages + offset; ++counter) {
92 m_parent->pageMap->append(false);
93 for (unsigned byteCounter = 0; byteCounter < pageSize(); ++byteCounter)
94 m_parent->memoryMap->append(false);
95 }
96
97 m_parent->additionalPagesInHeap += numPages;
98
99 return FreeSpacePtr(result);
100 }
101
102 default:
103 CRASH();
104 return nullptr;
105 }
106 }
107
108 virtual void notifyNeedPage(void* page, size_t count)
109 {
110 // the page should be both free and unmapped.
111 for (size_t i = 0; i < count; ++i)
112 EXPECT_TRUE(!m_parent->pageState(reinterpret_cast<uintptr_t>(page) / pageSize() + i));
113 for (uintptr_t address = reinterpret_cast<uintptr_t>(page); address < reinterpret_cast<uintptr_t>(page) + pageSize() * count; ++address)
114 EXPECT_TRUE(!m_parent->byteState(reinterpret_cast<void*>(address)));
115 for (size_t i = 0; i < count; ++i)
116 m_parent->pageState(reinterpret_cast<uintptr_t>(page) / pageSize() + i) = true;
117 }
118
119 virtual void notifyPageIsFree(void* page, size_t count)
120 {
121 // the page should be free of objects at this point, but it should still
122 // be mapped.
123 for (size_t i = 0; i < count; ++i)
124 EXPECT_TRUE(m_parent->pageState(reinterpret_cast<uintptr_t>(page) / pageSize() + i));
125 for (uintptr_t address = reinterpret_cast<uintptr_t>(page); address < reinterpret_cast<uintptr_t>(page) + pageSize() * count; ++address)
126 EXPECT_TRUE(!m_parent->byteState(reinterpret_cast<void*>(address)));
127 for (size_t i = 0; i < count; ++i)
128 m_parent->pageState(reinterpret_cast<uintptr_t>(page) / pageSize() + i) = false;
129 }
130
131 private:
132 MetaAllocatorTest* m_parent;
133 };
134
135 static const unsigned basePage = 1;
136 static const unsigned defaultPagesInHeap = 100;
137
138 unsigned additionalPagesInHeap;
139
140 Vector<bool, 0>* memoryMap;
141 Vector<bool, 0>* pageMap;
142 bool allocatorDestroyed;
143
144 SimpleTestAllocator* allocator;
145
146 virtual void SetUp()
147 {
148 memoryMap = new Vector<bool, 0>();
149 pageMap = new Vector<bool, 0>();
150
151 for (unsigned page = basePage; page < basePage + defaultPagesInHeap; ++page) {
152 pageMap->append(false);
153 for (unsigned byteInPage = 0; byteInPage < pageSize(); ++byteInPage)
154 memoryMap->append(false);
155 }
156
157 allocatorDestroyed = false;
158
159 currentHeapGrowthMode = DontGrowHeap;
160 allowAllocatePages = 0;
161 additionalPagesInHeap = 0;
162 requestedNumPages = 0;
163
164 allocator = new SimpleTestAllocator(this);
165 }
166
167 virtual void TearDown()
168 {
169 EXPECT_TRUE(currentHeapGrowthMode == DontGrowHeap);
170 EXPECT_EQ(allowAllocatePages, static_cast<size_t>(0));
171 EXPECT_EQ(requestedNumPages, static_cast<size_t>(0));
172
173 // memory should be free.
174 for (unsigned page = basePage; page < basePage + defaultPagesInHeap; ++page) {
175 EXPECT_TRUE(!pageState(page));
176 for (unsigned byteInPage = 0; byteInPage < pageSize(); ++byteInPage)
177 EXPECT_TRUE(!byteState(page * pageSize() + byteInPage));
178 }
179
180 // NOTE: this automatically tests that the allocator did not leak
181 // memory, so long as these tests are running with !defined(NDEBUG).
182 // See MetaAllocator::m_mallocBalance.
183 delete allocator;
184
185 EXPECT_TRUE(allocatorDestroyed);
186
187 delete memoryMap;
188 delete pageMap;
189 }
190
191 MetaAllocatorHandle* allocate(size_t sizeInBytes, SanityCheckMode sanityCheckMode = RunSanityCheck)
192 {
193 MetaAllocatorHandle* handle = allocator->allocate(sizeInBytes, 0).leakRef();
194 EXPECT_TRUE(handle);
195 EXPECT_EQ(handle->sizeInBytes(), sizeInBytes);
196
197 uintptr_t startByte = handle->start().untaggedPtr<uintptr_t>();
198 uintptr_t endByte = handle->end().untaggedPtr<uintptr_t>();
199 for (uintptr_t currentByte = startByte; currentByte < endByte; ++currentByte) {
200 EXPECT_TRUE(!byteState(currentByte));
201 byteState(currentByte) = true;
202 EXPECT_TRUE(pageState(currentByte / pageSize()));
203 }
204
205 if (sanityCheckMode == RunSanityCheck)
206 sanityCheck();
207
208 return handle;
209 }
210
211 void free(MetaAllocatorHandle* handle, SanityCheckMode sanityCheckMode = RunSanityCheck)
212 {
213 EXPECT_TRUE(handle);
214
215 notifyFree(handle->start().untaggedPtr(), handle->sizeInBytes());
216 handle->deref();
217
218 if (sanityCheckMode == RunSanityCheck)
219 sanityCheck();
220 }
221
222 void notifyFree(void* start, size_t sizeInBytes)
223 {
224 uintptr_t startByte = reinterpret_cast<uintptr_t>(start);
225 uintptr_t endByte = startByte + sizeInBytes;
226 for (uintptr_t currentByte = startByte; currentByte < endByte; ++currentByte) {
227 EXPECT_TRUE(byteState(currentByte));
228 byteState(currentByte) = false;
229 }
230 }
231
232 void sanityCheck()
233 {
234#ifndef NDEBUG
235 EXPECT_EQ(allocator->bytesReserved() - allocator->bytesAllocated(), allocator->debugFreeSpaceSize());
236#endif
237 EXPECT_EQ(allocator->bytesReserved(), (defaultPagesInHeap + additionalPagesInHeap) * pageSize());
238 EXPECT_EQ(allocator->bytesAllocated(), bytesAllocated());
239 EXPECT_EQ(allocator->bytesCommitted(), bytesCommitted());
240 }
241
242 void confirm(MetaAllocatorHandle* handle)
243 {
244 uintptr_t startByte = handle->start().untaggedPtr<uintptr_t>();
245 confirm(startByte, startByte + handle->sizeInBytes(), true);
246 }
247
248 void confirmHighWatermark(MetaAllocatorHandle* handle)
249 {
250 confirm(handle->end().untaggedPtr<uintptr_t>(), (basePage + defaultPagesInHeap) * pageSize(), false);
251 }
252
253 void confirm(uintptr_t startByte, uintptr_t endByte, bool value)
254 {
255 for (uintptr_t currentByte = startByte; currentByte < endByte; ++currentByte) {
256 EXPECT_EQ(byteState(currentByte), value);
257 if (value)
258 EXPECT_TRUE(pageState(currentByte / pageSize()));
259 }
260 if (!value) {
261 uintptr_t firstFreePage = (startByte + pageSize() - 1) / pageSize();
262 uintptr_t lastFreePage = (endByte - pageSize()) / pageSize();
263 for (uintptr_t currentPage = firstFreePage; currentPage <= lastFreePage; ++currentPage)
264 EXPECT_TRUE(!pageState(currentPage));
265 }
266 }
267
268 size_t bytesAllocated()
269 {
270 size_t result = 0;
271 for (unsigned index = 0; index < memoryMap->size(); ++index) {
272 if (memoryMap->at(index))
273 result++;
274 }
275 return result;
276 }
277
278 size_t bytesCommitted()
279 {
280 size_t result = 0;
281 for (unsigned index = 0; index < pageMap->size(); ++index) {
282 if (pageMap->at(index))
283 result++;
284 }
285 return result * pageSize();
286 }
287
288 bool& byteState(void* address)
289 {
290 return byteState(reinterpret_cast<uintptr_t>(address));
291 }
292
293 bool& byteState(uintptr_t address)
294 {
295 uintptr_t byteIndex = address - basePage * pageSize();
296 return memoryMap->at(byteIndex);
297 }
298
299 bool& pageState(uintptr_t page)
300 {
301 uintptr_t pageIndex = page - basePage;
302 return pageMap->at(pageIndex);
303 }
304
305 // Test helpers
306
307 void testOneAlloc(size_t size)
308 {
309 // Tests the most basic behavior: allocate one thing and free it. Also
310 // verifies that the state of pages is correct.
311
312 MetaAllocatorHandle* handle = allocate(size);
313 EXPECT_EQ(handle->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize()));
314 EXPECT_EQ(handle->sizeInBytes(), size);
315 EXPECT_TRUE(pageState(basePage));
316
317 confirm(handle);
318 confirmHighWatermark(handle);
319
320 free(handle);
321 }
322
323 void testRepeatAllocFree(size_t firstSize, ...)
324 {
325 // Tests right-coalescing by repeatedly allocating and freeing. The idea
326 // is that if you allocate something and then free it, then the heap should
327 // look identical to what it was before the allocation due to a right-coalesce
328 // of the freed chunk and the already-free memory, and so subsequent
329 // allocations should behave the same as the first one.
330
331 MetaAllocatorHandle* handle = allocate(firstSize);
332 EXPECT_EQ(handle->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize()));
333 EXPECT_EQ(handle->sizeInBytes(), firstSize);
334
335 confirm(handle);
336 confirmHighWatermark(handle);
337
338 free(handle);
339
340 va_list argList;
341 va_start(argList, firstSize);
342 while (size_t sizeInBytes = va_arg(argList, int)) {
343 handle = allocate(sizeInBytes);
344 EXPECT_EQ(handle->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize()));
345 EXPECT_EQ(handle->sizeInBytes(), sizeInBytes);
346
347 confirm(handle);
348 confirmHighWatermark(handle);
349
350 free(handle);
351 }
352 va_end(argList);
353 }
354
355 void testSimpleFullCoalesce(size_t firstSize, size_t secondSize, size_t thirdSize)
356 {
357 // Allocates something of size firstSize, then something of size secondSize, and then
358 // frees the first allocation, and then the second, and then attempts to allocate the
359 // third, asserting that it allocated at the base address of the heap.
360
361 // Note that this test may cause right-allocation, which will cause the test to fail.
362 // Thus the correct way of running this test is to ensure that secondSize is
363 // picked in such a way that it never straddles a page.
364
365 MetaAllocatorHandle* firstHandle = allocate(firstSize);
366 EXPECT_EQ(firstHandle->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize()));
367 EXPECT_EQ(firstHandle->sizeInBytes(), firstSize);
368
369 confirm(firstHandle);
370 confirmHighWatermark(firstHandle);
371
372 MetaAllocatorHandle* secondHandle = allocate(secondSize);
373 EXPECT_EQ(secondHandle->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize() + firstSize));
374 EXPECT_EQ(secondHandle->sizeInBytes(), secondSize);
375
376 confirm(firstHandle);
377 confirm(secondHandle);
378 confirmHighWatermark(secondHandle);
379
380 free(firstHandle);
381
382 confirm(secondHandle);
383 confirmHighWatermark(secondHandle);
384
385 free(secondHandle);
386
387 confirm(basePage * pageSize(), (basePage + defaultPagesInHeap) * pageSize(), false);
388
389 MetaAllocatorHandle* thirdHandle = allocate(thirdSize);
390 EXPECT_EQ(thirdHandle->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize()));
391 EXPECT_EQ(thirdHandle->sizeInBytes(), thirdSize);
392
393 confirm(thirdHandle);
394 confirmHighWatermark(thirdHandle);
395
396 free(thirdHandle);
397 }
398
399 enum class TestFIFOAllocMode { FillAtEnd, EagerFill };
400 void testFIFOAlloc(TestFIFOAllocMode mode, ...)
401 {
402 // This will test the simple case of no-coalesce (freeing the left-most
403 // chunk in memory when the chunk to the right of it is allocated) and
404 // fully exercise left-coalescing and full-coalescing. In EagerFill
405 // mode, this also tests perfect-fit allocation and no-coalescing free.
406
407 size_t totalSize = 0;
408
409 Vector<MetaAllocatorHandle*, 0> handles;
410
411 va_list argList;
412 va_start(argList, mode);
413 while (size_t sizeInBytes = va_arg(argList, int)) {
414 MetaAllocatorHandle* handle = allocate(sizeInBytes);
415 EXPECT_EQ(handle->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize() + totalSize));
416 EXPECT_EQ(handle->sizeInBytes(), sizeInBytes);
417
418 confirm(handle);
419 confirmHighWatermark(handle);
420
421 handles.append(handle);
422 totalSize += sizeInBytes;
423 }
424 va_end(argList);
425
426 for (unsigned index = 0; index < handles.size(); ++index)
427 confirm(handles.at(index));
428
429 size_t sizeSoFar = 0;
430 for (unsigned index = 0; index < handles.size(); ++index) {
431 sizeSoFar += handles.at(index)->sizeInBytes();
432 free(handles.at(index));
433 if (mode == TestFIFOAllocMode::EagerFill) {
434 MetaAllocatorHandle* handle = allocate(sizeSoFar);
435 EXPECT_EQ(handle->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize()));
436 EXPECT_EQ(handle->sizeInBytes(), sizeSoFar);
437
438 confirm(basePage * pageSize(), basePage * pageSize() + totalSize, true);
439 if (index < handles.size() - 1)
440 confirmHighWatermark(handles.last());
441 else
442 confirmHighWatermark(handle);
443
444 free(handle);
445
446 confirm(basePage * pageSize(), basePage * pageSize() + sizeSoFar, false);
447 }
448 }
449
450 ASSERT(sizeSoFar == totalSize);
451
452 confirm(basePage * pageSize(), (basePage + defaultPagesInHeap) * pageSize(), false);
453
454 if (mode == TestFIFOAllocMode::FillAtEnd) {
455 MetaAllocatorHandle* finalHandle = allocate(totalSize);
456 EXPECT_EQ(finalHandle->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize()));
457 EXPECT_EQ(finalHandle->sizeInBytes(), totalSize);
458
459 confirm(finalHandle);
460 confirmHighWatermark(finalHandle);
461
462 free(finalHandle);
463 }
464 }
465
466 void testFillHeap(size_t sizeInBytes, size_t numAllocations)
467 {
468 Vector<MetaAllocatorHandle*, 0> handles;
469
470 for (size_t index = 0; index < numAllocations; ++index)
471 handles.append(allocate(sizeInBytes, DontRunSanityCheck));
472
473 sanityCheck();
474
475 EXPECT_TRUE(!allocator->allocate(sizeInBytes, 0));
476
477 for (size_t index = 0; index < numAllocations; ++index)
478 free(handles.at(index), DontRunSanityCheck);
479
480 sanityCheck();
481 }
482
483 void testRightAllocation(size_t firstLeftSize, size_t firstRightSize, size_t secondLeftSize, size_t secondRightSize)
484 {
485 MetaAllocatorHandle* firstLeft = allocate(firstLeftSize);
486 EXPECT_EQ(firstLeft->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize()));
487
488 MetaAllocatorHandle* firstRight = allocate(firstRightSize);
489 EXPECT_EQ(firstRight->end().untaggedPtr(), reinterpret_cast<void*>((basePage + defaultPagesInHeap) * pageSize()));
490
491 MetaAllocatorHandle* secondLeft = allocate(secondLeftSize);
492 EXPECT_EQ(secondLeft->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize() + firstLeft->sizeInBytes()));
493
494 MetaAllocatorHandle* secondRight = allocate(secondRightSize);
495 EXPECT_EQ(secondRight->end().untaggedPtr(), reinterpret_cast<void*>((basePage + defaultPagesInHeap) * pageSize() - firstRight->sizeInBytes()));
496
497 free(firstLeft);
498 free(firstRight);
499 free(secondLeft);
500 free(secondRight);
501
502 MetaAllocatorHandle* final = allocate(defaultPagesInHeap * pageSize());
503 EXPECT_EQ(final->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize()));
504
505 free(final);
506 }
507
508 void testBestFit(size_t firstSize, size_t step, unsigned numSlots, SanityCheckMode sanityCheckMode)
509 {
510 Vector<MetaAllocatorHandle*, 0> handlesToFree;
511 Vector<MetaAllocatorHandle*, 0> handles;
512 Vector<void*, 0> locations;
513
514 size_t size = firstSize;
515 for (unsigned index = 0; index < numSlots; ++index) {
516 MetaAllocatorHandle* toFree = allocate(size, sanityCheckMode);
517 if (!handles.isEmpty()) {
518 while (toFree->start().untaggedPtr() != handles.last()->end().untaggedPtr()) {
519 handlesToFree.append(toFree);
520 toFree = allocate(size, sanityCheckMode);
521 }
522 }
523
524 MetaAllocatorHandle* fragger = allocate(32, sanityCheckMode);
525 EXPECT_EQ(fragger->start().untaggedPtr(), toFree->end().untaggedPtr());
526
527 locations.append(toFree->start().untaggedPtr());
528
529 handlesToFree.append(toFree);
530 handles.append(fragger);
531
532 size += step;
533 }
534
535 ASSERT(locations.size() == numSlots);
536
537 for (unsigned index = 0; index < handlesToFree.size(); ++index)
538 free(handlesToFree.at(index), sanityCheckMode);
539
540 size = firstSize;
541 for (unsigned index = 0; index < numSlots; ++index) {
542 MetaAllocatorHandle* bestFit = allocate(size - 32, sanityCheckMode);
543
544 EXPECT_TRUE(bestFit->start().untaggedPtr() == locations.at(index)
545 || bestFit->end().untaggedPtr() == reinterpret_cast<void*>(reinterpret_cast<uintptr_t>(locations.at(index)) + size));
546
547 MetaAllocatorHandle* small = allocate(32, sanityCheckMode);
548 if (bestFit->start().untaggedPtr() == locations.at(index))
549 EXPECT_EQ(small->start().untaggedPtr(), bestFit->end().untaggedPtr());
550 else
551 EXPECT_EQ(small->end().untaggedPtr(), bestFit->start().untaggedPtr());
552
553 free(bestFit, sanityCheckMode);
554 free(small, sanityCheckMode);
555
556 size += step;
557 }
558
559 for (unsigned index = 0; index < numSlots; ++index)
560 free(handles.at(index), sanityCheckMode);
561
562 MetaAllocatorHandle* final = allocate(defaultPagesInHeap * pageSize(), sanityCheckMode);
563 EXPECT_EQ(final->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize()));
564
565 free(final, sanityCheckMode);
566 }
567
568 void testShrink(size_t firstSize, size_t secondSize)
569 {
570 // Allocate the thing that will be shrunk
571 MetaAllocatorHandle* handle = allocate(firstSize);
572
573 // Shrink it, and make sure that our state reflects the shrinkage.
574 notifyFree(reinterpret_cast<void*>(handle->start().untaggedPtr<uintptr_t>() + secondSize), firstSize - secondSize);
575
576 handle->shrink(secondSize);
577 EXPECT_EQ(handle->sizeInBytes(), secondSize);
578
579 sanityCheck();
580
581 // Assert that the heap is not empty.
582 EXPECT_TRUE(!allocator->allocate(defaultPagesInHeap * pageSize(), 0));
583
584 // Allocate the remainder of the heap.
585 MetaAllocatorHandle* remainder = allocate(defaultPagesInHeap * pageSize() - secondSize);
586 EXPECT_EQ(remainder->start().untaggedPtr(), handle->end().untaggedPtr());
587
588 free(remainder);
589 free(handle);
590
591 // Assert that the heap is empty and finish up.
592 MetaAllocatorHandle* final = allocate(defaultPagesInHeap * pageSize());
593 EXPECT_EQ(final->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize()));
594
595 free(final);
596 }
597
598 void testDemandAllocCoalesce(size_t firstSize, size_t numPages, size_t secondSize)
599 {
600 EXPECT_TRUE(!allocator->allocate((defaultPagesInHeap + numPages) * pageSize(), 0));
601
602 MetaAllocatorHandle* firstHandle = allocate(firstSize);
603
604 EXPECT_TRUE(!allocator->allocate(secondSize, 0));
605 EXPECT_TRUE(!allocator->allocate((defaultPagesInHeap + numPages) * pageSize(), 0));
606
607 currentHeapGrowthMode = ForTestDemandAllocCoalesce;
608 allowAllocatePages = numPages;
609
610 MetaAllocatorHandle* secondHandle = allocate(secondSize);
611
612 EXPECT_TRUE(currentHeapGrowthMode == DontGrowHeap);
613 EXPECT_EQ(allowAllocatePages, static_cast<size_t>(0));
614 EXPECT_EQ(requestedNumPages, (secondSize + pageSize() - 1) / pageSize());
615 EXPECT_EQ(secondHandle->start().untaggedPtr(), reinterpret_cast<void*>((basePage + defaultPagesInHeap) * pageSize()));
616
617 requestedNumPages = 0;
618
619 free(firstHandle);
620 free(secondHandle);
621
622 free(allocate((defaultPagesInHeap + numPages) * pageSize()));
623 }
624
625 void testDemandAllocDontCoalesce(size_t firstSize, size_t numPages, size_t secondSize)
626 {
627 free(allocate(firstSize));
628 free(allocate(defaultPagesInHeap * pageSize()));
629 EXPECT_TRUE(!allocator->allocate((defaultPagesInHeap + numPages) * pageSize(), 0));
630
631 MetaAllocatorHandle* firstHandle = allocate(firstSize);
632
633 EXPECT_TRUE(!allocator->allocate(secondSize, 0));
634 EXPECT_TRUE(!allocator->allocate((defaultPagesInHeap + numPages) * pageSize(), 0));
635
636 currentHeapGrowthMode = ForTestDemandAllocDontCoalesce;
637 allowAllocatePages = numPages;
638
639 MetaAllocatorHandle* secondHandle = allocate(secondSize);
640
641 EXPECT_TRUE(currentHeapGrowthMode == DontGrowHeap);
642 EXPECT_EQ(allowAllocatePages, static_cast<size_t>(0));
643 EXPECT_EQ(requestedNumPages, (secondSize + pageSize() - 1) / pageSize());
644 EXPECT_EQ(secondHandle->start().untaggedPtr(), reinterpret_cast<void*>((basePage + defaultPagesInHeap + 1) * pageSize()));
645
646 requestedNumPages = 0;
647
648 EXPECT_TRUE(!allocator->allocate((defaultPagesInHeap + numPages) * pageSize(), 0));
649
650 free(firstHandle);
651 free(secondHandle);
652
653 EXPECT_TRUE(!allocator->allocate((defaultPagesInHeap + numPages) * pageSize(), 0));
654
655 firstHandle = allocate(firstSize);
656 secondHandle = allocate(secondSize);
657 EXPECT_EQ(firstHandle->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize()));
658 EXPECT_EQ(secondHandle->start().untaggedPtr(), reinterpret_cast<void*>((basePage + defaultPagesInHeap + 1) * pageSize()));
659 free(firstHandle);
660 free(secondHandle);
661 }
662};
663
664TEST_F(MetaAllocatorTest, Empty)
665{
666 // Tests that creating and destroying an allocator works.
667}
668
669TEST_F(MetaAllocatorTest, AllocZero)
670{
671 // Tests that allocating a zero-length block returns 0 and
672 // does not change anything in memory.
673
674 ASSERT(!allocator->allocate(0, 0));
675
676 MetaAllocatorHandle* final = allocate(defaultPagesInHeap * pageSize());
677 EXPECT_EQ(final->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize()));
678 free(final);
679}
680
681TEST_F(MetaAllocatorTest, OneAlloc32)
682{
683 testOneAlloc(32);
684}
685
686TEST_F(MetaAllocatorTest, OneAlloc64)
687{
688 testOneAlloc(64);
689}
690
691TEST_F(MetaAllocatorTest, OneAllocTwoPages)
692{
693 testOneAlloc(pageSize() * 2);
694}
695
696TEST_F(MetaAllocatorTest, RepeatAllocFree32Twice)
697{
698 testRepeatAllocFree(32, 32, 0);
699}
700
701TEST_F(MetaAllocatorTest, RepeatAllocFree32Then64)
702{
703 testRepeatAllocFree(32, 64, 0);
704}
705
706TEST_F(MetaAllocatorTest, RepeatAllocFree64Then32)
707{
708 testRepeatAllocFree(64, 32, 0);
709}
710
711TEST_F(MetaAllocatorTest, RepeatAllocFree32TwiceThen64)
712{
713 testRepeatAllocFree(32, 32, 64, 0);
714}
715
716TEST_F(MetaAllocatorTest, RepeatAllocFree32Then64Twice)
717{
718 testRepeatAllocFree(32, 64, 64, 0);
719}
720
721TEST_F(MetaAllocatorTest, RepeatAllocFree64Then32Then64)
722{
723 testRepeatAllocFree(64, 32, 64, 0);
724}
725
726TEST_F(MetaAllocatorTest, RepeatAllocFree32Thrice)
727{
728 testRepeatAllocFree(32, 32, 32, 0);
729}
730
731TEST_F(MetaAllocatorTest, RepeatAllocFree32Then64Then32)
732{
733 testRepeatAllocFree(32, 32, 32, 0);
734}
735
736TEST_F(MetaAllocatorTest, RepeatAllocFree64Then32Twice)
737{
738 testRepeatAllocFree(64, 32, 32, 0);
739}
740
741TEST_F(MetaAllocatorTest, RepeatAllocFreeTwoPagesThen32)
742{
743 testRepeatAllocFree(static_cast<int>(pageSize() * 2), 32, 0);
744}
745
746TEST_F(MetaAllocatorTest, RepeatAllocFree32ThenTwoPages)
747{
748 testRepeatAllocFree(32, static_cast<int>(pageSize() * 2), 0);
749}
750
751TEST_F(MetaAllocatorTest, RepeatAllocFreePageThenTwoPages)
752{
753 testRepeatAllocFree(static_cast<int>(pageSize()), static_cast<int>(pageSize() * 2), 0);
754}
755
756TEST_F(MetaAllocatorTest, RepeatAllocFreeTwoPagesThenPage)
757{
758 testRepeatAllocFree(static_cast<int>(pageSize() * 2), static_cast<int>(pageSize()), 0);
759}
760
761TEST_F(MetaAllocatorTest, SimpleFullCoalesce32Plus32Then128)
762{
763 testSimpleFullCoalesce(32, 32, 128);
764}
765
766TEST_F(MetaAllocatorTest, SimpleFullCoalesce32Plus64Then128)
767{
768 testSimpleFullCoalesce(32, 64, 128);
769}
770
771TEST_F(MetaAllocatorTest, SimpleFullCoalesce64Plus32Then128)
772{
773 testSimpleFullCoalesce(64, 32, 128);
774}
775
776TEST_F(MetaAllocatorTest, SimpleFullCoalesce32PlusPageLess32ThenPage)
777{
778 testSimpleFullCoalesce(32, pageSize() - 32, pageSize());
779}
780
781TEST_F(MetaAllocatorTest, SimpleFullCoalesce32PlusPageLess32ThenTwoPages)
782{
783 testSimpleFullCoalesce(32, pageSize() - 32, pageSize() * 2);
784}
785
786TEST_F(MetaAllocatorTest, SimpleFullCoalescePagePlus32ThenTwoPages)
787{
788 testSimpleFullCoalesce(pageSize(), 32, pageSize() * 2);
789}
790
791TEST_F(MetaAllocatorTest, SimpleFullCoalescePagePlusPageThenTwoPages)
792{
793 testSimpleFullCoalesce(pageSize(), pageSize(), pageSize() * 2);
794}
795
796TEST_F(MetaAllocatorTest, FIFOAllocFillAtEnd32Twice)
797{
798 testFIFOAlloc(TestFIFOAllocMode::FillAtEnd, 32, 32, 0);
799}
800
801TEST_F(MetaAllocatorTest, FIFOAllocFillAtEnd32Thrice)
802{
803 testFIFOAlloc(TestFIFOAllocMode::FillAtEnd, 32, 32, 32, 0);
804}
805
806TEST_F(MetaAllocatorTest, FIFOAllocFillAtEnd32FourTimes)
807{
808 testFIFOAlloc(TestFIFOAllocMode::FillAtEnd, 32, 32, 32, 32, 0);
809}
810
811TEST_F(MetaAllocatorTest, FIFOAllocFillAtEndPageLess32Then32ThenPageLess64Then64)
812{
813 testFIFOAlloc(TestFIFOAllocMode::FillAtEnd, static_cast<int>(pageSize() - 32), 32, static_cast<int>(pageSize() - 64), 64, 0);
814}
815
816TEST_F(MetaAllocatorTest, FIFOAllocEagerFill32Twice)
817{
818 testFIFOAlloc(TestFIFOAllocMode::EagerFill, 32, 32, 0);
819}
820
821TEST_F(MetaAllocatorTest, FIFOAllocEagerFill32Thrice)
822{
823 testFIFOAlloc(TestFIFOAllocMode::EagerFill, 32, 32, 32, 0);
824}
825
826TEST_F(MetaAllocatorTest, FIFOAllocEagerFill32FourTimes)
827{
828 testFIFOAlloc(TestFIFOAllocMode::EagerFill, 32, 32, 32, 32, 0);
829}
830
831TEST_F(MetaAllocatorTest, FIFOAllocEagerFillPageLess32Then32ThenPageLess64Then64)
832{
833 testFIFOAlloc(TestFIFOAllocMode::EagerFill, static_cast<int>(pageSize() - 32), 32, static_cast<int>(pageSize() - 64), 64, 0);
834}
835
836TEST_F(MetaAllocatorTest, FillHeap32)
837{
838 testFillHeap(32, defaultPagesInHeap * pageSize() / 32);
839}
840
841TEST_F(MetaAllocatorTest, FillHeapPage)
842{
843 testFillHeap(pageSize(), defaultPagesInHeap);
844}
845
846TEST_F(MetaAllocatorTest, FillHeapTwoPages)
847{
848 testFillHeap(pageSize() * 2, defaultPagesInHeap / 2);
849}
850
851TEST_F(MetaAllocatorTest, RightAllocation32ThenPageThen32ThenPage)
852{
853 testRightAllocation(32, pageSize(), 32, pageSize());
854}
855
856TEST_F(MetaAllocatorTest, RightAllocationQuarterPageThenPageThenQuarterPageThenPage)
857{
858 testRightAllocation(pageSize() / 4, pageSize(), pageSize() / 4, pageSize());
859}
860
861TEST_F(MetaAllocatorTest, BestFit64Plus64Thrice)
862{
863 testBestFit(64, 64, 3, RunSanityCheck);
864}
865
866TEST_F(MetaAllocatorTest, BestFit64Plus64TenTimes)
867{
868 testBestFit(64, 64, 10, DontRunSanityCheck);
869}
870
871TEST_F(MetaAllocatorTest, BestFit64Plus64HundredTimes)
872{
873 testBestFit(64, 64, 100, DontRunSanityCheck);
874}
875
876TEST_F(MetaAllocatorTest, BestFit96Plus64Thrice)
877{
878 testBestFit(96, 64, 3, RunSanityCheck);
879}
880
881TEST_F(MetaAllocatorTest, BestFit96Plus64TenTimes)
882{
883 testBestFit(96, 64, 10, DontRunSanityCheck);
884}
885
886TEST_F(MetaAllocatorTest, BestFit96Plus64HundredTimes)
887{
888 testBestFit(96, 64, 100, DontRunSanityCheck);
889}
890
891TEST_F(MetaAllocatorTest, BestFit96Plus96Thrice)
892{
893 testBestFit(96, 96, 3, RunSanityCheck);
894}
895
896TEST_F(MetaAllocatorTest, BestFit96Plus96TenTimes)
897{
898 testBestFit(96, 96, 10, DontRunSanityCheck);
899}
900
901TEST_F(MetaAllocatorTest, BestFit96Plus96EightyTimes)
902{
903 testBestFit(96, 96, 80, DontRunSanityCheck);
904}
905
906TEST_F(MetaAllocatorTest, Shrink64To32)
907{
908 testShrink(64, 32);
909}
910
911TEST_F(MetaAllocatorTest, ShrinkPageTo32)
912{
913 testShrink(pageSize(), 32);
914}
915
916TEST_F(MetaAllocatorTest, ShrinkPageToPageLess32)
917{
918 testShrink(pageSize(), pageSize() - 32);
919}
920
921TEST_F(MetaAllocatorTest, ShrinkTwoPagesTo32)
922{
923 testShrink(pageSize() * 2, 32);
924}
925
926TEST_F(MetaAllocatorTest, ShrinkTwoPagesToPagePlus32)
927{
928 testShrink(pageSize() * 2, pageSize() + 32);
929}
930
931TEST_F(MetaAllocatorTest, ShrinkTwoPagesToPage)
932{
933 testShrink(pageSize() * 2, pageSize());
934}
935
936TEST_F(MetaAllocatorTest, ShrinkTwoPagesToPageLess32)
937{
938 testShrink(pageSize() * 2, pageSize() - 32);
939}
940
941TEST_F(MetaAllocatorTest, ShrinkTwoPagesToTwoPagesLess32)
942{
943 testShrink(pageSize() * 2, pageSize() * 2 - 32);
944}
945
946TEST_F(MetaAllocatorTest, DemandAllocCoalescePageThenDoubleHeap)
947{
948 testDemandAllocCoalesce(pageSize(), defaultPagesInHeap, defaultPagesInHeap * pageSize());
949}
950
951TEST_F(MetaAllocatorTest, DemandAllocCoalescePageThenTripleHeap)
952{
953 testDemandAllocCoalesce(pageSize(), defaultPagesInHeap * 2, defaultPagesInHeap * pageSize());
954}
955
956TEST_F(MetaAllocatorTest, DemandAllocDontCoalescePageThenDoubleHeap)
957{
958 testDemandAllocDontCoalesce(pageSize(), defaultPagesInHeap, defaultPagesInHeap * pageSize());
959}
960
961} // namespace TestWebKitAPI
962
963#if USE(POINTER_PROFILING)
964
965namespace WTF {
966
967const char* tagForPtr(const void*) { return "<unknown>"; }
968const char* ptrTagName(PtrTag) { return "<unknown>"; }
969
970} // namespace WTF
971
972#endif // USE(POINTER_PROFILING)
973