@@ -1332,13 +1332,15 @@ STATIC_ASSERT(static_cast<ObjectSpace>(1 << AllocationSpace::MAP_SPACE) ==
13321332
13331333void Space::AddAllocationObserver (AllocationObserver* observer) {
13341334 allocation_observers_.push_back (observer);
1335+ StartNextInlineAllocationStep ();
13351336}
13361337
13371338void Space::RemoveAllocationObserver (AllocationObserver* observer) {
13381339 auto it = std::find (allocation_observers_.begin (),
13391340 allocation_observers_.end (), observer);
13401341 DCHECK (allocation_observers_.end () != it);
13411342 allocation_observers_.erase (it);
1343+ StartNextInlineAllocationStep ();
13421344}
13431345
13441346void Space::PauseAllocationObservers () { allocation_observers_paused_ = true ; }
@@ -1347,11 +1349,12 @@ void Space::ResumeAllocationObservers() {
13471349 allocation_observers_paused_ = false ;
13481350}
13491351
1350- void Space::AllocationStep (Address soon_object, int size) {
1352+ void Space::AllocationStep (int bytes_since_last, Address soon_object,
1353+ int size) {
13511354 if (!allocation_observers_paused_) {
13521355 heap ()->CreateFillerObjectAt (soon_object, size, ClearRecordedSlots::kNo );
13531356 for (AllocationObserver* observer : allocation_observers_) {
1354- observer->AllocationStep (size , soon_object, size);
1357+ observer->AllocationStep (bytes_since_last , soon_object, size);
13551358 }
13561359 }
13571360}
@@ -1371,7 +1374,8 @@ PagedSpace::PagedSpace(Heap* heap, AllocationSpace space,
13711374 : Space (heap, space, executable),
13721375 anchor_ (this ),
13731376 free_list_ (this ),
1374- locked_page_ (nullptr ) {
1377+ locked_page_ (nullptr ),
1378+ top_on_previous_step_ (0 ) {
13751379 area_size_ = MemoryAllocator::PageAreaSize (space);
13761380 accounting_stats_.Clear ();
13771381
@@ -1600,6 +1604,48 @@ void PagedSpace::SetAllocationInfo(Address top, Address limit) {
16001604 }
16011605}
16021606
1607+ void PagedSpace::DecreaseLimit (Address new_limit) {
1608+ Address old_limit = limit ();
1609+ DCHECK_LE (top (), new_limit);
1610+ DCHECK_GE (old_limit, new_limit);
1611+ if (new_limit != old_limit) {
1612+ SetTopAndLimit (top (), new_limit);
1613+ Free (new_limit, old_limit - new_limit);
1614+ if (heap ()->incremental_marking ()->black_allocation ()) {
1615+ Page::FromAllocationAreaAddress (new_limit)->DestroyBlackArea (new_limit,
1616+ old_limit);
1617+ }
1618+ }
1619+ }
1620+
1621+ Address PagedSpace::ComputeLimit (Address start, Address end,
1622+ size_t size_in_bytes) {
1623+ DCHECK_GE (end - start, size_in_bytes);
1624+
1625+ if (heap ()->inline_allocation_disabled ()) {
1626+ // Keep the linear allocation area to fit exactly the requested size.
1627+ return start + size_in_bytes;
1628+ } else if (!allocation_observers_paused_ && !allocation_observers_.empty () &&
1629+ identity () == OLD_SPACE && !is_local ()) {
1630+ // Generated code may allocate inline from the linear allocation area for
1631+ // Old Space. To make sure we can observe these allocations, we use a lower
1632+ // limit.
1633+ size_t step = RoundSizeDownToObjectAlignment (
1634+ static_cast <int >(GetNextInlineAllocationStepSize ()));
1635+ return Max (start + size_in_bytes, Min (start + step, end));
1636+ } else {
1637+ // The entire node can be used as the linear allocation area.
1638+ return end;
1639+ }
1640+ }
1641+
1642+ void PagedSpace::StartNextInlineAllocationStep () {
1643+ if (!allocation_observers_paused_ && SupportsInlineAllocation ()) {
1644+ top_on_previous_step_ = allocation_observers_.empty () ? 0 : top ();
1645+ DecreaseLimit (ComputeLimit (top (), limit (), 0 ));
1646+ }
1647+ }
1648+
16031649void PagedSpace::MarkAllocationInfoBlack () {
16041650 DCHECK (heap ()->incremental_marking ()->black_allocation ());
16051651 Address current_top = top ();
@@ -1645,6 +1691,12 @@ void PagedSpace::EmptyAllocationInfo() {
16451691 }
16461692 }
16471693
1694+ if (top_on_previous_step_) {
1695+ DCHECK (current_top >= top_on_previous_step_);
1696+ AllocationStep (static_cast <int >(current_top - top_on_previous_step_),
1697+ nullptr , 0 );
1698+ top_on_previous_step_ = 0 ;
1699+ }
16481700 SetTopAndLimit (NULL , NULL );
16491701 DCHECK_GE (current_limit, current_top);
16501702 Free (current_top, current_limit - current_top);
@@ -2087,16 +2139,6 @@ void NewSpace::StartNextInlineAllocationStep() {
20872139 }
20882140}
20892141
2090- void NewSpace::AddAllocationObserver (AllocationObserver* observer) {
2091- Space::AddAllocationObserver (observer);
2092- StartNextInlineAllocationStep ();
2093- }
2094-
2095- void NewSpace::RemoveAllocationObserver (AllocationObserver* observer) {
2096- Space::RemoveAllocationObserver (observer);
2097- StartNextInlineAllocationStep ();
2098- }
2099-
21002142void NewSpace::PauseAllocationObservers () {
21012143 // Do a step to account for memory allocated so far.
21022144 InlineAllocationStep (top (), top (), nullptr , 0 );
@@ -2105,12 +2147,28 @@ void NewSpace::PauseAllocationObservers() {
21052147 UpdateInlineAllocationLimit (0 );
21062148}
21072149
2150+ void PagedSpace::PauseAllocationObservers () {
2151+ // Do a step to account for memory allocated so far.
2152+ if (top_on_previous_step_) {
2153+ int bytes_allocated = static_cast <int >(top () - top_on_previous_step_);
2154+ AllocationStep (bytes_allocated, nullptr , 0 );
2155+ }
2156+ Space::PauseAllocationObservers ();
2157+ top_on_previous_step_ = 0 ;
2158+ }
2159+
21082160void NewSpace::ResumeAllocationObservers () {
21092161 DCHECK (top_on_previous_step_ == 0 );
21102162 Space::ResumeAllocationObservers ();
21112163 StartNextInlineAllocationStep ();
21122164}
21132165
2166+ // TODO(ofrobots): refactor into SpaceWithLinearArea
2167+ void PagedSpace::ResumeAllocationObservers () {
2168+ DCHECK (top_on_previous_step_ == 0 );
2169+ Space::ResumeAllocationObservers ();
2170+ StartNextInlineAllocationStep ();
2171+ }
21142172
21152173void NewSpace::InlineAllocationStep (Address top, Address new_top,
21162174 Address soon_object, size_t size) {
@@ -2885,7 +2943,6 @@ bool FreeList::Allocate(size_t size_in_bytes) {
28852943 if (new_node == nullptr ) return false ;
28862944
28872945 DCHECK_GE (new_node_size, size_in_bytes);
2888- size_t bytes_left = new_node_size - size_in_bytes;
28892946
28902947#ifdef DEBUG
28912948 for (size_t i = 0 ; i < size_in_bytes / kPointerSize ; i++) {
@@ -2899,38 +2956,21 @@ bool FreeList::Allocate(size_t size_in_bytes) {
28992956 // candidate.
29002957 DCHECK (!MarkCompactCollector::IsOnEvacuationCandidate (new_node));
29012958
2902- const size_t kThreshold = IncrementalMarking::kAllocatedThreshold ;
2903-
29042959 // Memory in the linear allocation area is counted as allocated. We may free
29052960 // a little of this again immediately - see below.
29062961 owner_->IncreaseAllocatedBytes (new_node_size,
29072962 Page::FromAddress (new_node->address ()));
29082963
2909- if (owner_->heap ()->inline_allocation_disabled ()) {
2910- // Keep the linear allocation area to fit exactly the requested size.
2911- // Return the rest to the free list.
2912- owner_->Free (new_node->address () + size_in_bytes, bytes_left);
2913- owner_->SetAllocationInfo (new_node->address (),
2914- new_node->address () + size_in_bytes);
2915- } else if (bytes_left > kThreshold &&
2916- owner_->heap ()->incremental_marking ()->IsMarkingIncomplete () &&
2917- FLAG_incremental_marking &&
2918- !owner_->is_local ()) { // Not needed on CompactionSpaces.
2919- size_t linear_size = owner_->RoundSizeDownToObjectAlignment (kThreshold );
2920- // We don't want to give too large linear areas to the allocator while
2921- // incremental marking is going on, because we won't check again whether
2922- // we want to do another increment until the linear area is used up.
2923- DCHECK_GE (new_node_size, size_in_bytes + linear_size);
2924- owner_->Free (new_node->address () + size_in_bytes + linear_size,
2925- new_node_size - size_in_bytes - linear_size);
2926- owner_->SetAllocationInfo (
2927- new_node->address (), new_node->address () + size_in_bytes + linear_size);
2928- } else {
2929- // Normally we give the rest of the node to the allocator as its new
2930- // linear allocation area.
2931- owner_->SetAllocationInfo (new_node->address (),
2932- new_node->address () + new_node_size);
2964+ Address start = new_node->address ();
2965+ Address end = new_node->address () + new_node_size;
2966+ Address limit = owner_->ComputeLimit (start, end, size_in_bytes);
2967+ DCHECK_LE (limit, end);
2968+ DCHECK_LE (size_in_bytes, limit - start);
2969+ if (limit != end) {
2970+ owner_->Free (limit, end - limit);
29332971 }
2972+ owner_->SetAllocationInfo (start, limit);
2973+
29342974 return true ;
29352975}
29362976
@@ -3318,7 +3358,7 @@ AllocationResult LargeObjectSpace::AllocateRaw(int object_size,
33183358 if (heap ()->incremental_marking ()->black_allocation ()) {
33193359 heap ()->incremental_marking ()->marking_state ()->WhiteToBlack (object);
33203360 }
3321- AllocationStep (object->address (), object_size);
3361+ AllocationStep (object_size, object->address (), object_size);
33223362 DCHECK_IMPLIES (
33233363 heap ()->incremental_marking ()->black_allocation (),
33243364 heap ()->incremental_marking ()->marking_state ()->IsBlack (object));
0 commit comments