Commit da49dc4a authored by Kevin Modzelewski's avatar Kevin Modzelewski

Merge pull request #646 from kmod/class_freeing

Temporary fix for freeing-classes-before-instances bug
parents f6b3c256 e0bd51eb
...@@ -375,10 +375,10 @@ void markPhase() { ...@@ -375,10 +375,10 @@ void markPhase() {
#endif #endif
} }
static void sweepPhase(std::vector<Box*>& weakly_referenced) { static void sweepPhase(std::vector<Box*>& weakly_referenced, std::vector<BoxedClass*>& classes_to_free) {
// we need to use the allocator here because these objects are referenced only here, and calling the weakref // we need to use the allocator here because these objects are referenced only here, and calling the weakref
// callbacks could start another gc // callbacks could start another gc
global_heap.freeUnmarked(weakly_referenced); global_heap.freeUnmarked(weakly_referenced, classes_to_free);
} }
static bool gc_enabled = true; static bool gc_enabled = true;
...@@ -433,7 +433,17 @@ void runCollection() { ...@@ -433,7 +433,17 @@ void runCollection() {
// since the deallocation of other objects (namely, the weakref objects themselves) can affect // since the deallocation of other objects (namely, the weakref objects themselves) can affect
// those lists, and we want to see the final versions. // those lists, and we want to see the final versions.
std::vector<Box*> weakly_referenced; std::vector<Box*> weakly_referenced;
sweepPhase(weakly_referenced);
// Temporary solution to the "we can't free classes before their instances": the sweep phase
// will avoid freeing classes, and will instead put them into this list for us to free at the end.
// XXX there are still corner cases with it:
// - there is no ordering enforced between different class objects, ie if you free a class and a metaclass
// in the same collection we have the same issue
// - if there are weakreferences to the class, it gets freed slightly earlier
// These could both be fixed but I think the full fix will come with rudi's larger finalization changes.
std::vector<BoxedClass*> classes_to_free;
sweepPhase(weakly_referenced, classes_to_free);
// Handle weakrefs in two passes: // Handle weakrefs in two passes:
// - first, find all of the weakref objects whose callbacks we need to call. we need to iterate // - first, find all of the weakref objects whose callbacks we need to call. we need to iterate
...@@ -458,7 +468,11 @@ void runCollection() { ...@@ -458,7 +468,11 @@ void runCollection() {
weak_references.push_back(head); weak_references.push_back(head);
} }
} }
global_heap.free(GCAllocation::fromUserData(o)); global_heap._setFree(GCAllocation::fromUserData(o));
}
for (auto b : classes_to_free) {
global_heap._setFree(GCAllocation::fromUserData(b));
} }
should_not_reenter_gc = false; // end non-reentrant section should_not_reenter_gc = false; // end non-reentrant section
......
...@@ -43,7 +43,7 @@ template <> void return_temporary_buffer<pyston::Box*>(pyston::Box** p) { ...@@ -43,7 +43,7 @@ template <> void return_temporary_buffer<pyston::Box*>(pyston::Box** p) {
namespace pyston { namespace pyston {
namespace gc { namespace gc {
bool _doFree(GCAllocation* al, std::vector<Box*>* weakly_referenced); bool _doFree(GCAllocation* al, std::vector<Box*>* weakly_referenced, std::vector<BoxedClass*>* classes_to_free);
// lots of linked lists around here, so let's just use template functions for operations on them. // lots of linked lists around here, so let's just use template functions for operations on them.
template <class ListT> inline void nullNextPrev(ListT* node) { template <class ListT> inline void nullNextPrev(ListT* node) {
...@@ -86,7 +86,8 @@ template <class ListT, typename Func> inline void forEach(ListT* list, Func func ...@@ -86,7 +86,8 @@ template <class ListT, typename Func> inline void forEach(ListT* list, Func func
} }
template <class ListT, typename Free> template <class ListT, typename Free>
inline void sweepList(ListT* head, std::vector<Box*>& weakly_referenced, Free free_func) { inline void sweepList(ListT* head, std::vector<Box*>& weakly_referenced, std::vector<BoxedClass*>& classes_to_free,
Free free_func) {
auto cur = head; auto cur = head;
while (cur) { while (cur) {
GCAllocation* al = cur->data; GCAllocation* al = cur->data;
...@@ -94,7 +95,7 @@ inline void sweepList(ListT* head, std::vector<Box*>& weakly_referenced, Free fr ...@@ -94,7 +95,7 @@ inline void sweepList(ListT* head, std::vector<Box*>& weakly_referenced, Free fr
clearMark(al); clearMark(al);
cur = cur->next; cur = cur->next;
} else { } else {
if (_doFree(al, &weakly_referenced)) { if (_doFree(al, &weakly_referenced, &classes_to_free)) {
removeFromLL(cur); removeFromLL(cur);
auto to_free = cur; auto to_free = cur;
...@@ -122,7 +123,8 @@ void _bytesAllocatedTripped() { ...@@ -122,7 +123,8 @@ void _bytesAllocatedTripped() {
Heap global_heap; Heap global_heap;
__attribute__((always_inline)) bool _doFree(GCAllocation* al, std::vector<Box*>* weakly_referenced) { __attribute__((always_inline)) bool _doFree(GCAllocation* al, std::vector<Box*>* weakly_referenced,
std::vector<BoxedClass*>* classes_to_free) {
#ifndef NVALGRIND #ifndef NVALGRIND
VALGRIND_DISABLE_ERROR_REPORTING; VALGRIND_DISABLE_ERROR_REPORTING;
#endif #endif
...@@ -149,6 +151,13 @@ __attribute__((always_inline)) bool _doFree(GCAllocation* al, std::vector<Box*>* ...@@ -149,6 +151,13 @@ __attribute__((always_inline)) bool _doFree(GCAllocation* al, std::vector<Box*>*
} }
} }
// Note: do this check after the weakrefs check.
if (PyType_Check(b)) {
assert(classes_to_free);
classes_to_free->push_back(static_cast<BoxedClass*>(b));
return false;
}
// XXX: we are currently ignoring destructors (tp_dealloc) for extension objects, since we have // XXX: we are currently ignoring destructors (tp_dealloc) for extension objects, since we have
// historically done that (whoops) and there are too many to be worth changing for now as long // historically done that (whoops) and there are too many to be worth changing for now as long
// as we can get real destructor support soon. // as we can get real destructor support soon.
...@@ -161,7 +170,7 @@ __attribute__((always_inline)) bool _doFree(GCAllocation* al, std::vector<Box*>* ...@@ -161,7 +170,7 @@ __attribute__((always_inline)) bool _doFree(GCAllocation* al, std::vector<Box*>*
} }
void Heap::destructContents(GCAllocation* al) { void Heap::destructContents(GCAllocation* al) {
_doFree(al, NULL); _doFree(al, NULL, NULL);
} }
struct HeapStatistics { struct HeapStatistics {
...@@ -369,8 +378,8 @@ GCAllocation* SmallArena::allocationFrom(void* ptr) { ...@@ -369,8 +378,8 @@ GCAllocation* SmallArena::allocationFrom(void* ptr) {
return reinterpret_cast<GCAllocation*>(&b->atoms[atom_idx]); return reinterpret_cast<GCAllocation*>(&b->atoms[atom_idx]);
} }
void SmallArena::freeUnmarked(std::vector<Box*>& weakly_referenced) { void SmallArena::freeUnmarked(std::vector<Box*>& weakly_referenced, std::vector<BoxedClass*>& classes_to_free) {
thread_caches.forEachValue([this, &weakly_referenced](ThreadBlockCache* cache) { thread_caches.forEachValue([this, &weakly_referenced, &classes_to_free](ThreadBlockCache* cache) {
for (int bidx = 0; bidx < NUM_BUCKETS; bidx++) { for (int bidx = 0; bidx < NUM_BUCKETS; bidx++) {
Block* h = cache->cache_free_heads[bidx]; Block* h = cache->cache_free_heads[bidx];
// Try to limit the amount of unused memory a thread can hold onto; // Try to limit the amount of unused memory a thread can hold onto;
...@@ -390,8 +399,8 @@ void SmallArena::freeUnmarked(std::vector<Box*>& weakly_referenced) { ...@@ -390,8 +399,8 @@ void SmallArena::freeUnmarked(std::vector<Box*>& weakly_referenced) {
insertIntoLL(&heads[bidx], h); insertIntoLL(&heads[bidx], h);
} }
Block** chain_end = _freeChain(&cache->cache_free_heads[bidx], weakly_referenced); Block** chain_end = _freeChain(&cache->cache_free_heads[bidx], weakly_referenced, classes_to_free);
_freeChain(&cache->cache_full_heads[bidx], weakly_referenced); _freeChain(&cache->cache_full_heads[bidx], weakly_referenced, classes_to_free);
while (Block* b = cache->cache_full_heads[bidx]) { while (Block* b = cache->cache_full_heads[bidx]) {
removeFromLLAndNull(b); removeFromLLAndNull(b);
...@@ -401,8 +410,8 @@ void SmallArena::freeUnmarked(std::vector<Box*>& weakly_referenced) { ...@@ -401,8 +410,8 @@ void SmallArena::freeUnmarked(std::vector<Box*>& weakly_referenced) {
}); });
for (int bidx = 0; bidx < NUM_BUCKETS; bidx++) { for (int bidx = 0; bidx < NUM_BUCKETS; bidx++) {
Block** chain_end = _freeChain(&heads[bidx], weakly_referenced); Block** chain_end = _freeChain(&heads[bidx], weakly_referenced, classes_to_free);
_freeChain(&full_heads[bidx], weakly_referenced); _freeChain(&full_heads[bidx], weakly_referenced, classes_to_free);
while (Block* b = full_heads[bidx]) { while (Block* b = full_heads[bidx]) {
removeFromLLAndNull(b); removeFromLLAndNull(b);
...@@ -429,7 +438,8 @@ void SmallArena::getStatistics(HeapStatistics* stats) { ...@@ -429,7 +438,8 @@ void SmallArena::getStatistics(HeapStatistics* stats) {
} }
SmallArena::Block** SmallArena::_freeChain(Block** head, std::vector<Box*>& weakly_referenced) { SmallArena::Block** SmallArena::_freeChain(Block** head, std::vector<Box*>& weakly_referenced,
std::vector<BoxedClass*>& classes_to_free) {
while (Block* b = *head) { while (Block* b = *head) {
int num_objects = b->numObjects(); int num_objects = b->numObjects();
int first_obj = b->minObjIndex(); int first_obj = b->minObjIndex();
...@@ -453,7 +463,7 @@ SmallArena::Block** SmallArena::_freeChain(Block** head, std::vector<Box*>& weak ...@@ -453,7 +463,7 @@ SmallArena::Block** SmallArena::_freeChain(Block** head, std::vector<Box*>& weak
if (isMarked(al)) { if (isMarked(al)) {
clearMark(al); clearMark(al);
} else { } else {
if (_doFree(al, &weakly_referenced)) { if (_doFree(al, &weakly_referenced, &classes_to_free)) {
b->isfree.set(atom_idx); b->isfree.set(atom_idx);
#ifndef NDEBUG #ifndef NDEBUG
memset(al->user_data, 0xbb, b->size - sizeof(GCAllocation)); memset(al->user_data, 0xbb, b->size - sizeof(GCAllocation));
...@@ -664,8 +674,8 @@ GCAllocation* LargeArena::allocationFrom(void* ptr) { ...@@ -664,8 +674,8 @@ GCAllocation* LargeArena::allocationFrom(void* ptr) {
return NULL; return NULL;
} }
void LargeArena::freeUnmarked(std::vector<Box*>& weakly_referenced) { void LargeArena::freeUnmarked(std::vector<Box*>& weakly_referenced, std::vector<BoxedClass*>& classes_to_free) {
sweepList(head, weakly_referenced, [this](LargeObj* ptr) { _freeLargeObj(ptr); }); sweepList(head, weakly_referenced, classes_to_free, [this](LargeObj* ptr) { _freeLargeObj(ptr); });
} }
void LargeArena::getStatistics(HeapStatistics* stats) { void LargeArena::getStatistics(HeapStatistics* stats) {
...@@ -857,8 +867,8 @@ GCAllocation* HugeArena::allocationFrom(void* ptr) { ...@@ -857,8 +867,8 @@ GCAllocation* HugeArena::allocationFrom(void* ptr) {
return NULL; return NULL;
} }
void HugeArena::freeUnmarked(std::vector<Box*>& weakly_referenced) { void HugeArena::freeUnmarked(std::vector<Box*>& weakly_referenced, std::vector<BoxedClass*>& classes_to_free) {
sweepList(head, weakly_referenced, [this](HugeObj* ptr) { _freeHugeObj(ptr); }); sweepList(head, weakly_referenced, classes_to_free, [this](HugeObj* ptr) { _freeHugeObj(ptr); });
} }
void HugeArena::getStatistics(HeapStatistics* stats) { void HugeArena::getStatistics(HeapStatistics* stats) {
......
...@@ -223,7 +223,7 @@ public: ...@@ -223,7 +223,7 @@ public:
void free(GCAllocation* al); void free(GCAllocation* al);
GCAllocation* allocationFrom(void* ptr); GCAllocation* allocationFrom(void* ptr);
void freeUnmarked(std::vector<Box*>& weakly_referenced); void freeUnmarked(std::vector<Box*>& weakly_referenced, std::vector<BoxedClass*>& classes_to_free);
void getStatistics(HeapStatistics* stats); void getStatistics(HeapStatistics* stats);
...@@ -355,7 +355,7 @@ private: ...@@ -355,7 +355,7 @@ private:
Block* _allocBlock(uint64_t size, Block** prev); Block* _allocBlock(uint64_t size, Block** prev);
GCAllocation* _allocFromBlock(Block* b); GCAllocation* _allocFromBlock(Block* b);
Block* _claimBlock(size_t rounded_size, Block** free_head); Block* _claimBlock(size_t rounded_size, Block** free_head);
Block** _freeChain(Block** head, std::vector<Box*>& weakly_referenced); Block** _freeChain(Block** head, std::vector<Box*>& weakly_referenced, std::vector<BoxedClass*>& classes_to_free);
void _getChainStatistics(HeapStatistics* stats, Block** head); void _getChainStatistics(HeapStatistics* stats, Block** head);
GCAllocation* __attribute__((__malloc__)) _alloc(size_t bytes, int bucket_idx); GCAllocation* __attribute__((__malloc__)) _alloc(size_t bytes, int bucket_idx);
...@@ -428,7 +428,7 @@ public: ...@@ -428,7 +428,7 @@ public:
void free(GCAllocation* alloc); void free(GCAllocation* alloc);
GCAllocation* allocationFrom(void* ptr); GCAllocation* allocationFrom(void* ptr);
void freeUnmarked(std::vector<Box*>& weakly_referenced); void freeUnmarked(std::vector<Box*>& weakly_referenced, std::vector<BoxedClass*>& classes_to_free);
void getStatistics(HeapStatistics* stats); void getStatistics(HeapStatistics* stats);
}; };
...@@ -446,7 +446,7 @@ public: ...@@ -446,7 +446,7 @@ public:
void free(GCAllocation* alloc); void free(GCAllocation* alloc);
GCAllocation* allocationFrom(void* ptr); GCAllocation* allocationFrom(void* ptr);
void freeUnmarked(std::vector<Box*>& weakly_referenced); void freeUnmarked(std::vector<Box*>& weakly_referenced, std::vector<BoxedClass*>& classes_to_free);
void getStatistics(HeapStatistics* stats); void getStatistics(HeapStatistics* stats);
...@@ -529,18 +529,7 @@ public: ...@@ -529,18 +529,7 @@ public:
void free(GCAllocation* alloc) { void free(GCAllocation* alloc) {
destructContents(alloc); destructContents(alloc);
if (large_arena.contains(alloc)) { _setFree(alloc);
large_arena.free(alloc);
return;
}
if (huge_arena.contains(alloc)) {
huge_arena.free(alloc);
return;
}
assert(small_arena.contains(alloc));
small_arena.free(alloc);
} }
// not thread safe: // not thread safe:
...@@ -557,15 +546,34 @@ public: ...@@ -557,15 +546,34 @@ public:
} }
// not thread safe: // not thread safe:
void freeUnmarked(std::vector<Box*>& weakly_referenced) { void freeUnmarked(std::vector<Box*>& weakly_referenced, std::vector<BoxedClass*>& classes_to_free) {
small_arena.freeUnmarked(weakly_referenced); small_arena.freeUnmarked(weakly_referenced, classes_to_free);
large_arena.freeUnmarked(weakly_referenced); large_arena.freeUnmarked(weakly_referenced, classes_to_free);
huge_arena.freeUnmarked(weakly_referenced); huge_arena.freeUnmarked(weakly_referenced, classes_to_free);
} }
void dumpHeapStatistics(int level); void dumpHeapStatistics(int level);
private:
// Internal function that just marks the allocation as being freed, without doing any
// Python-semantics on it.
void _setFree(GCAllocation* alloc) {
if (large_arena.contains(alloc)) {
large_arena.free(alloc);
return;
}
if (huge_arena.contains(alloc)) {
huge_arena.free(alloc);
return;
}
assert(small_arena.contains(alloc));
small_arena.free(alloc);
}
friend void markPhase(); friend void markPhase();
friend void runCollection();
}; };
extern Heap global_heap; extern Heap global_heap;
......
# Since gc-related metadata is (currently) placed on classes,
# it's tricky for us if a class object becomes garbage in the same
# collection as some of its instances. If we try to collect the class
# first, we will not have the metadata any more on how to collect the instances.
def f():
class C(object):
pass
for i in xrange(1000):
pass
for j in xrange(1000):
f()
import gc
gc.collect()
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment