Commit 7c96b62e authored by Kevin Modzelewski's avatar Kevin Modzelewski

Merge pull request #874 from rudi-c/redundantvisit

Notion of redundant visits to slowly move towards scanning everything
parents 0abd6919 592f854c
...@@ -318,10 +318,6 @@ GCRootHandle::~GCRootHandle() { ...@@ -318,10 +318,6 @@ GCRootHandle::~GCRootHandle() {
getRootHandles()->erase(this); getRootHandles()->erase(this);
} }
bool GCVisitor::isValid(void* p) {
return global_heap.getAllocationFromInteriorPointer(p) != NULL;
}
void GCVisitor::visit(void* p) { void GCVisitor::visit(void* p) {
if ((uintptr_t)p < SMALL_ARENA_START || (uintptr_t)p >= HUGE_ARENA_START + ARENA_SIZE) { if ((uintptr_t)p < SMALL_ARENA_START || (uintptr_t)p >= HUGE_ARENA_START + ARENA_SIZE) {
ASSERT(!p || isNonheapRoot(p), "%p", p); ASSERT(!p || isNonheapRoot(p), "%p", p);
......
...@@ -15,6 +15,8 @@ ...@@ -15,6 +15,8 @@
#ifndef PYSTON_GC_COLLECTOR_H #ifndef PYSTON_GC_COLLECTOR_H
#define PYSTON_GC_COLLECTOR_H #define PYSTON_GC_COLLECTOR_H
#include "gc/gc.h"
namespace pyston { namespace pyston {
class Box; class Box;
...@@ -58,6 +60,16 @@ void invalidateOrderedFinalizerList(); ...@@ -58,6 +60,16 @@ void invalidateOrderedFinalizerList();
// assert rather than delaying of the next GC. // assert rather than delaying of the next GC.
void startGCUnexpectedRegion(); void startGCUnexpectedRegion();
void endGCUnexpectedRegion(); void endGCUnexpectedRegion();
class GCVisitorNoRedundancy : public GCVisitor {
virtual ~GCVisitorNoRedundancy() {}
virtual void visitRedundant(void* p) { visit(p); }
virtual void visitRangeRedundant(void* const* start, void* const* end) { visitRange(start, end); }
virtual void visitPotentialRedundant(void* p) { visitPotential(p); }
virtual void visitPotentialRangeRedundant(void* const* start, void* const* end) { visitPotentialRange(start, end); }
virtual bool shouldVisitRedundants() { return true; }
};
} }
} }
......
...@@ -16,10 +16,16 @@ ...@@ -16,10 +16,16 @@
#define PYSTON_GC_GC_H #define PYSTON_GC_GC_H
#include <deque> #include <deque>
#include <memory>
#include <stddef.h>
// Files outside of the gc/ folder should only import gc.h or gc_alloc.h // Files outside of the gc/ folder should only import gc.h or gc_alloc.h
// which are the "public" memory management interface. // which are the "public" memory management interface.
// Some code is only useful towards an effort to implement a
// moving gc, gate behind this flag for now.
#define MOVING_GC 0
#define GC_KEEP_ALIVE(t) asm volatile("" : : "X"(t)) #define GC_KEEP_ALIVE(t) asm volatile("" : : "X"(t))
#define TRACE_GC_MARKING 0 #define TRACE_GC_MARKING 0
...@@ -42,11 +48,11 @@ namespace gc { ...@@ -42,11 +48,11 @@ namespace gc {
class TraceStack; class TraceStack;
class GCVisitor { class GCVisitor {
private: private:
bool isValid(void* p); TraceStack* stack;
public: public:
TraceStack* stack;
GCVisitor(TraceStack* stack) : stack(stack) {} GCVisitor(TraceStack* stack) : stack(stack) {}
virtual ~GCVisitor() {}
// These all work on *user* pointers, ie pointers to the user_data section of GCAllocations // These all work on *user* pointers, ie pointers to the user_data section of GCAllocations
void visitIf(void* p) { void visitIf(void* p) {
...@@ -57,6 +63,20 @@ public: ...@@ -57,6 +63,20 @@ public:
void visitRange(void* const* start, void* const* end); void visitRange(void* const* start, void* const* end);
void visitPotential(void* p); void visitPotential(void* p);
void visitPotentialRange(void* const* start, void* const* end); void visitPotentialRange(void* const* start, void* const* end);
// Some object have fields with pointers to Pyston heap objects that we are confident are
// already being scanned elsewhere.
//
// In a mark-and-sweep collector, scanning those fields would be redundant because the mark
// phase only needs to visit each object once, so there would be a performance hit.
//
// In a moving collector, every reference needs to be visited since the pointer value could
// change. We don't have a moving collector yet, but it's good practice to call visit every
// pointer value and no-op to avoid the performance hit of the mark-and-sweep case.
virtual void visitRedundant(void* p) {}
virtual void visitRedundantRange(void** start, void** end) {}
virtual void visitPotentialRedundant(void* p) {}
virtual void visitPotentialRangeRedundant(void* const* start, void* const* end) {}
}; };
enum class GCKind : uint8_t { enum class GCKind : uint8_t {
......
...@@ -32,13 +32,23 @@ void HiddenClass::gc_visit(GCVisitor* visitor) { ...@@ -32,13 +32,23 @@ void HiddenClass::gc_visit(GCVisitor* visitor) {
visitor->visitRange((void* const*)&children.vector()[0], (void* const*)&children.vector()[children.size()]); visitor->visitRange((void* const*)&children.vector()[0], (void* const*)&children.vector()[children.size()]);
visitor->visit(attrwrapper_child); visitor->visit(attrwrapper_child);
// We don't need to visit the keys of the 'children' map, since the children should have those as entries if (children.empty()) {
// in the attr_offssets map.
// Also, if we have any children, we can skip scanning our attr_offsets map, since it will be a subset
// of our child's map.
if (children.empty())
for (auto p : attr_offsets) for (auto p : attr_offsets)
visitor->visit(p.first); visitor->visit(p.first);
} else {
#if MOVING_GC
// If we have any children, the attr_offsets map will be a subset of the child's map.
for (const auto& p : attr_offsets)
visitor->visitRedundant(p.first);
#endif
}
#if MOVING_GC
// The children should have the entries of the keys of the 'children' map in the attr_offsets map.
for (const auto& p : children) {
visitor->visitRedundant(p.first);
}
#endif
} }
void HiddenClass::appendAttribute(BoxedString* attr) { void HiddenClass::appendAttribute(BoxedString* attr) {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment