Commit 97d43471 authored by Kevin Modzelewski's avatar Kevin Modzelewski

Switch the attributes array to be a PRECISE array

Previously it was UNTRACKED and the object's gcHandler would scan
it if necessary.  This worked ok most of the time but caused issues
when we subclass from an extension class: the memory for those classes
are allocated conservatively, which meant that a gc handler wasn't
called.

One potential approach would be to allocate extension objects as PYTHON
allocations but with a gcHandler that both scans conservatively and also
knows about the attrs array.  But for now it seems easier to allocate the
attrs array as a PRECISE array (which means it gets scanned) rather than
UNTRACKED, since it will automatically get picked up (and provide better
gc safety in general and remove one of the odd gc cases).

I didn't realize that we weren't using PRECISE at all and that it wasn't
completely working, so this change also required setting kind_data for
PRECISE allocations, and fixing the way we keep this updated during
gc_realloc.
parent 00b4c8ec
...@@ -190,7 +190,7 @@ void GCVisitor::visit(void* p) { ...@@ -190,7 +190,7 @@ void GCVisitor::visit(void* p) {
if (isNonheapRoot(p)) { if (isNonheapRoot(p)) {
return; return;
} else { } else {
assert(global_heap.getAllocationFromInteriorPointer(p)->user_data == p); ASSERT(global_heap.getAllocationFromInteriorPointer(p)->user_data == p, "%p", p);
stack->push(p); stack->push(p);
} }
} }
...@@ -272,12 +272,18 @@ void markPhase() { ...@@ -272,12 +272,18 @@ void markPhase() {
if (DEBUG >= 2) { if (DEBUG >= 2) {
if (global_heap.small_arena.contains(p)) { if (global_heap.small_arena.contains(p)) {
SmallArena::Block* b = SmallArena::Block::forPointer(p); SmallArena::Block* b = SmallArena::Block::forPointer(p);
assert(b->size >= bytes); assert(b->size >= bytes + sizeof(GCAllocation));
} }
} }
visitor.visitPotentialRange((void**)p, (void**)((char*)p + bytes)); visitor.visitPotentialRange((void**)p, (void**)((char*)p + bytes));
} else if (kind_id == GCKind::PRECISE) { } else if (kind_id == GCKind::PRECISE) {
uint32_t bytes = al->kind_data; uint32_t bytes = al->kind_data;
if (DEBUG >= 2) {
if (global_heap.small_arena.contains(p)) {
SmallArena::Block* b = SmallArena::Block::forPointer(p);
assert(b->size >= bytes + sizeof(GCAllocation));
}
}
visitor.visitRange((void**)p, (void**)((char*)p + bytes)); visitor.visitRange((void**)p, (void**)((char*)p + bytes));
} else if (kind_id == GCKind::PYTHON) { } else if (kind_id == GCKind::PYTHON) {
Box* b = reinterpret_cast<Box*>(p); Box* b = reinterpret_cast<Box*>(p);
......
...@@ -52,7 +52,7 @@ extern "C" inline void* gc_alloc(size_t bytes, GCKind kind_id) { ...@@ -52,7 +52,7 @@ extern "C" inline void* gc_alloc(size_t bytes, GCKind kind_id) {
alloc->kind_id = kind_id; alloc->kind_id = kind_id;
alloc->gc_flags = 0; alloc->gc_flags = 0;
if (kind_id == GCKind::CONSERVATIVE) { if (kind_id == GCKind::CONSERVATIVE || kind_id == GCKind::PRECISE) {
// Round the size up to the nearest multiple of the pointer width, so that // Round the size up to the nearest multiple of the pointer width, so that
// we have an integer number of pointers to scan. // we have an integer number of pointers to scan.
// TODO We can probably this better; we could round down when we scan, or even // TODO We can probably this better; we could round down when we scan, or even
...@@ -99,24 +99,36 @@ extern "C" inline void* gc_realloc(void* ptr, size_t bytes) { ...@@ -99,24 +99,36 @@ extern "C" inline void* gc_realloc(void* ptr, size_t bytes) {
// Normal realloc() supports receiving a NULL pointer, but we need to know what the GCKind is: // Normal realloc() supports receiving a NULL pointer, but we need to know what the GCKind is:
assert(ptr); assert(ptr);
bytes += sizeof(GCAllocation); size_t alloc_bytes = bytes + sizeof(GCAllocation);
#ifndef NVALGRIND GCAllocation* alloc;
void* rtn; void* rtn;
#ifndef NVALGRIND
if (ENABLE_REDZONES) { if (ENABLE_REDZONES) {
void* base = (char*)ptr - REDZONE_SIZE; void* base = (char*)ptr - REDZONE_SIZE;
void* rtn_base = global_heap.realloc(GCAllocation::fromUserData(base), bytes + 2 * REDZONE_SIZE)->user_data; alloc = global_heap.realloc(GCAllocation::fromUserData(base), alloc_bytes + 2 * REDZONE_SIZE);
void* rtn_base = alloc->user_data;
rtn = (char*)rtn_base + REDZONE_SIZE; rtn = (char*)rtn_base + REDZONE_SIZE;
} else { } else {
rtn = global_heap.realloc(GCAllocation::fromUserData(ptr), bytes)->user_data; alloc = global_heap.realloc(GCAllocation::fromUserData(ptr), alloc_bytes);
rtn = alloc->user_data;
} }
VALGRIND_FREELIKE_BLOCK(ptr, REDZONE_SIZE); VALGRIND_FREELIKE_BLOCK(ptr, REDZONE_SIZE);
VALGRIND_MALLOCLIKE_BLOCK(rtn, bytes, REDZONE_SIZE, true); VALGRIND_MALLOCLIKE_BLOCK(rtn, alloc_bytes, REDZONE_SIZE, true);
return rtn;
#else #else
return global_heap.realloc(GCAllocation::fromUserData(ptr), bytes)->user_data; alloc = global_heap.realloc(GCAllocation::fromUserData(ptr), alloc_bytes);
rtn = alloc->user_data;
#endif #endif
if (alloc->kind_id == GCKind::CONSERVATIVE || alloc->kind_id == GCKind::PRECISE) {
bytes = (bytes + sizeof(void*) - 1) & (~(sizeof(void*) - 1));
assert(bytes < (1 << 31));
alloc->kind_data = bytes;
}
return rtn;
} }
extern "C" inline void gc_free(void* ptr) { extern "C" inline void gc_free(void* ptr) {
......
...@@ -484,13 +484,6 @@ public: ...@@ -484,13 +484,6 @@ public:
rtn = small_arena.realloc(alloc, bytes); rtn = small_arena.realloc(alloc, bytes);
} }
// We keep track of the size of conservative objects in the "kind_data" field,
// so with a realloc we have to update that:
if (rtn->kind_id == GCKind::CONSERVATIVE) {
// Round up to a multiple of sizeof(void*):
rtn->kind_data = (bytes + sizeof(void*) - 1) & (~(sizeof(void*) - 1));
}
return rtn; return rtn;
} }
......
...@@ -724,11 +724,10 @@ void Box::setattr(const std::string& attr, Box* val, SetattrRewriteArgs* rewrite ...@@ -724,11 +724,10 @@ void Box::setattr(const std::string& attr, Box* val, SetattrRewriteArgs* rewrite
RewriterVar* r_new_array2 = NULL; RewriterVar* r_new_array2 = NULL;
int new_size = sizeof(HCAttrs::AttrList) + sizeof(Box*) * (numattrs + 1); int new_size = sizeof(HCAttrs::AttrList) + sizeof(Box*) * (numattrs + 1);
if (numattrs == 0) { if (numattrs == 0) {
attrs->attr_list = (HCAttrs::AttrList*)gc_alloc(new_size, gc::GCKind::UNTRACKED); attrs->attr_list = (HCAttrs::AttrList*)gc_alloc(new_size, gc::GCKind::PRECISE);
if (rewrite_args) { if (rewrite_args) {
RewriterVar* r_newsize = rewrite_args->rewriter->loadConst(new_size, Location::forArg(0)); RewriterVar* r_newsize = rewrite_args->rewriter->loadConst(new_size, Location::forArg(0));
RewriterVar* r_kind RewriterVar* r_kind = rewrite_args->rewriter->loadConst((int)gc::GCKind::PRECISE, Location::forArg(1));
= rewrite_args->rewriter->loadConst((int)gc::GCKind::UNTRACKED, Location::forArg(1));
r_new_array2 = rewrite_args->rewriter->call(true, (void*)gc::gc_alloc, r_newsize, r_kind); r_new_array2 = rewrite_args->rewriter->call(true, (void*)gc::gc_alloc, r_newsize, r_kind);
} }
} else { } else {
...@@ -2231,6 +2230,11 @@ extern "C" void dump(void* p) { ...@@ -2231,6 +2230,11 @@ extern "C" void dump(void* p) {
return; return;
} }
if (al->kind_id == gc::GCKind::PRECISE) {
printf("precise gc array\n");
return;
}
if (al->kind_id == gc::GCKind::CONSERVATIVE) { if (al->kind_id == gc::GCKind::CONSERVATIVE) {
printf("conservatively-scanned object object\n"); printf("conservatively-scanned object object\n");
return; return;
......
...@@ -410,24 +410,8 @@ extern "C" void boxGCHandler(GCVisitor* v, Box* b) { ...@@ -410,24 +410,8 @@ extern "C" void boxGCHandler(GCVisitor* v, Box* b) {
HCAttrs* attrs = b->getHCAttrsPtr(); HCAttrs* attrs = b->getHCAttrsPtr();
v->visit(attrs->hcls); v->visit(attrs->hcls);
switch (attrs->hcls->type) { if (attrs->attr_list)
case HiddenClass::NORMAL: {
int nattrs = attrs->hcls->getAttrOffsets().size();
if (nattrs) {
HCAttrs::AttrList* attr_list = attrs->attr_list;
assert(attr_list);
v->visit(attr_list);
v->visitRange((void**)&attr_list->attrs[0], (void**)&attr_list->attrs[nattrs]);
}
break;
}
case HiddenClass::DICT_BACKED: {
HCAttrs::AttrList* attr_list = attrs->attr_list;
v->visit(attrs->attr_list); v->visit(attrs->attr_list);
v->visit(attrs->attr_list->attrs[0]);
break;
}
}
} }
if (b->cls->instancesHaveDictAttrs()) { if (b->cls->instancesHaveDictAttrs()) {
...@@ -480,7 +464,7 @@ static void typeSetDict(Box* obj, Box* val, void* context) { ...@@ -480,7 +464,7 @@ static void typeSetDict(Box* obj, Box* val, void* context) {
RELEASE_ASSERT(val->cls == dict_cls || val->cls == attrwrapper_cls, ""); RELEASE_ASSERT(val->cls == dict_cls || val->cls == attrwrapper_cls, "");
auto new_attr_list auto new_attr_list
= (HCAttrs::AttrList*)gc_alloc(sizeof(HCAttrs::AttrList) + sizeof(Box*), gc::GCKind::UNTRACKED); = (HCAttrs::AttrList*)gc_alloc(sizeof(HCAttrs::AttrList) + sizeof(Box*), gc::GCKind::PRECISE);
new_attr_list->attrs[0] = val; new_attr_list->attrs[0] = val;
HCAttrs* hcattrs = obj->getHCAttrsPtr(); HCAttrs* hcattrs = obj->getHCAttrsPtr();
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment