Commit 8497b615 authored by Kevin Modzelewski's avatar Kevin Modzelewski

Merge pull request #1154 from kmod/refcounting

Trying to reduce allocations in refcounting branch
parents d3b6df9b 8001ad74
......@@ -821,6 +821,7 @@ Box* ASTInterpreter::doOSR(AST_Jump* node) {
OSRExit exit(found_entry);
std::vector<Box*> arg_array;
arg_array.reserve(sorted_symbol_table.size());
for (auto& it : sorted_symbol_table) {
arg_array.push_back(it.second);
}
......@@ -1107,10 +1108,14 @@ Value ASTInterpreter::createFunction(AST* node, AST_arguments* args, const std::
std::vector<Box*> defaults;
llvm::SmallVector<RewriterVar*, 4> defaults_vars;
defaults.reserve(args->defaults.size());
RewriterVar* defaults_var = NULL;
if (jit)
if (jit) {
defaults_var = args->defaults.size() ? jit->allocate(args->defaults.size()) : jit->imm(0ul);
defaults_vars.reserve(args->defaults.size());
}
int i = 0;
for (AST_expr* d : args->defaults) {
Value v = visit_expr(d);
......@@ -1198,6 +1203,7 @@ Value ASTInterpreter::visit_makeFunction(AST_MakeFunction* mkfn) {
AST_arguments* args = node->args;
std::vector<Value> decorators;
decorators.reserve(node->decorator_list.size());
for (AST_expr* d : node->decorator_list)
decorators.push_back(visit_expr(d));
......@@ -1228,6 +1234,7 @@ Value ASTInterpreter::visit_makeClass(AST_MakeClass* mkclass) {
}
std::vector<Box*> decorators;
decorators.reserve(node->decorator_list.size());
for (AST_expr* d : node->decorator_list)
decorators.push_back(visit_expr(d).o);
......@@ -1510,6 +1517,9 @@ Value ASTInterpreter::visit_call(AST_Call* node) {
std::vector<Box*> args;
llvm::SmallVector<RewriterVar*, 8> args_vars;
args.reserve(node->args.size());
args_vars.reserve(node->args.size());
for (AST_expr* e : node->args) {
Value v = visit_expr(e);
args.push_back(v.o);
......
......@@ -591,20 +591,70 @@ public:
Box* operator*() { return impl->getValue(); }
};
template <typename T, int N> class SmallUniquePtr {
private:
char _data[N];
bool owned;
#ifndef NDEBUG
bool address_taken = false;
#endif
template <typename ConcreteType, typename... Args> SmallUniquePtr(ConcreteType* dummy, Args... args) {
static_assert(sizeof(ConcreteType) <= N, "SmallUniquePtr not large enough to contain this object");
new (_data) ConcreteType(std::forward<Args>(args)...);
owned = true;
}
public:
template <typename ConcreteType, typename... Args> static SmallUniquePtr emplace(Args... args) {
return SmallUniquePtr<T, N>((ConcreteType*)nullptr, args...);
}
SmallUniquePtr(const SmallUniquePtr&) = delete;
SmallUniquePtr(SmallUniquePtr&& rhs) { *this = std::move(rhs); }
void operator=(const SmallUniquePtr&) = delete;
void operator=(SmallUniquePtr&& rhs) {
assert(!rhs.address_taken && "Invalid copy after being converted to a pointer");
std::swap(_data, rhs._data);
owned = false;
std::swap(owned, rhs.owned);
}
~SmallUniquePtr() {
if (owned)
((T*)this)->~T();
}
operator T*() {
#ifndef NDEBUG
address_taken = true;
#endif
return reinterpret_cast<T*>(_data);
}
};
// A custom "range" container that helps manage lifetimes. We need to free the underlying Impl object
// when the range loop is done; previously we had the iterator itself handle this, but that started
// to get complicated since they get copied around, and the management of the begin() and end() iterators
// is slightly different.
// So to simplify, have the range object take care of it.
//
// Note: be careful when explicitly calling begin(). The returned iterator points into this BoxIteratorRange
// object, so once you call begin() it is a bug to move/copy this BoxIteratorRange object (the SmallUniquePtr
// should complain).
class BoxIteratorRange {
private:
std::unique_ptr<BoxIteratorImpl> begin_impl;
// std::unique_ptr<BoxIteratorImpl> begin_impl;
// char _data[32];
typedef SmallUniquePtr<BoxIteratorImpl, 32> UniquePtr;
UniquePtr begin_impl;
BoxIteratorImpl* end_impl;
public:
BoxIteratorRange(std::unique_ptr<BoxIteratorImpl> begin, BoxIteratorImpl* end)
: begin_impl(std::move(begin)), end_impl(end) {}
BoxIterator begin() { return BoxIterator(begin_impl.get()); }
template <typename ImplType, typename T>
BoxIteratorRange(BoxIteratorImpl* end, T&& arg, ImplType* dummy)
: begin_impl(UniquePtr::emplace<ImplType, T>(arg)), end_impl(end) {}
BoxIterator begin() { return BoxIterator(begin_impl); }
BoxIterator end() { return BoxIterator(end_impl); }
int traverse(visitproc visit, void* arg) {
......
......@@ -807,11 +807,15 @@ Box* map(Box* f, BoxedTuple* args) {
std::vector<BoxIterator> args_it;
std::vector<BoxIterator> args_end;
ranges.reserve(args->size());
args_it.reserve(args->size());
args_end.reserve(args->size());
for (auto e : *args) {
auto range = e->pyElements();
args_it.emplace_back(range.begin());
args_end.emplace_back(range.end());
ranges.push_back(std::move(range));
args_it.emplace_back(ranges.back().begin());
args_end.emplace_back(ranges.back().end());
}
assert(args_it.size() == num_iterable);
assert(args_end.size() == num_iterable);
......@@ -1221,11 +1225,13 @@ Box* zip(BoxedTuple* containers) {
return incref(rtn);
std::vector<BoxIteratorRange> ranges;
ranges.reserve(containers->size());
for (auto container : *containers) {
ranges.push_back(std::move(container->pyElements()));
ranges.push_back(container->pyElements());
}
std::vector<BoxIterator> iterators;
iterators.reserve(containers->size());
for (auto&& range : ranges) {
iterators.push_back(std::move(range.begin()));
}
......
......@@ -45,7 +45,7 @@ static std::deque<uint64_t> available_addrs;
#define STACK_REDZONE_SIZE PAGE_SIZE
#define MAX_STACK_SIZE (4 * 1024 * 1024)
static std::unordered_map<void*, BoxedGenerator*> s_generator_map;
static llvm::DenseMap<void*, BoxedGenerator*> s_generator_map;
static_assert(THREADING_USE_GIL, "have to make the generator map thread safe!");
class RegisterHelper {
......
......@@ -165,23 +165,19 @@ public:
BoxIteratorRange Box::pyElements() {
if (this->cls == list_cls) {
using BoxIteratorList = BoxIteratorIndex<BoxedList>;
std::unique_ptr<BoxIteratorImpl> begin(new BoxIteratorList((BoxedList*)this));
BoxIteratorImpl* end = BoxIteratorList::end();
return BoxIteratorRange(std::move(begin), end);
return BoxIteratorRange(end, (BoxedList*)this, (BoxIteratorList*)nullptr);
} else if (this->cls == tuple_cls) {
using BoxIteratorTuple = BoxIteratorIndex<BoxedTuple>;
std::unique_ptr<BoxIteratorImpl> begin(new BoxIteratorTuple((BoxedTuple*)this));
BoxIteratorImpl* end = BoxIteratorTuple::end();
return BoxIteratorRange(std::move(begin), end);
return BoxIteratorRange(end, (BoxedTuple*)this, (BoxIteratorTuple*)nullptr);
} else if (this->cls == str_cls) {
using BoxIteratorString = BoxIteratorIndex<BoxedString>;
std::unique_ptr<BoxIteratorImpl> begin(new BoxIteratorString((BoxedString*)this));
BoxIteratorImpl* end = BoxIteratorString::end();
return BoxIteratorRange(std::move(begin), end);
return BoxIteratorRange(end, (BoxedString*)this, (BoxIteratorString*)nullptr);
} else {
std::unique_ptr<BoxIteratorImpl> begin(new BoxIteratorGeneric(this));
BoxIteratorImpl* end = BoxIteratorGeneric::end();
return BoxIteratorRange(std::move(begin), end);
return BoxIteratorRange(end, this, (BoxIteratorGeneric*)nullptr);
}
}
}
......@@ -657,13 +657,13 @@ public:
void* operator new(size_t size, int capacity) {
assert(size == sizeof(GCdArray));
return PyMem_MALLOC(capacity * sizeof(Box*) + size);
return PyObject_Malloc(capacity * sizeof(Box*) + size);
}
void operator delete(void* p) { PyMem_FREE(p); }
void operator delete(void* p) { PyObject_Free(p); }
static GCdArray* grow(GCdArray* array, int capacity) {
return (GCdArray*)PyMem_REALLOC(array, capacity * sizeof(Box*) + sizeof(GCdArray));
return (GCdArray*)PyObject_Realloc(array, capacity * sizeof(Box*) + sizeof(GCdArray));
}
};
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment