Commit 9cd8e75e authored by Marius Wachtler's avatar Marius Wachtler Committed by GitHub

Merge pull request #1377 from undingen/new_bst4

BST: convert all nodes to directly operate at vregs instead of names
parents 4e10a4f0 c0a273ff
......@@ -55,7 +55,7 @@ private:
VRegMap<Status> statuses;
LivenessAnalysis* analysis;
void _doLoad(int vreg, BST_Name* node) {
void _doLoad(int vreg) {
Status& status = statuses[vreg];
status.addUsage(Status::USED);
}
......@@ -70,50 +70,28 @@ private:
public:
LivenessBBVisitor(LivenessAnalysis* analysis)
: statuses(analysis->cfg->getVRegInfo().getTotalNumOfVRegs()), analysis(analysis) {}
: NoopBSTVisitor(true /* skip child CFG nodes */),
statuses(analysis->cfg->getVRegInfo().getTotalNumOfVRegs()),
analysis(analysis) {}
bool firstIsUse(int vreg) const { return getStatusFirst(vreg) == Status::USED; }
bool firstIsDef(int vreg) const { return getStatusFirst(vreg) == Status::DEFINED; }
bool isKilledAt(BST_Name* node, bool is_live_at_end) { return node->is_kill; }
bool visit_classdef(BST_ClassDef* node) {
for (auto e : node->bases)
e->accept(this);
for (auto e : node->decorator_list)
e->accept(this);
return true;
bool visit_vreg(int* vreg, bool is_dst) override {
if (*vreg >= 0) {
if (is_dst)
_doStore(*vreg);
else
_doLoad(*vreg);
}
bool visit_functiondef(BST_FunctionDef* node) {
for (auto* d : node->decorator_list)
d->accept(this);
for (auto* d : node->args->defaults)
d->accept(this);
return true;
}
bool visit_name(BST_Name* node) {
if (node->vreg == -1)
return true;
if (node->ctx_type == AST_TYPE::Load)
_doLoad(node->vreg, node);
else if (node->ctx_type == AST_TYPE::Del) {
// Hack: we don't have a bytecode for temporary-kills:
if (node->vreg >= analysis->cfg->getVRegInfo().getNumOfUserVisibleVRegs())
bool visit_deletename(BST_DeleteName* node) override {
if (node->vreg < 0 || node->vreg >= analysis->cfg->getVRegInfo().getNumOfUserVisibleVRegs())
return true;
_doLoad(node->vreg, node);
_doLoad(node->vreg);
_doStore(node->vreg);
} else if (node->ctx_type == AST_TYPE::Store || node->ctx_type == AST_TYPE::Param)
_doStore(node->vreg);
else {
ASSERT(0, "%d", node->ctx_type);
abort();
}
return true;
}
};
......@@ -136,13 +114,6 @@ LivenessAnalysis::LivenessAnalysis(CFG* cfg) : cfg(cfg), result_cache(cfg->getVR
LivenessAnalysis::~LivenessAnalysis() {
}
bool LivenessAnalysis::isKill(BST_Name* node, CFGBlock* parent_block) {
if (node->id.s()[0] != '#')
return false;
return liveness_cache[parent_block]->isKilledAt(node, isLiveAtEnd(node->vreg, parent_block));
}
bool LivenessAnalysis::isLiveAtEnd(int vreg, CFGBlock* block) {
// Is a user-visible name, always live:
if (vreg < block->cfg->getVRegInfo().getNumOfUserVisibleVRegs())
......@@ -228,102 +199,55 @@ public:
virtual void processBB(Map& starting, CFGBlock* block) const;
};
class DefinednessVisitor : public BSTVisitor {
class DefinednessVisitor : public NoopBSTVisitor {
private:
typedef DefinednessBBAnalyzer::Map Map;
Map& state;
void _doSet(int vreg) {
if (vreg == VREG_UNDEFINED)
return;
assert(vreg >= 0 && vreg < state.numVregs());
state[vreg] = DefinednessAnalysis::Defined;
}
void _doSet(BST* t) {
switch (t->type) {
case BST_TYPE::Attribute:
// doesn't affect definedness (yet?)
break;
case BST_TYPE::Name: {
auto name = bst_cast<BST_Name>(t);
if (name->lookup_type == ScopeInfo::VarScopeType::FAST
|| name->lookup_type == ScopeInfo::VarScopeType::CLOSURE) {
assert(name->vreg != -1);
_doSet(name->vreg);
} else if (name->lookup_type == ScopeInfo::VarScopeType::GLOBAL
|| name->lookup_type == ScopeInfo::VarScopeType::NAME) {
assert(name->vreg == -1);
// skip
} else {
RELEASE_ASSERT(0, "%d", static_cast<int>(name->lookup_type));
}
break;
}
case BST_TYPE::Subscript:
break;
case BST_TYPE::Tuple: {
BST_Tuple* tt = bst_cast<BST_Tuple>(t);
for (int i = 0; i < tt->elts.size(); i++) {
_doSet(tt->elts[i]);
}
break;
}
default:
ASSERT(0, "Unknown type for DefinednessVisitor: %d", t->type);
}
}
public:
DefinednessVisitor(Map& state) : state(state) {}
virtual bool visit_assert(BST_Assert* node) { return true; }
virtual bool visit_branch(BST_Branch* node) { return true; }
virtual bool visit_expr(BST_Expr* node) { return true; }
virtual bool visit_invoke(BST_Invoke* node) { return false; }
virtual bool visit_jump(BST_Jump* node) { return true; }
virtual bool visit_print(BST_Print* node) { return true; }
virtual bool visit_raise(BST_Raise* node) { return true; }
virtual bool visit_return(BST_Return* node) { return true; }
virtual bool visit_delete(BST_Delete* node) {
auto t = node->target;
if (t->type == BST_TYPE::Name) {
BST_Name* name = bst_cast<BST_Name>(t);
if (name->lookup_type != ScopeInfo::VarScopeType::GLOBAL
&& name->lookup_type != ScopeInfo::VarScopeType::NAME) {
assert(name->vreg != -1);
state[name->vreg] = DefinednessAnalysis::Undefined;
} else
assert(name->vreg == -1);
} else {
// The CFG pass should reduce all deletes to the "basic" deletes on names/attributes/subscripts.
// If not, probably the best way to do this would be to just do a full BST traversal
// and look for BST_Name's with a ctx of Del
assert(t->type == BST_TYPE::Attribute || t->type == BST_TYPE::Subscript);
}
return true;
DefinednessVisitor(Map& state) : NoopBSTVisitor(true /* skip child CFG nodes */), state(state) {}
bool visit_vreg(int* vreg, bool is_dest) override {
if (*vreg < 0)
return false;
if (is_dest)
state[*vreg] = DefinednessAnalysis::Defined;
else
state[*vreg] = DefinednessAnalysis::Undefined;
return false;
}
virtual bool visit_classdef(BST_ClassDef* node) {
assert(0 && "I think this isn't needed");
//_doSet(node->name);
bool visit_deletename(BST_DeleteName* node) override {
if (node->lookup_type != ScopeInfo::VarScopeType::GLOBAL
&& node->lookup_type != ScopeInfo::VarScopeType::NAME) {
assert(node->vreg >= 0);
state[node->vreg] = DefinednessAnalysis::Undefined;
} else
assert(node->vreg == VREG_UNDEFINED);
return true;
}
virtual bool visit_functiondef(BST_FunctionDef* node) {
assert(0 && "I think this isn't needed");
//_doSet(node->name);
bool visit_copyvreg(BST_CopyVReg* node) override {
// don't visit the vreg it will never get killed
// visit_vreg(&node->vreg_src, false);
_doSet(node->vreg_dst);
return true;
}
virtual bool visit_assign(BST_Assign* node) {
_doSet(node->target);
bool visit_loadname(BST_LoadName* node) override {
// don't visit the vreg it will never get killed
// visit_vreg(&node->vreg, false);
_doSet(node->vreg_dst);
return true;
}
virtual bool visit_arguments(BST_arguments* node) { RELEASE_ASSERT(0, "this shouldn't get hit"); }
virtual bool visit_exec(BST_Exec* node) { return true; }
friend class DefinednessBBAnalyzer;
};
......@@ -460,11 +384,13 @@ const VRegSet& PhiAnalysis::getAllRequiredFor(CFGBlock* block) {
}
bool PhiAnalysis::isRequired(int vreg, CFGBlock* block) {
assert(vreg >= 0);
assert(required_phis.count(block));
return required_phis.find(block)->second[vreg];
}
bool PhiAnalysis::isRequiredAfter(int vreg, CFGBlock* block) {
assert(vreg >= 0);
// If there are multiple successors, then none of them are allowed
// to require any phi nodes
if (block->successors.size() != 1)
......@@ -475,6 +401,7 @@ bool PhiAnalysis::isRequiredAfter(int vreg, CFGBlock* block) {
}
bool PhiAnalysis::isPotentiallyUndefinedAfter(int vreg, CFGBlock* block) {
assert(vreg >= 0);
for (auto b : block->successors) {
if (isPotentiallyUndefinedAt(vreg, b))
return true;
......@@ -483,6 +410,7 @@ bool PhiAnalysis::isPotentiallyUndefinedAfter(int vreg, CFGBlock* block) {
}
bool PhiAnalysis::isPotentiallyUndefinedAt(int vreg, CFGBlock* block) {
assert(vreg >= 0);
assert(definedness.defined_at_beginning.count(block));
return definedness.defined_at_beginning.find(block)->second[vreg] != DefinednessAnalysis::Defined;
}
......
......@@ -26,9 +26,7 @@
namespace pyston {
class BST_arguments;
class BST_Jump;
class BST_Name;
class CFG;
class CFGBlock;
class LivenessBBVisitor;
......@@ -47,9 +45,6 @@ public:
LivenessAnalysis(CFG* cfg);
~LivenessAnalysis();
// we don't keep track of node->parent_block relationships, so you have to pass both:
bool isKill(BST_Name* node, CFGBlock* parent_block);
bool isLiveAtEnd(int vreg, CFGBlock* block);
};
......
......@@ -36,16 +36,11 @@ ScopingResults::ScopingResults(ScopeInfo* scope_info, bool globals_from_module)
deref_info = scope_info->getAllDerefVarsAndInfo();
}
DerefInfo ScopingResults::getDerefInfo(BST_Name* node) const {
DerefInfo ScopingResults::getDerefInfo(BST_LoadName* node) const {
assert(node->lookup_type == ScopeInfo::VarScopeType::DEREF);
assert(node->deref_info.offset != INT_MAX);
return node->deref_info;
}
size_t ScopingResults::getClosureOffset(BST_Name* node) const {
assert(node->lookup_type == ScopeInfo::VarScopeType::CLOSURE);
assert(node->closure_offset != -1);
return node->closure_offset;
}
class YieldVisitor : public NoopASTVisitor {
public:
......
This diff is collapsed.
......@@ -24,9 +24,9 @@
namespace pyston {
class CFGBlock;
class ConstantVRegInfo;
class BoxedClass;
class BST_expr;
class BST_slice;
class BST_stmt_with_dest;
class OSREntryDescriptor;
class TypeAnalysis {
......@@ -40,15 +40,14 @@ public:
virtual ConcreteCompilerType* getTypeAtBlockStart(int vreg, CFGBlock* block) = 0;
virtual ConcreteCompilerType* getTypeAtBlockEnd(int vreg, CFGBlock* block) = 0;
virtual BoxedClass* speculatedExprClass(BST_expr*) = 0;
virtual BoxedClass* speculatedExprClass(BST_slice*) = 0;
virtual BoxedClass* speculatedExprClass(BST_stmt_with_dest*) = 0;
};
TypeAnalysis* doTypeAnalysis(CFG* cfg, const ParamNames& param_names,
const std::vector<ConcreteCompilerType*>& arg_types, EffortLevel effort,
TypeAnalysis::SpeculationLevel speculation);
TypeAnalysis::SpeculationLevel speculation, const ConstantVRegInfo& constant_vregs);
TypeAnalysis* doTypeAnalysis(const OSREntryDescriptor* entry_descriptor, EffortLevel effort,
TypeAnalysis::SpeculationLevel speculation);
TypeAnalysis::SpeculationLevel speculation, const ConstantVRegInfo& constant_vregs);
}
#endif
......@@ -341,7 +341,7 @@ ICSlotInfo* ICInfo::pickEntryForRewrite(const char* debug_name) {
}
static llvm::DenseMap<void*, ICInfo*> ics_by_return_addr;
static llvm::DenseMap<BST*, ICInfo*> ics_by_ast_node;
static llvm::DenseMap<BST_stmt*, ICInfo*> ics_by_ast_node;
ICInfo::ICInfo(void* start_addr, void* slowpath_rtn_addr, void* continue_addr, StackInfo stack_info, int size,
llvm::CallingConv::ID calling_conv, LiveOutSet _live_outs, assembler::GenericRegister return_register,
......@@ -485,13 +485,13 @@ bool ICInfo::isMegamorphic() {
return times_rewritten >= IC_MEGAMORPHIC_THRESHOLD;
}
ICInfo* ICInfo::getICInfoForNode(BST* node) {
ICInfo* ICInfo::getICInfoForNode(BST_stmt* node) {
auto&& it = ics_by_ast_node.find(node);
if (it != ics_by_ast_node.end())
return it->second;
return NULL;
}
void ICInfo::associateNodeWithICInfo(BST* node, std::unique_ptr<TypeRecorder> type_recorder) {
void ICInfo::associateNodeWithICInfo(BST_stmt* node, std::unique_ptr<TypeRecorder> type_recorder) {
assert(!this->node);
this->node = node;
this->type_recorder = std::move(type_recorder);
......
......@@ -105,7 +105,7 @@ private:
std::vector<Location> ic_global_decref_locations;
// associated BST node for this IC
BST* node;
BST_stmt* node;
// for ICSlotRewrite:
ICSlotInfo* pickEntryForRewrite(const char* debug_name);
......@@ -145,8 +145,8 @@ public:
friend class ICSlotRewrite;
static ICInfo* getICInfoForNode(BST* node);
void associateNodeWithICInfo(BST* node, std::unique_ptr<TypeRecorder> type_recorder);
static ICInfo* getICInfoForNode(BST_stmt* node);
void associateNodeWithICInfo(BST_stmt* node, std::unique_ptr<TypeRecorder> type_recorder);
void appendDecrefInfosTo(std::vector<DecrefInfo>& dest_decref_infos);
};
......
This diff is collapsed.
......@@ -23,7 +23,6 @@ namespace gc {
class GCVisitor;
}
class BST_expr;
class BST_stmt;
class BST_Jump;
class Box;
......@@ -46,11 +45,11 @@ struct ASTInterpreterJitInterface {
static int getGlobalsOffset();
static void delNameHelper(void* _interpreter, InternedString name);
static Box* derefHelper(void* interp, BST_Name* node);
static Box* derefHelper(void* interp, BST_LoadName* node);
static Box* landingpadHelper(void* interp);
static void pendingCallsCheckHelper();
static void setExcInfoHelper(void* interp, STOLEN(Box*) type, STOLEN(Box*) value, STOLEN(Box*) traceback);
static void setLocalClosureHelper(void* interp, BST_Name* name, Box* v);
static void setLocalClosureHelper(void* interp, int vreg, int closure_offset, Box* v);
static void uncacheExcInfoHelper(void* interp);
static void raise0Helper(void* interp) __attribute__((noreturn));
static Box* yieldHelper(void* interp, STOLEN(Box*) value);
......@@ -79,7 +78,7 @@ Box* astInterpretFunction(BoxedCode* f, Box* closure, Box* generator, Box* globa
Box** args);
Box* astInterpretFunctionEval(BoxedCode* cf, Box* globals, Box* boxedLocals);
// this function is implemented in the src/codegen/ast_interpreter_exec.S assembler file
extern "C" Box* astInterpretDeopt(BoxedCode* cf, BST_expr* after_expr, BST_stmt* enclosing_stmt, Box* expr_val,
extern "C" Box* astInterpretDeopt(BoxedCode* cf, BST_stmt* enclosing_stmt, Box* expr_val,
STOLEN(FrameStackState) frame_state);
struct FrameInfo;
......
......@@ -43,7 +43,7 @@ executeInnerAndSetupFrame:
// Our unwinder must be able to detect deopt frames and by writting this wrapper in assembler we can be sure to correctly
// detect the frame independent of compiler optimizations because this function will always appear in the call stack.
//
// Box* astInterpretDeopt(FunctionMetadata* cf, AST_expr* after_expr, AST_stmt* enclosing_stmt, Box* expr_val,
// Box* astInterpretDeopt(FunctionMetadata* cf, AST_stmt* enclosing_stmt, Box* expr_val,
// FrameStackState frame_state);
.text
.globl astInterpretDeopt
......
This diff is collapsed.
......@@ -231,7 +231,7 @@ private:
JitCodeBlock& code_block;
RewriterVar* interp;
RewriterVar* vregs_array;
llvm::DenseMap<InternedString, RewriterVar*> local_syms;
llvm::DenseMap<int /*vreg*/, RewriterVar*> local_syms;
// keeps track which non block local vregs are known to have a non NULL value
llvm::DenseSet<int> known_non_null_vregs;
......@@ -249,7 +249,7 @@ private:
uint8_t* end_addr;
std::unique_ptr<ICSetupInfo> ic;
StackInfo stack_info;
BST* node;
BST_stmt* node;
std::vector<Location> decref_infos;
std::unique_ptr<TypeRecorder> type_recorder;
};
......@@ -266,39 +266,41 @@ public:
RewriterVar* imm(uint64_t val);
RewriterVar* imm(void* val);
RewriterVar* emitAugbinop(BST_expr* node, RewriterVar* lhs, RewriterVar* rhs, int op_type);
RewriterVar* emitAugbinop(BST_stmt* node, RewriterVar* lhs, RewriterVar* rhs, int op_type);
RewriterVar* emitApplySlice(RewriterVar* target, RewriterVar* lower, RewriterVar* upper);
RewriterVar* emitBinop(BST_expr* node, RewriterVar* lhs, RewriterVar* rhs, int op_type);
RewriterVar* emitCallattr(BST_expr* node, RewriterVar* obj, BoxedString* attr, CallattrFlags flags,
RewriterVar* emitBinop(BST_stmt* node, RewriterVar* lhs, RewriterVar* rhs, int op_type);
RewriterVar* emitCallattr(BST_stmt* node, RewriterVar* obj, BoxedString* attr, CallattrFlags flags,
const llvm::ArrayRef<RewriterVar*> args, std::vector<BoxedString*>* keyword_names);
RewriterVar* emitCompare(BST_expr* node, RewriterVar* lhs, RewriterVar* rhs, int op_type);
RewriterVar* emitCompare(BST_stmt* node, RewriterVar* lhs, RewriterVar* rhs, int op_type);
RewriterVar* emitCreateDict();
void emitDictSet(RewriterVar* dict, RewriterVar* k, RewriterVar* v);
RewriterVar* emitCreateList(const llvm::ArrayRef<STOLEN(RewriterVar*)> values);
RewriterVar* emitCreateSet(const llvm::ArrayRef<RewriterVar*> values);
RewriterVar* emitCreateSlice(RewriterVar* start, RewriterVar* stop, RewriterVar* step);
RewriterVar* emitCreateTuple(const llvm::ArrayRef<RewriterVar*> values);
RewriterVar* emitDeref(BST_Name* name);
RewriterVar* emitDeref(BST_LoadName* name);
RewriterVar* emitExceptionMatches(RewriterVar* v, RewriterVar* cls);
RewriterVar* emitGetAttr(RewriterVar* obj, BoxedString* s, BST_expr* node);
RewriterVar* emitGetBlockLocal(BST_Name* name);
void emitKillTemporary(BST_Name* name);
RewriterVar* emitGetAttr(BST_stmt* node, RewriterVar* obj, BoxedString* s);
RewriterVar* emitGetBlockLocal(InternedString name, int vreg);
RewriterVar* emitGetBlockLocalMustExist(int vreg);
void emitKillTemporary(int vreg);
RewriterVar* emitGetBoxedLocal(BoxedString* s);
RewriterVar* emitGetBoxedLocals();
RewriterVar* emitGetClsAttr(RewriterVar* obj, BoxedString* s);
RewriterVar* emitGetGlobal(BoxedString* s);
RewriterVar* emitGetItem(BST_expr* node, RewriterVar* value, RewriterVar* slice);
RewriterVar* emitGetLocal(BST_Name* name);
RewriterVar* emitGetItem(BST_stmt* node, RewriterVar* value, RewriterVar* slice);
RewriterVar* emitGetLocal(InternedString name, int vreg);
RewriterVar* emitGetLocalMustExist(int vreg);
RewriterVar* emitGetPystonIter(RewriterVar* v);
RewriterVar* emitHasnext(RewriterVar* v);
RewriterVar* emitImportFrom(RewriterVar* module, BoxedString* name);
RewriterVar* emitImportName(int level, RewriterVar* from_imports, llvm::StringRef module_name);
RewriterVar* emitImportFrom(RewriterVar* module, RewriterVar* name);
RewriterVar* emitImportName(int level, RewriterVar* from_imports, RewriterVar* module_name);
RewriterVar* emitImportStar(RewriterVar* module);
RewriterVar* emitLandingpad();
RewriterVar* emitNonzero(RewriterVar* v);
RewriterVar* emitNotNonzero(RewriterVar* v);
RewriterVar* emitRepr(RewriterVar* v);
RewriterVar* emitRuntimeCall(BST_expr* node, RewriterVar* obj, ArgPassSpec argspec,
RewriterVar* emitRuntimeCall(BST_stmt* node, RewriterVar* obj, ArgPassSpec argspec,
const llvm::ArrayRef<RewriterVar*> args, std::vector<BoxedString*>* keyword_names);
RewriterVar* emitUnaryop(RewriterVar* v, int op_type);
std::vector<RewriterVar*> emitUnpackIntoArray(RewriterVar* v, uint64_t num);
......@@ -317,14 +319,15 @@ public:
void emitRaise0();
void emitRaise3(RewriterVar* arg0, RewriterVar* arg1, RewriterVar* arg2);
void emitReturn(RewriterVar* v);
void emitSetAttr(BST_expr* node, RewriterVar* obj, BoxedString* s, STOLEN(RewriterVar*) attr);
void emitSetBlockLocal(BST_Name* name, STOLEN(RewriterVar*) v);
void emitSetAttr(BST_stmt* node, RewriterVar* obj, BoxedString* s, STOLEN(RewriterVar*) attr);
void emitSetBlockLocal(int vreg, STOLEN(RewriterVar*) v);
void emitSetCurrentInst(BST_stmt* node);
void emitSetExcInfo(RewriterVar* type, RewriterVar* value, RewriterVar* traceback);
void emitSetGlobal(BoxedString* s, STOLEN(RewriterVar*) v, bool are_globals_from_module);
void emitSetItemName(BoxedString* s, RewriterVar* v);
void emitSetItem(RewriterVar* target, RewriterVar* slice, RewriterVar* value);
void emitSetLocal(BST_Name* name, bool set_closure, STOLEN(RewriterVar*) v);
void emitSetLocal(int vreg, STOLEN(RewriterVar*) v);
void emitSetLocalClosure(BST_StoreName* name, STOLEN(RewriterVar*) v);
// emitSideExit steals a full ref from v, not just a vref
void emitSideExit(STOLEN(RewriterVar*) v, Box* cmp_value, CFGBlock* next_block);
void emitUncacheExcInfo();
......@@ -351,7 +354,7 @@ private:
const llvm::ArrayRef<RewriterVar*> additional_uses);
std::pair<RewriterVar*, RewriterAction*> emitPPCall(void* func_addr, llvm::ArrayRef<RewriterVar*> args,
unsigned short pp_size, bool should_record_type = false,
BST* bst_node = NULL,
BST_stmt* bst_node = NULL,
llvm::ArrayRef<RewriterVar*> additional_uses = {});
static void assertNameDefinedHelper(const char* id);
......@@ -371,7 +374,7 @@ private:
void _emitJump(CFGBlock* b, RewriterVar* block_next, ExitInfo& exit_info);
void _emitOSRPoint();
void _emitPPCall(RewriterVar* result, void* func_addr, llvm::ArrayRef<RewriterVar*> args, unsigned short pp_size,
BST* bst_node, llvm::ArrayRef<RewriterVar*> vars_to_bump);
BST_stmt* bst_node, llvm::ArrayRef<RewriterVar*> vars_to_bump);
void _emitRecordType(RewriterVar* obj_cls_var);
void _emitReturn(RewriterVar* v);
void _emitSideExit(STOLEN(RewriterVar*) var, RewriterVar* val_constant, CFGBlock* next_block,
......
......@@ -73,7 +73,7 @@ struct GlobalState {
llvm::Type* llvm_value_type, *llvm_value_type_ptr, *llvm_value_type_ptr_ptr;
llvm::Type* llvm_class_type, *llvm_class_type_ptr;
llvm::Type* llvm_opaque_type;
llvm::Type* llvm_boxedstring_type_ptr, *llvm_dict_type_ptr, *llvm_bststmt_type_ptr, *llvm_bstexpr_type_ptr;
llvm::Type* llvm_boxedstring_type_ptr, *llvm_dict_type_ptr, *llvm_bststmt_type_ptr;
llvm::Type* llvm_frame_info_type;
llvm::Type* llvm_code_type_ptr, *llvm_closure_type_ptr, *llvm_generator_type_ptr;
llvm::Type* llvm_module_type_ptr, *llvm_bool_type_ptr;
......
......@@ -2762,6 +2762,15 @@ public:
return new ConcreteCompilerVariable(SLICE, rtn);
}
CompilerVariable* dup(VAR* v, DupCache& cache) override {
// TODO copied from UnknownType
auto& rtn = cache[v];
if (rtn == NULL) {
rtn = new VAR(this, v->getValue());
}
return rtn;
}
} _UNBOXED_SLICE;
CompilerType* UNBOXED_SLICE = &_UNBOXED_SLICE;
......
......@@ -337,6 +337,38 @@ llvm::Value* handlePotentiallyUndefined(ConcreteCompilerVariable* is_defined_var
return phi;
}
// This is used to filter out any names set by an invoke statement at the end of the previous block, if we're in the
// unwind path.
class SymTableDstVRegDeleter : NoopBSTVisitor {
private:
SymbolTable* sym_table;
bool created_new_sym_table;
SymTableDstVRegDeleter(SymbolTable* sym_table)
: NoopBSTVisitor(true /* skip child CFG nodes */), sym_table(sym_table), created_new_sym_table(false) {}
protected:
bool visit_vreg(int* vreg, bool is_dst = false) override {
if (!is_dst || *vreg == VREG_UNDEFINED || !(*sym_table)[*vreg])
return false;
if (!created_new_sym_table) {
sym_table = new SymbolTable(*sym_table);
created_new_sym_table = true;
}
(*sym_table)[*vreg] = NULL;
return false;
}
public:
static std::pair<SymbolTable*, bool /* created_new_sym_table */> removeDestVRegsFromSymTable(SymbolTable* sym_table,
BST_Invoke* stmt) {
SymTableDstVRegDeleter visitor(sym_table);
stmt->accept(&visitor);
return std::make_pair(visitor.sym_table, visitor.created_new_sym_table);
}
};
static void emitBBs(IRGenState* irstate, TypeAnalysis* types, const OSREntryDescriptor* entry_descriptor,
const BlockSet& blocks) {
SourceInfo* source = irstate->getSourceInfo();
......@@ -551,7 +583,7 @@ static void emitBBs(IRGenState* irstate, TypeAnalysis* types, const OSREntryDesc
if (block == cfg->getStartingBlock()) {
assert(entry_descriptor == NULL);
if (ENABLE_REOPT && effort < EffortLevel::MAXIMAL && source->ast_type != BST_TYPE::Module) {
if (ENABLE_REOPT && effort < EffortLevel::MAXIMAL && source->ast_type != AST_TYPE::Module) {
llvm::BasicBlock* preentry_bb = llvm::BasicBlock::Create(
g.context, "pre_entry", irstate->getLLVMFunction(), llvm_entry_blocks[cfg->getStartingBlock()]);
llvm::BasicBlock* reopt_bb = llvm::BasicBlock::Create(g.context, "reopt", irstate->getLLVMFunction());
......@@ -695,16 +727,6 @@ static void emitBBs(IRGenState* irstate, TypeAnalysis* types, const OSREntryDesc
// analysis frameworks can't (yet) support the idea of a block flowing differently to its different
// successors.
//
// There are four kinds of BST statements which can set a name:
// - Assign
// - ClassDef
// - FunctionDef
// - Import, ImportFrom
//
// However, all of these get translated away into Assigns, so we only need to worry about those. Also,
// as an invariant, all assigns that can fail assign to a temporary rather than a python name. This
// ensures that we interoperate properly with definedness analysis.
//
// We only need to do this in the case that we have exactly one predecessor, because:
// - a block ending in an invoke will have multiple successors
// - critical edges (block with multiple successors -> block with multiple predecessors)
......@@ -715,38 +737,9 @@ static void emitBBs(IRGenState* irstate, TypeAnalysis* types, const OSREntryDesc
SymbolTable* sym_table = ending_symbol_tables[pred];
bool created_new_sym_table = false;
if (last_inst->type == BST_TYPE::Invoke && bst_cast<BST_Invoke>(last_inst)->exc_dest == block) {
BST_stmt* stmt = bst_cast<BST_Invoke>(last_inst)->stmt;
// The CFG pass translates away these statements, so we should never encounter them.
// If we did, we'd need to remove a name here.
assert(stmt->type != BST_TYPE::ClassDef);
assert(stmt->type != BST_TYPE::FunctionDef);
assert(stmt->type != BST_TYPE::Import);
assert(stmt->type != BST_TYPE::ImportFrom);
if (stmt->type == BST_TYPE::Assign) {
auto asgn = bst_cast<BST_Assign>(stmt);
if (asgn->target->type == BST_TYPE::Name) {
auto asname = bst_cast<BST_Name>(asgn->target);
assert(asname->lookup_type != ScopeInfo::VarScopeType::UNKNOWN);
InternedString name = asname->id;
int vreg = bst_cast<BST_Name>(asgn->target)->vreg;
assert(name.c_str()[0] == '#'); // it must be a temporary
// You might think I need to check whether `name' is being assigned globally or locally,
// since a global assign doesn't affect the symbol table. However, the CFG pass only
// generates invoke-assigns to temporary variables. Just to be sure, we assert:
assert(asname->lookup_type != ScopeInfo::VarScopeType::GLOBAL);
// TODO: inefficient
sym_table = new SymbolTable(*sym_table);
ASSERT((*sym_table)[vreg] != NULL, "%d %s\n", block->idx, name.c_str());
(*sym_table)[vreg] = NULL;
created_new_sym_table = true;
}
}
}
if (last_inst->type == BST_TYPE::Invoke && bst_cast<BST_Invoke>(last_inst)->exc_dest == block)
std::tie(sym_table, created_new_sym_table) = SymTableDstVRegDeleter::removeDestVRegsFromSymTable(
sym_table, bst_cast<BST_Invoke>(last_inst));
generator->copySymbolsFrom(sym_table);
for (auto&& p : *definedness_tables[pred]) {
......@@ -1034,7 +1027,7 @@ std::pair<CompiledFunction*, llvm::Function*> doCompile(BoxedCode* code, SourceI
assert((entry_descriptor != NULL) + (spec != NULL) == 1);
if (VERBOSITY("irgen") >= 2)
source->cfg->print();
source->cfg->print(code->constant_vregs);
assert(g.cur_module == NULL);
......@@ -1108,9 +1101,10 @@ std::pair<CompiledFunction*, llvm::Function*> doCompile(BoxedCode* code, SourceI
speculation_level = TypeAnalysis::SOME;
TypeAnalysis* types;
if (entry_descriptor)
types = doTypeAnalysis(entry_descriptor, effort, speculation_level);
types = doTypeAnalysis(entry_descriptor, effort, speculation_level, code->constant_vregs);
else
types = doTypeAnalysis(source->cfg, *param_names, spec->arg_types, effort, speculation_level);
types = doTypeAnalysis(source->cfg, *param_names, spec->arg_types, effort, speculation_level,
code->constant_vregs);
_t2.split();
......
......@@ -29,7 +29,6 @@
namespace pyston {
class BST_expr;
class BST_stmt;
class CFGBlock;
class GCBuilder;
......@@ -37,6 +36,7 @@ class IREmitter;
struct UnwindInfo {
public:
BoxedCode* code;
BST_stmt* current_stmt;
llvm::BasicBlock* exc_dest;
......@@ -46,15 +46,15 @@ public:
bool hasHandler() const { return exc_dest != NULL; }
UnwindInfo(BST_stmt* current_stmt, llvm::BasicBlock* exc_dest, bool is_after_deopt = false)
: current_stmt(current_stmt), exc_dest(exc_dest), is_after_deopt(is_after_deopt) {}
UnwindInfo(BoxedCode* code, BST_stmt* current_stmt, llvm::BasicBlock* exc_dest, bool is_after_deopt = false)
: code(code), current_stmt(current_stmt), exc_dest(exc_dest), is_after_deopt(is_after_deopt) {}
ExceptionStyle preferredExceptionStyle() const;
// Risky! This means that we can't unwind from this location, and should be used in the
// rare case that there are language-specific reasons that the statement should not unwind
// (ex: loading function arguments into the appropriate scopes).
static UnwindInfo cantUnwind() { return UnwindInfo(NULL, NULL); }
static UnwindInfo cantUnwind() { return UnwindInfo(NULL, NULL, NULL); }
};
// TODO get rid of this
......@@ -119,7 +119,7 @@ public:
// virtual void checkAndPropagateCapiException(const UnwindInfo& unw_info, llvm::Value* returned_val,
// llvm::Value* exc_val, bool double_check = false) = 0;
virtual llvm::Value* createDeopt(BST_stmt* current_stmt, BST_expr* node, llvm::Value* node_value) = 0;
virtual llvm::Value* createDeopt(BST_stmt* current_stmt, llvm::Value* node_value) = 0;
virtual BORROWED(Box*) getIntConstant(int64_t n) = 0;
virtual BORROWED(Box*) getFloatConstant(double d) = 0;
......
This diff is collapsed.
......@@ -203,8 +203,6 @@ class BST_Call;
IREmitter* createIREmitter(IRGenState* irstate, llvm::BasicBlock*& curblock, IRGenerator* irgenerator = NULL);
IRGenerator* createIRGenerator(IRGenState* irstate, std::unordered_map<CFGBlock*, llvm::BasicBlock*>& entry_blocks,
CFGBlock* myblock, TypeAnalysis* types);
std::vector<BoxedString*>* getKeywordNameStorage(BST_Call* node);
}
#endif
......@@ -151,10 +151,6 @@ void initGlobalFuncs(GlobalState& g) {
assert(g.llvm_bststmt_type_ptr);
g.llvm_bststmt_type_ptr = g.llvm_bststmt_type_ptr->getPointerTo();
g.llvm_bstexpr_type_ptr = g.stdlib_module->getTypeByName("class.pyston::BST_expr");
assert(g.llvm_bstexpr_type_ptr);
g.llvm_bstexpr_type_ptr = g.llvm_bstexpr_type_ptr->getPointerTo();
// The LLVM vector type for the arguments that we pass to runtimeCall and related functions.
// It will be a pointer to a type named something like class.std::vector or
// class.std::vector.##. We can figure out exactly what it is by looking at the last
......
......@@ -42,7 +42,7 @@ Box* recordType(TypeRecorder* self, Box* obj) {
return obj;
}
BoxedClass* predictClassFor(BST* node) {
BoxedClass* predictClassFor(BST_stmt* node) {
ICInfo* ic = ICInfo::getICInfoForNode(node);
if (!ic || !ic->getTypeRecorder())
return NULL;
......
......@@ -19,7 +19,7 @@
namespace pyston {
class BST;
class BST_stmt;
class Box;
class BoxedClass;
......@@ -44,7 +44,7 @@ public:
friend Box* recordType(TypeRecorder*, Box*);
};
BoxedClass* predictClassFor(BST* node);
BoxedClass* predictClassFor(BST_stmt* node);
}
#endif
......@@ -636,7 +636,7 @@ public:
if (!getIsReraiseFlag()) {
// TODO: shouldn't fetch this multiple times?
frame_iter.getCurrentStatement()->cxx_exception_count++;
++frame_iter.getFrameInfo()->code->cxx_exception_count[frame_iter.getCurrentStatement()];
exceptionAtLine(&exc_info.traceback);
} else
getIsReraiseFlag() = false;
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
......@@ -88,8 +88,7 @@ public:
void unconnectFrom(CFGBlock* successor);
void push_back(BST_stmt* node) { body.push_back(node); }
void print(llvm::raw_ostream& stream = llvm::outs());
void _print() { print(); }
void print(const ConstantVRegInfo& constant_vregs, llvm::raw_ostream& stream = llvm::outs());
};
// the vregs are split into three parts.
......@@ -110,8 +109,8 @@ class VRegInfo {
private:
#ifndef NDEBUG
// this maps use too much memory, we only use them in the debug build for asserts
llvm::DenseMap<InternedString, DefaultedInt<-1>> sym_vreg_map_user_visible;
llvm::DenseMap<InternedString, DefaultedInt<-1>> sym_vreg_map;
llvm::DenseMap<InternedString, DefaultedInt<VREG_UNDEFINED>> sym_vreg_map_user_visible;
llvm::DenseMap<InternedString, DefaultedInt<VREG_UNDEFINED>> sym_vreg_map;
#endif
// Reverse map, from vreg->symbol name.
......@@ -126,8 +125,8 @@ public:
#ifndef NDEBUG
// map of all assigned names. if the name is block local the vreg number is not unique because this vregs get reused
// between blocks.
const llvm::DenseMap<InternedString, DefaultedInt<-1>>& getSymVRegMap() const { return sym_vreg_map; }
const llvm::DenseMap<InternedString, DefaultedInt<-1>>& getUserVisibleSymVRegMap() const {
const llvm::DenseMap<InternedString, DefaultedInt<VREG_UNDEFINED>>& getSymVRegMap() const { return sym_vreg_map; }
const llvm::DenseMap<InternedString, DefaultedInt<VREG_UNDEFINED>>& getUserVisibleSymVRegMap() const {
return sym_vreg_map_user_visible;
}
......@@ -169,7 +168,7 @@ public:
int getNumOfCrossBlockVRegs() const { return num_vregs_cross_block; }
bool hasVRegsAssigned() const { return num_vregs != -1; }
void assignVRegs(CFG* cfg, const ParamNames& param_names);
void assignVRegs(CFG* cfg, const ParamNames& param_names, llvm::DenseMap<int*, InternedString>& id_vreg);
};
// Control Flow Graph
......@@ -211,7 +210,7 @@ public:
blocks.push_back(block);
}
void print(llvm::raw_ostream& stream = llvm::outs());
void print(const ConstantVRegInfo& constant_vregs, llvm::raw_ostream& stream = llvm::outs());
};
class VRegSet {
......@@ -332,7 +331,7 @@ public:
BoxedCode* computeAllCFGs(AST* ast, bool globals_from_module, FutureFlags future_flags, BoxedString* fn,
BoxedModule* bm);
void printCFG(CFG* cfg);
void printCFG(CFG* cfg, const ConstantVRegInfo& constant_vregs);
}
#endif
This diff is collapsed.
......@@ -113,9 +113,10 @@ void BoxedCode::dealloc(Box* b) noexcept {
}
BoxedCode::BoxedCode(int num_args, bool takes_varargs, bool takes_kwargs, int firstlineno,
std::unique_ptr<SourceInfo> source, ParamNames param_names, BoxedString* filename,
BoxedString* name, Box* doc)
std::unique_ptr<SourceInfo> source, ConstantVRegInfo constant_vregs, ParamNames param_names,
BoxedString* filename, BoxedString* name, Box* doc)
: source(std::move(source)),
constant_vregs(std::move(constant_vregs)),
filename(incref(filename)),
name(incref(name)),
firstlineno(firstlineno),
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment