Commit b3f28d48 authored by Max Bachmann's avatar Max Bachmann Committed by GitHub

Fix type conversions in vectorcallfunc (GH-4054)

parent 03278313
...@@ -680,7 +680,7 @@ static PyObject *__Pyx_CyFunction_CallAsMethod(PyObject *func, PyObject *args, P ...@@ -680,7 +680,7 @@ static PyObject *__Pyx_CyFunction_CallAsMethod(PyObject *func, PyObject *args, P
__pyx_vectorcallfunc vc = __Pyx_CyFunction_func_vectorcall(cyfunc); __pyx_vectorcallfunc vc = __Pyx_CyFunction_func_vectorcall(cyfunc);
if (vc) { if (vc) {
#if CYTHON_ASSUME_SAFE_MACROS #if CYTHON_ASSUME_SAFE_MACROS
return __Pyx_PyVectorcall_FastCallDict(func, vc, &PyTuple_GET_ITEM(args, 0), PyTuple_GET_SIZE(args), kw); return __Pyx_PyVectorcall_FastCallDict(func, vc, &PyTuple_GET_ITEM(args, 0), (size_t)PyTuple_GET_SIZE(args), kw);
#else #else
// avoid unused function warning // avoid unused function warning
(void) &__Pyx_PyVectorcall_FastCallDict; (void) &__Pyx_PyVectorcall_FastCallDict;
......
...@@ -553,15 +553,15 @@ class __Pyx_FakeReference { ...@@ -553,15 +553,15 @@ class __Pyx_FakeReference {
#if CYTHON_VECTORCALL #if CYTHON_VECTORCALL
#define __pyx_vectorcallfunc vectorcallfunc #define __pyx_vectorcallfunc vectorcallfunc
#define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET PY_VECTORCALL_ARGUMENTS_OFFSET #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET PY_VECTORCALL_ARGUMENTS_OFFSET
#define __Pyx_PyVectorcall_NARGS(n) PyVectorcall_NARGS(n) #define __Pyx_PyVectorcall_NARGS(n) PyVectorcall_NARGS((size_t)(n))
#elif CYTHON_BACKPORT_VECTORCALL #elif CYTHON_BACKPORT_VECTORCALL
typedef PyObject *(*__pyx_vectorcallfunc)(PyObject *callable, PyObject *const *args, typedef PyObject *(*__pyx_vectorcallfunc)(PyObject *callable, PyObject *const *args,
size_t nargsf, PyObject *kwnames); size_t nargsf, PyObject *kwnames);
#define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET ((size_t)1 << (8 * sizeof(size_t) - 1)) #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET ((size_t)1 << (8 * sizeof(size_t) - 1))
#define __Pyx_PyVectorcall_NARGS(n) ((n) & ~__Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET) #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(((size_t)(n)) & ~__Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET))
#else #else
#define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET 0 #define __Pyx_PY_VECTORCALL_ARGUMENTS_OFFSET 0
#define __Pyx_PyVectorcall_NARGS(n) (n) #define __Pyx_PyVectorcall_NARGS(n) ((Py_ssize_t)(n))
#endif #endif
#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) #if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc)
......
...@@ -2103,7 +2103,7 @@ bad: ...@@ -2103,7 +2103,7 @@ bad:
/////////////// PyObjectFastCall.proto /////////////// /////////////// PyObjectFastCall.proto ///////////////
#define __Pyx_PyObject_FastCall(func, args, nargs) __Pyx_PyObject_FastCallDict(func, args, nargs, NULL) #define __Pyx_PyObject_FastCall(func, args, nargs) __Pyx_PyObject_FastCallDict(func, args, nargs, NULL)
static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs); /*proto*/ static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObject **args, size_t nargs, PyObject *kwargs); /*proto*/
/////////////// PyObjectFastCall /////////////// /////////////// PyObjectFastCall ///////////////
//@requires: PyObjectCall //@requires: PyObjectCall
...@@ -2111,23 +2111,23 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObj ...@@ -2111,23 +2111,23 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObj
//@requires: PyObjectCallMethO //@requires: PyObjectCallMethO
//@substitute: naming //@substitute: naming
static PyObject* __Pyx_PyObject_FastCall_fallback(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) { static PyObject* __Pyx_PyObject_FastCall_fallback(PyObject *func, PyObject **args, size_t nargs, PyObject *kwargs) {
PyObject *argstuple; PyObject *argstuple;
PyObject *result; PyObject *result;
Py_ssize_t i; size_t i;
argstuple = PyTuple_New(nargs); argstuple = PyTuple_New(nargs);
if (unlikely(!argstuple)) return NULL; if (unlikely(!argstuple)) return NULL;
for (i = 0; i < nargs; i++) { for (i = 0; i < nargs; i++) {
Py_INCREF(args[i]); Py_INCREF(args[i]);
PyTuple_SET_ITEM(argstuple, i, args[i]); PyTuple_SET_ITEM(argstuple, (Py_ssize_t)i, args[i]);
} }
result = __Pyx_PyObject_Call(func, argstuple, kwargs); result = __Pyx_PyObject_Call(func, argstuple, kwargs);
Py_DECREF(argstuple); Py_DECREF(argstuple);
return result; return result;
} }
static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t _nargs, PyObject *kwargs) { static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObject **args, size_t _nargs, PyObject *kwargs) {
// Special fast paths for 0 and 1 arguments // Special fast paths for 0 and 1 arguments
// NOTE: in many cases, this is called with a constant value for nargs // NOTE: in many cases, this is called with a constant value for nargs
// which is known at compile-time. So the branches below will typically // which is known at compile-time. So the branches below will typically
...@@ -2181,20 +2181,20 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObj ...@@ -2181,20 +2181,20 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_FastCallDict(PyObject *func, PyObj
#if CYTHON_VECTORCALL #if CYTHON_VECTORCALL
vectorcallfunc f = _PyVectorcall_Function(func); vectorcallfunc f = _PyVectorcall_Function(func);
if (f) { if (f) {
return f(func, args, nargs, kwargs); return f(func, args, (size_t)nargs, kwargs);
} }
#elif defined(__Pyx_CyFunction_USED) && CYTHON_BACKPORT_VECTORCALL #elif defined(__Pyx_CyFunction_USED) && CYTHON_BACKPORT_VECTORCALL
// exclude fused functions for now // exclude fused functions for now
if (__Pyx_CyFunction_CheckExact(func)) { if (__Pyx_CyFunction_CheckExact(func)) {
__pyx_vectorcallfunc f = __Pyx_CyFunction_func_vectorcall(func); __pyx_vectorcallfunc f = __Pyx_CyFunction_func_vectorcall(func);
if (f) return f(func, args, nargs, kwargs); if (f) return f(func, args, (size_t)nargs, kwargs);
} }
#endif #endif
if (nargs == 0) { if (nargs == 0) {
return __Pyx_PyObject_Call(func, $empty_tuple, kwargs); return __Pyx_PyObject_Call(func, $empty_tuple, kwargs);
} }
return __Pyx_PyObject_FastCall_fallback(func, args, nargs, kwargs); return __Pyx_PyObject_FastCall_fallback(func, args, (size_t)nargs, kwargs);
} }
...@@ -2559,14 +2559,14 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { ...@@ -2559,14 +2559,14 @@ static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) {
/////////////// PyVectorcallFastCallDict.proto /////////////// /////////////// PyVectorcallFastCallDict.proto ///////////////
#if CYTHON_METH_FASTCALL #if CYTHON_METH_FASTCALL
static CYTHON_INLINE PyObject *__Pyx_PyVectorcall_FastCallDict(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, Py_ssize_t nargs, PyObject *kw); static CYTHON_INLINE PyObject *__Pyx_PyVectorcall_FastCallDict(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw);
#endif #endif
/////////////// PyVectorcallFastCallDict /////////////// /////////////// PyVectorcallFastCallDict ///////////////
#if CYTHON_METH_FASTCALL #if CYTHON_METH_FASTCALL
// Slow path when kw is non-empty // Slow path when kw is non-empty
static PyObject *__Pyx_PyVectorcall_FastCallDict_kw(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, Py_ssize_t nargs, PyObject *kw) static PyObject *__Pyx_PyVectorcall_FastCallDict_kw(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw)
{ {
// Code based on _PyObject_FastCallDict() and _PyStack_UnpackDict() from CPython // Code based on _PyObject_FastCallDict() and _PyStack_UnpackDict() from CPython
PyObject *res = NULL; PyObject *res = NULL;
...@@ -2574,17 +2574,18 @@ static PyObject *__Pyx_PyVectorcall_FastCallDict_kw(PyObject *func, __pyx_vector ...@@ -2574,17 +2574,18 @@ static PyObject *__Pyx_PyVectorcall_FastCallDict_kw(PyObject *func, __pyx_vector
PyObject **newargs; PyObject **newargs;
PyObject **kwvalues; PyObject **kwvalues;
Py_ssize_t i, pos; Py_ssize_t i, pos;
size_t j;
PyObject *key, *value; PyObject *key, *value;
unsigned long keys_are_strings; unsigned long keys_are_strings;
Py_ssize_t nkw = PyDict_GET_SIZE(kw); Py_ssize_t nkw = PyDict_GET_SIZE(kw);
// Copy positional arguments // Copy positional arguments
newargs = (PyObject **)PyMem_Malloc((nargs + nkw) * sizeof(args[0])); newargs = (PyObject **)PyMem_Malloc((nargs + (size_t)nkw) * sizeof(args[0]));
if (unlikely(newargs == NULL)) { if (unlikely(newargs == NULL)) {
PyErr_NoMemory(); PyErr_NoMemory();
return NULL; return NULL;
} }
for (i = 0; i < nargs; i++) newargs[i] = args[i]; for (j = 0; j < nargs; j++) newargs[j] = args[j];
// Copy keyword arguments // Copy keyword arguments
kwnames = PyTuple_New(nkw); kwnames = PyTuple_New(nkw);
...@@ -2619,7 +2620,7 @@ cleanup: ...@@ -2619,7 +2620,7 @@ cleanup:
return res; return res;
} }
static CYTHON_INLINE PyObject *__Pyx_PyVectorcall_FastCallDict(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, Py_ssize_t nargs, PyObject *kw) static CYTHON_INLINE PyObject *__Pyx_PyVectorcall_FastCallDict(PyObject *func, __pyx_vectorcallfunc vc, PyObject *const *args, size_t nargs, PyObject *kw)
{ {
if (likely(kw == NULL) || PyDict_GET_SIZE(kw) == 0) { if (likely(kw == NULL) || PyDict_GET_SIZE(kw) == 0) {
return vc(func, args, nargs, NULL); return vc(func, args, nargs, NULL);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment