Skip to content

Commit

Permalink
Support Python 3.13 (not free-threaded) (#711)
Browse files Browse the repository at this point in the history
* Build Python 3.13 wheels (not free-threaded)

* Include cp313 wheels for testing

* Build Python 3.13 wheels only for testing

* Remove trove classifier

* Update for Python 3.13.0rc1

* Clean up

* Bump cibuildwheel to 2.21.1

* Upgrade deprecated artifact actions

* Update for Python 3.13.0rc3

* Bump cibuildwheel to 2.21.3 to use Python 3.13.0 final

* Squash _eval_type warning

* Still use _PyUnicode_EQ for older pythons

Keeps around the semantic meaning for when this function is added back
in Python 3.14.

* Use `PyObject_GetIter` for all versions

* Use PyLong_AsNativeBytes on Py3.13

* Suppress hashing error when parsing annotated types

---------

Co-authored-by: Jim Crist-Harif <[email protected]>
  • Loading branch information
edgarrmondragon and jcrist authored Oct 13, 2024
1 parent 51a261c commit 7ade469
Show file tree
Hide file tree
Showing 4 changed files with 61 additions and 19 deletions.
11 changes: 7 additions & 4 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ jobs:
env:
CIBW_TEST_REQUIRES: "pytest msgpack pyyaml tomli tomli_w"
CIBW_TEST_COMMAND: "pytest {project}/tests"
CIBW_BUILD: "cp38-* cp39-* cp310-* cp311-* cp312-*"
CIBW_BUILD: "cp38-* cp39-* cp310-* cp311-* cp312-* cp313-*"
CIBW_SKIP: "*-win32 *_i686 *_s390x *_ppc64le"
CIBW_ARCHS_MACOS: "x86_64 arm64"
CIBW_ARCHS_LINUX: "x86_64 aarch64"
Expand All @@ -99,15 +99,16 @@ jobs:
- name: Set up Environment
if: github.event_name != 'release'
run: |
echo "CIBW_SKIP=${CIBW_SKIP} *-musllinux_* cp38-*_aarch64 cp39-*_aarch64 cp311-*_aarch64 cp312-*_aarch64" >> $GITHUB_ENV
echo "CIBW_SKIP=${CIBW_SKIP} *-musllinux_* cp38-*_aarch64 cp39-*_aarch64 cp311-*_aarch64 cp312-*_aarch64 cp313-*_aarch64" >> $GITHUB_ENV
- name: Build & Test Wheels
uses: pypa/cibuildwheel@v2.16.5
uses: pypa/cibuildwheel@v2.21.3

- name: Upload artifact
uses: actions/upload-artifact@v4
if: github.event_name == 'release' && github.event.action == 'published'
with:
name: artifact-wheels
path: ./wheelhouse/*.whl

build_sdist:
Expand All @@ -129,6 +130,7 @@ jobs:
- name: Upload artifact
uses: actions/upload-artifact@v4
with:
name: artifact-sdist
path: dist/*.tar.gz

upload_pypi:
Expand All @@ -138,8 +140,9 @@ jobs:
steps:
- uses: actions/download-artifact@v4
with:
name: artifact
merge-multiple: true
path: dist
pattern: artifact-*

- uses: pypa/gh-action-pypi-publish@master
with:
Expand Down
56 changes: 42 additions & 14 deletions msgspec/_core.c
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
#define PY310_PLUS (PY_VERSION_HEX >= 0x030a0000)
#define PY311_PLUS (PY_VERSION_HEX >= 0x030b0000)
#define PY312_PLUS (PY_VERSION_HEX >= 0x030c0000)
#define PY313_PLUS (PY_VERSION_HEX >= 0x030d0000)

/* Hint to the compiler not to store `x` in a register since it is likely to
* change. Results in much higher performance on GCC, with smaller benefits on
Expand Down Expand Up @@ -56,6 +57,12 @@ ms_popcount(uint64_t i) { \
#define SET_SIZE(obj, size) (((PyVarObject *)obj)->ob_size = size)
#endif

#if PY313_PLUS
#define MS_UNICODE_EQ(a, b) (PyUnicode_Compare(a, b) == 0)
#else
#define MS_UNICODE_EQ(a, b) _PyUnicode_EQ(a, b)
#endif

#define DIV_ROUND_CLOSEST(n, d) ((((n) < 0) == ((d) < 0)) ? (((n) + (d)/2)/(d)) : (((n) - (d)/2)/(d)))

/* These macros are used to manually unroll some loops */
Expand Down Expand Up @@ -497,7 +504,7 @@ find_keyword(PyObject *kwnames, PyObject *const *kwstack, PyObject *key)
for (i = 0; i < nkwargs; i++) {
PyObject *kwname = PyTuple_GET_ITEM(kwnames, i);
assert(PyUnicode_Check(kwname));
if (_PyUnicode_EQ(kwname, key)) {
if (MS_UNICODE_EQ(kwname, key)) {
return kwstack[i];
}
}
Expand Down Expand Up @@ -4440,18 +4447,17 @@ typenode_collect_convert_structs(TypeNodeCollectState *state) {
*
* If any of these checks fails, an appropriate error is returned.
*/
PyObject *tag_mapping = NULL, *tag_field = NULL, *set_item = NULL;
PyObject *tag_mapping = NULL, *tag_field = NULL, *set_iter = NULL, *set_item = NULL;
PyObject *struct_info = NULL;
Py_ssize_t set_pos = 0;
Py_hash_t set_hash;
bool array_like = false;
bool tags_are_strings = true;
int status = -1;

tag_mapping = PyDict_New();
if (tag_mapping == NULL) goto cleanup;

while (_PySet_NextEntry(state->structs_set, &set_pos, &set_item, &set_hash)) {
set_iter = PyObject_GetIter(state->structs_set);
while ((set_item = PyIter_Next(set_iter))) {
struct_info = StructInfo_Convert(set_item);
if (struct_info == NULL) goto cleanup;

Expand Down Expand Up @@ -4559,6 +4565,7 @@ typenode_collect_convert_structs(TypeNodeCollectState *state) {
status = 0;

cleanup:
Py_XDECREF(set_iter);
Py_XDECREF(tag_mapping);
Py_XDECREF(struct_info);
return status;
Expand Down Expand Up @@ -4614,11 +4621,15 @@ typenode_origin_args_metadata(
* abstract -> concrete mapping. If present, this is an unparametrized
* collection of some form. This helps avoid compatibility issues in
* Python 3.8, where unparametrized collections still have __args__. */
origin = PyDict_GetItem(state->mod->concrete_types, t);
origin = PyDict_GetItemWithError(state->mod->concrete_types, t);
if (origin != NULL) {
Py_INCREF(origin);
break;
}
else {
/* Ignore all errors in this initial check */
PyErr_Clear();
}

/* If `t` is a type instance, no need to inspect further */
if (PyType_CheckExact(t)) {
Expand Down Expand Up @@ -7324,7 +7335,7 @@ Struct_vectorcall(PyTypeObject *cls, PyObject *const *args, size_t nargsf, PyObj
* check for parameters passed both as arg and kwarg */
for (field_index = 0; field_index < nfields; field_index++) {
PyObject *field = PyTuple_GET_ITEM(fields, field_index);
if (_PyUnicode_EQ(kwname, field)) {
if (MS_UNICODE_EQ(kwname, field)) {
if (MS_UNLIKELY(field_index < nargs)) {
PyErr_Format(
PyExc_TypeError,
Expand Down Expand Up @@ -7731,7 +7742,7 @@ struct_replace(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject
}
for (field_index = 0; field_index < nfields; field_index++) {
PyObject *field = PyTuple_GET_ITEM(fields, field_index);
if (_PyUnicode_EQ(kwname, field)) goto kw_found;
if (MS_UNICODE_EQ(kwname, field)) goto kw_found;
}

/* Unknown keyword */
Expand Down Expand Up @@ -11257,7 +11268,16 @@ ms_uuid_to_16_bytes(MsgspecState *mod, PyObject *obj, unsigned char *buf) {
PyErr_SetString(PyExc_TypeError, "uuid.int must be an int");
return -1;
}
#if PY313_PLUS
int out = (int)PyLong_AsNativeBytes(
int128,
buf,
16,
Py_ASNATIVEBYTES_BIG_ENDIAN | Py_ASNATIVEBYTES_UNSIGNED_BUFFER
);
#else
int out = _PyLong_AsByteArray((PyLongObject *)int128, buf, 16, 0, 0);
#endif
Py_DECREF(int128);
return out;
}
Expand Down Expand Up @@ -12406,8 +12426,7 @@ mpack_encode_list(EncoderState *self, PyObject *obj)
static int
mpack_encode_set(EncoderState *self, PyObject *obj)
{
Py_ssize_t len, ppos = 0;
Py_hash_t hash;
Py_ssize_t len = 0;
PyObject *item;
int status = -1;

Expand All @@ -12426,13 +12445,18 @@ mpack_encode_set(EncoderState *self, PyObject *obj)

if (mpack_encode_array_header(self, len, "set") < 0) return -1;
if (Py_EnterRecursiveCall(" while serializing an object")) return -1;
while (_PySet_NextEntry(obj, &ppos, &item, &hash)) {

PyObject *iter = PyObject_GetIter(obj);
if (iter == NULL) goto cleanup;

while ((item = PyIter_Next(iter))) {
if (mpack_encode_inline(self, item) < 0) goto cleanup;
}
status = 0;

cleanup:
Py_LeaveRecursiveCall();
Py_XDECREF(iter);
return status;
}

Expand Down Expand Up @@ -13709,8 +13733,7 @@ json_encode_tuple(EncoderState *self, PyObject *obj)
static int
json_encode_set(EncoderState *self, PyObject *obj)
{
Py_ssize_t len, ppos = 0;
Py_hash_t hash;
Py_ssize_t len = 0;
PyObject *item;
int status = -1;

Expand All @@ -13729,7 +13752,11 @@ json_encode_set(EncoderState *self, PyObject *obj)

if (ms_write(self, "[", 1) < 0) return -1;
if (Py_EnterRecursiveCall(" while serializing an object")) return -1;
while (_PySet_NextEntry(obj, &ppos, &item, &hash)) {

PyObject *iter = PyObject_GetIter(obj);
if (iter == NULL) goto cleanup;

while ((item = PyIter_Next(iter))) {
if (json_encode_inline(self, item) < 0) goto cleanup;
if (ms_write(self, ",", 1) < 0) goto cleanup;
}
Expand All @@ -13738,6 +13765,7 @@ json_encode_set(EncoderState *self, PyObject *obj)
status = 0;
cleanup:
Py_LeaveRecursiveCall();
Py_XDECREF(iter);
return status;
}

Expand Down
11 changes: 10 additions & 1 deletion msgspec/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,15 @@ def _forward_ref(value):
return typing.ForwardRef(value, is_argument=False, is_class=True)


# Python 3.13 adds a new mandatory type_params kwarg to _eval_type
if sys.version_info >= (3, 13):

def _eval_type(t, globalns, localns):
return typing._eval_type(t, globalns, localns, ())
else:
_eval_type = typing._eval_type


def _apply_params(obj, mapping):
if params := getattr(obj, "__parameters__", None):
args = tuple(mapping.get(p, p) for p in params)
Expand Down Expand Up @@ -127,7 +136,7 @@ def get_class_annotations(obj):
value = type(None)
elif isinstance(value, str):
value = _forward_ref(value)
value = typing._eval_type(value, cls_locals, cls_globals)
value = _eval_type(value, cls_locals, cls_globals)
if mapping is not None:
value = _apply_params(value, mapping)
hints[name] = value
Expand Down
2 changes: 2 additions & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ omit =
markers =
mypy
pyright
filterwarnings =
error

[versioneer]
VCS = git
Expand Down

0 comments on commit 7ade469

Please sign in to comment.