Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

update dataclasses.py test_dataclasses.py from cpython 3.11.2 #4759

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: main
Choose a base branch
Loading
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
140 changes: 89 additions & 51 deletions 140 Lib/dataclasses.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import keyword
import builtins
import functools
import itertools
import abc
import _thread
from types import FunctionType, GenericAlias
Expand Down Expand Up @@ -222,14 +223,34 @@ def __repr__(self):
# https://bugs.python.org/issue33453 for details.
_MODULE_IDENTIFIER_RE = re.compile(r'^(?:\s*(\w+)\s*\.)?\s*(\w+)')

# This function's logic is copied from "recursive_repr" function in
# reprlib module to avoid dependency.
def _recursive_repr(user_function):
# Decorator to make a repr function return "..." for a recursive
# call.
repr_running = set()

@functools.wraps(user_function)
def wrapper(self):
key = id(self), _thread.get_ident()
if key in repr_running:
return '...'
repr_running.add(key)
try:
result = user_function(self)
finally:
repr_running.discard(key)
return result
return wrapper

class InitVar:
__slots__ = ('type', )

def __init__(self, type):
self.type = type

def __repr__(self):
if isinstance(self.type, type) and not isinstance(self.type, GenericAlias):
if isinstance(self.type, type):
type_name = self.type.__name__
else:
# typing objects, e.g. List[int]
Expand Down Expand Up @@ -279,6 +300,7 @@ def __init__(self, default, default_factory, init, repr, hash, compare,
self.kw_only = kw_only
self._field_type = None

@_recursive_repr
def __repr__(self):
return ('Field('
f'name={self.name!r},'
Expand All @@ -297,7 +319,7 @@ def __repr__(self):
# This is used to support the PEP 487 __set_name__ protocol in the
# case where we're using a field that contains a descriptor as a
# default value. For details on __set_name__, see
# https://www.python.org/dev/peps/pep-0487/#implementation-details.
# https://peps.python.org/pep-0487/#implementation-details.
#
# Note that in _process_class, this Field object is overwritten
# with the default value, so the end result is a descriptor that
Expand Down Expand Up @@ -388,27 +410,6 @@ def _tuple_str(obj_name, fields):
return f'({",".join([f"{obj_name}.{f.name}" for f in fields])},)'


# This function's logic is copied from "recursive_repr" function in
# reprlib module to avoid dependency.
def _recursive_repr(user_function):
# Decorator to make a repr function return "..." for a recursive
# call.
repr_running = set()

@functools.wraps(user_function)
def wrapper(self):
key = id(self), _thread.get_ident()
if key in repr_running:
return '...'
repr_running.add(key)
try:
result = user_function(self)
finally:
repr_running.discard(key)
return result
return wrapper


def _create_fn(name, args, body, *, globals=None, locals=None,
return_type=MISSING):
# Note that we may mutate locals. Callers beware!
Expand Down Expand Up @@ -807,8 +808,10 @@ def _get_field(cls, a_name, a_type, default_kw_only):
raise TypeError(f'field {f.name} is a ClassVar but specifies '
'kw_only')

# For real fields, disallow mutable defaults for known types.
if f._field_type is _FIELD and isinstance(f.default, (list, dict, set)):
# For real fields, disallow mutable defaults. Use unhashable as a proxy
# indicator for mutability. Read the __hash__ attribute from the class,
# not the instance.
if f._field_type is _FIELD and f.default.__class__.__hash__ is None:
raise ValueError(f'mutable default {type(f.default)} for field '
f'{f.name} is not allowed: use default_factory')

Expand Down Expand Up @@ -879,7 +882,7 @@ def _hash_exception(cls, fields, globals):


def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen,
match_args, kw_only, slots):
match_args, kw_only, slots, weakref_slot):
# Now that dicts retain insertion order, there's no reason to use
# an ordered dict. I am leveraging that ordering here, because
# derived class fields overwrite base class fields, but the order
Expand Down Expand Up @@ -1097,16 +1100,19 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen,
_set_new_attribute(cls, '__match_args__',
tuple(f.name for f in std_init_fields))

# It's an error to specify weakref_slot if slots is False.
if weakref_slot and not slots:
raise TypeError('weakref_slot is True but slots is False')
if slots:
cls = _add_slots(cls, frozen)
cls = _add_slots(cls, frozen, weakref_slot)

abc.update_abstractmethods(cls)

return cls


# _dataclass_getstate and _dataclass_setstate are needed for pickling frozen
# classes with slots. These could be slighly more performant if we generated
# classes with slots. These could be slightly more performant if we generated
# the code instead of iterating over fields. But that can be a project for
# another day, if performance becomes an issue.
def _dataclass_getstate(self):
Expand All @@ -1119,7 +1125,21 @@ def _dataclass_setstate(self, state):
object.__setattr__(self, field.name, value)


def _add_slots(cls, is_frozen):
def _get_slots(cls):
match cls.__dict__.get('__slots__'):
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

blocked by match-statement support

case None:
return
case str(slot):
yield slot
# Slots may be any iterable, but we cannot handle an iterator
# because it will already be (partially) consumed.
case iterable if not hasattr(iterable, '__next__'):
yield from iterable
case _:
raise TypeError(f"Slots of '{cls.__name__}' cannot be determined")


def _add_slots(cls, is_frozen, weakref_slot):
# Need to create a new class, since we can't set __slots__
# after a class has been created.

Expand All @@ -1130,7 +1150,23 @@ def _add_slots(cls, is_frozen):
# Create a new dict for our new class.
cls_dict = dict(cls.__dict__)
field_names = tuple(f.name for f in fields(cls))
cls_dict['__slots__'] = field_names
# Make sure slots don't overlap with those in base classes.
inherited_slots = set(
itertools.chain.from_iterable(map(_get_slots, cls.__mro__[1:-1]))
)
# The slots for our class. Remove slots from our base classes. Add
# '__weakref__' if weakref_slot was given, unless it is already present.
cls_dict["__slots__"] = tuple(
itertools.filterfalse(
inherited_slots.__contains__,
itertools.chain(
# gh-93521: '__weakref__' also needs to be filtered out if
# already present in inherited_slots
field_names, ('__weakref__',) if weakref_slot else ()
)
),
)

for field_name in field_names:
# Remove our attributes, if present. They'll still be
# available in _MARKER.
Expand All @@ -1155,25 +1191,25 @@ def _add_slots(cls, is_frozen):

def dataclass(cls=None, /, *, init=True, repr=True, eq=True, order=False,
unsafe_hash=False, frozen=False, match_args=True,
kw_only=False, slots=False):
"""Returns the same class as was passed in, with dunder methods
added based on the fields defined in the class.
kw_only=False, slots=False, weakref_slot=False):
"""Add dunder methods based on the fields defined in the class.

Examines PEP 526 __annotations__ to determine fields.

If init is true, an __init__() method is added to the class. If
repr is true, a __repr__() method is added. If order is true, rich
If init is true, an __init__() method is added to the class. If repr
is true, a __repr__() method is added. If order is true, rich
comparison dunder methods are added. If unsafe_hash is true, a
__hash__() method function is added. If frozen is true, fields may
not be assigned to after instance creation. If match_args is true,
the __match_args__ tuple is added. If kw_only is true, then by
default all fields are keyword-only. If slots is true, an
__slots__ attribute is added.
__hash__() method is added. If frozen is true, fields may not be
assigned to after instance creation. If match_args is true, the
__match_args__ tuple is added. If kw_only is true, then by default
all fields are keyword-only. If slots is true, a new class with a
__slots__ attribute is returned.
"""

def wrap(cls):
return _process_class(cls, init, repr, eq, order, unsafe_hash,
frozen, match_args, kw_only, slots)
frozen, match_args, kw_only, slots,
weakref_slot)

# See if we're being called as @dataclass or @dataclass().
if cls is None:
Expand Down Expand Up @@ -1210,15 +1246,15 @@ def _is_dataclass_instance(obj):
def is_dataclass(obj):
"""Returns True if obj is a dataclass or an instance of a
dataclass."""
cls = obj if isinstance(obj, type) and not isinstance(obj, GenericAlias) else type(obj)
cls = obj if isinstance(obj, type) else type(obj)
return hasattr(cls, _FIELDS)


def asdict(obj, *, dict_factory=dict):
"""Return the fields of a dataclass instance as a new dictionary mapping
field names to field values.

Example usage:
Example usage::

@dataclass
class C:
Expand Down Expand Up @@ -1289,8 +1325,8 @@ class C:
x: int
y: int

c = C(1, 2)
assert astuple(c) == (1, 2)
c = C(1, 2)
assert astuple(c) == (1, 2)

If given, 'tuple_factory' will be used instead of built-in tuple.
The function applies recursively to field values that are
Expand Down Expand Up @@ -1332,17 +1368,18 @@ def _astuple_inner(obj, tuple_factory):

def make_dataclass(cls_name, fields, *, bases=(), namespace=None, init=True,
repr=True, eq=True, order=False, unsafe_hash=False,
frozen=False, match_args=True, kw_only=False, slots=False):
frozen=False, match_args=True, kw_only=False, slots=False,
weakref_slot=False):
"""Return a new dynamically created dataclass.

The dataclass name will be 'cls_name'. 'fields' is an iterable
of either (name), (name, type) or (name, type, Field) objects. If type is
omitted, use the string 'typing.Any'. Field objects are created by
the equivalent of calling 'field(name, type [, Field-info])'.
the equivalent of calling 'field(name, type [, Field-info])'.::

C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,))

is equivalent to:
is equivalent to::

@dataclass
class C(Base):
Expand Down Expand Up @@ -1399,13 +1436,14 @@ def exec_body_callback(ns):
# Apply the normal decorator.
return dataclass(cls, init=init, repr=repr, eq=eq, order=order,
unsafe_hash=unsafe_hash, frozen=frozen,
match_args=match_args, kw_only=kw_only, slots=slots)
match_args=match_args, kw_only=kw_only, slots=slots,
weakref_slot=weakref_slot)


def replace(obj, /, **changes):
"""Return a new object replacing specified fields with new values.

This is especially useful for frozen classes. Example usage:
This is especially useful for frozen classes. Example usage::

@dataclass(frozen=True)
class C:
Expand All @@ -1415,7 +1453,7 @@ class C:
c = C(1, 2)
c1 = replace(c, x=3)
assert c1.x == 3 and c1.y == 2
"""
"""

# We're going to mutate 'changes', but that's okay because it's a
# new dict, even if called with 'replace(obj, **my_changes)'.
Expand Down
Loading
Morty Proxy This is a proxified and sanitized view of the page, visit original site.