Keep cloudpickle up-to-date with the upstream (#7406)

This commit is contained in:
Siyuan (Ryans) Zhuang 2020-03-03 13:52:54 -08:00 committed by GitHub
parent b0bf5450c2
commit f6883bf725
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3 changed files with 161 additions and 336 deletions

View file

@ -16,4 +16,4 @@ else:
from ray.cloudpickle.cloudpickle import *
FAST_CLOUDPICKLE_USED = False
__version__ = '1.2.2.dev0'
__version__ = '1.4.0.dev0'

View file

@ -42,6 +42,8 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from __future__ import print_function
import abc
import builtins
import dis
from functools import partial
import io
@ -58,12 +60,13 @@ import types
import weakref
import uuid
import threading
from enum import Enum
from pickle import _Pickler as Pickler
from pickle import _getattribute
from io import BytesIO
from importlib._bootstrap import _find_spec
try:
from enum import Enum
except ImportError:
Enum = None
# cloudpickle is meant for inter process communication: we expect all
# communicating processes to run the same Python version hence we favor
@ -84,24 +87,6 @@ if PYPY:
# builtin-code objects only exist in pypy
builtin_code_type = type(float.__new__.__code__)
if sys.version_info[0] < 3: # pragma: no branch
from pickle import Pickler
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
string_types = (basestring,) # noqa
PY3 = False
PY2 = True
else:
types.ClassType = type
from pickle import _Pickler as Pickler
from io import BytesIO as StringIO
string_types = (str,)
PY3 = True
PY2 = False
from importlib._bootstrap import _find_spec
_extract_code_globals_cache = weakref.WeakKeyDictionary()
@ -123,21 +108,6 @@ def _lookup_class_or_track(class_tracker_id, class_def):
_DYNAMIC_CLASS_TRACKER_BY_CLASS[class_def] = class_tracker_id
return class_def
if sys.version_info[:2] >= (3, 5):
from pickle import _getattribute
elif sys.version_info[:2] >= (3, 4):
from pickle import _getattribute as _py34_getattribute
# pickle._getattribute does not return the parent under Python 3.4
def _getattribute(obj, name):
return _py34_getattribute(obj, name), None
else:
# pickle._getattribute is a python3 addition and enchancement of getattr,
# that can handle dotted attribute names. In cloudpickle for python2,
# handling dotted names is not needed, so we simply define _getattribute as
# a wrapper around getattr.
def _getattribute(obj, name):
return getattr(obj, name, None), None
def _whichmodule(obj, name):
"""Find the module an object belongs to.
@ -151,10 +121,17 @@ def _whichmodule(obj, name):
module_name = getattr(obj, '__module__', None)
if module_name is not None:
return module_name
# Protect the iteration by using a list copy of sys.modules against dynamic
# modules that trigger imports of other modules upon calls to getattr.
for module_name, module in list(sys.modules.items()):
if module_name == '__main__' or module is None:
# Protect the iteration by using a copy of sys.modules against dynamic
# modules that trigger imports of other modules upon calls to getattr or
# other threads importing at the same time.
for module_name, module in sys.modules.copy().items():
# Some modules such as coverage can inject non-module objects inside
# sys.modules
if (
module_name == '__main__' or
module is None or
not isinstance(module, types.ModuleType)
):
continue
try:
if _getattribute(module, name)[0] is obj:
@ -346,41 +323,23 @@ def _make_cell_set_template_code():
co = _cell_set_factory.__code__
if PY2: # pragma: no branch
_cell_set_template_code = types.CodeType(
co.co_argcount,
co.co_nlocals,
co.co_stacksize,
co.co_flags,
co.co_code,
co.co_consts,
co.co_names,
co.co_varnames,
co.co_filename,
co.co_name,
co.co_firstlineno,
co.co_lnotab,
co.co_cellvars, # co_freevars is initialized with co_cellvars
(), # co_cellvars is made empty
)
else:
_cell_set_template_code = types.CodeType(
co.co_argcount,
co.co_kwonlyargcount, # Python 3 only argument
co.co_nlocals,
co.co_stacksize,
co.co_flags,
co.co_code,
co.co_consts,
co.co_names,
co.co_varnames,
co.co_filename,
co.co_name,
co.co_firstlineno,
co.co_lnotab,
co.co_cellvars, # co_freevars is initialized with co_cellvars
(), # co_cellvars is made empty
)
_cell_set_template_code = types.CodeType(
co.co_argcount,
co.co_kwonlyargcount, # Python 3 only argument
co.co_nlocals,
co.co_stacksize,
co.co_flags,
co.co_code,
co.co_consts,
co.co_names,
co.co_varnames,
co.co_filename,
co.co_name,
co.co_firstlineno,
co.co_lnotab,
co.co_cellvars, # co_freevars is initialized with co_cellvars
(), # co_cellvars is made empty
)
return _cell_set_template_code
@ -406,41 +365,15 @@ def _builtin_type(name):
return getattr(types, name)
if sys.version_info < (3, 4): # pragma: no branch
def _walk_global_ops(code):
"""
Yield (opcode, argument number) tuples for all
global-referencing instructions in *code*.
"""
code = getattr(code, 'co_code', b'')
if PY2: # pragma: no branch
code = map(ord, code)
n = len(code)
i = 0
extended_arg = 0
while i < n:
op = code[i]
i += 1
if op >= HAVE_ARGUMENT:
oparg = code[i] + code[i + 1] * 256 + extended_arg
extended_arg = 0
i += 2
if op == EXTENDED_ARG:
extended_arg = oparg * 65536
if op in GLOBAL_OPS:
yield op, oparg
else:
def _walk_global_ops(code):
"""
Yield (opcode, argument number) tuples for all
global-referencing instructions in *code*.
"""
for instr in dis.get_instructions(code):
op = instr.opcode
if op in GLOBAL_OPS:
yield op, instr.arg
def _walk_global_ops(code):
"""
Yield (opcode, argument number) tuples for all
global-referencing instructions in *code*.
"""
for instr in dis.get_instructions(code):
op = instr.opcode
if op in GLOBAL_OPS:
yield op, instr.arg
def _extract_class_dict(cls):
@ -492,17 +425,12 @@ class CloudPickler(Pickler):
dispatch[memoryview] = save_memoryview
if PY2: # pragma: no branch
def save_buffer(self, obj):
self.save(str(obj))
dispatch[buffer] = save_buffer # noqa: F821 'buffer' was removed in Python 3
def save_module(self, obj):
"""
Save a module as an import
"""
if _is_dynamic(obj):
obj.__dict__.pop('__builtins__', None)
self.save_reduce(dynamic_subimport, (obj.__name__, vars(obj)),
obj=obj)
else:
@ -514,29 +442,22 @@ class CloudPickler(Pickler):
"""
Save a code object
"""
if PY3: # pragma: no branch
if hasattr(obj, "co_posonlyargcount"): # pragma: no branch
args = (
obj.co_argcount, obj.co_posonlyargcount,
obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
obj.co_varnames, obj.co_filename, obj.co_name,
obj.co_firstlineno, obj.co_lnotab, obj.co_freevars,
obj.co_cellvars
)
else:
args = (
obj.co_argcount, obj.co_kwonlyargcount, obj.co_nlocals,
obj.co_stacksize, obj.co_flags, obj.co_code, obj.co_consts,
obj.co_names, obj.co_varnames, obj.co_filename,
obj.co_name, obj.co_firstlineno, obj.co_lnotab,
obj.co_freevars, obj.co_cellvars
)
if hasattr(obj, "co_posonlyargcount"): # pragma: no branch
args = (
obj.co_argcount, obj.co_posonlyargcount,
obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
obj.co_varnames, obj.co_filename, obj.co_name,
obj.co_firstlineno, obj.co_lnotab, obj.co_freevars,
obj.co_cellvars
)
else:
args = (
obj.co_argcount, obj.co_nlocals, obj.co_stacksize, obj.co_flags, obj.co_code,
obj.co_consts, obj.co_names, obj.co_varnames, obj.co_filename, obj.co_name,
obj.co_firstlineno, obj.co_lnotab, obj.co_freevars, obj.co_cellvars
obj.co_argcount, obj.co_kwonlyargcount, obj.co_nlocals,
obj.co_stacksize, obj.co_flags, obj.co_code, obj.co_consts,
obj.co_names, obj.co_varnames, obj.co_filename,
obj.co_name, obj.co_firstlineno, obj.co_lnotab,
obj.co_freevars, obj.co_cellvars
)
self.save_reduce(types.CodeType, args, obj=obj)
@ -590,13 +511,12 @@ class CloudPickler(Pickler):
"""
members = dict((e.name, e.value) for e in obj)
# Python 2.7 with enum34 can have no qualname:
qualname = getattr(obj, "__qualname__", None)
self.save_reduce(_make_skeleton_enum,
(obj.__bases__, obj.__name__, qualname, members,
obj.__module__, _ensure_tracking(obj), None),
obj=obj)
self.save_reduce(
_make_skeleton_enum,
(obj.__bases__, obj.__name__, obj.__qualname__,
members, obj.__module__, _ensure_tracking(obj), None),
obj=obj
)
# Cleanup the clsdict that will be passed to _rehydrate_skeleton_class:
# Those attributes are already handled by the metaclass.
@ -617,26 +537,38 @@ class CloudPickler(Pickler):
clsdict = _extract_class_dict(obj)
clsdict.pop('__weakref__', None)
# For ABCMeta in python3.7+, remove _abc_impl as it is not picklable.
# This is a fix which breaks the cache but this only makes the first
# calls to issubclass slower.
if "_abc_impl" in clsdict:
import abc
(registry, _, _, _) = abc._get_dump(obj)
clsdict["_abc_impl"] = [subclass_weakref()
for subclass_weakref in registry]
if issubclass(type(obj), abc.ABCMeta):
# If obj is an instance of an ABCMeta subclass, dont pickle the
# cache/negative caches populated during isinstance/issubclass
# checks, but pickle the list of registered subclasses of obj.
clsdict.pop('_abc_cache', None)
clsdict.pop('_abc_negative_cache', None)
clsdict.pop('_abc_negative_cache_version', None)
registry = clsdict.pop('_abc_registry', None)
if registry is None:
# in Python3.7+, the abc caches and registered subclasses of a
# class are bundled into the single _abc_impl attribute
clsdict.pop('_abc_impl', None)
(registry, _, _, _) = abc._get_dump(obj)
clsdict["_abc_impl"] = [subclass_weakref()
for subclass_weakref in registry]
else:
# In the above if clause, registry is a set of weakrefs -- in
# this case, registry is a WeakSet
clsdict["_abc_impl"] = [type_ for type_ in registry]
# On PyPy, __doc__ is a readonly attribute, so we need to include it in
# the initial skeleton class. This is safe because we know that the
# doc can't participate in a cycle with the original class.
type_kwargs = {'__doc__': clsdict.pop('__doc__', None)}
if hasattr(obj, "__slots__"):
if "__slots__" in clsdict:
type_kwargs['__slots__'] = obj.__slots__
# pickle string length optimization: member descriptors of obj are
# created automatically from obj's __slots__ attribute, no need to
# save them in obj's state
if isinstance(obj.__slots__, string_types):
if isinstance(obj.__slots__, str):
clsdict.pop(obj.__slots__)
else:
for k in obj.__slots__:
@ -644,10 +576,16 @@ class CloudPickler(Pickler):
# If type overrides __dict__ as a property, include it in the type
# kwargs. In Python 2, we can't set this attribute after construction.
# XXX: can this ever happen in Python 3? If so add a test.
__dict__ = clsdict.pop('__dict__', None)
if isinstance(__dict__, property):
type_kwargs['__dict__'] = __dict__
if sys.version_info < (3, 7):
# Although annotations were added in Python 3.4, It is not possible
# to properly pickle them until Python 3.7. (See #193)
clsdict.pop('__annotations__', None)
save = self.save
write = self.write
@ -747,7 +685,9 @@ class CloudPickler(Pickler):
'doc': func.__doc__,
'_cloudpickle_submodules': submodules
}
if hasattr(func, '__annotations__') and sys.version_info >= (3, 4):
if hasattr(func, '__annotations__') and sys.version_info >= (3, 7):
# Although annotations were added in Python3.4, It is not possible
# to properly pickle them until Python3.7. (See #193)
state['annotations'] = func.__annotations__
if hasattr(func, '__qualname__'):
state['qualname'] = func.__qualname__
@ -809,45 +749,6 @@ class CloudPickler(Pickler):
return (code, f_globals, defaults, closure, dct, base_globals)
if not PY3: # pragma: no branch
# Python3 comes with native reducers that allow builtin functions and
# methods pickling as module/class attributes. The following method
# extends this for python2.
# Please note that currently, neither pickle nor cloudpickle support
# dynamically created builtin functions/method pickling.
def save_builtin_function_or_method(self, obj):
is_bound = getattr(obj, '__self__', None) is not None
if is_bound:
# obj is a bound builtin method.
rv = (getattr, (obj.__self__, obj.__name__))
return self.save_reduce(obj=obj, *rv)
is_unbound = hasattr(obj, '__objclass__')
if is_unbound:
# obj is an unbound builtin method (accessed from its class)
rv = (getattr, (obj.__objclass__, obj.__name__))
return self.save_reduce(obj=obj, *rv)
# Otherwise, obj is not a method, but a function. Fallback to
# default pickling by attribute.
return Pickler.save_global(self, obj)
dispatch[types.BuiltinFunctionType] = save_builtin_function_or_method
# A comprehensive summary of the various kinds of builtin methods can
# be found in PEP 579: https://www.python.org/dev/peps/pep-0579/
classmethod_descriptor_type = type(float.__dict__['fromhex'])
wrapper_descriptor_type = type(float.__repr__)
method_wrapper_type = type(1.5.__repr__)
dispatch[classmethod_descriptor_type] = save_builtin_function_or_method
dispatch[wrapper_descriptor_type] = save_builtin_function_or_method
dispatch[method_wrapper_type] = save_builtin_function_or_method
if sys.version_info[:2] < (3, 4):
method_descriptor = type(str.upper)
dispatch[method_descriptor] = save_builtin_function_or_method
def save_getset_descriptor(self, obj):
return self.save_reduce(getattr, (obj.__objclass__, obj.__name__))
@ -877,73 +778,20 @@ class CloudPickler(Pickler):
Pickler.save_global(self, obj, name=name)
dispatch[type] = save_global
dispatch[types.ClassType] = save_global
def save_instancemethod(self, obj):
# Memoization rarely is ever useful due to python bounding
if obj.__self__ is None:
self.save_reduce(getattr, (obj.im_class, obj.__name__))
else:
if PY3: # pragma: no branch
self.save_reduce(types.MethodType, (obj.__func__, obj.__self__), obj=obj)
else:
self.save_reduce(
types.MethodType,
(obj.__func__, obj.__self__, type(obj.__self__)), obj=obj)
self.save_reduce(types.MethodType, (obj.__func__, obj.__self__), obj=obj)
dispatch[types.MethodType] = save_instancemethod
def save_inst(self, obj):
"""Inner logic to save instance. Based off pickle.save_inst"""
cls = obj.__class__
# Try the dispatch table (pickle module doesn't do it)
f = self.dispatch.get(cls)
if f:
f(self, obj) # Call unbound method with explicit self
return
memo = self.memo
write = self.write
save = self.save
if hasattr(obj, '__getinitargs__'):
args = obj.__getinitargs__()
len(args) # XXX Assert it's a sequence
pickle._keep_alive(args, memo)
else:
args = ()
write(pickle.MARK)
if self.bin:
save(cls)
for arg in args:
save(arg)
write(pickle.OBJ)
else:
for arg in args:
save(arg)
write(pickle.INST + cls.__module__ + '\n' + cls.__name__ + '\n')
self.memoize(obj)
try:
getstate = obj.__getstate__
except AttributeError:
stuff = obj.__dict__
else:
stuff = getstate()
pickle._keep_alive(stuff, memo)
save(stuff)
write(pickle.BUILD)
if PY2: # pragma: no branch
dispatch[types.InstanceType] = save_inst
def save_property(self, obj):
# properties not correctly saved in python
self.save_reduce(property, (obj.fget, obj.fset, obj.fdel, obj.__doc__), obj=obj)
self.save_reduce(property, (obj.fget, obj.fset, obj.fdel, obj.__doc__),
obj=obj)
dispatch[property] = save_property
@ -991,10 +839,6 @@ class CloudPickler(Pickler):
def save_file(self, obj):
"""Save a file"""
try:
import StringIO as pystringIO # we can't use cStringIO as it lacks the name attribute
except ImportError:
import io as pystringIO
if not hasattr(obj, 'name') or not hasattr(obj, 'mode'):
raise pickle.PicklingError("Cannot pickle files that do not map to an actual file")
@ -1013,7 +857,8 @@ class CloudPickler(Pickler):
name = obj.name
retval = pystringIO.StringIO()
# TODO: also support binary mode files with io.BytesIO
retval = io.StringIO()
try:
# Read the whole file
@ -1036,11 +881,7 @@ class CloudPickler(Pickler):
def save_not_implemented(self, obj):
self.save_reduce(_gen_not_implemented, ())
try: # Python 2
dispatch[file] = save_file
except NameError: # Python 3 # pragma: no branch
dispatch[io.TextIOWrapper] = save_file
dispatch[io.TextIOWrapper] = save_file
dispatch[type(Ellipsis)] = save_ellipsis
dispatch[type(NotImplemented)] = save_not_implemented
@ -1117,7 +958,7 @@ def dumps(obj, protocol=None):
Set protocol=pickle.DEFAULT_PROTOCOL instead if you need to ensure
compatibility with older versions of Python.
"""
file = StringIO()
file = BytesIO()
try:
cp = CloudPickler(file, protocol=protocol)
cp.dump(obj)
@ -1140,6 +981,7 @@ def subimport(name):
def dynamic_subimport(name, vars):
mod = types.ModuleType(name)
mod.__dict__.update(vars)
mod.__dict__['__builtins__'] = builtins.__dict__
return mod
@ -1338,10 +1180,7 @@ def _make_skeleton_enum(bases, name, qualname, members, module,
classdict[member_name] = member_value
enum_class = metacls.__new__(metacls, name, bases, classdict)
enum_class.__module__ = module
# Python 2.7 compat
if qualname is not None:
enum_class.__qualname__ = qualname
enum_class.__qualname__ = qualname
return _lookup_class_or_track(class_tracker_id, enum_class)
@ -1355,41 +1194,25 @@ def _is_dynamic(module):
if hasattr(module, '__file__'):
return False
if hasattr(module, '__spec__'):
if module.__spec__ is not None:
return False
# In PyPy, Some built-in modules such as _codecs can have their
# __spec__ attribute set to None despite being imported. For such
# modules, the ``_find_spec`` utility of the standard library is used.
parent_name = module.__name__.rpartition('.')[0]
if parent_name: # pragma: no cover
# This code handles the case where an imported package (and not
# module) remains with __spec__ set to None. It is however untested
# as no package in the PyPy stdlib has __spec__ set to None after
# it is imported.
try:
parent = sys.modules[parent_name]
except KeyError:
msg = "parent {!r} not in sys.modules"
raise ImportError(msg.format(parent_name))
else:
pkgpath = parent.__path__
else:
pkgpath = None
return _find_spec(module.__name__, pkgpath, module) is None
else:
# Backward compat for Python 2
import imp
try:
path = None
for part in module.__name__.split('.'):
if path is not None:
path = [path]
f, path, description = imp.find_module(part, path)
if f is not None:
f.close()
except ImportError:
return True
if module.__spec__ is not None:
return False
# In PyPy, Some built-in modules such as _codecs can have their
# __spec__ attribute set to None despite being imported. For such
# modules, the ``_find_spec`` utility of the standard library is used.
parent_name = module.__name__.rpartition('.')[0]
if parent_name: # pragma: no cover
# This code handles the case where an imported package (and not
# module) remains with __spec__ set to None. It is however untested
# as no package in the PyPy stdlib has __spec__ set to None after
# it is imported.
try:
parent = sys.modules[parent_name]
except KeyError:
msg = "parent {!r} not in sys.modules"
raise ImportError(msg.format(parent_name))
else:
pkgpath = parent.__path__
else:
pkgpath = None
return _find_spec(module.__name__, pkgpath, module) is None

View file

@ -26,7 +26,7 @@ from .cloudpickle import (
_is_dynamic, _extract_code_globals, _BUILTIN_TYPE_NAMES, DEFAULT_PROTOCOL,
_find_imported_submodules, _get_cell_contents, _is_global, _builtin_type,
Enum, _ensure_tracking, _make_skeleton_class, _make_skeleton_enum,
_extract_class_dict, string_types, dynamic_subimport, subimport, cell_set,
_extract_class_dict, dynamic_subimport, subimport, cell_set,
_make_empty_cell
)
@ -52,8 +52,7 @@ def dump(obj, file, protocol=None, buffer_callback=None):
Set protocol=pickle.DEFAULT_PROTOCOL instead if you need to ensure
compatibility with older versions of Python.
"""
CloudPickler(file, protocol=protocol,
buffer_callback=buffer_callback).dump(obj)
CloudPickler(file, protocol=protocol, buffer_callback=buffer_callback).dump(obj)
def dumps(obj, protocol=None, buffer_callback=None):
@ -67,8 +66,7 @@ def dumps(obj, protocol=None, buffer_callback=None):
compatibility with older versions of Python.
"""
with io.BytesIO() as file:
cp = CloudPickler(file, protocol=protocol,
buffer_callback=buffer_callback)
cp = CloudPickler(file, protocol=protocol, buffer_callback=buffer_callback)
cp.dump(obj)
return file.getvalue()
@ -78,12 +76,12 @@ def dumps(obj, protocol=None, buffer_callback=None):
def _class_getnewargs(obj):
type_kwargs = {}
if hasattr(obj, "__slots__"):
if "__slots__" in obj.__dict__:
type_kwargs["__slots__"] = obj.__slots__
__dict__ = obj.__dict__.get("__dict__", None)
__dict__ = obj.__dict__.get('__dict__', None)
if isinstance(__dict__, property):
type_kwargs["__dict__"] = __dict__
type_kwargs['__dict__'] = __dict__
return (type(obj), obj.__name__, obj.__bases__, type_kwargs,
_ensure_tracking(obj), None)
@ -143,26 +141,32 @@ def _function_getstate(func):
def _class_getstate(obj):
clsdict = _extract_class_dict(obj)
clsdict.pop("__weakref__", None)
clsdict.pop('__weakref__', None)
# For ABCMeta in python3.7+, remove _abc_impl as it is not picklable.
# This is a fix which breaks the cache but this only makes the first
# calls to issubclass slower.
if "_abc_impl" in clsdict:
(registry, _, _, _) = abc._get_dump(obj)
clsdict["_abc_impl"] = [subclass_weakref()
for subclass_weakref in registry]
if hasattr(obj, "__slots__"):
# TODO: not sure if we are going to implement it for python < 3.7.
# But we are sure that we didn't have the '_abc_impl' field in python < 3.7,
# and there are currently no reported issues about it.
if sys.version_info >= (3, 7):
if issubclass(type(obj), abc.ABCMeta):
# If obj is an instance of an ABCMeta subclass, dont pickle the
# cache/negative caches populated during isinstance/issubclass
# checks, but pickle the list of registered subclasses of obj.
clsdict.pop('_abc_impl', None)
(registry, _, _, _) = abc._get_dump(obj)
clsdict["_abc_impl"] = [subclass_weakref()
for subclass_weakref in registry]
if "__slots__" in clsdict:
# pickle string length optimization: member descriptors of obj are
# created automatically from obj's __slots__ attribute, no need to
# save them in obj's state
if isinstance(obj.__slots__, string_types):
if isinstance(obj.__slots__, str):
clsdict.pop(obj.__slots__)
else:
for k in obj.__slots__:
clsdict.pop(k, None)
clsdict.pop("__dict__", None) # unpicklable property object
clsdict.pop('__dict__', None) # unpicklable property object
return (clsdict, {})
@ -304,6 +308,7 @@ def _memoryview_reduce(obj):
def _module_reduce(obj):
if _is_dynamic(obj):
obj.__dict__.pop('__builtins__', None)
return dynamic_subimport, (obj.__name__, vars(obj))
else:
return subimport, (obj.__name__,)
@ -321,6 +326,10 @@ def _root_logger_reduce(obj):
return logging.getLogger, ()
def _property_reduce(obj):
return property, (obj.fget, obj.fset, obj.fdel, obj.__doc__)
def _weakset_reduce(obj):
return weakref.WeakSet, (list(obj),)
@ -406,11 +415,6 @@ def _class_setstate(obj, state):
return obj
def _property_reduce(obj):
# Python < 3.8 only
return property, (obj.fget, obj.fset, obj.fdel, obj.__doc__)
def _numpy_frombuffer(buffer, dtype, shape, order):
# Get the _frombuffer() function for reconstruction
from numpy.core.numeric import _frombuffer
@ -477,25 +481,23 @@ class CloudPickler(Pickler):
dispatch[logging.Logger] = _logger_reduce
dispatch[logging.RootLogger] = _root_logger_reduce
dispatch[memoryview] = _memoryview_reduce
dispatch[property] = _property_reduce
dispatch[staticmethod] = _classmethod_reduce
if sys.version_info[:2] >= (3, 8):
dispatch[types.CellType] = _cell_reduce
else:
dispatch[type(_make_empty_cell())] = _cell_reduce
dispatch[types.CodeType] = _code_reduce
dispatch[types.GetSetDescriptorType] = _getset_descriptor_reduce
dispatch[types.ModuleType] = _module_reduce
dispatch[types.MethodType] = _method_reduce
dispatch[types.MappingProxyType] = _mappingproxy_reduce
dispatch[weakref.WeakSet] = _weakset_reduce
if sys.version_info[:2] >= (3, 8):
dispatch[types.CellType] = _cell_reduce
else:
dispatch[type(_make_empty_cell())] = _cell_reduce
if sys.version_info[:2] < (3, 8):
dispatch[property] = _property_reduce
def __init__(self, file, protocol=None, buffer_callback=None):
if protocol is None:
protocol = DEFAULT_PROTOCOL
Pickler.__init__(self, file, protocol=protocol,
buffer_callback=buffer_callback)
Pickler.__init__(self, file, protocol=protocol, buffer_callback=buffer_callback)
# map functions __globals__ attribute ids, to ensure that functions
# sharing the same global namespace at pickling time also share their
# global namespace at unpickling time.