mirror of
https://github.com/vale981/jobmanager
synced 2025-03-06 02:11:39 -05:00
extending the PersistentData_Server, and some testing
This commit is contained in:
parent
149fc38b26
commit
51e8483b57
2 changed files with 77 additions and 3 deletions
|
@ -2,6 +2,60 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from .jobmanager import JobManager_Server
|
||||
import pickle
|
||||
|
||||
def recursive_scan_for_instance(obj, type, explicit_exclude = None ):
|
||||
"""
|
||||
try to do some recursive check to see whether 'obj' is of type
|
||||
'type' or contains items of 'type' type.
|
||||
|
||||
if obj is a mapping (like dict) this will only check
|
||||
for item iterated over via
|
||||
|
||||
for item in obj
|
||||
|
||||
which corresponds to the keys in the dict case.
|
||||
|
||||
The explicit_exclude argument may be a tuple of types for
|
||||
some explicit checking in the sense that if obj is an
|
||||
instance of one of the type given by explicit_exclude
|
||||
we know it is NOT an instance of type.
|
||||
"""
|
||||
|
||||
# check this object for type
|
||||
if isinstance(obj, type):
|
||||
return True
|
||||
|
||||
# check for some explicit types in order to conclude
|
||||
# that obj is not of type
|
||||
# see dict example
|
||||
if explicit_exclude is not None:
|
||||
if isinstance(obj, explicit_exclude):
|
||||
return False
|
||||
|
||||
|
||||
# if not of desired type, try to iterate and check each item for type
|
||||
try:
|
||||
for i in obj:
|
||||
# return True, if object is of type or contains type
|
||||
if recursive_scan_for_instance(i, type) == True:
|
||||
return True
|
||||
except:
|
||||
pass
|
||||
|
||||
# either object is not iterable and not of type, or each item is not of type -> return False
|
||||
return False
|
||||
|
||||
def recursive_scan_for_dict_instance(obj):
|
||||
# here we explicitly check against the 'str' class
|
||||
# as it is iterable, but can not contain an dict as item, only characters
|
||||
return recursive_scan_for_instance(obj, type=dict, explicit_exclude=(str, ))
|
||||
|
||||
def as_binary_data(a):
|
||||
if isinstance(a, dict):
|
||||
raise RuntimeError()
|
||||
|
||||
return pickle.dumps(a)
|
||||
|
||||
class PersistentData_Server(JobManager_Server):
|
||||
def __init__(self,
|
||||
|
@ -12,11 +66,19 @@ class PersistentData_Server(JobManager_Server):
|
|||
verbose=1,
|
||||
msg_interval=1,
|
||||
fname_dump=None,
|
||||
speed_calc_cycles=50):
|
||||
speed_calc_cycles=50,
|
||||
overwrite=False):
|
||||
|
||||
JobManager_Server.__init__(self, authkey, const_arg=const_arg, port=port, verbose=verbose, msg_interval=msg_interval, fname_dump=fname_dump, speed_calc_cycles=speed_calc_cycles)
|
||||
self.pds = persistent_data_structure
|
||||
self.overwrite = overwrite
|
||||
|
||||
def process_new_result(self, arg, result):
|
||||
self.pds[arg] = result
|
||||
self.pds[as_binary_data(arg)] = result
|
||||
self.pds.commit()
|
||||
|
||||
def put_arg(self, a):
|
||||
a_bin = as_binary_data(a)
|
||||
if overwrite or (not a_bin in self.pds):
|
||||
JobManager_Server.put_arg(self, a)
|
||||
|
||||
|
|
|
@ -165,6 +165,17 @@ def test_mp_read_from_sqlite():
|
|||
p1.join()
|
||||
p2.join()
|
||||
|
||||
from collections import namedtuple
|
||||
a_tup_type = namedtuple('a_tup_type', ['a', 'b'])
|
||||
|
||||
def test_dict_dump():
|
||||
a = {'a': 1, 'b': 2}
|
||||
import pickle
|
||||
print(pickle.dumps(a))
|
||||
|
||||
a_tup = a_tup_type(**a)
|
||||
print(pickle.dumps(a_tup))
|
||||
print(hash(a_tup))
|
||||
|
||||
|
||||
|
||||
|
@ -174,5 +185,6 @@ if __name__ == "__main__":
|
|||
# test_pd()
|
||||
# test_pd_bytes()
|
||||
# test_directory_removal()
|
||||
test_mp_read_from_sqlite()
|
||||
# test_mp_read_from_sqlite()
|
||||
test_dict_dump()
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue