mirror of
https://github.com/vale981/jobmanager
synced 2025-03-05 09:51:38 -05:00
try to get tests running on new version
This commit is contained in:
parent
662a7c11b5
commit
da8281aa70
6 changed files with 6 additions and 1316 deletions
|
@ -36,11 +36,6 @@ from . import clients
|
|||
from . import decorators
|
||||
from . import servers
|
||||
from . import ode_wrapper
|
||||
from . import binfootprint
|
||||
|
||||
# persistentData requires sqlitedict
|
||||
try:
|
||||
from . import persistentData
|
||||
except ImportError as e:
|
||||
warnings.warn("Submodule 'persistentData' is not available. Reason: {}.".format(e))
|
||||
|
||||
|
||||
|
|
|
@ -46,7 +46,8 @@ import sys
|
|||
import time
|
||||
import traceback
|
||||
import warnings
|
||||
from . import binfootprint as bf
|
||||
import binfootprint as bf
|
||||
import progress
|
||||
|
||||
import logging
|
||||
|
||||
|
@ -113,9 +114,6 @@ else:
|
|||
class JMHostNotReachableError(JMConnectionError):
|
||||
pass
|
||||
|
||||
sys.path.append(os.path.dirname(__file__))
|
||||
from . import progress
|
||||
|
||||
myQueue = mp.Queue
|
||||
AuthenticationError = mp.AuthenticationError
|
||||
|
||||
|
|
|
@ -1,177 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import division, print_function
|
||||
|
||||
import sys
|
||||
|
||||
if sys.version_info.major > 2:
|
||||
import pathlib
|
||||
path = pathlib.PosixPath(__file__).absolute()
|
||||
jobmanager = path.parent.parent# / 'jobmanager'
|
||||
sys.path.insert(0, str(jobmanager))
|
||||
else:
|
||||
from os.path import abspath, dirname, split
|
||||
# Add parent directory to beginning of path variable
|
||||
sys.path = [split(dirname(abspath(__file__)))[0]] + sys.path
|
||||
|
||||
from jobmanager import binfootprint as bfp
|
||||
import numpy as np
|
||||
from collections import namedtuple
|
||||
|
||||
import warnings
|
||||
warnings.filterwarnings('error')
|
||||
|
||||
def test_version_tag():
|
||||
ob = 5
|
||||
binob = bfp.dump(ob)
|
||||
assert bfp.byte_to_ord(binob[0]) == bfp._VERS
|
||||
|
||||
def test_atom():
|
||||
atoms = [12345678, 3.141, 'hallo Welt', 'öäüß', True, False, None, 2**65, -3**65, b'\xff\fe\03']
|
||||
|
||||
for atom in atoms:
|
||||
bin_atom = bfp.dump(atom)
|
||||
atom_prime = bfp.load(bin_atom)
|
||||
bin_ob_prime = bfp.dump(atom_prime)
|
||||
assert bin_atom == bin_ob_prime
|
||||
|
||||
hash(bin_atom)
|
||||
|
||||
def test_tuple():
|
||||
t = (12345678, 3.141, 'hallo Welt', 'öäüß', True, False, None, (3, tuple(), (4,5,None), 'test'))
|
||||
bin_tuple = bfp.dump(t)
|
||||
assert type(bin_tuple) is bfp.BIN_TYPE
|
||||
t_prime = bfp.load(bin_tuple)
|
||||
assert t == t_prime
|
||||
bin_ob_prime = bfp.dump(t_prime)
|
||||
assert bin_tuple == bin_ob_prime
|
||||
|
||||
def test_nparray():
|
||||
ob = np.random.randn(3,53,2)
|
||||
bin_ob = bfp.dump(ob)
|
||||
assert type(bin_ob) is bfp.BIN_TYPE
|
||||
ob_prime = bfp.load(bin_ob)
|
||||
assert np.all(ob == ob_prime)
|
||||
bin_ob_prime = bfp.dump(ob_prime)
|
||||
assert bin_ob == bin_ob_prime
|
||||
|
||||
ob = np.random.randn(3,53,2)
|
||||
ob = (ob, ob, 4, None)
|
||||
bin_ob = bfp.dump(ob)
|
||||
ob_prime = bfp.load(bin_ob)
|
||||
assert np.all(ob[0] == ob_prime[0])
|
||||
assert np.all(ob[1] == ob_prime[1])
|
||||
bin_ob_prime = bfp.dump(ob_prime)
|
||||
assert bin_ob == bin_ob_prime
|
||||
|
||||
def test_list():
|
||||
ob = [1,2,3]
|
||||
bin_ob = bfp.dump(ob)
|
||||
assert type(bin_ob) is bfp.BIN_TYPE
|
||||
ob_prime = bfp.load(bin_ob)
|
||||
assert np.all(ob == ob_prime)
|
||||
bin_ob_prime = bfp.dump(ob_prime)
|
||||
assert bin_ob == bin_ob_prime
|
||||
|
||||
ob = [1, (2,3), np.array([2j,3j])]
|
||||
bin_ob = bfp.dump(ob)
|
||||
ob_prime = bfp.load(bin_ob)
|
||||
bin_ob_prime = bfp.dump(ob_prime)
|
||||
assert bin_ob == bin_ob_prime
|
||||
|
||||
assert np.all(ob[0] == ob_prime[0])
|
||||
assert np.all(ob[1] == ob_prime[1])
|
||||
assert np.all(ob[2] == ob_prime[2])
|
||||
|
||||
def test_getstate():
|
||||
class T(object):
|
||||
def __init__(self, a):
|
||||
self.a = a
|
||||
def __getstate__(self):
|
||||
return [self.a]
|
||||
def __setstate__(self, state):
|
||||
self.a = state[0]
|
||||
|
||||
ob = T(4)
|
||||
bin_ob = bfp.dump(ob)
|
||||
assert type(bin_ob) is bfp.BIN_TYPE
|
||||
|
||||
classes = {}
|
||||
classes['T'] = T
|
||||
|
||||
ob_prime = bfp.load(bin_ob, classes)
|
||||
|
||||
assert np.all(ob.a == ob_prime.a)
|
||||
bin_ob_prime = bfp.dump(ob_prime)
|
||||
assert bin_ob == bin_ob_prime
|
||||
|
||||
try:
|
||||
ob_prime = bfp.load(bin_ob)
|
||||
except bfp.BFUnkownClassError:
|
||||
pass
|
||||
else:
|
||||
assert False, "binfootprint.BFUnkownClassError should have been raised"
|
||||
|
||||
def test_named_tuple():
|
||||
obj_type = namedtuple('obj_type', ['a','b','c'])
|
||||
|
||||
obj = obj_type(12345678, 3.141, 'hallo Welt')
|
||||
|
||||
bin_obj = bfp.dump(obj)
|
||||
assert type(bin_obj) is bfp.BIN_TYPE
|
||||
obj_prime = bfp.load(bin_obj)
|
||||
assert obj_prime.__class__.__name__ == obj.__class__.__name__
|
||||
assert obj_prime._fields == obj._fields
|
||||
assert obj_prime == obj
|
||||
bin_ob_prime = bfp.dump(obj_prime)
|
||||
assert bin_obj == bin_ob_prime
|
||||
|
||||
def test_complex():
|
||||
z = 3+4j
|
||||
bf = bfp.dump(z)
|
||||
assert type(bf) is bfp.BIN_TYPE
|
||||
zr = bfp.load(bf)
|
||||
assert zr == z
|
||||
|
||||
def test_dict():
|
||||
a = {'a':1, 5:5, 3+4j:'l', False: b'ab4+#'}
|
||||
bf = bfp.dump(a)
|
||||
assert type(bf) is bfp.BIN_TYPE
|
||||
a_restored = bfp.load(bf)
|
||||
for k in a:
|
||||
assert a[k] == a_restored[k]
|
||||
|
||||
def test_versions():
|
||||
nt = namedtuple('nt', ['x', 'y'])
|
||||
n = nt(4,5)
|
||||
n2 = nt(n, n)
|
||||
ob = [3, n, n2]
|
||||
|
||||
binob = bfp.dump(ob, vers = 0)
|
||||
try:
|
||||
bfp.load(binob)
|
||||
except bfp.BFUnkownClassError:
|
||||
pass
|
||||
else:
|
||||
assert False, "binfootprint.BFUnkownClassError should have been raised"
|
||||
|
||||
rest_ob = bfp.load(binob, {'nt': nt})
|
||||
assert rest_ob == ob
|
||||
|
||||
binob = bfp.dump(ob, vers = 0x80)
|
||||
rest_ob = bfp.load(binob)
|
||||
assert rest_ob == ob
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# test_version_tag()
|
||||
# test_atom()
|
||||
# test_tuple()
|
||||
# test_nparray()
|
||||
# test_list()
|
||||
# test_getstate()
|
||||
# test_named_tuple()
|
||||
# test_complex()
|
||||
# test_dict()
|
||||
test_versions()
|
||||
|
||||
|
|
@ -17,7 +17,9 @@ from os.path import abspath, dirname, split
|
|||
# Add parent directory to beginning of path variable
|
||||
sys.path = [split(dirname(abspath(__file__)))[0]] + sys.path
|
||||
|
||||
from jobmanager import jobmanager, progress, binfootprint
|
||||
import jobmanager
|
||||
import binfootprint
|
||||
import progress
|
||||
|
||||
import warnings
|
||||
warnings.filterwarnings('error')
|
||||
|
|
|
@ -1,532 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import division, print_function
|
||||
|
||||
import sys
|
||||
import pickle
|
||||
import os
|
||||
from os.path import abspath, dirname, split, exists
|
||||
from shutil import rmtree
|
||||
|
||||
import warnings
|
||||
warnings.filterwarnings('error')
|
||||
|
||||
import numpy as np
|
||||
|
||||
# Add parent directory to beginning of path variable
|
||||
sys.path = [split(dirname(abspath(__file__)))[0]] + sys.path
|
||||
|
||||
import jobmanager.persistentData as pd
|
||||
from jobmanager.persistentData import PersistentDataStructure as PDS
|
||||
|
||||
VERBOSE = 1
|
||||
|
||||
if sys.version_info[0] == 2:
|
||||
# fixes keyword problems with python 2.x
|
||||
old_open = open
|
||||
def new_open(file, mode):
|
||||
return old_open(name = file, mode = mode)
|
||||
open = new_open
|
||||
|
||||
rmtree('__test_data', ignore_errors=True)
|
||||
rmtree('__data', ignore_errors=True)
|
||||
rmtree('__base', ignore_errors=True)
|
||||
|
||||
|
||||
|
||||
def test_pd():
|
||||
try:
|
||||
with PDS(name='test_data', verbose=VERBOSE) as data:
|
||||
key = 'a'
|
||||
value = 1
|
||||
data.setData(key=key, value=value)
|
||||
assert data.getData(key) == value
|
||||
assert len(data) == 1
|
||||
|
||||
|
||||
key_sub = 'zz'
|
||||
with data.getData(key_sub, create_sub_data=True) as sub_data:
|
||||
sub_data.setData(key=key, value=3)
|
||||
assert sub_data.getData(key) == 3
|
||||
assert data.getData(key) == 1
|
||||
|
||||
|
||||
with sub_data.getData(key_sub, create_sub_data=True) as sub_sub_data:
|
||||
sub_sub_data.setData(key=key, value=4)
|
||||
assert sub_sub_data.getData(key) == 4
|
||||
assert sub_data.getData(key) == 3
|
||||
assert data.getData(key) == 1
|
||||
|
||||
with sub_data.getData(key_sub, create_sub_data=True) as sub_sub_data:
|
||||
assert sub_sub_data.getData(key) == 4
|
||||
assert sub_data.getData(key) == 3
|
||||
assert data.getData(key) == 1
|
||||
|
||||
finally:
|
||||
print()
|
||||
data.erase()
|
||||
|
||||
def test_pd_bytes():
|
||||
t1 = (3.4, 4.5, 5.6, 6.7, 7.8, 8.9)
|
||||
t2 = (3.4, 4.5, 5.6, 6.7, 7.8, 8.9, 9,1)
|
||||
|
||||
b1 = pickle.dumps(t1)
|
||||
b2 = pickle.dumps(t2)
|
||||
|
||||
try:
|
||||
with PDS(name='base', verbose=VERBOSE) as base_data:
|
||||
with base_data.getData(key=b1, create_sub_data=True) as sub_data:
|
||||
for i in range(2, 10):
|
||||
sub_data[i] = t2
|
||||
|
||||
base_data[b2] = t1
|
||||
|
||||
if VERBOSE > 1:
|
||||
print("\nCHECK\n")
|
||||
|
||||
with PDS(name='base', verbose=VERBOSE) as base_data:
|
||||
with base_data.getData(key=b1) as sub_data:
|
||||
for i in range(2, 10):
|
||||
assert sub_data[i] == t2
|
||||
|
||||
assert base_data[b2] == t1
|
||||
|
||||
finally:
|
||||
print()
|
||||
base_data.erase()
|
||||
|
||||
|
||||
def test_directory_removal():
|
||||
try:
|
||||
with PDS(name='data', verbose=VERBOSE) as data:
|
||||
with data.newSubData('s1') as s1:
|
||||
s1['bla'] = 9
|
||||
|
||||
f = open(file=data._dirname + '/other_file', mode='w')
|
||||
f.close()
|
||||
|
||||
print("now there should be a warning, because there is an unknown file in the directory!")
|
||||
finally:
|
||||
try:
|
||||
data.erase()
|
||||
except UserWarning:
|
||||
pass
|
||||
|
||||
assert exists(data._dirname)
|
||||
os.remove(data._dirname + '/other_file')
|
||||
os.rmdir(data._dirname)
|
||||
|
||||
def test_mp_read_from_sqlite():
|
||||
import sqlitedict as sqd
|
||||
import multiprocessing as mp
|
||||
import time
|
||||
|
||||
d = sqd.SqliteDict('test.db', autocommit = True)
|
||||
|
||||
def write(arg):
|
||||
with sqd.SqliteDict('test.db', autocommit = True) as d:
|
||||
for i in range(100):
|
||||
d[i] = (i, arg)
|
||||
|
||||
def read():
|
||||
with sqd.SqliteDict('test.db', autocommit = True) as d:
|
||||
for i in range(len(d)):
|
||||
print(i, d[i])
|
||||
|
||||
p1 = mp.Process(target = write, args=('p1', ))
|
||||
time.sleep(0.1)
|
||||
p2 = mp.Process(target = read)
|
||||
|
||||
p1.start()
|
||||
p2.start()
|
||||
|
||||
p1.join(10)
|
||||
p2.join(10)
|
||||
|
||||
try:
|
||||
if p1.is_alive():
|
||||
raise RuntimeError("write process did not finish on time")
|
||||
if p2.is_alive():
|
||||
raise RuntimeError("read process did not finish on time")
|
||||
finally:
|
||||
p1.terminate()
|
||||
p2.terminate()
|
||||
d.terminate()
|
||||
|
||||
|
||||
def test_from_existing_sub_data():
|
||||
print()
|
||||
print('test_from_existing_sub_data')
|
||||
t1 = (3.4, 4.5, 5.6, 6.7, 7.8, 8.9)
|
||||
t2 = (3.4, 4.5, 5.6, 6.7, 7.8, 8.9, 9,1)
|
||||
|
||||
try:
|
||||
with PDS(name='base', verbose=VERBOSE) as base_data:
|
||||
with base_data.getData(key='sub1', create_sub_data = True) as sub_data:
|
||||
sub_data[100] = t1
|
||||
sub_data[200] = t2
|
||||
with sub_data.getData(key = 'subsub1', create_sub_data = True) as sub_sub_data:
|
||||
sub_sub_data['t'] = 'hallo Welt'
|
||||
|
||||
base_data.setDataFromSubData(key='sub2', subData = sub_data)
|
||||
|
||||
|
||||
with PDS(name='base', verbose=VERBOSE) as base_data:
|
||||
with base_data.getData(key='sub2', create_sub_data = False) as sub_data:
|
||||
assert sub_data[100] == t1
|
||||
assert sub_data[200] == t2
|
||||
with sub_data.getData(key = 'subsub1', create_sub_data = False) as sub_sub_data:
|
||||
assert sub_sub_data['t'] == 'hallo Welt'
|
||||
|
||||
|
||||
with PDS(name='base', verbose=VERBOSE) as base_data:
|
||||
with base_data.getData(key='sub1', create_sub_data = True) as sub_data:
|
||||
base_data['sub2'] = sub_data
|
||||
|
||||
with PDS(name='base', verbose=VERBOSE) as base_data:
|
||||
with base_data.getData(key='sub2', create_sub_data = False) as sub_data:
|
||||
assert sub_data[100] == t1
|
||||
assert sub_data[200] == t2
|
||||
with sub_data.getData(key = 'subsub1', create_sub_data = False) as sub_sub_data:
|
||||
assert sub_sub_data['t'] == 'hallo Welt'
|
||||
sub_sub_data['t'] = 'sub2:hallo Welt'
|
||||
|
||||
sub_data[100] = "sub2:t1"
|
||||
sub_data[200] = "sub2:t2"
|
||||
|
||||
with PDS(name='base', verbose=VERBOSE) as base_data:
|
||||
with base_data.getData(key='sub1', create_sub_data = True) as sub_data:
|
||||
assert sub_data[100] == t1
|
||||
assert sub_data[200] == t2
|
||||
with sub_data.getData(key = 'subsub1', create_sub_data = True) as sub_sub_data:
|
||||
assert sub_sub_data['t'] == 'hallo Welt'
|
||||
|
||||
with PDS(name='base', verbose=VERBOSE) as base_data:
|
||||
with base_data.getData(key='sub2', create_sub_data = False) as sub_data:
|
||||
with sub_data.getData(key = 'subsub1', create_sub_data = False) as sub_sub_data:
|
||||
assert sub_sub_data['t'] == 'sub2:hallo Welt'
|
||||
|
||||
assert sub_data[100] == "sub2:t1"
|
||||
assert sub_data[200] == "sub2:t2"
|
||||
|
||||
finally:
|
||||
print()
|
||||
base_data.erase()
|
||||
|
||||
def test_remove_sub_data_and_check_len():
|
||||
try:
|
||||
with PDS(name='base', verbose=VERBOSE) as base_data:
|
||||
with base_data.getData(key='sub1', create_sub_data = True) as sub_data:
|
||||
sub_data[100] = 't1'
|
||||
sub_data[200] = 't2'
|
||||
with sub_data.getData(key = 'subsub1', create_sub_data = True) as sub_sub_data:
|
||||
sub_sub_data['t'] = 'hallo Welt'
|
||||
|
||||
|
||||
assert len(sub_data) == 3, "len = {}".format(len(sub_data))
|
||||
|
||||
|
||||
|
||||
assert len(base_data) == 1
|
||||
base_data['copy_of_sub1'] = sub_data
|
||||
assert len(base_data) == 2
|
||||
del base_data['sub1']
|
||||
assert len(base_data) == 1
|
||||
|
||||
with base_data.getData(key='copy_of_sub1', create_sub_data = True) as sub_data:
|
||||
assert len(sub_data) == 3
|
||||
assert sub_data[100] == 't1'
|
||||
assert sub_data[200] == 't2'
|
||||
with sub_data.getData(key = 'subsub1', create_sub_data = True) as sub_sub_data:
|
||||
assert sub_sub_data['t'] == 'hallo Welt'
|
||||
|
||||
assert ('sub1' not in base_data)
|
||||
finally:
|
||||
base_data.erase()
|
||||
|
||||
def test_show_stat():
|
||||
try:
|
||||
with PDS(name='test_data', verbose=VERBOSE) as data:
|
||||
key = 'a'
|
||||
value = 1
|
||||
data.setData(key=key, value=value)
|
||||
assert data.getData(key) == value
|
||||
|
||||
|
||||
key_sub = 'zz'
|
||||
with data.getData(key_sub, create_sub_data=True) as sub_data:
|
||||
sub_data.setData(key=key, value=3)
|
||||
assert sub_data.getData(key) == 3
|
||||
assert data.getData(key) == 1
|
||||
|
||||
|
||||
key_sub_bin = pickle.dumps(key_sub, protocol=2)
|
||||
with sub_data.getData(key_sub_bin, create_sub_data=True) as sub_sub_data:
|
||||
sub_sub_data.setData(key=key, value=4)
|
||||
assert sub_sub_data.getData(key) == 4
|
||||
assert sub_data.getData(key) == 3
|
||||
assert data.getData(key) == 1
|
||||
|
||||
with sub_data.getData(key_sub_bin, create_sub_data=True) as sub_sub_data:
|
||||
assert sub_sub_data.getData(key) == 4
|
||||
assert sub_data.getData(key) == 3
|
||||
assert data.getData(key) == 1
|
||||
|
||||
data.show_stat(recursive=True)
|
||||
finally:
|
||||
data.erase()
|
||||
|
||||
def slow_len(pd):
|
||||
n = 0
|
||||
for k in pd:
|
||||
n += 1
|
||||
return n
|
||||
|
||||
def test_len():
|
||||
try:
|
||||
with PDS(name='data', verbose=VERBOSE) as data:
|
||||
assert len(data) == 0
|
||||
assert slow_len(data) == 0
|
||||
|
||||
data['a'] = 1
|
||||
assert len(data) == 1
|
||||
assert slow_len(data) == 1
|
||||
|
||||
for i in range(1, 8):
|
||||
data[i*10] = i
|
||||
assert len(data) == 8
|
||||
assert slow_len(data) == 8
|
||||
|
||||
with PDS(name='data', verbose=VERBOSE) as data:
|
||||
assert len(data) == 8
|
||||
assert slow_len(data) == 8
|
||||
|
||||
data.clear()
|
||||
assert len(data) == 0
|
||||
assert slow_len(data) == 0
|
||||
|
||||
with PDS(name='data', verbose=VERBOSE) as data:
|
||||
assert len(data) == 0
|
||||
assert slow_len(data) == 0
|
||||
finally:
|
||||
data.erase()
|
||||
|
||||
|
||||
|
||||
def test_clear():
|
||||
try:
|
||||
with PDS(name='data', verbose=VERBOSE) as data:
|
||||
data['a'] = 1
|
||||
data['b'] = 2
|
||||
with data.newSubData('s1') as s1:
|
||||
s1['bla'] = 9
|
||||
with data.newSubData('s2') as s2:
|
||||
s2['bla2'] = 18
|
||||
|
||||
with data['s1'] as s1:
|
||||
s1['t'] = 'tmp'
|
||||
s1.clear()
|
||||
|
||||
with data['s1'] as s1:
|
||||
assert len(s1) == 0
|
||||
assert slow_len(s1) == 0
|
||||
|
||||
data.clear()
|
||||
|
||||
dir_content = os.listdir(data._dirname)
|
||||
assert len(dir_content) == 1
|
||||
assert dir_content[0] == 'data.db'
|
||||
finally:
|
||||
data.erase()
|
||||
|
||||
def test_not_in():
|
||||
try:
|
||||
with PDS(name='data', verbose=VERBOSE) as data:
|
||||
data['a'] = 1
|
||||
data['b'] = 2
|
||||
with data.newSubData('s1') as s1:
|
||||
s1['bla'] = 9
|
||||
|
||||
assert ('a' in data)
|
||||
assert ('b' in data)
|
||||
assert ('s1' in data)
|
||||
|
||||
assert ('c' not in data)
|
||||
|
||||
finally:
|
||||
data.erase()
|
||||
|
||||
def test_npa():
|
||||
a = np.linspace(0, 1, 100).reshape(10,10)
|
||||
try:
|
||||
with PDS(name='data_npa', verbose=VERBOSE) as data:
|
||||
data['a'] = a
|
||||
|
||||
with PDS(name='data_npa', verbose=VERBOSE) as data:
|
||||
b = data['a']
|
||||
assert np.all(b == a)
|
||||
|
||||
with PDS(name='data_npa', verbose=VERBOSE) as data:
|
||||
del data['a']
|
||||
data['a'] = a
|
||||
finally:
|
||||
data.erase()
|
||||
|
||||
def test_merge():
|
||||
|
||||
a = np.random.rand(5)
|
||||
|
||||
with PDS(name='d1', verbose=VERBOSE) as d1:
|
||||
d1.clear()
|
||||
d1['k1'] = 1
|
||||
d1['k2'] = 2
|
||||
d1['k3'] = 3
|
||||
d1['aa'] = a
|
||||
with d1.newSubData('sub1') as sub1:
|
||||
sub1['s1'] = 11
|
||||
sub1['s2'] = 12
|
||||
sub1['s3'] = 13
|
||||
sub1['a'] = a
|
||||
|
||||
with PDS(name='d2', verbose=VERBOSE) as d2:
|
||||
d2.clear()
|
||||
d2['2k1'] = 1
|
||||
|
||||
d2.mergeOtherPDS(other_db_name = "d1", status_interval=0)
|
||||
|
||||
|
||||
with PDS(name='d2', verbose=VERBOSE) as d2:
|
||||
assert 'k1' in d2
|
||||
assert d2['k1'] == 1
|
||||
assert 'k2' in d2
|
||||
assert d2['k2'] == 2
|
||||
assert 'k3' in d2
|
||||
assert d2['k3'] == 3
|
||||
assert 'aa' in d2
|
||||
assert np.all(d2['aa'] == a)
|
||||
|
||||
assert "sub1" in d2
|
||||
assert isinstance(d2["sub1"], PDS)
|
||||
with d2["sub1"] as sub:
|
||||
assert 's1' in sub
|
||||
assert sub['s1'] == 11
|
||||
assert 's2' in sub
|
||||
assert sub['s2'] == 12
|
||||
assert 's3' in sub
|
||||
assert sub['s3'] == 13
|
||||
assert 'a' in sub
|
||||
assert np.all(sub['a'] == a)
|
||||
|
||||
try:
|
||||
with PDS(name='d2', verbose=VERBOSE) as d2:
|
||||
d2.mergeOtherPDS(other_db_name = "d1", update='error', status_interval=0)
|
||||
except KeyError as e:
|
||||
print(e)
|
||||
print("this is ok!")
|
||||
pass
|
||||
|
||||
with PDS(name='d2', verbose=VERBOSE) as d2:
|
||||
d2['k1'] = 'k1'
|
||||
d2.mergeOtherPDS(other_db_name = "d1", update='ignore', status_interval=0)
|
||||
assert d2['k1'] == 'k1'
|
||||
|
||||
with PDS(name='d2', verbose=VERBOSE) as d2:
|
||||
d2['k1'] = 'k1'
|
||||
d2.mergeOtherPDS(other_db_name = "d1", update='update', status_interval=0)
|
||||
assert d2['k1'] == 1
|
||||
|
||||
def test_merge_fname_conflict():
|
||||
|
||||
class PDS_det_fname(PDS):
|
||||
def newNPA(self, key, nparray):
|
||||
d = {'fname': 'det_fname.npy',
|
||||
'magic': pd.MAGIC_SIGN_NPARRAY}
|
||||
self.db[key] = d
|
||||
self.db.commit()
|
||||
|
||||
full_name = os.path.join(self._dirname, d['fname'])
|
||||
np.save(full_name, nparray)
|
||||
return True
|
||||
|
||||
def newSubData(self, key):
|
||||
self.need_open()
|
||||
dirname = 'subDB'
|
||||
i = 2
|
||||
|
||||
while os.path.exists(os.path.join(self._dirname, '__'+dirname)):
|
||||
dirname = 'subDB{}'.format(i)
|
||||
i += 1
|
||||
print(self._dirname, dirname)
|
||||
|
||||
full_name = os.path.join(self._dirname, '__'+dirname)
|
||||
os.mkdir(full_name)
|
||||
if not key in self.db:
|
||||
d = {'name': dirname,
|
||||
'magic': pd.MAGIC_SIGN}
|
||||
self.db[key] = d
|
||||
self.db.commit()
|
||||
return self.__class__(name = d['name'], path = os.path.join(self._dirname) , verbose = self.verbose)
|
||||
else:
|
||||
raise RuntimeError("can NOT create new SubData, key already found!")
|
||||
|
||||
a = np.random.rand(5)
|
||||
b = np.random.rand(5)
|
||||
|
||||
with PDS_det_fname(name='d1', verbose=VERBOSE) as d1:
|
||||
d1.clear()
|
||||
d1.newNPA('aa', a)
|
||||
with d1.newSubData('sub1') as sub1:
|
||||
sub1['s1'] = 11
|
||||
sub1.newNPA('a', a)
|
||||
|
||||
with PDS_det_fname(name='d2', verbose=VERBOSE) as d2:
|
||||
d2.clear()
|
||||
d2['2k1'] = 1
|
||||
d2.newNPA('2aa', b)
|
||||
with d2.newSubData('sub2') as sub2:
|
||||
sub2['s2'] = 22
|
||||
sub2.newNPA('a2', b)
|
||||
|
||||
assert np.all(d2['2aa'] == b)
|
||||
d2.mergeOtherPDS(other_db_name = "d1", update='error', status_interval=0)
|
||||
assert np.all(d2['2aa'] == b)
|
||||
|
||||
|
||||
|
||||
assert os.path.exists( os.path.join(d1._path, '__d1', '__subDB'))
|
||||
assert os.path.exists( os.path.join(d1._path, '__d1', 'det_fname.npy'))
|
||||
assert os.path.exists( os.path.join(d1._path, '__d2', '__subDB'))
|
||||
assert os.path.exists( os.path.join(d1._path, '__d2', 'det_fname.npy'))
|
||||
|
||||
with PDS_det_fname(name='d2', verbose=VERBOSE) as d2:
|
||||
assert d2['2k1'] == 1
|
||||
assert np.all(d2['2aa'] == b)
|
||||
|
||||
assert np.all(d2['aa'] == a)
|
||||
|
||||
assert d2.has_key('sub1')
|
||||
with d2['sub1'] as sub1:
|
||||
assert sub1['s1'] == 11
|
||||
assert np.all(sub1['a'] == a)
|
||||
|
||||
assert d2.has_key('sub2')
|
||||
with d2['sub2'] as sub2:
|
||||
assert sub2['s2'] == 22
|
||||
assert np.all(sub2['a2'] == b)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_pd()
|
||||
test_pd_bytes()
|
||||
test_directory_removal()
|
||||
test_mp_read_from_sqlite()
|
||||
test_from_existing_sub_data()
|
||||
test_remove_sub_data_and_check_len()
|
||||
test_show_stat()
|
||||
test_len()
|
||||
test_clear()
|
||||
test_not_in()
|
||||
test_npa()
|
||||
test_merge()
|
||||
test_merge_fname_conflict()
|
||||
pass
|
|
@ -1,596 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import division, print_function
|
||||
|
||||
import sys
|
||||
import pickle
|
||||
import os
|
||||
from os.path import abspath, dirname, split, exists
|
||||
from shutil import rmtree
|
||||
|
||||
import warnings
|
||||
warnings.filterwarnings('error')
|
||||
warnings.filterwarnings(action = "once",
|
||||
category= DeprecationWarning)
|
||||
|
||||
import numpy as np
|
||||
|
||||
# Add parent directory to beginning of path variable
|
||||
sys.path = [split(dirname(abspath(__file__)))[0]] + sys.path
|
||||
|
||||
from jobmanager.persistentData import PersistentDataStructure_HDF5 as PDS
|
||||
from jobmanager.persistentData import PersistentDataStructure as PDS_SQL
|
||||
from jobmanager.persistentData import mergePDS
|
||||
|
||||
VERBOSE = 1
|
||||
|
||||
def test_md5_clash():
|
||||
with PDS(name='test_data', verbose=VERBOSE) as data:
|
||||
data.clear()
|
||||
data._md5 = lambda key: str(np.random.randint(0,2))
|
||||
for i in range(100):
|
||||
data['a{}'.format(i)] = i
|
||||
assert len(data) == 100
|
||||
|
||||
for i in range(100):
|
||||
data.newSubData('s{}'.format(i))
|
||||
assert len(data) == 200
|
||||
|
||||
n = 0
|
||||
for k in data:
|
||||
n += 1
|
||||
assert n == 200
|
||||
|
||||
def test_pd():
|
||||
|
||||
with PDS(name='test_data', verbose=VERBOSE) as data:
|
||||
data.clear()
|
||||
key = 'a'
|
||||
value = 1
|
||||
try:
|
||||
data.getData(key)
|
||||
except KeyError as e:
|
||||
pass
|
||||
|
||||
data.setData(key=key, value=value)
|
||||
assert data.getData(key) == value
|
||||
assert len(data) == 1
|
||||
|
||||
key_sub = 'zz'
|
||||
with data.getData(key_sub, create_sub_data=True) as sub_data:
|
||||
sub_data.setData(key=key, value=3)
|
||||
assert sub_data.getData(key) == 3
|
||||
assert data.getData(key) == 1
|
||||
|
||||
with sub_data.getData(key_sub, create_sub_data=True) as sub_sub_data:
|
||||
sub_sub_data.setData(key=key, value=4)
|
||||
assert sub_sub_data.getData(key) == 4
|
||||
assert sub_data.getData(key) == 3
|
||||
assert data.getData(key) == 1
|
||||
|
||||
with sub_data.getData(key_sub, create_sub_data=True) as sub_sub_data:
|
||||
assert sub_sub_data.getData(key) == 4
|
||||
assert sub_data.getData(key) == 3
|
||||
assert data.getData(key) == 1
|
||||
|
||||
with PDS(name='test_data', verbose=VERBOSE) as data:
|
||||
data['d1'] = ('ö', 4, [0])
|
||||
with PDS(name='test_data', verbose=VERBOSE) as data:
|
||||
d1 = data['d1']
|
||||
assert d1[0] == 'ö'
|
||||
assert d1[1] == 4
|
||||
assert d1[2] == [0]
|
||||
|
||||
with PDS(name='test_data', verbose=VERBOSE) as data:
|
||||
data.clear()
|
||||
data.newSubData(key='sub_1', overwrite = False)
|
||||
|
||||
with PDS(name='test_data', verbose=VERBOSE) as data:
|
||||
try:
|
||||
data.newSubData(key='sub_1', overwrite = False)
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
data.newSubData(key='sub_1', overwrite = True)
|
||||
|
||||
def test_pd_bytes():
|
||||
t1 = (3.4, 4.5, 5.6, 6.7, 7.8, 8.9)
|
||||
t2 = (3.4, 4.5, 5.6, 6.7, 7.8, 8.9, 9,1)
|
||||
|
||||
b1 = pickle.dumps(t1)
|
||||
b2 = pickle.dumps(t2)
|
||||
|
||||
try:
|
||||
with PDS(name='base', verbose=VERBOSE) as base_data:
|
||||
with base_data.getData(key=b1, create_sub_data=True) as sub_data:
|
||||
for i in range(2, 10):
|
||||
sub_data[i] = t2
|
||||
|
||||
base_data[b2] = t1
|
||||
|
||||
if VERBOSE > 1:
|
||||
print("\nCHECK\n")
|
||||
|
||||
with PDS(name='base', verbose=VERBOSE) as base_data:
|
||||
with base_data.getData(key=b1) as sub_data:
|
||||
for i in range(2, 10):
|
||||
assert np.all(sub_data[i] == t2)
|
||||
|
||||
assert np.all(base_data[b2] == t1)
|
||||
|
||||
finally:
|
||||
print()
|
||||
base_data.erase()
|
||||
|
||||
|
||||
def test_mp_read_from_sqlite():
|
||||
import sqlitedict as sqd
|
||||
import multiprocessing as mp
|
||||
import time
|
||||
|
||||
d = sqd.SqliteDict('test.db', autocommit = True)
|
||||
|
||||
def write(arg):
|
||||
with sqd.SqliteDict('test.db', autocommit = True) as d:
|
||||
for i in range(100):
|
||||
d[i] = (i, arg)
|
||||
|
||||
def read():
|
||||
with sqd.SqliteDict('test.db', autocommit = True) as d:
|
||||
for i in range(len(d)):
|
||||
print(i, d[i])
|
||||
|
||||
p1 = mp.Process(target = write, args=('p1', ))
|
||||
time.sleep(0.1)
|
||||
p2 = mp.Process(target = read)
|
||||
|
||||
p1.start()
|
||||
p2.start()
|
||||
|
||||
p1.join(10)
|
||||
p2.join(10)
|
||||
|
||||
try:
|
||||
if p1.is_alive():
|
||||
raise RuntimeError("write process did not finish on time")
|
||||
if p2.is_alive():
|
||||
raise RuntimeError("read process did not finish on time")
|
||||
finally:
|
||||
p1.terminate()
|
||||
p2.terminate()
|
||||
d.terminate()
|
||||
|
||||
|
||||
def test_from_existing_sub_data():
|
||||
print()
|
||||
print('test_from_existing_sub_data')
|
||||
t1 = (3.4, 4.5, 5.6, 6.7, 7.8, 8.9)
|
||||
t2 = (3.4, 4.5, 5.6, 6.7, 7.8, 8.9, 9,1)
|
||||
|
||||
|
||||
with PDS(name='base', verbose=VERBOSE) as base_data:
|
||||
base_data.clear()
|
||||
with base_data.getData(key='s1', create_sub_data = True) as s1:
|
||||
s1['d1'] = 1
|
||||
s1['d2'] = 'b'
|
||||
|
||||
with PDS(name='base', verbose=VERBOSE) as base_data:
|
||||
assert base_data['s1']['d1'] == 1
|
||||
assert base_data['s1']['d2'] == 'b'
|
||||
base_data.setDataFromSubData('s2', base_data['s1'])
|
||||
assert base_data['s2']['d1'] == 1
|
||||
assert base_data['s2']['d2'] == 'b'
|
||||
|
||||
del base_data['s1']
|
||||
assert base_data['s2']['d1'] == 1
|
||||
assert base_data['s2']['d2'] == 'b'
|
||||
|
||||
with PDS(name='base', verbose=VERBOSE) as base_data:
|
||||
with base_data.getData(key='sub1', create_sub_data = True) as sub_data:
|
||||
sub_data[100] = t1
|
||||
sub_data[200] = t2
|
||||
with sub_data.getData(key = 'subsub1', create_sub_data = True) as sub_sub_data:
|
||||
sub_sub_data['t'] = 'hallo Welt'
|
||||
|
||||
base_data.setDataFromSubData(key='sub2', subData = sub_data)
|
||||
|
||||
|
||||
with PDS(name='base', verbose=VERBOSE) as base_data:
|
||||
with base_data.getData(key='sub2', create_sub_data = False) as sub_data:
|
||||
assert np.all(sub_data[100] == t1)
|
||||
assert np.all(sub_data[200] == t2)
|
||||
with sub_data.getData(key = 'subsub1', create_sub_data = False) as sub_sub_data:
|
||||
assert sub_sub_data['t'] == 'hallo Welt'
|
||||
|
||||
|
||||
with PDS(name='base', verbose=VERBOSE) as base_data:
|
||||
with base_data.getData(key='sub1', create_sub_data = True) as sub_data:
|
||||
base_data['sub2'] = sub_data
|
||||
|
||||
with PDS(name='base', verbose=VERBOSE) as base_data:
|
||||
with base_data.getData(key='sub2', create_sub_data = False) as sub_data:
|
||||
assert np.all(sub_data[100] == t1)
|
||||
assert np.all(sub_data[200] == t2)
|
||||
with sub_data.getData(key = 'subsub1', create_sub_data = False) as sub_sub_data:
|
||||
assert sub_sub_data['t'] == 'hallo Welt'
|
||||
sub_sub_data['t'] = 'sub2:hallo Welt'
|
||||
|
||||
sub_data[100] = "sub2:t1"
|
||||
sub_data[200] = "sub2:t2"
|
||||
|
||||
|
||||
with PDS(name='base', verbose=VERBOSE) as base_data:
|
||||
base_data.clear()
|
||||
with base_data.getData(key = 'sub1', create_sub_data = True) as sub1:
|
||||
sub1['npa'] = np.linspace(0,1,10)
|
||||
sub1['val'] = 'hallo ich bin sub1'
|
||||
|
||||
base_data['sub2'] = sub1
|
||||
|
||||
with PDS(name='base', verbose=VERBOSE) as base_data:
|
||||
npa1 = base_data['sub1']['npa']
|
||||
npa2 = base_data['sub1']['npa']
|
||||
|
||||
assert type(npa1) == np.ndarray
|
||||
assert type(npa2) == np.ndarray
|
||||
|
||||
|
||||
def test_remove_sub_data_and_check_len():
|
||||
|
||||
with PDS(name='base', verbose=VERBOSE) as base_data:
|
||||
base_data.clear()
|
||||
with base_data.getData(key='sub1', create_sub_data = True) as sub_data:
|
||||
sub_data[100] = 't1'
|
||||
sub_data[200] = 't2'
|
||||
with sub_data.getData(key = 'subsub1', create_sub_data = True) as sub_sub_data:
|
||||
sub_sub_data['t'] = 'hallo Welt'
|
||||
|
||||
|
||||
assert len(sub_data) == 3, "len = {}".format(len(sub_data))
|
||||
|
||||
|
||||
|
||||
assert len(base_data) == 1
|
||||
base_data['copy_of_sub1'] = sub_data
|
||||
assert len(base_data) == 2
|
||||
del base_data['sub1']
|
||||
assert len(base_data) == 1
|
||||
|
||||
with base_data.getData(key='copy_of_sub1', create_sub_data = True) as sub_data:
|
||||
assert len(sub_data) == 3
|
||||
assert sub_data[100] == 't1'
|
||||
assert sub_data[200] == 't2'
|
||||
with sub_data.getData(key = 'subsub1', create_sub_data = True) as sub_sub_data:
|
||||
assert sub_sub_data['t'] == 'hallo Welt'
|
||||
|
||||
assert ('sub1' not in base_data)
|
||||
|
||||
|
||||
def slow_len(pd):
|
||||
n = 0
|
||||
for k in pd:
|
||||
n += 1
|
||||
return n
|
||||
|
||||
def test_len():
|
||||
|
||||
with PDS(name='data', verbose=VERBOSE) as data:
|
||||
data.clear()
|
||||
assert len(data) == 0
|
||||
assert slow_len(data) == 0
|
||||
|
||||
data['a'] = 1
|
||||
assert len(data) == 1
|
||||
assert slow_len(data) == 1
|
||||
|
||||
for i in range(1, 8):
|
||||
data[i*10] = i
|
||||
assert len(data) == 8
|
||||
assert slow_len(data) == 8
|
||||
|
||||
with PDS(name='data', verbose=VERBOSE) as data:
|
||||
assert len(data) == 8
|
||||
assert slow_len(data) == 8
|
||||
|
||||
data.clear()
|
||||
assert len(data) == 0
|
||||
assert slow_len(data) == 0
|
||||
|
||||
with PDS(name='data', verbose=VERBOSE) as data:
|
||||
assert len(data) == 0
|
||||
assert slow_len(data) == 0
|
||||
|
||||
|
||||
|
||||
def test_clear():
|
||||
with PDS(name='data', verbose=VERBOSE) as data:
|
||||
data.clear()
|
||||
data['a'] = 1
|
||||
data['b'] = 2
|
||||
with data.newSubData('s1') as s1:
|
||||
s1['bla'] = 9
|
||||
with data.newSubData('s2') as s2:
|
||||
s2['bla2'] = 18
|
||||
|
||||
with data['s1'] as s1:
|
||||
s1['t'] = 'tmp'
|
||||
s1.clear()
|
||||
|
||||
with data['s1'] as s1:
|
||||
assert len(s1) == 0
|
||||
assert slow_len(s1) == 0
|
||||
|
||||
data.clear()
|
||||
|
||||
|
||||
def test_not_in():
|
||||
try:
|
||||
with PDS(name='data', verbose=VERBOSE) as data:
|
||||
data['a'] = 1
|
||||
data['b'] = 2
|
||||
with data.newSubData('s1') as s1:
|
||||
s1['bla'] = 9
|
||||
|
||||
assert ('a' in data)
|
||||
assert ('b' in data)
|
||||
assert ('s1' in data)
|
||||
|
||||
assert ('c' not in data)
|
||||
|
||||
finally:
|
||||
data.erase()
|
||||
|
||||
def test_npa():
|
||||
a = np.linspace(0, 1, 100).reshape(10,10)
|
||||
try:
|
||||
with PDS(name='data_npa', verbose=VERBOSE) as data:
|
||||
data['a'] = a
|
||||
|
||||
with PDS(name='data_npa', verbose=VERBOSE) as data:
|
||||
b = data['a']
|
||||
assert np.all(b == a)
|
||||
|
||||
with PDS(name='data_npa', verbose=VERBOSE) as data:
|
||||
del data['a']
|
||||
data['a'] = a
|
||||
finally:
|
||||
data.erase()
|
||||
|
||||
def test_merge():
|
||||
|
||||
a = np.random.rand(5)
|
||||
|
||||
with PDS(name='d1', verbose=VERBOSE) as d1:
|
||||
d1.clear()
|
||||
d1['k1'] = 1
|
||||
d1['k2'] = 2
|
||||
d1['k3'] = 3
|
||||
d1['aa'] = a
|
||||
with d1.newSubData('sub1') as sub1:
|
||||
sub1['s1'] = 11
|
||||
sub1['s2'] = 12
|
||||
sub1['s3'] = 13
|
||||
sub1['a'] = a
|
||||
|
||||
with PDS(name='d2', verbose=VERBOSE) as d2:
|
||||
d2.clear()
|
||||
d2['2k1'] = 1
|
||||
with PDS(name='d1', verbose=VERBOSE) as d1:
|
||||
d2.mergeOtherPDS(other_db = d1, status_interval=0)
|
||||
|
||||
with PDS(name='d2', verbose=VERBOSE) as d2:
|
||||
assert 'k1' in d2
|
||||
assert d2['k1'] == 1
|
||||
assert 'k2' in d2
|
||||
assert d2['k2'] == 2
|
||||
assert 'k3' in d2
|
||||
assert d2['k3'] == 3
|
||||
assert 'aa' in d2
|
||||
assert np.all(d2['aa'] == a)
|
||||
|
||||
assert "sub1" in d2
|
||||
assert isinstance(d2["sub1"], PDS)
|
||||
with d2["sub1"] as sub:
|
||||
assert 's1' in sub
|
||||
assert sub['s1'] == 11
|
||||
assert 's2' in sub
|
||||
assert sub['s2'] == 12
|
||||
assert 's3' in sub
|
||||
assert sub['s3'] == 13
|
||||
assert 'a' in sub
|
||||
assert np.all(sub['a'] == a)
|
||||
|
||||
try:
|
||||
with PDS(name='d2', verbose=VERBOSE) as d2:
|
||||
with PDS(name='d1', verbose=VERBOSE) as d1:
|
||||
d2.mergeOtherPDS(other_db = d1, update='error', status_interval=0)
|
||||
except KeyError as e:
|
||||
print(e)
|
||||
print("this is ok!")
|
||||
pass
|
||||
|
||||
with PDS(name='d2', verbose=VERBOSE) as d2:
|
||||
d2['k1'] = 'k1'
|
||||
with PDS(name='d1', verbose=VERBOSE) as d1:
|
||||
d2.mergeOtherPDS(other_db = d1, update='ignore', status_interval=0)
|
||||
assert d2['k1'] == 'k1'
|
||||
|
||||
with PDS(name='d2', verbose=VERBOSE) as d2:
|
||||
d2['k1'] = 'k1'
|
||||
with PDS(name='d1', verbose=VERBOSE) as d1:
|
||||
d2.mergeOtherPDS(other_db = d1, update='update', status_interval=0)
|
||||
assert d2['k1'] == 1
|
||||
|
||||
def test_link_vs_copy():
|
||||
data = np.arange(0,5)
|
||||
|
||||
with PDS(name='d', verbose=VERBOSE) as d:
|
||||
d.clear()
|
||||
sub = d.getData('sub', create_sub_data = True)
|
||||
sub['3'] = 3
|
||||
sub['4'] = 4
|
||||
|
||||
h5obj = d.getH5Object('sub')
|
||||
assert h5obj.attrs['size'] == 2
|
||||
|
||||
d.setDataFromSubData('gr1_copy', sub, copy=False)
|
||||
h5obj = d.getH5Object('gr1_copy')
|
||||
assert h5obj.attrs['size'] == 2
|
||||
|
||||
d.setDataFromSubData('gr1_link', sub, copy=True)
|
||||
h5obj = d.getH5Object('gr1_link')
|
||||
assert h5obj.attrs['size'] == 2
|
||||
|
||||
assert len(d) == 3
|
||||
|
||||
with PDS(name='d', verbose=VERBOSE) as d:
|
||||
d.clear()
|
||||
gr1 = d.getData('gr1', create_sub_data=True)
|
||||
gr1['data'] = data
|
||||
gr1['str'] = 'gr1'
|
||||
keys = [k for k in d['gr1']]
|
||||
assert keys[0] == 'str'
|
||||
assert keys[1] == 'data'
|
||||
|
||||
d.setDataFromSubData('gr1_link', gr1, copy=False)
|
||||
assert len(d) == d.calc_len()
|
||||
|
||||
keys = [k for k in d['gr1']]
|
||||
assert keys[0] == 'str'
|
||||
assert keys[1] == 'data'
|
||||
keys = [k for k in d['gr1_link']]
|
||||
assert keys[0] == 'str'
|
||||
assert keys[1] == 'data'
|
||||
|
||||
d.setDataFromSubData('gr1_copy', gr1, copy=True)
|
||||
assert len(d) == d.calc_len()
|
||||
keys = [k for k in d['gr1']]
|
||||
assert keys[0] == 'str'
|
||||
assert keys[1] == 'data'
|
||||
keys = [k for k in d['gr1_copy']]
|
||||
assert keys[0] == 'str'
|
||||
assert keys[1] == 'data'
|
||||
|
||||
with PDS(name='d', verbose=VERBOSE) as d:
|
||||
assert np.all(d['gr1']['data'] == d['gr1_link']['data'])
|
||||
assert np.all(d['gr1']['data'] == d['gr1_copy']['data'])
|
||||
|
||||
d['gr1'].getH5Object('data')[0] = -3
|
||||
assert d['gr1_link']['data'][0] == -3
|
||||
assert d['gr1_copy']['data'][0] == 0
|
||||
|
||||
keys = [k for k in d['gr1']]
|
||||
assert keys[0] == 'str'
|
||||
assert keys[1] == 'data'
|
||||
|
||||
|
||||
|
||||
|
||||
def test_merge_fname_conflict():
|
||||
a = np.random.rand(5)
|
||||
b = np.random.rand(5)
|
||||
|
||||
with PDS(name='d1', verbose=VERBOSE) as d1:
|
||||
d1.clear()
|
||||
d1['aa'] = a
|
||||
with d1.newSubData('sub1') as sub1:
|
||||
sub1['s1'] = 11
|
||||
sub1['a'] = a
|
||||
|
||||
with PDS(name='d2', verbose=VERBOSE) as d2:
|
||||
d2.clear()
|
||||
d2['2k1'] = 1
|
||||
d2['2aa'] = b
|
||||
with d2.newSubData('sub2') as sub2:
|
||||
sub2['s2'] = 22
|
||||
sub2['a2'] = b
|
||||
|
||||
assert np.all(d2['2aa'] == b)
|
||||
with PDS(name='d1', verbose=VERBOSE) as d1:
|
||||
d2.mergeOtherPDS(other_db = d1, update='error', status_interval=0)
|
||||
assert np.all(d2['2aa'] == b)
|
||||
|
||||
with PDS(name='d2', verbose=VERBOSE) as d2:
|
||||
assert d2['2k1'] == 1
|
||||
assert np.all(d2['2aa'] == b)
|
||||
|
||||
assert np.all(d2['aa'] == a)
|
||||
|
||||
assert d2.has_key('sub1')
|
||||
with d2['sub1'] as sub1:
|
||||
assert sub1['s1'] == 11
|
||||
assert np.all(sub1['a'] == a)
|
||||
|
||||
assert d2.has_key('sub2')
|
||||
with d2['sub2'] as sub2:
|
||||
assert sub2['s2'] == 22
|
||||
assert np.all(sub2['a2'] == b)
|
||||
|
||||
|
||||
def test_convert_SQL_TO_H5():
|
||||
data = np.empty((10,), dtype='<U2')
|
||||
data[0] = 'd\U00008000'
|
||||
data[1] = 'ha'
|
||||
|
||||
with PDS_SQL(name='pds_sql') as db:
|
||||
db.clear()
|
||||
db['a'] = 5
|
||||
db[4] = (3, 's', [0])
|
||||
db['uni'] = data
|
||||
db[b'\xff\xee'] = np.arange(4)
|
||||
with db.newSubData('sub') as sub:
|
||||
sub['d1'] = 1
|
||||
sub['d2'] = data
|
||||
|
||||
|
||||
with PDS(name='pds_h5') as db_h5:
|
||||
db_h5.clear()
|
||||
db_h5['datautest'] = data
|
||||
with PDS_SQL(name='pds_sql') as db_sql:
|
||||
mergePDS(db_sql, db_h5, status_interval=0)
|
||||
|
||||
with PDS(name='pds_h5') as db_h5:
|
||||
assert db_h5['a'] == 5
|
||||
assert db_h5['4'] == (3, 's', [0])
|
||||
assert np.all(db_h5[b'\xff\xee'] == np.arange(4))
|
||||
assert np.all(db_h5['uni'] == data)
|
||||
sub = db_h5['sub']
|
||||
assert sub['d1'] == 1
|
||||
assert np.all(sub['d2'] == data)
|
||||
|
||||
def test_iterator():
|
||||
with PDS(name='pds') as db:
|
||||
db.clear()
|
||||
db['a'] = 5
|
||||
db[4] = (3, 's', [0])
|
||||
db['uni'] = np.arange(10)
|
||||
db[b'\xff\xee'] = np.arange(4)
|
||||
with db.newSubData('sub') as sub:
|
||||
sub['d1'] = 1
|
||||
sub['d2'] = np.arange(5)
|
||||
|
||||
db = PDS(name='pds')
|
||||
for k in db:
|
||||
print(k)
|
||||
db.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_clear()
|
||||
test_pd()
|
||||
test_md5_clash()
|
||||
test_pd_bytes()
|
||||
|
||||
test_mp_read_from_sqlite()
|
||||
test_from_existing_sub_data()
|
||||
test_remove_sub_data_and_check_len()
|
||||
test_len()
|
||||
|
||||
test_not_in()
|
||||
test_npa()
|
||||
test_merge()
|
||||
test_merge_fname_conflict()
|
||||
test_link_vs_copy()
|
||||
test_convert_SQL_TO_H5()
|
||||
test_iterator()
|
||||
pass
|
Loading…
Add table
Reference in a new issue