some updates on 7

This commit is contained in:
Valentin Boettcher 2022-08-15 21:44:17 +02:00
parent 61be4d4fba
commit 097586299b
No known key found for this signature in database
GPG key ID: E034E12B7AF56ACE
15 changed files with 179929 additions and 50 deletions

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

After

Width:  |  Height:  |  Size: 349 KiB

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

After

Width:  |  Height:  |  Size: 127 KiB

File diff suppressed because it is too large Load diff

After

Width:  |  Height:  |  Size: 1.2 MiB

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

After

Width:  |  Height:  |  Size: 162 KiB

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

After

Width:  |  Height:  |  Size: 114 KiB

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

After

Width:  |  Height:  |  Size: 239 KiB

View file

@ -1,4 +1,4 @@
#! /usr/bin/env bash
sudo mount.nfs 141.30.17.16:/data_local_huge/valentin/master/07/07_one_bath_systematics/.data $(dirname "$0")/.data
sudo mount.nfs 141.30.17.16:/data_local_huge/valentin/master/07/07_one_bath_systematics/results $(dirname "$0")/results
sudo mount.nfs workhorse:/data_local_huge/valentin/master/07/07_one_bath_systematics/.data $(dirname "$0")/.data
sudo mount.nfs workhorse:/data_local_huge/valentin/master/07/07_one_bath_systematics/results $(dirname "$0")/results

View file

@ -16,7 +16,7 @@ import matplotlib
import ray
ray.shutdown()
ray.init(address="auto")
ray.init()
from hops.util.logging_setup import logging_setup
import logging

View file

@ -23,11 +23,11 @@ Init ray and silence stocproc.
#+begin_src jupyter-python :tangle scripts/integrate_slip.py
import ray
ray.shutdown()
ray.init(address="auto")
ray.init()
#+end_src
#+RESULTS:
: RayContext(dashboard_url='', python_version='3.9.13', ray_version='1.13.0', ray_commit='e4ce38d001dbbe09cd21c497fedd03d692b2be3e', address_info={'node_ip_address': '141.30.17.225', 'raylet_ip_address': '141.30.17.225', 'redis_address': None, 'object_store_address': '/tmp/ray/session_2022-08-03_14-53-16_736667_825118/sockets/plasma_store.7', 'raylet_socket_name': '/tmp/ray/session_2022-08-03_14-53-16_736667_825118/sockets/raylet.3', 'webui_url': '', 'session_dir': '/tmp/ray/session_2022-08-03_14-53-16_736667_825118', 'metrics_export_port': 59318, 'gcs_address': '141.30.17.16:6379', 'address': '141.30.17.16:6379', 'node_id': '4261dafb07982d866fc5f9805325751682bf9a59ad6e8cfeafab3a7a'})
: RayContext(dashboard_url='', python_version='3.9.13', ray_version='1.13.0', ray_commit='e4ce38d001dbbe09cd21c497fedd03d692b2be3e', address_info={'node_ip_address': '192.168.100.170', 'raylet_ip_address': '192.168.100.170', 'redis_address': None, 'object_store_address': '/tmp/ray/session_2022-08-14_17-20-46_960858_72812/sockets/plasma_store', 'raylet_socket_name': '/tmp/ray/session_2022-08-14_17-20-46_960858_72812/sockets/raylet', 'webui_url': '', 'session_dir': '/tmp/ray/session_2022-08-14_17-20-46_960858_72812', 'metrics_export_port': 43591, 'gcs_address': '192.168.100.170:64809', 'address': '192.168.100.170:64809', 'node_id': '1dd774b25412ed5294df4862101460494986daa6a2a8808227e2252d'})
#+begin_src jupyter-python :results none :tangle scripts/integrate_slip.py
from hops.util.logging_setup import logging_setup
@ -48,8 +48,8 @@ We use a logspaced time to resolve the initial slip.
#+RESULTS:
:RESULTS:
| <matplotlib.lines.Line2D | at | 0x7f23bbaacaf0> |
[[file:./.ob-jupyter/05d0b45df064c0f44a1fcacfde5f455874881298.svg]]
| <matplotlib.lines.Line2D | at | 0x7f0a61043e80> |
[[file:./.ob-jupyter/fbfe58386f7fb10dfa36a6c1b9f4db68ce76b8e3.svg]]
:END:
#+begin_src jupyter-python :results none :tangle scripts/integrate_slip.py
@ -1672,7 +1672,7 @@ f, ax = plot_interaction_consistency(
#+begin_src jupyter-python
from hops.util.dynamic_matrix import ConstantMatrix, SmoothStep
t_max = 10
t_max = 40
L_op = 1 / 2 * (ConstantMatrix(qt.sigmax()))
width_models_heights = []
ref = QubitModel(
@ -1690,7 +1690,7 @@ f, ax = plot_interaction_consistency(
driving_process_tolerance=StocProcTolerances(1e-4, 1e-4),
)
for δ in [.001, 1]:
for δ in [.001, .1]:
for ω_c in [4,5,6]:
ref.ω_c = ω_c
ref.ω_s = 4 - ω_c
@ -1708,32 +1708,177 @@ f, ax = plot_interaction_consistency(
#+begin_src jupyter-python
aux.integrate_multi(width_models_heights[2:], 1000)
aux.integrate_multi(width_models_heights[3:], 1000)
#+end_src
#+RESULTS:
:RESULTS:
#+begin_example
[INFO hops.core.integration 10013] Choosing the nonlinear integrator.
[INFO hops.core.integration 10013] Using 21 integrators.
[INFO hops.core.integration 10013] Some 500 trajectories have to be integrated.
[INFO hops.core.integration 10013] Using 330 hierarchy states.
100% 500/500 [01:00<00:00, 8.23it/s]
[INFO hops.core.integration 10013] Choosing the nonlinear integrator.
[INFO hops.core.integration 10013] Using 21 integrators.
[INFO hops.core.integration 10013] Some 500 trajectories have to be integrated.
[INFO hops.core.integration 10013] Using 330 hierarchy states.
100% 500/500 [00:40<00:00, 12.23it/s]
[INFO hops.core.integration 10013] Choosing the nonlinear integrator.
[INFO hops.core.integration 10013] Using 21 integrators.
[INFO hops.core.integration 10013] Some 500 trajectories have to be integrated.
[INFO hops.core.integration 10013] Using 330 hierarchy states.
100% 500/500 [00:49<00:00, 10.01it/s]
[INFO hops.core.integration 10013] Choosing the nonlinear integrator.
[INFO hops.core.integration 10013] Using 21 integrators.
[INFO hops.core.integration 10013] Some 500 trajectories have to be integrated.
[INFO hops.core.integration 10013] Using 330 hierarchy states.
100% 500/500 [00:55<00:00, 9.06it/s]
[INFO hops.core.integration 72812] Choosing the nonlinear integrator.
[INFO hops.core.integration 72812] Using 8 integrators.
[INFO hops.core.integration 72812] Some 0 trajectories have to be integrated.
[INFO hops.core.integration 72812] Using 330 hierarchy states.
0it [00:00, ?it/s]
[INFO hops.core.integration 72812] Choosing the nonlinear integrator.
[INFO hops.core.integration 72812] Using 8 integrators.
[INFO hops.core.integration 72812] Some 0 trajectories have to be integrated.
[INFO hops.core.integration 72812] Using 330 hierarchy states.
0it [00:00, ?it/s]
[INFO hops.core.integration 72812] Choosing the nonlinear integrator.
#+end_example
# [goto error]
#+begin_example
---------------------------------------------------------------------------
KeyboardInterrupt Traceback (most recent call last)
Input In [10], in <cell line: 1>()
----> 1 aux.integrate_multi(width_models_heights[3:], 1000)
File ~/src/two_qubit_model/hiro_models/model_auxiliary.py:84, in integrate_multi(models, *args, **kwargs)
 77 """Integrate the hops equations for the ``models``.
 78 Like :any:`integrate` just for many models.
 79
 80 A call to :any:`ray.init` may be required.
 81 """
 83 for model in models:
---> 84 integrate(model, *args, **kwargs)
File ~/src/two_qubit_model/hiro_models/model_auxiliary.py:108, in integrate(model, n, data_path, clear_pd)
 98 # with model_db(data_path) as db:
 99 # if hash in db and "data" db[hash]
 101 supervisor = HOPSSupervisor(
 102 model.hops_config,
 103 n,
 104 data_path=data_path,
 105 data_name=hash,
 106 )
--> 108 supervisor.integrate(clear_pd)
 110 with supervisor.get_data(True) as data:
 111 with model_db(data_path) as db:
File ~/src/hops/hops/core/integration.py:1235, in HOPSSupervisor.integrate(self, clear_pd)
 1216 """Integrate the HOPS equations on a ray cluster (see
 1217 :any:`ray.init`).
 1218
 1219 :param clear_pd: If set to :any:`True`, the result database
 1220  will be cleared prior to the integration.
 1221 """
 1223 # Despite their name, we don't instantiate the actor as a
 1224 # `ray actor`. Rather we put one instance into the ray object
 1225 # store and then launch as many tasks as we have trajectories.
 (...)
 1232 #
 1233 # -- Valentin Boettcher
-> 1235 with self.get_data_and_maybe_clear(clear_pd) as data:
 1236 t = data.get_time()
 1238 num_integrators = int(ray.available_resources().get("CPU", 0))
File ~/src/hops/hops/core/integration.py:1318, in HOPSSupervisor.get_data_and_maybe_clear(self, clear)
 1311 def get_data_and_maybe_clear(self, clear: bool = False) -> hid.HIData:
 1312 """
 1313  Like :any:`get_data` but conditionally clears the data and sets the time.
 1314
 1315  :param clear: Whether to clear the data.
 1316  """
-> 1318 with self.get_data() as data:
 1319 if clear:
 1320 log.info("Clear HIData contained in {}".format(data.h5File))
File ~/src/hops/hops/core/integration.py:1307, in HOPSSupervisor.get_data(self, read_only)
 1299 def get_data(self, read_only: bool = False) -> hid.HIData:
 1300 """
 1301  :returns: The database containing results that correspond to the current configuration
 1302  (:any:`params`).
 1303
 1304  :param read_only: Whether to open the database in read only mode.
 1305  """
-> 1307 return self.metadata.get_HIData(
 1308  key=self.params, read_only=read_only, robust=True
 1309  )
File ~/src/hops/hops/core/hierarchy_data.py:1360, in HIMetaData.get_HIData(self, key, read_only, overwrite_key, robust)
 1355 hdf5_name, bin_key, hashed_key = self.get_HIData_fname(
 1356 key, ret_bin_and_hash=True
 1357 )
 1359 assert isinstance(bin_key, bytes)
-> 1360 return HIData(
 1361  str(self.path / hdf5_name),
 1362  read_only=read_only,
 1363  hi_key=key,
 1364  hi_key_bin=bin_key,
 1365  hi_key_bin_hash=hashed_key,
 1366  overwrite_key=overwrite_key,
 1367  robust=robust,
 1368 )
File ~/src/hops/hops/core/hierarchy_data.py:397, in HIData.__init__(self, hdf5_name, read_only, hi_key, hi_key_bin, hi_key_bin_hash, check_consistency, overwrite_key, robust)
 385 """The time points on which the trajectories and everything else are
 386 given.
 387
 (...)
 393 :any:`time_set`.
 394 """
 396 self._init_bcf_terms_and_aux_states()
--> 397 self._open_file(read_only, hi_key_bin, hi_key_bin_hash, overwrite_key, robust)
 399 if check_consistency:
 400 self._check_consistency(hi_key_bin, hi_key_bin_hash)
File ~/src/hops/hops/core/hierarchy_data.py:485, in HIData._open_file(self, read_only, hi_key_bin, hi_key_bin_hash, overwrite_key, backup_if_error)
 483 if not read_only:
 484 try:
--> 485 p = test_file_version(self.hdf5_name)
 486 if p:
 487 warnings.warn(
 488 "can not check version! process list {} has access to hdf5 file {}".format(
 489 p, self.hdf5_name
 490 )
 491 )
File ~/src/hops/hops/core/hierarchy_data.py:1384, in test_file_version(hdf5_name)
 1379 if len(p) > 0:
 1380 # another process accesses the file, assume that the file has allready the new format,
 1381 # since that other process has already changed it
 1382 return p
-> 1384 with h5py.File(hdf5_name, "r+", libver="latest") as h5File:
 1385 # print("test file, open", hdf5_name, "'r+")
 1386 try:
 1387 # print("test file, try to set swmr_mode True")
 1388 h5File.swmr_mode = True
File /nix/store/786z5qd97vl0sdh5h4x9kg8ql6ihs4n0-python3-3.9.13-env/lib/python3.9/site-packages/h5py/_hl/files.py:533, in File.__init__(self, name, mode, driver, libver, userblock_size, swmr, rdcc_nslots, rdcc_nbytes, rdcc_w0, track_order, fs_strategy, fs_persist, fs_threshold, fs_page_size, page_buf_size, min_meta_keep, min_raw_keep, locking, alignment_threshold, alignment_interval, **kwds)
 525 fapl = make_fapl(driver, libver, rdcc_nslots, rdcc_nbytes, rdcc_w0,
 526 locking, page_buf_size, min_meta_keep, min_raw_keep,
 527 alignment_threshold=alignment_threshold,
 528 alignment_interval=alignment_interval,
 529 **kwds)
 530 fcpl = make_fcpl(track_order=track_order, fs_strategy=fs_strategy,
 531 fs_persist=fs_persist, fs_threshold=fs_threshold,
 532 fs_page_size=fs_page_size)
--> 533 fid = make_fid(name, mode, userblock_size, fapl, fcpl, swmr=swmr)
 535 if isinstance(libver, tuple):
 536 self._libver = libver
File /nix/store/786z5qd97vl0sdh5h4x9kg8ql6ihs4n0-python3-3.9.13-env/lib/python3.9/site-packages/h5py/_hl/files.py:228, in make_fid(name, mode, userblock_size, fapl, fcpl, swmr)
 226 fid = h5f.open(name, flags, fapl=fapl)
 227 elif mode == 'r+':
--> 228 fid = h5f.open(name, h5f.ACC_RDWR, fapl=fapl)
 229 elif mode in ['w-', 'x']:
 230 fid = h5f.create(name, h5f.ACC_EXCL, fapl=fapl, fcpl=fcpl)
File h5py/_objects.pyx:54, in h5py._objects.with_phil.wrapper()
File h5py/_objects.pyx:55, in h5py._objects.with_phil.wrapper()
File h5py/h5f.pyx:106, in h5py.h5f.open()
File h5py/h5g.pyx:271, in h5py.h5g.GroupID.__init__()
File h5py/h5g.pyx:272, in h5py.h5g.GroupID.__init__()
File <frozen importlib._bootstrap>:398, in parent(self)
KeyboardInterrupt:
#+end_example
:END:
#+begin_src jupyter-python
@ -1742,7 +1887,7 @@ f, ax = plot_interaction_consistency(
flow = -1 * model.bath_energy_flow(data, gc_sleep=0).for_bath(0)
_, _, (lines, _) = plot_with_σ(
model.t,
flow * (1/flow.value[-1]),
flow ,
label=fr"$\omega_c={model.ω_c}$",
ax=ax,
)
@ -1769,30 +1914,15 @@ f, ax = plot_interaction_consistency(
# )
# plot_with_σ(model.t, total, ax=ax, color=lines[0].get_color())
#ax.set_yscale("log")
plt.xlim(8,10)
plt.ylim(.9, 2)
# plt.xlim(8,10)
# plt.ylim(.9, 2)
plt.legend()
#+end_src
#+RESULTS:
:RESULTS:
#+begin_example
Loading: 2% 2/84 [00:00<00:11, 7.13it/s](integration_task pid=32030, ip=141.30.17.8) E0812 20:55:23.911544195 32243 chttp2_transport.cc:1103] Received a GOAWAY with error code ENHANCE_YOUR_CALM and debug data equal to "too_many_pings"
(integration_task pid=31798, ip=141.30.17.8) E0812 20:55:24.003499709 32022 chttp2_transport.cc:1103] Received a GOAWAY with error code ENHANCE_YOUR_CALM and debug data equal to "too_many_pings"
Loading: 5% 4/84 [00:00<00:10, 7.52it/s](integration_task pid=32001, ip=141.30.17.8) E0812 20:55:24.167139336 32253 chttp2_transport.cc:1103] Received a GOAWAY with error code ENHANCE_YOUR_CALM and debug data equal to "too_many_pings"
Loading: 8% 7/84 [00:00<00:08, 8.81it/s](integration_task pid=31982, ip=141.30.17.8) E0812 20:55:24.470939153 32222 chttp2_transport.cc:1103] Received a GOAWAY with error code ENHANCE_YOUR_CALM and debug data equal to "too_many_pings"
Loading: 10% 8/84 [00:00<00:08, 8.54it/s](integration_task pid=31797, ip=141.30.17.8) E0812 20:55:24.602187660 32006 chttp2_transport.cc:1103] Received a GOAWAY with error code ENHANCE_YOUR_CALM and debug data equal to "too_many_pings"
Loading: 100% 84/84 [00:17<00:00, 4.70it/s]
[INFO root 10013] Writing cache to: results/flow_331f9efc6c9a96e44040b52e5b1bdc5dc1ef0b66239e4bd74eb7ffc28fc1ac4b_flow_worker_1000_None_72e02dfe4c10430b09a230ac69f7ce17d2b26b203346f53fd151a749d0db0ea8.npy
Loading: 100% 84/84 [00:16<00:00, 5.12it/s]
[INFO root 10013] Writing cache to: results/flow_1f692962f6be2c0b07f9ec1346e5895d2a51480138232d4e6a04fc8cb4a4bcd5_flow_worker_1000_None_72e02dfe4c10430b09a230ac69f7ce17d2b26b203346f53fd151a749d0db0ea8.npy
Loading: 100% 84/84 [00:16<00:00, 5.08it/s]
[INFO root 10013] Writing cache to: results/flow_e524dd5144b67fa9354b65c0a6749782a40b86e4b3f0fd905a115e5f28b500bf_flow_worker_1000_None_72e02dfe4c10430b09a230ac69f7ce17d2b26b203346f53fd151a749d0db0ea8.npy
Loading: 100% 84/84 [00:16<00:00, 5.03it/s]
[INFO root 10013] Writing cache to: results/flow_fe8dafbadf6b56a312c509217693e49da823908a772ae6e8c979ac7a28379c5a_flow_worker_1000_None_72e02dfe4c10430b09a230ac69f7ce17d2b26b203346f53fd151a749d0db0ea8.npy
#+end_example
: <matplotlib.legend.Legend at 0x7f222c89f760>
[[file:./.ob-jupyter/718773bf6ae3d946ad06a83feed92fa7077d9271.svg]]
: <matplotlib.legend.Legend at 0x7f0a60c013a0>
[[file:./.ob-jupyter/ea0e2d8ab0f00f7953f03bdc8a4179fa1e761f10.svg]]
:END: