mirror of
https://github.com/vale981/two_qubit_model
synced 2025-03-04 17:21:43 -05:00
implement snapshotting for online analysis
This commit is contained in:
parent
8a587ab02b
commit
098c83ab44
3 changed files with 41 additions and 13 deletions
|
@ -23,6 +23,8 @@ import numpy as np
|
||||||
from multiprocessing import Process
|
from multiprocessing import Process
|
||||||
import hops.core.signal_delay as signal_delay
|
import hops.core.signal_delay as signal_delay
|
||||||
import signal
|
import signal
|
||||||
|
from typing import Union
|
||||||
|
import hopsflow.util
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
|
@ -55,7 +57,7 @@ def model_db(data_path: str = "./.data"):
|
||||||
|
|
||||||
f.truncate(0)
|
f.truncate(0)
|
||||||
f.seek(0)
|
f.seek(0)
|
||||||
f.write(JSONEncoder.dumps(db))
|
f.write(JSONEncoder.dumps(db, indent=4))
|
||||||
|
|
||||||
|
|
||||||
def model_hook(dct: dict[str, Any]):
|
def model_hook(dct: dict[str, Any]):
|
||||||
|
@ -261,6 +263,8 @@ def is_smaller(first: Path, second: Path) -> bool:
|
||||||
def import_results(
|
def import_results(
|
||||||
data_path: str = "./.data",
|
data_path: str = "./.data",
|
||||||
other_data_path: str = "./.data_other",
|
other_data_path: str = "./.data_other",
|
||||||
|
results_path: Union[Path, str] = "./results",
|
||||||
|
other_results_path: Union[Path, str] = "./results_other",
|
||||||
interactive: bool = False,
|
interactive: bool = False,
|
||||||
models_to_import: Optional[Iterable[Model]] = None,
|
models_to_import: Optional[Iterable[Model]] = None,
|
||||||
):
|
):
|
||||||
|
@ -279,6 +283,9 @@ def import_results(
|
||||||
[model.hexhash for model in models_to_import] if models_to_import else []
|
[model.hexhash for model in models_to_import] if models_to_import else []
|
||||||
)
|
)
|
||||||
|
|
||||||
|
results_path = Path(results_path)
|
||||||
|
other_results_path = Path(other_results_path)
|
||||||
|
|
||||||
with model_db(other_data_path) as other_db:
|
with model_db(other_data_path) as other_db:
|
||||||
for current_hash, data in other_db.items():
|
for current_hash, data in other_db.items():
|
||||||
with model_db(data_path) as db:
|
with model_db(data_path) as db:
|
||||||
|
@ -291,21 +298,23 @@ def import_results(
|
||||||
logging.info(f"Skipping {current_hash}.")
|
logging.info(f"Skipping {current_hash}.")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
this_path = Path(data_path) / data["data_path"]
|
||||||
|
this_path_tmp = this_path.with_suffix(".part")
|
||||||
|
other_path = Path(other_data_path) / data["data_path"]
|
||||||
|
|
||||||
if current_hash not in db:
|
if current_hash not in db:
|
||||||
do_import = True
|
do_import = True
|
||||||
elif "data_path" not in db[current_hash]:
|
elif "data_path" not in db[current_hash]:
|
||||||
do_import = True
|
do_import = True
|
||||||
elif is_smaller(
|
elif is_smaller(
|
||||||
Path(data_path) / db[current_hash]["data_path"],
|
this_path,
|
||||||
Path(other_data_path) / data["data_path"],
|
other_path,
|
||||||
):
|
):
|
||||||
do_import = True
|
do_import = True
|
||||||
|
|
||||||
if do_import:
|
logging.info(f"Not importing {current_hash}.")
|
||||||
this_path = Path(data_path) / data["data_path"]
|
|
||||||
this_path_tmp = this_path.with_suffix(".part")
|
|
||||||
other_path = Path(other_data_path) / data["data_path"]
|
|
||||||
|
|
||||||
|
if do_import:
|
||||||
config = data["model_config"]
|
config = data["model_config"]
|
||||||
logging.warning(f"Importing {other_path} to {this_path}.")
|
logging.warning(f"Importing {other_path} to {this_path}.")
|
||||||
logging.warning(f"The model description is '{config.description}'.")
|
logging.warning(f"The model description is '{config.description}'.")
|
||||||
|
@ -317,11 +326,30 @@ def import_results(
|
||||||
continue
|
continue
|
||||||
|
|
||||||
this_path.parents[0].mkdir(exist_ok=True, parents=True)
|
this_path.parents[0].mkdir(exist_ok=True, parents=True)
|
||||||
|
|
||||||
if is_smaller(this_path, other_path):
|
if is_smaller(this_path, other_path):
|
||||||
shutil.copy2(other_path, this_path_tmp)
|
shutil.copy2(other_path, this_path_tmp)
|
||||||
|
os.system("sync")
|
||||||
shutil.move(this_path_tmp, this_path)
|
shutil.move(this_path_tmp, this_path)
|
||||||
|
|
||||||
|
if "analysis_files" in data:
|
||||||
|
for fname in data["analysis_files"].values():
|
||||||
|
other_path = other_results_path / fname
|
||||||
|
|
||||||
|
for (
|
||||||
|
other_sub_path
|
||||||
|
) in hopsflow.util.get_all_snaphot_paths(other_path):
|
||||||
|
this_path = results_path / other_sub_path.name
|
||||||
|
this_path_tmp = this_path.with_suffix(".tmp")
|
||||||
|
|
||||||
|
logging.warning(
|
||||||
|
f"Importing {other_path} to {this_path}."
|
||||||
|
)
|
||||||
|
|
||||||
|
if other_sub_path.exists():
|
||||||
|
shutil.copy2(other_sub_path, this_path_tmp)
|
||||||
|
os.system("sync")
|
||||||
|
shutil.move(this_path_tmp, this_path)
|
||||||
|
|
||||||
db[current_hash] = data
|
db[current_hash] = data
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -296,7 +296,7 @@ class Model(ABC):
|
||||||
if not os.path.exists(file_path):
|
if not os.path.exists(file_path):
|
||||||
raise RuntimeError(f"No data found under '{file_path}'.")
|
raise RuntimeError(f"No data found under '{file_path}'.")
|
||||||
|
|
||||||
return hopsflow.util.WelfordAggregator.from_dump(file_path).ensemble_value
|
return hopsflow.util.get_online_values_from_cache(file_path)
|
||||||
|
|
||||||
def system_energy(
|
def system_energy(
|
||||||
self, data: Optional[HIData] = None, results_path: str = "results", **kwargs
|
self, data: Optional[HIData] = None, results_path: str = "results", **kwargs
|
||||||
|
@ -589,7 +589,7 @@ class Model(ABC):
|
||||||
|
|
||||||
return self.interaction_power(data, **kwargs).integrate(self.t)
|
return self.interaction_power(data, **kwargs).integrate(self.t)
|
||||||
|
|
||||||
def bath_energy(self, data: Optional[HIData], **kwargs) -> EnsembleValue:
|
def bath_energy(self, data: Optional[HIData] = None, **kwargs) -> EnsembleValue:
|
||||||
"""Calculates bath energy by integrating the bath energy flow
|
"""Calculates bath energy by integrating the bath energy flow
|
||||||
calculated from the ``data`` or, if not supplied, tries to load
|
calculated from the ``data`` or, if not supplied, tries to load
|
||||||
the online results from ``results_path``.
|
the online results from ``results_path``.
|
||||||
|
|
6
poetry.lock
generated
6
poetry.lock
generated
|
@ -87,7 +87,7 @@ test-tox-coverage = ["coverage (>=5.5)"]
|
||||||
type = "git"
|
type = "git"
|
||||||
url = "https://github.com/beartype/beartype"
|
url = "https://github.com/beartype/beartype"
|
||||||
reference = "main"
|
reference = "main"
|
||||||
resolved_reference = "f536570a1b8dc1d8f5cb3c07e93ce7915eabb899"
|
resolved_reference = "b48a56fb497b36aa9640c107caa6bc85e02f5782"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "binfootprint"
|
name = "binfootprint"
|
||||||
|
@ -362,7 +362,7 @@ plotting = ["matplotlib (>=3.5.0,<4.0.0)"]
|
||||||
type = "git"
|
type = "git"
|
||||||
url = "git@gitlab.hrz.tu-chemnitz.de:s8896854--tu-dresden.de/hops.git"
|
url = "git@gitlab.hrz.tu-chemnitz.de:s8896854--tu-dresden.de/hops.git"
|
||||||
reference = "main"
|
reference = "main"
|
||||||
resolved_reference = "ef9c3a500f9b2aa954a7f5b228c81d5363630b0d"
|
resolved_reference = "573274ec04be0a65f0e035885ce387247887b1e8"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "hopsflow"
|
name = "hopsflow"
|
||||||
|
@ -388,7 +388,7 @@ tqdm = "^4.62.3"
|
||||||
type = "git"
|
type = "git"
|
||||||
url = "https://github.com/vale981/hopsflow"
|
url = "https://github.com/vale981/hopsflow"
|
||||||
reference = "main"
|
reference = "main"
|
||||||
resolved_reference = "9c3fc669f6a103e70af7b04e7f5f057448be66d2"
|
resolved_reference = "e27e38b656b18e0bf3066d72e76ed1d467f26a5c"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "humanfriendly"
|
name = "humanfriendly"
|
||||||
|
|
Loading…
Add table
Reference in a new issue