two_qubit_model/hiro_models/utility.py

241 lines
6.3 KiB
Python
Raw Normal View History

2022-11-28 16:02:15 -05:00
from datetime import datetime
import dateutil.parser
2022-03-23 14:47:47 +01:00
from functools import singledispatchmethod
import dataclasses
from dataclasses import dataclass
from beartype import beartype
import json
import numpy as np
import qutip as qt
2022-04-20 11:41:21 +02:00
from typing import Any, Union, SupportsFloat
2022-03-23 14:47:47 +01:00
import hashlib
2022-04-06 18:28:53 +02:00
import hops.util.dynamic_matrix as dynamic_matrix
from hops.util.dynamic_matrix import DynamicMatrix, SmoothStep
2022-04-20 11:41:21 +02:00
import scipy.special
2022-11-28 15:31:58 -05:00
from numpy.typing import NDArray
2022-03-23 14:47:47 +01:00
@beartype
@dataclass
class StocProcTolerances:
"""
An object to hold tolerances for :any:`stocproc.StocProc`
instances.
"""
integration: float = 1e-4
"""Integration tolerance."""
interpolation: float = 1e-4
"""Interpolation tolerance."""
class JSONEncoder(json.JSONEncoder):
"""
A custom encoder to serialize objects occuring in
:any:`TwoQubitModel`.
"""
2022-11-28 19:28:32 +01:00
def encode(self, obj: Any):
def hint_tuples(item: Any):
if isinstance(item, tuple):
return {
"type": "tuple",
"value": [
hint_tuples(i) if isinstance(i, tuple) else i for i in item
],
}
if isinstance(item, list):
return [hint_tuples(e) for e in item]
if isinstance(item, dict):
return {key: hint_tuples(value) for key, value in item.items()}
else:
return item
return super().encode(hint_tuples(obj))
2022-03-23 14:47:47 +01:00
@singledispatchmethod
def default(self, obj: Any):
2022-04-07 14:30:05 +02:00
if hasattr(obj, "to_dict"):
return obj.to_dict()
2022-03-23 14:47:47 +01:00
return super().default(obj)
2022-11-28 19:28:32 +01:00
@default.register(tuple)
def _(self, obj: tuple):
return {
"type": "tuple",
"value": list(*obj),
}
2022-03-23 14:47:47 +01:00
@default.register
def _(self, arr: np.ndarray):
return {"type": "array", "value": arr.tolist()}
@default.register
def _(self, obj: qt.Qobj):
return {
"type": "Qobj",
"value": obj.full(),
"dims": obj.dims,
"obj_type": obj.type,
}
@default.register
def _(self, obj: complex):
return {"type": "complex", "re": obj.real, "im": obj.imag}
@default.register
def _(self, obj: StocProcTolerances):
return {"type": "StocProcTolerances", "value": dataclasses.asdict(obj)}
2022-04-06 18:28:53 +02:00
@default.register
def _(self, obj: DynamicMatrix):
return {
"type": "DynamicMatrix",
"subtype": obj.__class__.__name__,
"value": obj.__getstate__(),
}
2022-11-28 16:02:15 -05:00
@default.register
def _(self, obj: datetime):
return {
"type": "datetime",
"value": obj.isoformat(),
}
2022-03-23 14:47:47 +01:00
@classmethod
def dumps(cls, data, **kwargs) -> str:
"""Like :any:`json.dumps`, just for this encoder.
The ``kwargs`` are passed on to :any:`json.dumps`.
"""
return json.dumps(
data,
**kwargs,
cls=cls,
ensure_ascii=False,
)
@classmethod
def loads(cls, string: str, **kwargs) -> dict[str, Any]:
"""Like :any:`json.loads`, just for this encoder.
The ``kwargs`` are passed on to :any:`json.loads`.
"""
2022-04-07 14:30:05 +02:00
if "object_hook" not in kwargs:
kwargs["object_hook"] = object_hook
return json.loads(string, **kwargs)
2022-03-23 14:47:47 +01:00
@classmethod
def hash(cls, data, **kwargs):
"""
Like :any:`dumps`, only that the result is being piped into
:any:`hashlib.sha256`. A ``sha256`` hash is being returned.
"""
2022-04-21 16:22:02 +02:00
return hashlib.sha256(cls.dumps(data, sort_keys=True, **kwargs).encode("utf-8"))
2022-03-23 14:47:47 +01:00
@classmethod
def hexhash(cls, data, **kwargs) -> str:
"""
Like :any:`hash`, only that a hexdigest is being returned.
"""
return cls.hash(data, **kwargs).hexdigest()
def object_hook(dct: dict[str, Any]):
"""A custom decoder for the types introduced in :any:`JSONEncoder`."""
2022-04-07 14:30:05 +02:00
2022-03-23 14:47:47 +01:00
if "type" in dct:
type = dct["type"]
if type == "array":
return np.array(dct["value"])
if type == "Qobj":
return qt.Qobj(dct["value"], dims=dct["dims"], type=dct["obj_type"])
if type == "complex":
return dct["re"] + 1j * dct["im"]
if type == "StocProcTolerances":
return StocProcTolerances(**dct["value"])
2022-04-06 18:28:53 +02:00
if type == "DynamicMatrix":
return getattr(dynamic_matrix, dct["subtype"])(**dct["value"])
2022-11-28 19:28:32 +01:00
if type == "tuple":
return tuple(dct["value"])
2022-11-28 16:02:15 -05:00
if type == "datetime":
return dateutil.parser.parse(dct["value"])
2022-03-23 14:47:47 +01:00
return dct
def operator_norm(obj: qt.Qobj) -> float:
"""Returns the operator norm of ``obj``."""
return np.sqrt(max(np.abs((obj.dag() * obj).eigenenergies())))
def assert_serializable(model):
"""
Serialize and restore ``model`` into json asserting that the
objects stay the same.
"""
assert model == model.__class__.from_json(
model.to_json()
), "Serialization should not change the model."
2022-04-20 11:41:21 +02:00
def bcf_scale(
δ: SupportsFloat,
L: DynamicMatrix,
t_max: SupportsFloat,
s: SupportsFloat,
ω_c: SupportsFloat,
) -> float:
r"""
Calculate the normalized BCF scale so that
:any:`\langle{H_I}\rangle\approx = δ`.
:param δ: The coupling strength.
:param L: The coupling operators.
:param t_max: The maximal simulation time points.
:param s: The :math:`s` parameter of the (sub/super) ohmic BCF.
:param ω_c: The cutoff frequency of the BCF.
"""
L_expect = (L @ L.dag + L.dag @ L).max_operator_norm(t_max)
bcf_norm = (
np.pi * float(s) / (scipy.special.gamma(float(s) + 1) * float(ω_c) ** float(s))
)
return float(δ) / L_expect * bcf_norm
2022-11-28 15:31:58 -05:00
def linspace_with_strobe(
begin: float, end: float, N: int, strobe_frequency: float
) -> NDArray[np.float64]:
"""
Like ``linspace`` but so that the time points defined by the
stroboscope angular frequency ``strobe_frequency`` are included.
"""
return np.unique(
np.sort(
np.concatenate(
[
np.linspace(begin, end, N),
np.arange(begin, end, 2 * np.pi / strobe_frequency),
]
)
)
)