master-thesis/python/richard_hops/energy_flow_thermal.org
2021-11-02 18:38:51 +01:00

1239 lines
34 KiB
Org Mode
Raw Blame History

This file contains invisible Unicode characters

This file contains invisible Unicode characters that are indistinguishable to humans but may be processed differently by a computer. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#+PROPERTY: header-args :session finite_temp :kernel python :pandoc t :async yes
* Setup
** Jupyter
#+begin_src jupyter-python
%load_ext autoreload
%autoreload 2
%load_ext jupyter_spaces
#+end_src
#+RESULTS:
: The autoreload extension is already loaded. To reload it, use:
: %reload_ext autoreload
** Matplotlib
#+begin_src jupyter-python
import matplotlib
import matplotlib.pyplot as plt
#matplotlib.use("TkCairo", force=True)
%gui tk
%matplotlib inline
plt.style.use('ggplot')
#+end_src
#+RESULTS:
** Richard (old) HOPS
#+begin_src jupyter-python
import hierarchyLib
import hierarchyData
import numpy as np
from stocproc.stocproc import StocProc_FFT
import bcf
from dataclasses import dataclass, field
import scipy
import scipy.misc
import scipy.signal
import pickle
from scipy.special import gamma as gamma_func
from scipy.optimize import curve_fit
#+end_src
#+RESULTS:
** Auxiliary Definitions
#+begin_src jupyter-python
σ1 = np.matrix([[0,1],[1,0]])
σ2 = np.matrix([[0,-1j],[1j,0]])
σ3 = np.matrix([[1,0],[0,-1]])
#+end_src
#+RESULTS:
* Model Setup
Basic parameters.
#+begin_src jupyter-python
class params:
T = 2
t_max = 10
t_steps = int(t_max * 1/.01)
k_max = 10
N = 1000
seed = 100
dim = 2
H_s = σ3 + np.eye(dim)
L = σ2 #1 / 2 * (σ1 - 1j * σ2)
ψ_0 = np.array([0, 1])
s = 1
num_exp_t = 4
wc = 1
with open("good_fit_data_abs_brute_force", "rb") as f:
good_fit_data_abs = pickle.load(f)
alpha = 0.8
# _, g_tilde, w_tilde = good_fit_data_abs[(numExpFit, s)]
# g_tilde = np.array(g_tilde)
# w_tilde = np.array(w_tilde)
# g = 1 / np.pi * gamma_func(s + 1) * wc ** (s + 1) * np.asarray(g_tilde)
# w = wc * np.asarray(w_tilde)
bcf_scale = np.pi / 2 * alpha * wc ** (1 - s)
#+end_src
#+RESULTS:
** BCF and Thermal BCF
#+begin_src jupyter-python
@dataclass
class CauchyBCF:
δ: float
wc: float
def I(self, ω):
return np.sqrt(self.δ) / (self.δ + (ω - self.wc) ** 2 / self.δ)
def __call__(self, τ):
return np.sqrt(self.δ) * np.exp(-1j * self.wc * τ - np.abs(τ) * self.δ)
def __bfkey__(self):
return self.δ, self.wc
α = bcf.OBCF(s=params.s, eta=1, gamma=params.wc)
I = bcf.OSD(s=params.s, eta=1, gamma=params.wc)
#+end_src
#+RESULTS:
*** Fit
We now fit a sum of exponentials against the BCF.
#+begin_src jupyter-python
from lmfit import minimize, Parameters
def α_apprx(τ, g, w):
return np.sum(
g[np.newaxis, :] * np.exp(-w[np.newaxis, :] * (τ[:, np.newaxis])), axis=1
)
def _fit():
def residual(fit_params, x, data):
resid = 0
w = np.array([fit_params[f"w{i}"] for i in range(params.num_exp_t)]) + 1j * np.array(
[fit_params[f"wi{i}"] for i in range(params.num_exp_t)]
)
g = np.array([fit_params[f"g{i}"] for i in range(params.num_exp_t)]) + 1j * np.array(
[fit_params[f"gi{i}"] for i in range(params.num_exp_t)]
)
resid = data - α_apprx(x, g, w)
return resid.view(float)
fit_params = Parameters()
for i in range(params.num_exp_t):
fit_params.add(f"g{i}", value=.1)
fit_params.add(f"gi{i}", value=.1)
fit_params.add(f"w{i}", value=.1)
fit_params.add(f"wi{i}", value=.1)
ts = np.linspace(0, params.t_max, 1000)
out = minimize(residual, fit_params, args=(ts, α(ts)))
w = np.array([out.params[f"w{i}"] for i in range(params.num_exp_t)]) + 1j * np.array(
[out.params[f"wi{i}"] for i in range(params.num_exp_t)]
)
g = np.array([out.params[f"g{i}"] for i in range(params.num_exp_t)]) + 1j * np.array(
[out.params[f"gi{i}"] for i in range(params.num_exp_t)]
)
return w, g
w, g = _fit()
#+end_src
#+RESULTS:
*** Plot
Let's look a the result.
#+begin_src jupyter-python
class bcfplt:
t = np.linspace(0, params.t_max, 1000)
ω = np.linspace(params.wc - 10, params.wc + 10, 1000)
fig, axs = plt.subplots(2)
axs[0].plot(t, np.real(α(t)))
axs[0].plot(t, np.imag(α(t)))
axs[0].plot(t, np.real(α_apprx(t, g, w)))
axs[0].plot(t, np.imag(α_apprx(t, g, w)))
axs[1].plot(ω, I(ω).real)
axs[1].plot(ω, I(ω).imag)
#+end_src
#+RESULTS:
[[file:./.ob-jupyter/b2ed70a60ed972b1a9df01e1cca7fdf0e33eff97.png]]
Seems ok for now.
** Hops setup
#+begin_src jupyter-python
HierachyParam = hierarchyData.HiP(
k_max=params.k_max,
# g_scale=None,
# sample_method='random',
seed=params.seed,
nonlinear=True,
normalized=False,
# terminator=False,
result_type=hierarchyData.RESULT_TYPE_ALL,
# accum_only=None,
# rand_skip=None
)
#+end_src
#+RESULTS:
Integration.
#+begin_src jupyter-python
IntegrationParam = hierarchyData.IntP(
t_max=params.t_max,
t_steps=params.t_steps,
# integrator_name='zvode',
# atol=1e-8,
# rtol=1e-8,
# order=5,
# nsteps=5000,
# method='bdf',
# t_steps_skip=1
)
#+end_src
#+RESULTS:
And now the system.
#+begin_src jupyter-python
SystemParam = hierarchyData.SysP(
H_sys=params.H_s,
L=params.L,
psi0=params.ψ_0, # excited qubit
g=np.array(g),
w=np.array(w),
H_dynamic=[],
bcf_scale=params.bcf_scale, # some coupling strength (scaling of the fit parameters 'g_i')
gw_hash=None, # this is used to load g,w from some database
len_gw=len(g),
)
#+end_src
#+RESULTS:
The quantum noise.
#+begin_src jupyter-python
Eta = StocProc_FFT(
I,
params.t_max,
α,
negative_frequencies=False,
seed=params.seed,
intgr_tol=1e-3,
intpl_tol=1e-3,
scale=params.bcf_scale,
)
#+end_src
#+RESULTS:
#+begin_example
stocproc.stocproc - INFO - non neg freq only
stocproc.method_ft - INFO - get_dt_for_accurate_interpolation, please wait ...
stocproc.method_ft - INFO - acc interp N 33 dt 6.25e-01 -> diff 1.51e-01
stocproc.method_ft - INFO - acc interp N 65 dt 3.12e-01 -> diff 3.41e-02
stocproc.method_ft - INFO - acc interp N 129 dt 1.56e-01 -> diff 7.19e-03
stocproc.method_ft - INFO - acc interp N 257 dt 7.81e-02 -> diff 1.70e-03
stocproc.method_ft - INFO - acc interp N 513 dt 3.91e-02 -> diff 4.21e-04
stocproc.method_ft - INFO - requires dt < 3.906e-02
stocproc.method_ft - INFO - get_N_a_b_for_accurate_fourier_integral, please wait ...
stocproc.method_ft - INFO - J_w_min:1.00e-02 N 32 yields: interval [0.00e+00,6.47e+00] diff 9.83e-03
stocproc.method_ft - INFO - J_w_min:1.00e-03 N 32 yields: interval [0.00e+00,9.12e+00] diff 6.55e-03
stocproc.method_ft - INFO - J_w_min:1.00e-02 N 64 yields: interval [0.00e+00,6.47e+00] diff 1.11e-02
stocproc.method_ft - INFO - increasing N while shrinking the interval does lower the error -> try next level
stocproc.method_ft - INFO - J_w_min:1.00e-04 N 32 yields: interval [0.00e+00,1.17e+01] diff 1.32e-02
stocproc.method_ft - INFO - J_w_min:1.00e-03 N 64 yields: interval [0.00e+00,9.12e+00] diff 9.90e-04
stocproc.method_ft - INFO - return, cause tol of 0.001 was reached
stocproc.method_ft - INFO - requires dx < 1.425e-01
stocproc.stocproc - INFO - Fourier Integral Boundaries: [0.000e+00, 2.166e+02]
stocproc.stocproc - INFO - Number of Nodes : 2048
stocproc.stocproc - INFO - yields dx : 1.058e-01
stocproc.stocproc - INFO - yields dt : 2.900e-02
stocproc.stocproc - INFO - yields t_max : 5.937e+01
#+end_example
The sample trajectories are smooth.
#+begin_src jupyter-python
%%space plot
ts = np.linspace(0, params.t_max, 1000)
Eta.new_process()
plt.plot(ts, Eta(ts).real)
#+end_src
#+RESULTS:
:RESULTS:
| <matplotlib.lines.Line2D | at | 0x7f70a4adb790> |
[[file:./.ob-jupyter/2203ae711b676c5126b800cfe209dd822f8a19ac.png]]
:END:
And now the thermal noise.
#+begin_src jupyter-python
EtaTherm = StocProc_FFT(
spectral_density=bcf.OFTDens(s=params.s, eta=1, gamma=params.wc, beta=1 / params.T),
t_max=params.t_max,
alpha=bcf.OFTCorr(s=params.s, eta=1, gamma=params.wc, beta=1 / params.T),
intgr_tol=1e-3,
intpl_tol=1e-3,
seed=params.seed,
negative_frequencies=False,
scale=params.bcf_scale,
)
#+end_src
#+RESULTS:
#+begin_example
stocproc.stocproc - INFO - non neg freq only
stocproc.method_ft - INFO - get_dt_for_accurate_interpolation, please wait ...
stocproc.method_ft - INFO - acc interp N 33 dt 6.25e-01 -> diff 2.82e-02
stocproc.method_ft - INFO - acc interp N 65 dt 3.12e-01 -> diff 5.93e-03
stocproc.method_ft - INFO - acc interp N 129 dt 1.56e-01 -> diff 1.38e-03
stocproc.method_ft - INFO - acc interp N 257 dt 7.81e-02 -> diff 3.38e-04
stocproc.method_ft - INFO - requires dt < 7.812e-02
stocproc.method_ft - INFO - get_N_a_b_for_accurate_fourier_integral, please wait ...
stocproc.method_ft - INFO - J_w_min:1.00e-02 N 32 yields: interval [0.00e+00,4.10e+00] diff 1.21e-02
stocproc.method_ft - INFO - J_w_min:1.00e-03 N 32 yields: interval [0.00e+00,5.82e+00] diff 2.95e-02
stocproc.method_ft - INFO - J_w_min:1.00e-02 N 64 yields: interval [0.00e+00,4.10e+00] diff 7.88e-03
stocproc.method_ft - INFO - J_w_min:1.00e-04 N 32 yields: interval [0.00e+00,7.50e+00] diff 4.90e-02
stocproc.method_ft - INFO - J_w_min:1.00e-03 N 64 yields: interval [0.00e+00,5.82e+00] diff 6.80e-03
stocproc.method_ft - INFO - J_w_min:1.00e-02 N 128 yields: interval [0.00e+00,4.10e+00] diff 7.56e-03
stocproc.method_ft - INFO - increasing N while shrinking the interval does lower the error -> try next level
stocproc.method_ft - INFO - J_w_min:1.00e-05 N 32 yields: interval [0.00e+00,9.16e+00] diff 8.38e-02
stocproc.method_ft - INFO - J_w_min:1.00e-04 N 64 yields: interval [0.00e+00,7.50e+00] diff 1.10e-02
stocproc.method_ft - INFO - J_w_min:1.00e-03 N 128 yields: interval [0.00e+00,5.82e+00] diff 1.60e-03
stocproc.method_ft - INFO - J_w_min:1.00e-02 N 256 yields: interval [0.00e+00,4.10e+00] diff 7.48e-03
stocproc.method_ft - INFO - increasing N while shrinking the interval does lower the error -> try next level
stocproc.method_ft - INFO - J_w_min:1.00e-06 N 32 yields: interval [0.00e+00,1.08e+01] diff 1.22e-01
stocproc.method_ft - INFO - J_w_min:1.00e-05 N 64 yields: interval [0.00e+00,9.16e+00] diff 1.75e-02
stocproc.method_ft - INFO - J_w_min:1.00e-04 N 128 yields: interval [0.00e+00,7.50e+00] diff 2.67e-03
stocproc.method_ft - INFO - J_w_min:1.00e-03 N 256 yields: interval [0.00e+00,5.82e+00] diff 7.86e-04
stocproc.method_ft - INFO - return, cause tol of 0.001 was reached
stocproc.method_ft - INFO - requires dx < 2.272e-02
stocproc.stocproc - INFO - Fourier Integral Boundaries: [0.000e+00, 8.651e+01]
stocproc.stocproc - INFO - Number of Nodes : 4096
stocproc.stocproc - INFO - yields dx : 2.112e-02
stocproc.stocproc - INFO - yields dt : 7.263e-02
stocproc.stocproc - INFO - yields t_max : 2.974e+02
#+end_example
The sample trajectories are smooth too.
#+begin_src jupyter-python
%%space plot
ts = np.linspace(0, params.t_max, 1000)
EtaTherm.new_process()
plt.plot(ts, EtaTherm(ts).real)
#+end_src
#+RESULTS:
:RESULTS:
| <matplotlib.lines.Line2D | at | 0x7f70a4a507c0> |
[[file:./.ob-jupyter/99b25e82eddc6ca1ac5bfa2026136468066d1be9.png]]
:END:
* Actual Hops
Generate the key for binary caching.
#+begin_src jupyter-python
hi_key = hierarchyData.HIMetaKey_type(
HiP=HierachyParam,
IntP=IntegrationParam,
SysP=SystemParam,
Eta=Eta,
EtaTherm=EtaTherm,
)
#+end_src
#+RESULTS:
Initialize Hierarchy.
#+begin_src jupyter-python
myHierarchy = hierarchyLib.HI(hi_key, number_of_samples=params.N, desc="calculate the heat flow")
#+end_src
#+RESULTS:
: init Hi class, use 2002 equation
: /home/hiro/Documents/Projects/UNI/master/masterarb/python/richard_hops/hierarchyLib.py:1058: UserWarning: sum_k_max is not implemented! DO SO BEFORE NEXT USAGE (use simplex).HierarchyParametersType does not yet know about sum_k_max
: warnings.warn(
Run the integration.
#+begin_src jupyter-python
myHierarchy.integrate_simple(data_path="data", data_name="energy_flow_therm_new_again_less.data")
#+end_src
#+RESULTS:
#+begin_example
(10, 1000, 1001)
samples :0.0%
integration :0.0%
samples :0.4%
integration :45.6%
samples :0.9%
integration :12.6%
samples :1.3%
integration :95.3%
samples :1.8%
integration :76.1%
samples :2.3%
integration :48.0%
samples :2.3%
integration :99.9%
samples :2.4%
integration :74.3%
samples :2.9%
integration :64.6%
samples :3.4%
integration :46.3%
samples :3.9%
integration :14.1%
samples :4.4%
integration :1.5%
samples :4.8%
integration :92.0%
samples :5.3%
integration :66.0%
samples :5.8%
integration :51.5%
samples :6.3%
integration :35.0%
samples :6.4%
integration :99.9%
samples :6.4%
integration :99.9%
samples :6.4%
integration :99.9%
samples :6.4%
integration :99.9%
samples :6.9%
integration :1.2%
samples :7.0%
integration :99.9%
samples :7.5%
integration :35.0%
samples :7.9%
integration :95.6%
samples :8.0%
integration :43.1%
samples :8.5%
integration :23.9%
samples :8.7%
integration :99.9%
samples :9.1%
integration :0.2%
samples :9.5%
integration :89.0%
samples :9.6%
integration :99.9%
samples :10.1%
integration :77.3%
samples :10.4%
integration :99.9%
samples :10.6%
integration :78.4%
samples :11.1%
integration :54.3%
samples :11.2%
integration :99.9%
samples :11.7%
integration :67.8%
samples :12.1%
integration :99.9%
samples :12.3%
integration :66.9%
samples :12.8%
integration :49.7%
samples :12.9%
integration :99.9%
samples :13.4%
integration :15.6%
samples :13.8%
integration :99.9%
samples :13.9%
integration :99.9%
samples :14.4%
integration :79.3%
samples :14.6%
integration :99.9%
samples :15.0%
integration :56.3%
samples :15.5%
integration :24.4%
samples :15.6%
integration :53.3%
samples :16.1%
integration :40.1%
samples :16.3%
integration :99.9%
samples :16.7%
integration :1.3%
samples :17.1%
integration :85.3%
samples :17.2%
integration :99.9%
samples :17.7%
integration :1.7%
samples :18.0%
integration :99.9%
samples :18.2%
integration :80.1%
samples :18.7%
integration :54.7%
samples :18.8%
integration :99.9%
samples :19.3%
integration :42.4%
samples :19.7%
integration :99.9%
samples :19.9%
integration :49.4%
samples :20.4%
integration :28.7%
samples :20.5%
integration :99.9%
samples :20.6%
integration :48.9%
samples :21.0%
integration :99.9%
samples :21.3%
integration :39.7%
samples :21.8%
integration :17.9%
samples :22.0%
integration :99.9%
samples :22.3%
integration :55.0%
samples :22.8%
integration :23.0%
samples :22.9%
integration :61.2%
samples :23.4%
integration :34.9%
samples :23.6%
integration :99.9%
samples :23.9%
integration :92.7%
samples :24.4%
integration :63.9%
samples :24.4%
integration :99.9%
samples :24.9%
integration :20.4%
samples :25.2%
integration :99.9%
samples :25.5%
integration :57.4%
samples :25.9%
integration :95.2%
samples :26.0%
integration :99.9%
samples :26.2%
integration :93.2%
samples :26.6%
integration :99.9%
samples :26.9%
integration :53.1%
samples :27.4%
integration :40.7%
samples :27.5%
integration :99.9%
samples :27.9%
integration :71.3%
samples :28.3%
integration :99.9%
samples :28.5%
integration :19.5%
samples :29.0%
integration :8.5%
samples :29.2%
integration :99.9%
samples :29.6%
integration :0.1%
samples :30.0%
integration :82.2%
samples :30.1%
integration :69.8%
samples :30.6%
integration :50.0%
samples :30.9%
integration :99.9%
samples :31.0%
integration :81.4%
samples :31.5%
integration :57.7%
samples :31.6%
integration :52.0%
samples :32.1%
integration :34.3%
samples :32.4%
integration :99.9%
samples :32.7%
integration :22.2%
samples :33.2%
integration :0.0%
samples :33.3%
integration :99.9%
samples :33.8%
integration :5.6%
samples :34.1%
integration :99.9%
samples :34.3%
integration :3.6%
samples :34.7%
integration :84.6%
samples :34.9%
integration :99.9%
samples :35.3%
integration :89.0%
samples :35.8%
integration :62.8%
samples :35.9%
integration :39.1%
samples :36.4%
integration :11.0%
samples :36.6%
integration :99.9%
samples :36.6%
integration :99.9%
samples :37.1%
integration :10.6%
samples :37.3%
integration :65.4%
samples :37.8%
integration :44.3%
samples :38.1%
integration :99.9%
samples :38.3%
integration :99.9%
samples :38.8%
integration :81.3%
samples :39.0%
integration :7.2%
samples :39.4%
integration :97.2%
samples :39.8%
integration :99.9%
samples :40.0%
integration :93.5%
samples :40.5%
integration :70.7%
samples :40.6%
integration :99.9%
samples :40.9%
integration :45.0%
samples :41.3%
integration :99.9%
samples :41.5%
integration :88.2%
samples :42.0%
integration :68.2%
samples :42.2%
integration :99.9%
samples :42.5%
integration :92.3%
samples :43.0%
integration :68.6%
samples :43.1%
integration :99.9%
samples :43.6%
integration :71.8%
samples :44.1%
integration :44.8%
samples :44.6%
integration :25.6%
samples :45.1%
integration :7.7%
samples :45.5%
integration :97.1%
samples :46.0%
integration :72.7%
samples :46.5%
integration :40.4%
samples :47.0%
integration :22.3%
samples :47.5%
integration :3.1%
samples :47.9%
integration :90.8%
samples :48.4%
integration :67.0%
samples :48.9%
integration :36.7%
samples :48.9%
integration :99.9%
samples :48.9%
integration :99.9%
samples :48.9%
integration :99.9%
samples :48.9%
integration :99.9%
samples :48.9%
integration :99.9%
samples :49.3%
integration :5.0%
samples :49.7%
integration :85.1%
samples :50.0%
integration :99.9%
samples :50.2%
integration :41.6%
samples :50.7%
integration :24.7%
samples :51.1%
integration :99.9%
samples :51.6%
integration :78.3%
samples :52.1%
integration :46.2%
samples :52.6%
integration :29.4%
samples :53.1%
integration :5.9%
samples :53.5%
integration :78.5%
samples :54.0%
integration :53.3%
samples :54.5%
integration :28.7%
samples :55.0%
integration :10.4%
samples :55.5%
integration :3.0%
samples :55.9%
integration :97.2%
samples :56.4%
integration :75.1%
samples :56.7%
integration :11.9%
samples :57.2%
integration :0.0%
samples :57.5%
integration :99.9%
samples :57.5%
integration :99.9%
samples :57.5%
integration :99.9%
samples :57.5%
integration :99.9%
samples :57.5%
integration :99.9%
samples :57.5%
integration :99.9%
samples :57.8%
integration :77.7%
samples :58.3%
integration :53.5%
samples :58.5%
integration :99.9%
samples :58.7%
integration :37.1%
samples :59.2%
integration :15.8%
samples :59.4%
integration :5.8%
samples :59.8%
integration :99.5%
samples :60.1%
integration :99.9%
samples :60.1%
integration :99.9%
samples :60.5%
integration :0.0%
samples :60.7%
integration :77.9%
samples :61.2%
integration :59.5%
samples :61.5%
integration :99.9%
samples :61.7%
integration :62.3%
samples :62.2%
integration :28.4%
samples :62.3%
integration :99.9%
samples :62.5%
integration :59.4%
samples :62.8%
integration :99.9%
samples :63.1%
integration :62.1%
samples :63.6%
integration :37.8%
samples :63.7%
integration :99.9%
samples :64.1%
integration :99.8%
samples :64.6%
integration :79.1%
samples :64.7%
integration :90.6%
samples :65.2%
integration :68.2%
samples :65.4%
integration :99.9%
samples :65.8%
integration :47.8%
samples :66.3%
integration :14.5%
samples :66.4%
integration :37.2%
samples :66.9%
integration :20.2%
samples :67.1%
integration :99.9%
samples :67.3%
integration :6.3%
samples :67.7%
integration :95.5%
samples :67.9%
integration :53.7%
samples :68.4%
integration :36.6%
samples :68.7%
integration :99.9%
samples :69.1%
integration :23.1%
samples :69.6%
integration :10.3%
samples :70.0%
integration :91.6%
samples :70.5%
integration :65.4%
samples :71.0%
integration :42.1%
samples :71.5%
integration :20.5%
samples :72.0%
integration :5.8%
samples :72.4%
integration :93.3%
samples :72.9%
integration :76.1%
samples :73.4%
integration :73.2%
samples :73.9%
integration :56.9%
samples :74.4%
integration :30.2%
samples :74.9%
integration :11.4%
samples :75.2%
integration :66.9%
samples :75.7%
integration :28.5%
samples :75.9%
integration :99.9%
samples :75.9%
integration :99.9%
samples :75.9%
integration :99.9%
samples :75.9%
integration :99.9%
samples :75.9%
integration :99.9%
samples :76.1%
integration :10.6%
samples :76.5%
integration :99.8%
samples :77.0%
integration :81.5%
samples :77.1%
integration :99.9%
samples :77.4%
integration :73.1%
samples :77.8%
integration :99.9%
samples :78.0%
integration :57.8%
samples :78.5%
integration :37.9%
samples :78.6%
integration :99.9%
samples :78.7%
integration :0.1%
samples :79.1%
integration :82.8%
samples :79.4%
integration :35.2%
samples :79.9%
integration :16.9%
samples :80.1%
integration :99.9%
samples :80.4%
integration :81.1%
samples :80.9%
integration :42.0%
samples :81.0%
integration :81.5%
samples :81.5%
integration :64.4%
samples :81.8%
integration :99.9%
samples :82.1%
integration :53.6%
samples :82.6%
integration :38.6%
samples :82.7%
integration :15.3%
samples :83.2%
integration :0.1%
samples :83.4%
integration :99.9%
samples :83.4%
integration :99.9%
samples :83.9%
integration :41.7%
samples :84.1%
integration :37.1%
samples :84.6%
integration :13.4%
samples :84.9%
integration :99.9%
samples :85.0%
integration :81.5%
samples :85.5%
integration :59.9%
samples :85.6%
integration :99.9%
samples :86.1%
integration :30.6%
samples :86.5%
integration :53.8%
samples :86.6%
integration :99.9%
samples :87.1%
integration :82.4%
samples :87.3%
integration :99.9%
samples :87.5%
integration :58.6%
samples :88.0%
integration :36.3%
samples :88.2%
integration :13.1%
samples :88.6%
integration :98.9%
samples :88.9%
integration :99.9%
samples :89.2%
integration :55.8%
samples :89.7%
integration :25.7%
samples :89.8%
integration :22.9%
samples :90.3%
integration :0.4%
samples :90.5%
integration :99.9%
samples :90.7%
integration :88.2%
samples :91.2%
integration :66.2%
samples :91.3%
integration :99.9%
samples :91.7%
integration :94.0%
samples :92.1%
integration :99.9%
samples :92.3%
integration :18.3%
samples :92.7%
integration :96.3%
samples :93.2%
integration :78.6%
samples :93.7%
integration :54.4%
samples :94.2%
integration :33.1%
samples :94.7%
integration :20.7%
samples :95.1%
integration :98.3%
samples :95.6%
integration :80.2%
samples :96.1%
integration :57.7%
samples :96.6%
integration :35.9%
samples :97.1%
integration :18.9%
samples :97.6%
integration :0.4%
samples :98.0%
integration :75.9%
samples :98.5%
integration :54.6%
samples :98.9%
integration :34.8%
samples :99.3%
integration :37.7%
samples :99.3%
integration :99.9%
samples :99.3%
integration :99.9%
samples :99.3%
integration :99.9%
samples :99.3%
integration :99.9%
samples :99.3%
integration :99.9%
samples :99.3%
integration :99.9%
samples :99.7%
integration :63.5%
samples : 100%
integration :0.0%

#+end_example
Get the samples.
#+BEGIN_SRC jupyter-python
# to access the data the 'hi_key' is used to find the data in the hdf5 file
class int_result:
with hierarchyData.HIMetaData(
hid_name="energy_flow_therm_new_again_less.data", hid_path="data"
) as metaData:
with metaData.get_HIData(hi_key, read_only=True) as data:
smp = data.get_samples()
print("{} samples found in database".format(smp))
τ = data.get_time()
rho_τ = data.get_rho_t()
#s_proc = np.array(data.stoc_proc)
#states = np.array(data.aux_states).copy()
ψ_1 = np.array(data.aux_states[:, :, 0 : params.num_exp_t * params.dim])
ψ_0 = np.array(data.stoc_traj)
y = np.array(data.y)
#η = np.array(data.stoc_proc)
temp_y = np.array(data.temp_y)
#+end_src
#+RESULTS:
: 1000 samples found in database
Calculate energy.
#+begin_src jupyter-python
%matplotlib inline
energy = np.einsum("ijk,kj", int_result.rho_τ,params.H_s).real
plt.plot(int_result.τ, energy)
#+end_src
#+RESULTS:
:RESULTS:
| <matplotlib.lines.Line2D | at | 0x7f70a4918400> |
[[file:./.ob-jupyter/7cb82ec4a27feaf82773ed589e3fef5b13a8b0af.png]]
:END:
* Energy Flow
:PROPERTIES:
:ID: 9ce93da8-d323-40ec-96a2-42ba184dc963
:END:
#+begin_src jupyter-python
int_result.ψ_1.shape
#+end_src
#+RESULTS:
| 1280 | 1000 | 8 |
Let's look at the norm.
#+begin_src jupyter-python
plt.plot(int_result.τ, (int_result.ψ_0[0].conj() * int_result.ψ_0[0]).sum(axis=1).real)
#+end_src
#+RESULTS:
:RESULTS:
| <matplotlib.lines.Line2D | at | 0x7f70a48fabe0> |
[[file:./.ob-jupyter/fffb6bce0ad5b5e3fd7ae1b07c8cc3c93607fe3c.png]]
:END:
And try to calculate the energy flow.
#+begin_src jupyter-python
def flow_for_traj(ψ_0, ψ_1, temp_y):
a = np.array((params.L @ ψ_0.T).T)
EtaTherm.new_process(temp_y)
η_dot = scipy.misc.derivative(EtaTherm, int_result.τ, dx=1e-3, order=5)
ψ_1 = (-w * g * params.bcf_scale)[None, :, None] * ψ_1.reshape(
params.t_steps, params.num_exp_t, params.dim
)
# return np.array(np.sum(ψ_0.conj() * ψ_0, axis=1)).flatten().real
j_0 = np.array(
2
,* (
1j
,* (np.sum(a.conj()[:, None, :] * ψ_1, axis=(1, 2)))
/ np.sum(ψ_0.conj() * ψ_0, axis=1)
).real
).flatten()
j_therm = np.array(
2
,* (
1j
,* (np.sum(a.conj() * ψ_0, axis=1)) * η_dot
/ np.sum(ψ_0.conj() * ψ_0, axis=1)
).real
).flatten()
return j_0, j_therm
#+end_src
#+RESULTS:
Now we calculate the average over all trajectories.
#+begin_src jupyter-python
class Flow:
j_0 = np.zeros_like(int_result.τ)
j_therm = np.zeros_like(int_result.τ)
for i in range(0, params.N):
dj, dj_therm = flow_for_traj(
int_result.ψ_0[i], int_result.ψ_1[i], int_result.temp_y[i]
)
j_0 += dj
j_therm += dj_therm
j_0 /= params.N
j_therm /= params.N
j = j_0 + j_therm
#+end_src
#+RESULTS:
And plot it :).
#+begin_src jupyter-python
%matplotlib inline
plt.plot(int_result.τ, Flow.j_0, label=r"$j_0$")
plt.plot(int_result.τ, Flow.j_therm, label=r"$j_\mathrm{therm}$")
plt.plot(int_result.τ, Flow.j, label=r"$j$")
plt.legend()
#+end_src
#+RESULTS:
:RESULTS:
: <matplotlib.legend.Legend at 0x7f70a49ccac0>
[[file:./.ob-jupyter/e6724f82c0e439f5b7d9c0501ee930facb205943.png]]
:END:
Let's calculate the integrated energy.
#+begin_src jupyter-python
E_t = np.array(
[0]
+ [
scipy.integrate.simpson(Flow.j[0:n], int_result.τ[0:n])
for n in range(1, len(int_result.τ))
]
)
E_t[-1]
#+end_src
#+RESULTS:
: 4.387773704387552
With this we can retrieve the energy of the interaction Hamiltonian.
#+begin_src jupyter-python
E_I = - energy - E_t
#+end_src
#+RESULTS:
#+begin_src jupyter-python
%%space plot
#plt.plot(τ, j, label="$J$", linestyle='--')
plt.plot(int_result.τ, E_t, label=r"$\langle H_{\mathrm{B}}\rangle$")
plt.plot(int_result.τ, E_I, label=r"$\langle H_{\mathrm{I}}\rangle$")
plt.plot(int_result.τ, energy, label=r"$\langle H_{\mathrm{S}}\rangle$")
plt.xlabel("τ")
plt.legend()
plt.show()
#+end_src
#+RESULTS:
:RESULTS:
| <matplotlib.lines.Line2D | at | 0x7f70a47fbaf0> |
| <matplotlib.lines.Line2D | at | 0x7f70a47fbee0> |
| <matplotlib.lines.Line2D | at | 0x7f70a47862e0> |
: Text(0.5, 0, 'τ')
: <matplotlib.legend.Legend at 0x7f70a47fbdf0>
[[file:./.ob-jupyter/34fdaa07b2b35134bac0341834aced72d448d1c1.png]]
:END:
* System + Interaction Energy
#+begin_src jupyter-python
def h_si_for_traj(ψ_0, ψ_1, temp_y):
a = np.array((params.L @ ψ_0.T).T)
b = np.array((params.H_s @ ψ_0.T).T)
ψ_1 = (g*params.bcf_scale)[None, :, None] * ψ_1.reshape(
params.t_steps, params.num_exp_t, params.dim
)
EtaTherm.new_process(temp_y)
E_i = np.array(
2
,* (
-1j
,* np.sum(
a.conj()[:, None, :]
,* ψ_1,
axis=(1, 2),
)
).real
).flatten()
E_i += np.array(
2
,* (
-1j * EtaTherm(int_result.τ)
,* np.sum(
a.conj()
,* ψ_0,
axis=1,
)
).real
).flatten()
E_s = np.array(np.sum(b.conj() * ψ_0, axis=1)).flatten().real
return (E_i + E_s) / np.sum(ψ_0.conj() * ψ_0, axis=1).real
#+end_src
#+RESULTS:
#+begin_src jupyter-python
e_si = np.zeros_like(int_result.τ)
for i in range(0, params.N):
e_si += h_si_for_traj(int_result.ψ_0[i], int_result.ψ_1[i], int_result.temp_y[i])
e_si /= params.N
#+end_src
#+RESULTS:
Doesn't work out.
#+begin_src jupyter-python
plt.plot(int_result.τ, e_si, label=r"direct")
plt.plot(int_result.τ, E_I + energy)
plt.legend()
#+end_src
#+RESULTS:
:RESULTS:
: <matplotlib.legend.Legend at 0x7f70a45fddc0>
[[file:./.ob-jupyter/cbf733338ec8702f4c097598ed286cfc60811a0c.png]]
:END: