2020-03-31 15:35:03 +02:00
|
|
|
|
#+PROPERTY: header-args :exports both :output-dir results :session xs :kernel python3
|
2020-04-06 19:17:48 +02:00
|
|
|
|
#+HTML_HEAD: <link rel="stylesheet" href="tufte.css" />
|
|
|
|
|
#+OPTIONS: html-style:nil
|
|
|
|
|
#+HTML_CONTAINER: section
|
|
|
|
|
#+TITLE: Investigaton of Monte-Carlo Methods
|
|
|
|
|
#+AUTHOR: Valentin Boettcher
|
2020-03-27 15:43:13 +01:00
|
|
|
|
|
2020-03-27 13:39:00 +01:00
|
|
|
|
* Init
|
|
|
|
|
** Required Modules
|
|
|
|
|
#+NAME: e988e3f2-ad1f-49a3-ad60-bedba3863283
|
2020-03-31 15:35:03 +02:00
|
|
|
|
#+begin_src jupyter-python :exports both :tangle tangled/xs.py
|
2020-03-27 19:34:22 +01:00
|
|
|
|
import numpy as np
|
|
|
|
|
import matplotlib.pyplot as plt
|
2020-03-31 15:19:51 +02:00
|
|
|
|
import monte_carlo
|
2020-03-27 19:34:22 +01:00
|
|
|
|
#+end_src
|
2020-03-27 13:39:00 +01:00
|
|
|
|
|
|
|
|
|
#+RESULTS: e988e3f2-ad1f-49a3-ad60-bedba3863283
|
|
|
|
|
|
|
|
|
|
** Utilities
|
|
|
|
|
#+NAME: 53548778-a4c1-461a-9b1f-0f401df12b08
|
2020-03-31 15:35:03 +02:00
|
|
|
|
#+BEGIN_SRC jupyter-python :exports both
|
2020-03-27 13:39:00 +01:00
|
|
|
|
%run ../utility.py
|
2020-03-30 19:19:48 +02:00
|
|
|
|
%load_ext autoreload
|
|
|
|
|
%aimport monte_carlo
|
2020-03-31 15:19:51 +02:00
|
|
|
|
%autoreload 1
|
2020-03-27 13:39:00 +01:00
|
|
|
|
#+END_SRC
|
|
|
|
|
|
|
|
|
|
#+RESULTS: 53548778-a4c1-461a-9b1f-0f401df12b08
|
2020-04-06 18:21:10 +02:00
|
|
|
|
: The autoreload extension is already loaded. To reload it, use:
|
|
|
|
|
: %reload_ext autoreload
|
2020-03-27 13:39:00 +01:00
|
|
|
|
|
2020-03-30 15:43:55 +02:00
|
|
|
|
* Implementation
|
2020-03-27 13:39:00 +01:00
|
|
|
|
#+NAME: 777a013b-6c20-44bd-b58b-6a7690c21c0e
|
2020-03-31 15:35:03 +02:00
|
|
|
|
#+BEGIN_SRC jupyter-python :exports both :results raw drawer :exports code :tangle tangled/xs.py
|
2020-03-27 13:39:00 +01:00
|
|
|
|
"""
|
|
|
|
|
Implementation of the analytical cross section for q q_bar ->
|
|
|
|
|
gamma gamma
|
|
|
|
|
|
|
|
|
|
Author: Valentin Boettcher <hiro@protagon.space>
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
import numpy as np
|
|
|
|
|
|
|
|
|
|
# NOTE: a more elegant solution would be a decorator
|
|
|
|
|
def energy_factor(charge, esp):
|
|
|
|
|
"""
|
|
|
|
|
Calculates the factor common to all other values in this module
|
|
|
|
|
|
|
|
|
|
Arguments:
|
|
|
|
|
esp -- center of momentum energy in GeV
|
|
|
|
|
charge -- charge of the particle in units of the elementary charge
|
|
|
|
|
"""
|
|
|
|
|
|
2020-04-01 15:03:38 +02:00
|
|
|
|
return charge**4/(137.036*esp)**2/6
|
2020-03-27 13:39:00 +01:00
|
|
|
|
|
|
|
|
|
|
2020-03-28 11:43:21 +01:00
|
|
|
|
def diff_xs(θ, charge, esp):
|
2020-03-27 13:39:00 +01:00
|
|
|
|
"""
|
|
|
|
|
Calculates the differential cross section as a function of the
|
2020-03-30 15:43:55 +02:00
|
|
|
|
azimuth angle θ in units of 1/GeV².
|
2020-03-27 13:39:00 +01:00
|
|
|
|
|
2020-04-01 12:14:35 +02:00
|
|
|
|
Here dΩ=sinθdθdφ
|
|
|
|
|
|
2020-03-27 13:39:00 +01:00
|
|
|
|
Arguments:
|
2020-03-28 11:43:21 +01:00
|
|
|
|
θ -- azimuth angle
|
2020-03-27 13:39:00 +01:00
|
|
|
|
esp -- center of momentum energy in GeV
|
|
|
|
|
charge -- charge of the particle in units of the elementary charge
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
f = energy_factor(charge, esp)
|
2020-03-30 19:19:48 +02:00
|
|
|
|
return f*((np.cos(θ)**2+1)/np.sin(θ)**2)
|
2020-03-27 13:39:00 +01:00
|
|
|
|
|
2020-03-30 19:56:02 +02:00
|
|
|
|
def diff_xs_cosθ(cosθ, charge, esp):
|
|
|
|
|
"""
|
|
|
|
|
Calculates the differential cross section as a function of the
|
|
|
|
|
cosine of the azimuth angle θ in units of 1/GeV².
|
|
|
|
|
|
2020-04-01 12:14:35 +02:00
|
|
|
|
Here dΩ=d(cosθ)dφ
|
|
|
|
|
|
2020-03-30 19:56:02 +02:00
|
|
|
|
Arguments:
|
2020-03-30 20:26:10 +02:00
|
|
|
|
cosθ -- cosine of the azimuth angle
|
2020-03-30 19:56:02 +02:00
|
|
|
|
esp -- center of momentum energy in GeV
|
|
|
|
|
charge -- charge of the particle in units of the elementary charge
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
f = energy_factor(charge, esp)
|
|
|
|
|
return f*((cosθ**2+1)/(1-cosθ**2))
|
|
|
|
|
|
2020-04-02 15:55:07 +02:00
|
|
|
|
|
2020-03-28 11:53:45 +01:00
|
|
|
|
def diff_xs_eta(η, charge, esp):
|
2020-03-27 13:39:00 +01:00
|
|
|
|
"""
|
|
|
|
|
Calculates the differential cross section as a function of the
|
|
|
|
|
pseudo rapidity of the photons in units of 1/GeV^2.
|
|
|
|
|
|
2020-04-01 12:14:35 +02:00
|
|
|
|
This is actually the crossection dσ/(dφdη).
|
2020-03-30 20:26:10 +02:00
|
|
|
|
|
|
|
|
|
Arguments:
|
2020-04-01 12:14:35 +02:00
|
|
|
|
η -- pseudo rapidity
|
2020-03-30 20:26:10 +02:00
|
|
|
|
esp -- center of momentum energy in GeV
|
|
|
|
|
charge -- charge of the particle in units of the elementary charge
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
f = energy_factor(charge, esp)
|
2020-04-02 09:05:21 +02:00
|
|
|
|
return f*(np.tanh(η)**2 + 1)
|
2020-03-30 20:26:10 +02:00
|
|
|
|
|
2020-04-02 15:55:07 +02:00
|
|
|
|
|
|
|
|
|
def diff_xs_p_t(p_t, charge, esp):
|
|
|
|
|
"""
|
|
|
|
|
Calculates the differential cross section as a function of the
|
|
|
|
|
transverse momentum (p_t) of the photons in units of 1/GeV^2.
|
|
|
|
|
|
|
|
|
|
This is actually the crossection dσ/(dφdp_t).
|
|
|
|
|
|
|
|
|
|
Arguments:
|
|
|
|
|
p_t -- transverse momentum in GeV
|
|
|
|
|
esp -- center of momentum energy in GeV
|
|
|
|
|
charge -- charge of the particle in units of the elementary charge
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
f = energy_factor(charge, esp)
|
|
|
|
|
sqrt_fact = np.sqrt(1-(2*p_t/esp)**2)
|
|
|
|
|
return f/p_t*(1/sqrt_fact + sqrt_fact)
|
|
|
|
|
|
|
|
|
|
|
2020-03-28 11:53:45 +01:00
|
|
|
|
def total_xs_eta(η, charge, esp):
|
2020-03-27 13:39:00 +01:00
|
|
|
|
"""
|
|
|
|
|
Calculates the total cross section as a function of the pseudo
|
|
|
|
|
rapidity of the photons in units of 1/GeV^2. If the rapditiy is
|
|
|
|
|
specified as a tuple, it is interpreted as an interval. Otherwise
|
2020-03-28 11:43:21 +01:00
|
|
|
|
the interval [-η, η] will be used.
|
2020-03-27 13:39:00 +01:00
|
|
|
|
|
|
|
|
|
Arguments:
|
2020-03-28 11:43:21 +01:00
|
|
|
|
η -- pseudo rapidity (tuple or number)
|
2020-03-27 13:39:00 +01:00
|
|
|
|
esp -- center of momentum energy in GeV
|
|
|
|
|
charge -- charge of the particle in units of the elementar charge
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
f = energy_factor(charge, esp)
|
2020-03-28 11:43:21 +01:00
|
|
|
|
if not isinstance(η, tuple):
|
|
|
|
|
η = (-η, η)
|
2020-03-27 13:39:00 +01:00
|
|
|
|
|
2020-03-28 11:43:21 +01:00
|
|
|
|
if len(η) != 2:
|
|
|
|
|
raise ValueError('Invalid η cut.')
|
2020-03-27 13:39:00 +01:00
|
|
|
|
|
|
|
|
|
def F(x):
|
|
|
|
|
return np.tanh(x) - 2*x
|
|
|
|
|
|
2020-03-28 11:43:21 +01:00
|
|
|
|
return 2*np.pi*f*(F(η[0]) - F(η[1]))
|
2020-03-27 13:39:00 +01:00
|
|
|
|
#+END_SRC
|
|
|
|
|
|
|
|
|
|
#+RESULTS: 777a013b-6c20-44bd-b58b-6a7690c21c0e
|
|
|
|
|
|
|
|
|
|
* Calculations
|
|
|
|
|
First, set up the input parameters.
|
2020-03-31 15:35:03 +02:00
|
|
|
|
#+BEGIN_SRC jupyter-python :exports both :results raw drawer
|
2020-03-28 11:43:21 +01:00
|
|
|
|
η = 2.5
|
2020-03-27 13:39:00 +01:00
|
|
|
|
charge = 1/3
|
|
|
|
|
esp = 200 # GeV
|
|
|
|
|
#+END_SRC
|
|
|
|
|
|
2020-04-02 17:37:31 +02:00
|
|
|
|
#+RESULTS:
|
2020-03-31 16:40:10 +02:00
|
|
|
|
|
2020-03-31 12:16:57 +02:00
|
|
|
|
Set up the integration and plot intervals.
|
2020-03-31 15:35:03 +02:00
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
2020-03-31 12:16:57 +02:00
|
|
|
|
interval_η = [-η, η]
|
|
|
|
|
interval = η_to_θ([-η, η])
|
|
|
|
|
interval_cosθ = np.cos(interval)
|
2020-04-02 15:55:07 +02:00
|
|
|
|
interval_pt = np.sort(η_to_pt([0, η], esp/2))
|
2020-03-31 12:16:57 +02:00
|
|
|
|
plot_interval = [0.1, np.pi-.1]
|
|
|
|
|
#+end_src
|
|
|
|
|
|
2020-03-31 15:19:51 +02:00
|
|
|
|
#+RESULTS:
|
|
|
|
|
|
2020-04-05 13:55:28 +02:00
|
|
|
|
#+begin_note
|
|
|
|
|
Note that we could utilize the symetry of the integrand throughout,
|
|
|
|
|
but that doen't reduce variance and would complicate things now.
|
|
|
|
|
#+end_note
|
|
|
|
|
|
2020-04-05 12:30:38 +02:00
|
|
|
|
** Analytical Integration
|
|
|
|
|
And now calculate the cross section in picobarn.
|
|
|
|
|
#+BEGIN_SRC jupyter-python :exports both :results raw file :file xs.tex
|
|
|
|
|
xs_gev = total_xs_eta(η, charge, esp)
|
|
|
|
|
xs_pb = gev_to_pb(xs_gev)
|
|
|
|
|
tex_value(xs_pb, unit=r'\pico\barn', prefix=r'\sigma = ',
|
|
|
|
|
prec=6, save=('results', 'xs.tex'))
|
|
|
|
|
#+END_SRC
|
2020-03-27 13:39:00 +01:00
|
|
|
|
|
2020-04-05 12:30:38 +02:00
|
|
|
|
#+RESULTS:
|
|
|
|
|
: \(\sigma = \SI{0.053793}{\pico\barn}\)
|
2020-03-27 14:30:55 +01:00
|
|
|
|
|
2020-04-05 12:30:38 +02:00
|
|
|
|
Lets plot the total xs as a function of η.
|
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
|
|
|
|
fig, ax = set_up_plot()
|
|
|
|
|
η_s = np.linspace(0, 3, 1000)
|
|
|
|
|
ax.plot(η_s, gev_to_pb(total_xs_eta(η_s, charge, esp)))
|
|
|
|
|
ax.set_xlabel(r'$\eta$')
|
|
|
|
|
ax.set_ylabel(r'$\sigma$ [pb]')
|
|
|
|
|
ax.set_xlim([0, max(η_s)])
|
|
|
|
|
ax.set_ylim(0)
|
|
|
|
|
save_fig(fig, 'total_xs', 'xs', size=[2.5, 2])
|
|
|
|
|
#+end_src
|
2020-04-01 15:03:38 +02:00
|
|
|
|
|
2020-04-05 12:30:38 +02:00
|
|
|
|
#+RESULTS:
|
2020-04-07 09:57:15 +02:00
|
|
|
|
[[file:./.ob-jupyter/ac3b02ec166e0df1271348dbacacc2316d67028e.png]]
|
2020-04-01 15:03:38 +02:00
|
|
|
|
|
|
|
|
|
|
2020-04-05 12:30:38 +02:00
|
|
|
|
Compared to sherpa, it's pretty close.
|
|
|
|
|
#+NAME: 81b5ed93-0312-45dc-beec-e2ba92e22626
|
|
|
|
|
#+BEGIN_SRC jupyter-python :exports both :results raw drawer
|
|
|
|
|
sherpa = 0.05380
|
|
|
|
|
xs_pb - sherpa
|
|
|
|
|
#+END_SRC
|
2020-03-27 14:30:55 +01:00
|
|
|
|
|
2020-04-05 12:30:38 +02:00
|
|
|
|
#+RESULTS: 81b5ed93-0312-45dc-beec-e2ba92e22626
|
|
|
|
|
: -6.7112594623469635e-06
|
2020-03-27 15:43:13 +01:00
|
|
|
|
|
2020-04-05 12:30:38 +02:00
|
|
|
|
I had to set the runcard option ~EW_SCHEME: alpha0~ to use the pure
|
|
|
|
|
QED coupling constant.
|
2020-03-30 19:19:48 +02:00
|
|
|
|
|
2020-04-05 12:30:38 +02:00
|
|
|
|
** Numerical Integration
|
2020-03-30 19:19:48 +02:00
|
|
|
|
Plot our nice distribution:
|
2020-03-31 15:35:03 +02:00
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
2020-03-30 19:19:48 +02:00
|
|
|
|
plot_points = np.linspace(*plot_interval, 1000)
|
|
|
|
|
|
|
|
|
|
fig, ax = set_up_plot()
|
|
|
|
|
ax.plot(plot_points, gev_to_pb(diff_xs(plot_points, charge=charge, esp=esp)))
|
|
|
|
|
ax.set_xlabel(r'$\theta$')
|
2020-04-01 15:03:38 +02:00
|
|
|
|
ax.set_ylabel(r'$d\sigma/d\Omega$ [pb]')
|
2020-03-30 19:19:48 +02:00
|
|
|
|
ax.axvline(interval[0], color='gray', linestyle='--')
|
|
|
|
|
ax.axvline(interval[1], color='gray', linestyle='--', label=rf'$|\eta|={η}$')
|
|
|
|
|
ax.legend()
|
2020-04-01 15:03:38 +02:00
|
|
|
|
save_fig(fig, 'diff_xs', 'xs', size=[2.5, 2])
|
2020-03-30 19:19:48 +02:00
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
2020-04-07 09:57:15 +02:00
|
|
|
|
[[file:./.ob-jupyter/c9b71617a1208604b29d73e4f0197610e7cfe99d.png]]
|
2020-03-30 19:19:48 +02:00
|
|
|
|
|
|
|
|
|
Define the integrand.
|
2020-03-31 15:35:03 +02:00
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
2020-03-30 19:19:48 +02:00
|
|
|
|
def xs_pb_int(θ):
|
2020-04-01 13:55:22 +02:00
|
|
|
|
return 2*np.pi*gev_to_pb(np.sin(θ)*diff_xs(θ, charge=charge, esp=esp))
|
2020-04-02 09:16:33 +02:00
|
|
|
|
|
|
|
|
|
def xs_pb_int_η(η):
|
|
|
|
|
return 2*np.pi*gev_to_pb(diff_xs_eta(η, charge, esp))
|
2020-03-30 19:19:48 +02:00
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
|
|
|
|
|
|
|
|
|
Plot the integrand. # TODO: remove duplication
|
2020-03-31 15:35:03 +02:00
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
2020-04-07 09:57:15 +02:00
|
|
|
|
fig, ax = set_up_plot()
|
|
|
|
|
ax.plot(plot_points, xs_pb_int(plot_points))
|
|
|
|
|
ax.set_xlabel(r'$\theta$')
|
|
|
|
|
ax.set_ylabel(r'$\sin(\theta)\cdot\frac{d\sigma}{d\Omega}$ [pb]')
|
|
|
|
|
ax.set_xlim([plot_points.min(), plot_points.max()])
|
|
|
|
|
ax.axvline(interval[0], color='gray', linestyle='--')
|
|
|
|
|
ax.axvline(interval[1], color='gray', linestyle='--', label=rf'$|\eta|={η}$')
|
|
|
|
|
ax.legend()
|
|
|
|
|
save_fig(fig, 'xs_integrand', 'xs', size=[4, 4])
|
2020-03-30 19:19:48 +02:00
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
2020-04-07 09:57:15 +02:00
|
|
|
|
[[file:./.ob-jupyter/e0ff5a888d8b509a7651c44d8a1216595b725ba9.png]]
|
2020-04-05 12:30:38 +02:00
|
|
|
|
*** Integral over θ
|
2020-03-30 19:19:48 +02:00
|
|
|
|
Intergrate σ with the mc method.
|
2020-03-31 15:35:03 +02:00
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
2020-04-02 09:05:21 +02:00
|
|
|
|
xs_pb_mc, xs_pb_mc_err = monte_carlo.integrate(xs_pb_int, interval, 1000)
|
2020-04-01 13:55:22 +02:00
|
|
|
|
xs_pb_mc = xs_pb_mc
|
2020-03-30 19:19:48 +02:00
|
|
|
|
xs_pb_mc, xs_pb_mc_err
|
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
2020-04-07 09:57:15 +02:00
|
|
|
|
| 0.053382020808528406 | 0.0008336167380952699 |
|
2020-03-30 19:19:48 +02:00
|
|
|
|
|
2020-03-31 15:19:51 +02:00
|
|
|
|
We gonna export that as tex.
|
2020-03-31 16:40:10 +02:00
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
2020-04-01 13:55:22 +02:00
|
|
|
|
tex_value(xs_pb_mc, unit=r'\pico\barn', prefix=r'\sigma = ', err=xs_pb_mc_err, save=('results', 'xs_mc.tex'))
|
2020-03-30 19:19:48 +02:00
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
2020-04-06 18:21:10 +02:00
|
|
|
|
: \(\sigma = \SI{0.0534\pm 0.0008}{\pico\barn}\)
|
2020-04-02 09:05:21 +02:00
|
|
|
|
|
2020-04-05 12:30:38 +02:00
|
|
|
|
*** Integration over η
|
2020-04-02 09:16:33 +02:00
|
|
|
|
Plot the intgrand of the pseudo rap.
|
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
|
|
|
|
fig, ax = set_up_plot()
|
|
|
|
|
points = np.linspace(*interval_η, 1000)
|
|
|
|
|
ax.plot(points, xs_pb_int_η(points))
|
|
|
|
|
ax.set_xlabel(r'$\eta$')
|
|
|
|
|
ax.set_ylabel(r'$\frac{d\sigma}{d\theta}$ [pb]')
|
|
|
|
|
save_fig(fig, 'xs_integrand_η', 'xs', size=[4, 4])
|
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
2020-04-07 09:57:15 +02:00
|
|
|
|
[[file:./.ob-jupyter/92b46e2139d3c345ee37be28aa256105a6c926c5.png]]
|
2020-04-02 09:16:33 +02:00
|
|
|
|
|
2020-04-02 09:05:21 +02:00
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
2020-04-02 09:16:33 +02:00
|
|
|
|
xs_pb_η = monte_carlo.integrate(xs_pb_int_η,
|
2020-04-02 15:55:07 +02:00
|
|
|
|
interval_η, 1000)
|
2020-04-02 09:05:21 +02:00
|
|
|
|
xs_pb_η
|
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
2020-04-07 09:57:15 +02:00
|
|
|
|
| 0.053779012613814334 | 0.0001579614357163564 |
|
2020-04-02 09:05:21 +02:00
|
|
|
|
|
2020-04-05 12:34:51 +02:00
|
|
|
|
As we see, the result is a little better if we use pseudo rapidity,
|
|
|
|
|
because the differential cross section does not difverge anymore. But
|
|
|
|
|
becase our η interval is covering the range where all the variance is
|
|
|
|
|
occuring, the improvement is rather marginal.
|
|
|
|
|
|
2020-04-02 09:05:21 +02:00
|
|
|
|
And yet again export that as tex.
|
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
2020-04-06 21:25:22 +02:00
|
|
|
|
tex_value(*xs_pb_η, unit=r'\pico\barn', prefix=r'\sigma = ',
|
|
|
|
|
save=('results', 'xs_mc_eta.tex'))
|
2020-04-02 09:05:21 +02:00
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
2020-04-07 09:57:15 +02:00
|
|
|
|
: \(\sigma = \SI{0.05378\pm 0.00016}{\pico\barn}\)
|
2020-04-04 22:20:48 +02:00
|
|
|
|
|
2020-04-05 12:30:38 +02:00
|
|
|
|
*** Using =VEGAS=
|
2020-04-04 22:20:48 +02:00
|
|
|
|
Now we use =VEGAS= on the θ parametrisation and see what happens.
|
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
|
|
|
|
xs_pb_vegas, xs_pb_vegas_σ, xs_θ_intervals = \
|
|
|
|
|
monte_carlo.integrate_vegas(xs_pb_int, interval,
|
|
|
|
|
num_increments=20, alpha=4,
|
|
|
|
|
point_density=1000, acumulate=True)
|
|
|
|
|
xs_pb_vegas, xs_pb_vegas_σ
|
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
2020-04-07 09:57:15 +02:00
|
|
|
|
| 0.053731811180199644 | 6.830920893509916e-05 |
|
2020-04-04 22:20:48 +02:00
|
|
|
|
|
|
|
|
|
This is pretty good, although the variance reduction may be achieved
|
|
|
|
|
partially by accumulating the results from all runns. The uncertainty
|
|
|
|
|
is being overestimated!
|
|
|
|
|
|
|
|
|
|
And export that as tex.
|
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
|
|
|
|
tex_value(xs_pb_vegas, xs_pb_vegas_σ, unit=r'\pico\barn',
|
|
|
|
|
prefix=r'\sigma = ', save=('results', 'xs_mc_θ_vegas.tex'))
|
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
2020-04-07 09:57:15 +02:00
|
|
|
|
: \(\sigma = \SI{0.05373\pm 0.00007}{\pico\barn}\)
|
2020-04-04 22:20:48 +02:00
|
|
|
|
|
|
|
|
|
Surprisingly, without acumulation, the result ain't much different.
|
|
|
|
|
This depends, of course, on the iteration count.
|
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
|
|
|
|
monte_carlo.integrate_vegas(xs_pb_int, interval, num_increments=20,
|
|
|
|
|
alpha=4, point_density=1000,
|
|
|
|
|
acumulate=False)[0:2]
|
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
2020-04-07 09:57:15 +02:00
|
|
|
|
| 0.053772195195401516 | 7.447829565707063e-05 |
|
2020-04-02 09:05:21 +02:00
|
|
|
|
|
2020-04-05 12:30:38 +02:00
|
|
|
|
*** Testing the Statistics
|
2020-04-02 17:37:31 +02:00
|
|
|
|
Let's battle test the statistics.
|
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
|
|
|
|
num_runs = 1000
|
|
|
|
|
num_within = 0
|
|
|
|
|
|
|
|
|
|
for _ in range(num_runs):
|
|
|
|
|
val, err = monte_carlo.integrate(xs_pb_int_η, interval_η, 1000)
|
|
|
|
|
if abs(xs_pb - val) <= err:
|
|
|
|
|
num_within += 1
|
|
|
|
|
|
|
|
|
|
num_within/num_runs
|
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
2020-04-07 09:57:15 +02:00
|
|
|
|
: 0.665
|
2020-04-02 17:37:31 +02:00
|
|
|
|
|
|
|
|
|
So we see: the standard deviation is sound.
|
|
|
|
|
|
2020-04-06 21:25:22 +02:00
|
|
|
|
Doing the same thing with =VEGAS= works as well.
|
2020-04-04 22:20:48 +02:00
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
2020-04-05 13:55:28 +02:00
|
|
|
|
num_runs = 1000
|
|
|
|
|
num_within = 0
|
|
|
|
|
for _ in range(num_runs):
|
|
|
|
|
val, err, _ = \
|
|
|
|
|
monte_carlo.integrate_vegas(xs_pb_int, interval,
|
2020-04-06 21:16:31 +02:00
|
|
|
|
num_increments=8, alpha=1,
|
2020-04-05 13:55:28 +02:00
|
|
|
|
point_density=1000, acumulate=False)
|
2020-04-06 20:33:37 +02:00
|
|
|
|
|
2020-04-05 13:55:28 +02:00
|
|
|
|
if abs(xs_pb - val) <= err:
|
|
|
|
|
num_within += 1
|
|
|
|
|
num_within/num_runs
|
2020-04-04 22:20:48 +02:00
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
2020-04-07 09:57:15 +02:00
|
|
|
|
: 0.685
|
2020-04-04 22:20:48 +02:00
|
|
|
|
|
2020-04-05 12:30:38 +02:00
|
|
|
|
** Sampling and Analysis
|
2020-03-31 15:19:51 +02:00
|
|
|
|
Define the sample number.
|
2020-03-31 15:35:03 +02:00
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
2020-03-31 15:19:51 +02:00
|
|
|
|
sample_num = 1000
|
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
2020-04-02 15:55:07 +02:00
|
|
|
|
Let's define shortcuts for our distributions. The 2π are just there
|
|
|
|
|
for formal correctnes. Factors do not influecence the outcome.
|
2020-03-31 19:06:14 +02:00
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
2020-04-05 13:55:28 +02:00
|
|
|
|
def dist_cosθ(x):
|
2020-03-31 19:06:14 +02:00
|
|
|
|
return gev_to_pb(diff_xs_cosθ(x, charge, esp))*2*np.pi
|
2020-04-02 15:55:07 +02:00
|
|
|
|
|
|
|
|
|
def dist_η(x):
|
|
|
|
|
return gev_to_pb(diff_xs_eta(x, charge, esp))*2*np.pi
|
2020-03-31 19:06:14 +02:00
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
|
|
|
|
|
2020-04-05 12:30:38 +02:00
|
|
|
|
*** Sampling the cosθ cross section
|
2020-04-02 15:55:07 +02:00
|
|
|
|
|
2020-03-31 19:06:14 +02:00
|
|
|
|
Now we monte-carlo sample our distribution. We observe that the efficiency his very bad!
|
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
|
|
|
|
cosθ_sample, cosθ_efficiency = \
|
2020-04-05 13:55:28 +02:00
|
|
|
|
monte_carlo.sample_unweighted_array(sample_num, dist_cosθ,
|
2020-03-31 19:06:14 +02:00
|
|
|
|
interval_cosθ, report_efficiency=True)
|
|
|
|
|
cosθ_efficiency
|
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
2020-04-07 09:57:15 +02:00
|
|
|
|
: 0.027366713967159943
|
2020-03-31 19:06:14 +02:00
|
|
|
|
|
|
|
|
|
Our distribution has a lot of variance, as can be seen by plotting it.
|
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
2020-04-05 13:55:28 +02:00
|
|
|
|
pts = np.linspace(*interval_cosθ, 100)
|
|
|
|
|
fig, ax = set_up_plot()
|
|
|
|
|
ax.plot(pts, dist_cosθ(pts))
|
|
|
|
|
ax.set_xlabel(r'$\cos\theta$')
|
|
|
|
|
ax.set_ylabel(r'$\frac{d\sigma}{d\Omega}$')
|
2020-03-31 19:06:14 +02:00
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
2020-04-03 14:05:30 +02:00
|
|
|
|
:RESULTS:
|
2020-04-05 13:55:28 +02:00
|
|
|
|
: Text(0, 0.5, '$\\frac{d\\sigma}{d\\Omega}$')
|
2020-04-07 09:57:15 +02:00
|
|
|
|
[[file:./.ob-jupyter/c94d996aaf4307b7b2d810f95c45a2fea297e99a.png]]
|
2020-04-03 14:05:30 +02:00
|
|
|
|
:END:
|
2020-03-31 19:06:14 +02:00
|
|
|
|
|
|
|
|
|
We define a friendly and easy to integrate upper limit function.
|
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
2020-04-05 13:55:28 +02:00
|
|
|
|
upper_limit = dist_cosθ(interval_cosθ[0]) \
|
2020-03-31 19:06:14 +02:00
|
|
|
|
/interval_cosθ[0]**2
|
2020-04-05 13:55:28 +02:00
|
|
|
|
upper_base = dist_cosθ(0)
|
2020-03-31 19:06:14 +02:00
|
|
|
|
|
|
|
|
|
def upper(x):
|
|
|
|
|
return upper_base + upper_limit*x**2
|
|
|
|
|
|
|
|
|
|
def upper_int(x):
|
|
|
|
|
return upper_base*x + upper_limit*x**3/3
|
|
|
|
|
|
|
|
|
|
ax.plot(pts, upper(pts), label='Upper bound')
|
|
|
|
|
ax.legend()
|
|
|
|
|
ax.set_xlabel(r'$\cos\theta$')
|
|
|
|
|
ax.set_ylabel(r'$\frac{d\sigma}{d\Omega}$')
|
|
|
|
|
save_fig(fig, 'upper_bound', 'xs_sampling', size=(4, 4))
|
|
|
|
|
fig
|
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
2020-04-07 09:57:15 +02:00
|
|
|
|
[[file:./.ob-jupyter/25410dc62d8f53bb26a96887adc2f78b7426615d.png]]
|
2020-03-31 19:06:14 +02:00
|
|
|
|
|
2020-03-30 20:26:10 +02:00
|
|
|
|
|
2020-03-31 19:06:14 +02:00
|
|
|
|
To increase our efficiency, we have to specify an upper bound. That is
|
|
|
|
|
at least a little bit better. The numeric inversion is horribly inefficent.
|
2020-03-31 15:35:03 +02:00
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
2020-03-31 19:06:14 +02:00
|
|
|
|
cosθ_sample, cosθ_efficiency = \
|
2020-04-05 13:55:28 +02:00
|
|
|
|
monte_carlo.sample_unweighted_array(sample_num, dist_cosθ,
|
2020-03-31 19:06:14 +02:00
|
|
|
|
interval_cosθ, report_efficiency=True,
|
|
|
|
|
upper_bound=[upper, upper_int])
|
|
|
|
|
cosθ_efficiency
|
2020-03-30 19:56:02 +02:00
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
2020-04-07 09:57:15 +02:00
|
|
|
|
: 0.08113723900263531
|
2020-04-06 19:17:48 +02:00
|
|
|
|
<<cosθ-bare-eff>>
|
2020-03-30 19:56:02 +02:00
|
|
|
|
|
|
|
|
|
Nice! And now draw some histograms.
|
|
|
|
|
|
|
|
|
|
We define an auxilliary method for convenience.
|
2020-04-05 12:30:38 +02:00
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer :tangle tangled/plot_utils.py
|
2020-04-05 13:55:28 +02:00
|
|
|
|
"""
|
|
|
|
|
Some shorthands for common plotting tasks related to the investigation
|
|
|
|
|
of monte-carlo methods in one rimension.
|
|
|
|
|
|
|
|
|
|
Author: Valentin Boettcher <hiro at protagon.space>
|
|
|
|
|
"""
|
|
|
|
|
|
2020-04-05 12:30:38 +02:00
|
|
|
|
import matplotlib.pyplot as plt
|
|
|
|
|
|
2020-03-30 19:56:02 +02:00
|
|
|
|
def draw_histo(points, xlabel, bins=20):
|
2020-04-02 16:33:30 +02:00
|
|
|
|
heights, edges = np.histogram(points, bins)
|
|
|
|
|
centers = (edges[1:] + edges[:-1])/2
|
|
|
|
|
deviations = np.sqrt(heights)
|
|
|
|
|
|
2020-03-30 19:56:02 +02:00
|
|
|
|
fig, ax = set_up_plot()
|
2020-04-02 16:33:30 +02:00
|
|
|
|
ax.errorbar(centers, heights, deviations, linestyle='none', color='orange')
|
|
|
|
|
ax.step(edges, [heights[0], *heights], color='#1f77b4')
|
|
|
|
|
|
2020-03-30 19:56:02 +02:00
|
|
|
|
ax.set_xlabel(xlabel)
|
|
|
|
|
ax.set_xlim([points.min(), points.max()])
|
|
|
|
|
return fig, ax
|
2020-03-30 19:19:48 +02:00
|
|
|
|
#+end_src
|
2020-03-30 19:56:02 +02:00
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
|
|
|
|
|
|
|
|
|
The histogram for cosθ.
|
2020-03-31 15:35:03 +02:00
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
2020-03-30 19:56:02 +02:00
|
|
|
|
fig, _ = draw_histo(cosθ_sample, r'$\cos\theta$')
|
2020-03-31 15:19:51 +02:00
|
|
|
|
save_fig(fig, 'histo_cos_theta', 'xs', size=(4,3))
|
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
2020-04-07 09:57:15 +02:00
|
|
|
|
[[file:./.ob-jupyter/3e8bc82501001f331aac198031a022a99e173885.png]]
|
2020-03-31 15:19:51 +02:00
|
|
|
|
|
2020-04-05 12:30:38 +02:00
|
|
|
|
*** Observables
|
2020-04-02 15:55:07 +02:00
|
|
|
|
Now we define some utilities to draw real 4-momentum samples.
|
2020-03-31 15:35:03 +02:00
|
|
|
|
#+begin_src jupyter-python :exports both :tangle tangled/xs.py
|
2020-04-02 16:35:43 +02:00
|
|
|
|
def sample_momenta(sample_num, interval, charge, esp, seed=None):
|
|
|
|
|
"""Samples `sample_num` unweighted photon 4-momenta from the
|
2020-04-02 16:33:30 +02:00
|
|
|
|
cross-section.
|
2020-03-31 15:19:51 +02:00
|
|
|
|
|
|
|
|
|
:param sample_num: number of samples to take
|
|
|
|
|
:param interval: cosθ interval to sample from
|
|
|
|
|
:param charge: the charge of the quark
|
|
|
|
|
:param esp: center of mass energy
|
|
|
|
|
:param seed: the seed for the rng, optional, default is system
|
|
|
|
|
time
|
|
|
|
|
|
2020-04-02 16:35:43 +02:00
|
|
|
|
:returns: an array of 4 photon momenta
|
2020-04-02 16:33:30 +02:00
|
|
|
|
|
2020-03-31 15:19:51 +02:00
|
|
|
|
:rtype: np.ndarray
|
|
|
|
|
"""
|
|
|
|
|
cosθ_sample = \
|
|
|
|
|
monte_carlo.sample_unweighted_array(sample_num,
|
|
|
|
|
lambda x:
|
|
|
|
|
diff_xs_cosθ(x, charge, esp),
|
|
|
|
|
interval_cosθ)
|
|
|
|
|
φ_sample = np.random.uniform(0, 1, sample_num)
|
|
|
|
|
|
2020-04-02 15:55:07 +02:00
|
|
|
|
def make_momentum(esp, cosθ, φ):
|
2020-03-31 15:19:51 +02:00
|
|
|
|
sinθ = np.sqrt(1-cosθ**2)
|
|
|
|
|
return np.array([1, sinθ*np.cos(φ), sinθ*np.sin(φ), cosθ])*esp/2
|
|
|
|
|
|
2020-04-02 16:35:43 +02:00
|
|
|
|
momenta = np.array([make_momentum(esp, cosθ, φ) \
|
2020-03-31 15:19:51 +02:00
|
|
|
|
for cosθ, φ in np.array([cosθ_sample, φ_sample]).T])
|
2020-04-02 16:35:43 +02:00
|
|
|
|
return momenta
|
2020-03-31 15:19:51 +02:00
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
|
|
|
|
|
2020-03-31 15:35:03 +02:00
|
|
|
|
To generate histograms of other obeservables, we have to define them
|
|
|
|
|
as functions on 4-impuleses. Using those to transform samples is
|
|
|
|
|
analogous to transforming the distribution itself.
|
2020-04-07 09:57:15 +02:00
|
|
|
|
#+begin_src jupyter-python :session obs :exports both :results raw drawer :tangle tangled/observables.py
|
2020-03-31 15:19:51 +02:00
|
|
|
|
"""This module defines some observables on arrays of 4-pulses."""
|
|
|
|
|
import numpy as np
|
|
|
|
|
|
|
|
|
|
def p_t(p):
|
2020-04-02 15:55:07 +02:00
|
|
|
|
"""Transverse momentum
|
2020-03-31 15:19:51 +02:00
|
|
|
|
|
2020-04-02 16:35:43 +02:00
|
|
|
|
:param p: array of 4-momenta
|
2020-03-31 15:19:51 +02:00
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
return np.linalg.norm(p[:,1:3], axis=1)
|
|
|
|
|
|
|
|
|
|
def η(p):
|
|
|
|
|
"""Pseudo rapidity.
|
|
|
|
|
|
2020-04-02 16:35:43 +02:00
|
|
|
|
:param p: array of 4-momenta
|
2020-03-31 15:19:51 +02:00
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
return np.arccosh(np.linalg.norm(p[:,1:], axis=1)/p_t(p))*np.sign(p[:, 3])
|
2020-03-30 19:56:02 +02:00
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
|
|
|
|
|
2020-04-07 09:57:15 +02:00
|
|
|
|
And import them.
|
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
|
|
|
|
%aimport tangled.observables
|
|
|
|
|
obs = tangled.observables
|
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
2020-03-31 15:19:51 +02:00
|
|
|
|
|
|
|
|
|
Lets try it out.
|
2020-03-31 15:35:03 +02:00
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
2020-04-02 16:35:43 +02:00
|
|
|
|
momentum_sample = sample_momenta(2000, interval_cosθ, charge, esp)
|
2020-04-02 15:55:07 +02:00
|
|
|
|
momentum_sample
|
2020-03-30 19:56:02 +02:00
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
2020-04-07 09:57:15 +02:00
|
|
|
|
: array([[100. , 34.68438203, 28.21611696, -89.44743924],
|
|
|
|
|
: [100. , 47.30213877, 35.31219787, 80.71899621],
|
|
|
|
|
: [100. , 92.82148885, 36.57576574, 6.81062174],
|
2020-03-31 15:35:03 +02:00
|
|
|
|
: ...,
|
2020-04-07 09:57:15 +02:00
|
|
|
|
: [100. , 57.30653234, 73.69684619, 35.8432171 ],
|
|
|
|
|
: [100. , 18.00393271, 5.53427576, 98.21013287],
|
|
|
|
|
: [100. , 71.66331811, 15.42119249, -68.01878902]])
|
2020-03-30 20:26:10 +02:00
|
|
|
|
|
2020-03-31 15:19:51 +02:00
|
|
|
|
Now let's make a histogram of the η distribution.
|
2020-03-31 15:35:03 +02:00
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
2020-04-07 09:57:15 +02:00
|
|
|
|
η_sample = obs.η(momentum_sample)
|
2020-03-31 15:19:51 +02:00
|
|
|
|
draw_histo(η_sample, r'$\eta$')
|
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
2020-04-03 14:05:30 +02:00
|
|
|
|
:RESULTS:
|
2020-04-07 09:57:15 +02:00
|
|
|
|
| <Figure | size | 432x288 | with | 1 | Axes> | <matplotlib.axes._subplots.AxesSubplot | at | 0x7f9f4de34160> |
|
|
|
|
|
[[file:./.ob-jupyter/a96cd332697229119cf01ee425f1d20d5cee0fb2.png]]
|
2020-04-03 14:05:30 +02:00
|
|
|
|
:END:
|
2020-03-31 15:19:51 +02:00
|
|
|
|
|
|
|
|
|
|
2020-04-02 15:55:07 +02:00
|
|
|
|
And the same for the p_t (transverse momentum) distribution.
|
2020-03-31 15:35:03 +02:00
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
2020-04-07 09:57:15 +02:00
|
|
|
|
p_t_sample = obs.p_t(momentum_sample)
|
2020-04-02 16:33:30 +02:00
|
|
|
|
draw_histo(p_t_sample, r'$p_T$ [GeV]')
|
2020-04-02 15:55:07 +02:00
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
2020-04-03 14:05:30 +02:00
|
|
|
|
:RESULTS:
|
2020-04-07 09:57:15 +02:00
|
|
|
|
| <Figure | size | 432x288 | with | 1 | Axes> | <matplotlib.axes._subplots.AxesSubplot | at | 0x7f9f4d5b49a0> |
|
|
|
|
|
[[file:./.ob-jupyter/f179f03ce3c0797a0323f636828c23f1b68091bc.png]]
|
2020-04-03 14:05:30 +02:00
|
|
|
|
:END:
|
2020-04-02 15:55:07 +02:00
|
|
|
|
|
|
|
|
|
That looks somewhat fishy, but it isn't.
|
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
|
|
|
|
fig, ax = set_up_plot()
|
|
|
|
|
points = np.linspace(interval_pt[0], interval_pt[1] - .01, 1000)
|
|
|
|
|
ax.plot(points, gev_to_pb(diff_xs_p_t(points, charge, esp)))
|
|
|
|
|
ax.set_xlabel(r'$p_T$')
|
|
|
|
|
ax.set_xlim(interval_pt[0], interval_pt[1] + 1)
|
|
|
|
|
ax.set_ylim([0, gev_to_pb(diff_xs_p_t(interval_pt[1] -.01, charge, esp))])
|
|
|
|
|
ax.set_ylabel(r'$\frac{d\sigma}{dp_t}$ [pb]')
|
|
|
|
|
save_fig(fig, 'diff_xs_p_t', 'xs_sampling', size=[4, 3])
|
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
2020-04-07 09:57:15 +02:00
|
|
|
|
[[file:./.ob-jupyter/ec6b410ca8dd86985504bac4e801bf8336414887.png]]
|
2020-04-02 15:55:07 +02:00
|
|
|
|
this is strongly peaked at p_t=100GeV. (The jacobian goes like 1/x there!)
|
|
|
|
|
|
2020-04-05 12:30:38 +02:00
|
|
|
|
*** Sampling the η cross section
|
2020-04-02 16:33:30 +02:00
|
|
|
|
An again we see that the efficiency is way, way! better...
|
2020-04-02 15:55:07 +02:00
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
|
|
|
|
η_sample, η_efficiency = \
|
|
|
|
|
monte_carlo.sample_unweighted_array(sample_num, dist_η,
|
|
|
|
|
interval_η, report_efficiency=True)
|
|
|
|
|
η_efficiency
|
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
2020-04-07 09:57:15 +02:00
|
|
|
|
: 0.4066
|
2020-04-06 19:17:48 +02:00
|
|
|
|
<<η-eff>>
|
2020-04-02 15:55:07 +02:00
|
|
|
|
|
|
|
|
|
Let's draw a histogram to compare with the previous results.
|
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
|
|
|
|
draw_histo(η_sample, r'$\eta$')
|
2020-03-30 20:26:10 +02:00
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
2020-04-03 14:05:30 +02:00
|
|
|
|
:RESULTS:
|
2020-04-07 09:57:15 +02:00
|
|
|
|
| <Figure | size | 432x288 | with | 1 | Axes> | <matplotlib.axes._subplots.AxesSubplot | at | 0x7f9f4cd48b50> |
|
|
|
|
|
[[file:./.ob-jupyter/5a168895d1bab706b21f7aa0476cee30dc96bc82.png]]
|
2020-04-03 14:05:30 +02:00
|
|
|
|
:END:
|
2020-04-02 15:55:07 +02:00
|
|
|
|
Looks good to me :).
|
2020-04-05 12:34:51 +02:00
|
|
|
|
*** Sampling with =VEGAS=
|
2020-04-05 13:55:28 +02:00
|
|
|
|
Let's define some little helpers.
|
|
|
|
|
#+begin_src jupyter-python :exports both :tangle tangled/plot_utils.py
|
|
|
|
|
def plot_increments(ax, increment_borders, label=None, *args, **kwargs):
|
|
|
|
|
"""Plot the increment borders from a list. The first and last one
|
|
|
|
|
|
|
|
|
|
:param ax: the axis on which to draw
|
|
|
|
|
:param list increment_borders: the borders of the increments
|
|
|
|
|
:param str label: the label to apply to one of the vertical lines
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
ax.axvline(x=increment_borders[1], label=label, *args, **kwargs)
|
|
|
|
|
|
|
|
|
|
for increment in increment_borders[2:-1]:
|
|
|
|
|
ax.axvline(x=increment, *args, **kwargs)
|
|
|
|
|
|
2020-04-06 21:25:22 +02:00
|
|
|
|
def plot_vegas_weighted_distribution(ax, points, dist,
|
|
|
|
|
increment_borders, *args, **kwargs):
|
2020-04-05 13:55:28 +02:00
|
|
|
|
"""Plot the distribution with VEGAS weights applied.
|
|
|
|
|
|
|
|
|
|
:param ax: axis
|
|
|
|
|
:param points: points
|
|
|
|
|
:param dist: distribution
|
|
|
|
|
:param increment_borders: increment borders
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
num_increments = increment_borders.size
|
|
|
|
|
weighted_dist = dist.copy()
|
|
|
|
|
|
|
|
|
|
for left_border, right_border in zip(increment_borders[:-1],
|
|
|
|
|
increment_borders[1:]):
|
|
|
|
|
length = right_border - left_border
|
|
|
|
|
mask = (left_border <= points) & (points <= right_border)
|
|
|
|
|
weighted_dist[mask] = dist[mask]*num_increments*length
|
|
|
|
|
|
|
|
|
|
ax.plot(points, weighted_dist, *args, **kwargs)
|
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
|
|
|
|
|
|
|
|
|
To get the increments, we have to let =VEGAS= loose on our
|
|
|
|
|
distribution. We throw away the integral, but keep the increments.
|
|
|
|
|
|
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
|
|
|
|
_, _, increments = monte_carlo.integrate_vegas(dist_cosθ,
|
|
|
|
|
interval_cosθ,
|
|
|
|
|
num_increments=10, alpha=1,
|
|
|
|
|
epsilon=.01)
|
|
|
|
|
increments
|
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
2020-04-07 09:57:15 +02:00
|
|
|
|
: array([-0.9866143 , -0.9568834 , -0.89880566, -0.7731981 , -0.50508996,
|
|
|
|
|
: 0.00428964, 0.51249399, 0.7775964 , 0.90011062, 0.95706595,
|
2020-04-05 13:55:28 +02:00
|
|
|
|
: 0.9866143 ])
|
|
|
|
|
|
|
|
|
|
Visualizing the increment borders gives us the information we want.
|
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
|
|
|
|
pts = np.linspace(*interval_cosθ, 100)
|
|
|
|
|
fig, ax = set_up_plot()
|
|
|
|
|
ax.plot(pts, dist_cosθ(pts))
|
|
|
|
|
ax.set_xlabel(r'$\cos\theta$')
|
|
|
|
|
ax.set_ylabel(r'$\frac{d\sigma}{d\Omega}$')
|
|
|
|
|
ax.set_xlim(*interval_cosθ)
|
|
|
|
|
plot_increments(ax, increments,
|
|
|
|
|
label='Increment Borderds', color='gray', linestyle='--')
|
|
|
|
|
ax.legend()
|
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
|
|
|
|
:RESULTS:
|
2020-04-07 09:57:15 +02:00
|
|
|
|
: <matplotlib.legend.Legend at 0x7f9f4cbe39d0>
|
|
|
|
|
[[file:./.ob-jupyter/2f93c795fd70016e122cf92c324d03a5cf06a327.png]]
|
2020-04-05 13:55:28 +02:00
|
|
|
|
:END:
|
|
|
|
|
|
|
|
|
|
We can now plot the reweighted distribution to observe the variance
|
|
|
|
|
reduction visually.
|
|
|
|
|
|
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
|
|
|
|
pts = np.linspace(*interval_cosθ, 1000)
|
|
|
|
|
fig, ax = set_up_plot()
|
|
|
|
|
plot_vegas_weighted_distribution(ax, pts, dist_cosθ(pts), increments)
|
|
|
|
|
ax.set_xlabel(r'$\cos\theta$')
|
|
|
|
|
ax.set_ylabel(r'$\frac{d\sigma}{d\Omega}$')
|
|
|
|
|
ax.set_xlim(*interval_cosθ)
|
|
|
|
|
plot_increments(ax, increments,
|
|
|
|
|
label='Increment Borderds', color='gray', linestyle='--')
|
|
|
|
|
ax.legend()
|
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
|
|
|
|
:RESULTS:
|
2020-04-07 09:57:15 +02:00
|
|
|
|
: <matplotlib.legend.Legend at 0x7f9f4cb14850>
|
|
|
|
|
[[file:./.ob-jupyter/c982484453c2c20fd1b6c2aa2bf4fde70ae77725.png]]
|
2020-04-05 13:55:28 +02:00
|
|
|
|
:END:
|
|
|
|
|
|
2020-04-06 19:17:48 +02:00
|
|
|
|
|
2020-04-05 13:55:28 +02:00
|
|
|
|
I am batman!
|
2020-04-06 19:17:48 +02:00
|
|
|
|
Now, draw a sample and look at the efficiency.
|
|
|
|
|
|
2020-04-06 18:21:10 +02:00
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
|
|
|
|
cosθ_sample_strat, cosθ_efficiency_strat = \
|
|
|
|
|
monte_carlo.sample_unweighted_array(sample_num, dist_cosθ,
|
|
|
|
|
increment_borders=increments,
|
|
|
|
|
report_efficiency=True)
|
|
|
|
|
cosθ_efficiency_strat
|
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
2020-04-07 09:57:15 +02:00
|
|
|
|
: 0.3732
|
2020-04-05 21:12:02 +02:00
|
|
|
|
|
2020-04-06 19:17:48 +02:00
|
|
|
|
If we compare that to [[cosθ-bare-eff]], we can see the improvement :P.
|
|
|
|
|
It is even better the [[η-eff]]. The histogram looks just the same.
|
|
|
|
|
|
2020-04-06 18:21:10 +02:00
|
|
|
|
#+begin_src jupyter-python :exports both :results raw drawer
|
|
|
|
|
fig, _ = draw_histo(cosθ_sample_strat, r'$\cos\theta$')
|
|
|
|
|
save_fig(fig, 'histo_cos_theta_strat', 'xs', size=(4,3))
|
|
|
|
|
#+end_src
|
|
|
|
|
|
|
|
|
|
#+RESULTS:
|
2020-04-07 09:57:15 +02:00
|
|
|
|
[[file:./.ob-jupyter/1225a52ec975a00ef7e758afcf7726ba9cf41b5e.png]]
|