better compatibility through greater accuracy
Before Width: | Height: | Size: 9.9 KiB |
After Width: | Height: | Size: 10 KiB |
After Width: | Height: | Size: 10 KiB |
After Width: | Height: | Size: 8.1 KiB |
Before Width: | Height: | Size: 11 KiB |
After Width: | Height: | Size: 9.9 KiB |
After Width: | Height: | Size: 12 KiB |
After Width: | Height: | Size: 8.8 KiB |
Before Width: | Height: | Size: 10 KiB |
Before Width: | Height: | Size: 12 KiB |
Before Width: | Height: | Size: 8.9 KiB |
Before Width: | Height: | Size: 10 KiB |
After Width: | Height: | Size: 10 KiB |
Before Width: | Height: | Size: 8 KiB |
|
@ -489,14 +489,14 @@ draw_histogram(ax, part_hist)
|
|||
Now, it would be interesting to know the total cross section.
|
||||
So let's define the increments for VEGAS.
|
||||
#+begin_src jupyter-python :exports both :results raw drawer
|
||||
increments = np.array([3, 100, 100])
|
||||
increments = np.array([4, 100, 100])
|
||||
tex_value(
|
||||
np.prod(increments), prefix=r"K=", prec=0, save=("results/pdf/", "num_increments.tex")
|
||||
)
|
||||
#+end_src
|
||||
|
||||
#+RESULTS:
|
||||
: \(K=30000\)
|
||||
: \(K=40000\)
|
||||
|
||||
And calculate the XS.
|
||||
#+begin_src jupyter-python :exports both :results raw drawer
|
||||
|
@ -511,14 +511,14 @@ And calculate the XS.
|
|||
|
||||
xs_int_res = monte_carlo.integrate_vegas_nd(
|
||||
dist_η_vec,
|
||||
[interval_η, [pdf.xMin, 1], [pdf.xMin, 1]],
|
||||
epsilon=1e-11,
|
||||
[interval_η, [pdf.xMin, pdf.xMax], [pdf.xMin, pdf.xMax]],
|
||||
epsilon=1e-11/2,
|
||||
proc=1,
|
||||
increment_epsilon=.02,
|
||||
alpha=1.8,
|
||||
num_increments=increments,
|
||||
num_points_per_cube=10,
|
||||
cache="cache/pdf/total_xs_2_5_20_take20",
|
||||
cache="cache/pdf/total_xs_2_5_20_take22",
|
||||
)
|
||||
|
||||
total_xs = gev_to_pb(np.array(xs_int_res.combined_result)) * 2 * np.pi
|
||||
|
@ -528,7 +528,7 @@ And calculate the XS.
|
|||
#+RESULTS:
|
||||
:RESULTS:
|
||||
: Loading Cache: integrate_vegas_nd
|
||||
: array([3.86911687e+01, 1.96651183e-02])
|
||||
: array([3.86891167e+01, 9.39243388e-03])
|
||||
:END:
|
||||
|
||||
#+begin_src jupyter-python :exports both :results raw drawer
|
||||
|
@ -547,7 +547,7 @@ there are two identical protons.
|
|||
#+end_src
|
||||
|
||||
#+RESULTS:
|
||||
: -0.0012964079498659457
|
||||
: 0.006924228292849407
|
||||
|
||||
The efficiency will be around:
|
||||
#+begin_src jupyter-python :exports both :results raw drawer
|
||||
|
@ -555,7 +555,7 @@ The efficiency will be around:
|
|||
#+end_src
|
||||
|
||||
#+RESULTS:
|
||||
: 32.89204347314658
|
||||
: 36.68699853098365
|
||||
|
||||
Let's export those results for TeX:
|
||||
#+begin_src jupyter-python :exports both :results raw drawer
|
||||
|
@ -691,7 +691,7 @@ Lets plot how the pdf looks.
|
|||
|
||||
Overestimating the upper bounds helps with bias.
|
||||
#+begin_src jupyter-python :exports both :results raw drawer
|
||||
overestimate = 1.1
|
||||
overestimate = 1.0
|
||||
tex_value(
|
||||
(overestimate - 1) * 100,
|
||||
unit=r"\percent",
|
||||
|
@ -702,7 +702,7 @@ Overestimating the upper bounds helps with bias.
|
|||
|
||||
#+RESULTS:
|
||||
:RESULTS:
|
||||
: \(\SI{10}{\percent}\)
|
||||
: \(\SI{0}{\percent}\)
|
||||
[[file:./.ob-jupyter/542b03d025920448ba653b470ec6492cbdd1e4a7.png]]
|
||||
[[file:./.ob-jupyter/d47db0dde9ae59979f271a7cba8dfc46be3f1dd3.png]]
|
||||
[[file:./.ob-jupyter/7fe9d3bd60427cf20af835649efbcbaafefbb3e0.png]]
|
||||
|
@ -718,7 +718,7 @@ figure out the cpu mapping.
|
|||
cubes=xs_int_res.cubes,
|
||||
proc="auto",
|
||||
report_efficiency=True,
|
||||
cache="cache/pdf/total_xs_10000_000_2_5_take8",
|
||||
cache="cache/pdf/total_xs_10000_000_2_5_take11",
|
||||
status_path="/tmp/status1",
|
||||
overestimate_factor=overestimate,
|
||||
)
|
||||
|
@ -728,7 +728,7 @@ figure out the cpu mapping.
|
|||
#+RESULTS:
|
||||
:RESULTS:
|
||||
: Loading Cache: sample_unweighted_array
|
||||
: 0.29610040880251154
|
||||
: 0.3615754237122427
|
||||
:END:
|
||||
|
||||
That does look pretty good eh? So lets save it along with the sample size.
|
||||
|
@ -744,7 +744,7 @@ That does look pretty good eh? So lets save it along with the sample size.
|
|||
#+end_src
|
||||
|
||||
#+RESULTS:
|
||||
: \(\mathfrak{e}=\SI{30}{\percent}\)
|
||||
: \(\mathfrak{e}=\SI{36}{\percent}\)
|
||||
|
||||
** Observables
|
||||
Let's look at a histogramm of eta samples.
|
||||
|
@ -757,7 +757,7 @@ Let's look at a histogramm of eta samples.
|
|||
#+RESULTS:
|
||||
:RESULTS:
|
||||
: 10000000
|
||||
[[file:./.ob-jupyter/0b1b4f39201dac86ebfbfb8953561cfe81a6c70f.png]]
|
||||
[[file:./.ob-jupyter/764bd95aedbef68e7709a84780df593399e347a4.png]]
|
||||
:END:
|
||||
|
||||
Let's use a uniform histogram image size.
|
||||
|
@ -786,7 +786,7 @@ And now we compare all the observables with sherpa.
|
|||
#+end_src
|
||||
|
||||
#+RESULTS:
|
||||
[[file:./.ob-jupyter/a32ee488f4357426e3acecb1a5baaeddc367ee9b.png]]
|
||||
[[file:./.ob-jupyter/800bffd38987eae852b74eb2483698169c08c4de.png]]
|
||||
|
||||
Hah! there we have it!
|
||||
|
||||
|
@ -817,11 +817,10 @@ both equal.
|
|||
#+end_src
|
||||
|
||||
#+RESULTS:
|
||||
[[file:./.ob-jupyter/deeb122e09b948ea416db671bfe1838aedeae84a.png]]
|
||||
[[file:./.ob-jupyter/45cc8fbfa523c8faf704505d716ebc299a1a44fd.png]]
|
||||
|
||||
The invariant mass is not constant anymore.
|
||||
#+begin_src jupyter-python :exports both :results raw drawer
|
||||
bins = np.logspace(*np.log10([2 * min_pT, 2 * e_proton]), 51)
|
||||
yoda_hist_inv_m = yoda_to_numpy(yoda_file["/MC_DIPHOTON_PROTON/inv_m"])
|
||||
|
||||
fig, (ax, ax_ratio) = draw_ratio_plot(
|
||||
|
@ -842,7 +841,7 @@ The invariant mass is not constant anymore.
|
|||
#+end_src
|
||||
|
||||
#+RESULTS:
|
||||
[[file:./.ob-jupyter/a7708226adeb9782b68ffb10003c280d8dd19ef2.png]]
|
||||
[[file:./.ob-jupyter/9ad48ff715bb6445bf5a0c515e4a4f41593146fe.png]]
|
||||
|
||||
The cosθ distribution looks more like the paronic one.
|
||||
#+begin_src jupyter-python :exports both :results raw drawer
|
||||
|
@ -861,7 +860,7 @@ The cosθ distribution looks more like the paronic one.
|
|||
#+end_src
|
||||
|
||||
#+RESULTS:
|
||||
[[file:./.ob-jupyter/5bf3849e8196878ba4585a09e496e852b9969866.png]]
|
||||
[[file:./.ob-jupyter/d834978baba81dba0e2f052f1965b62ae736c151.png]]
|
||||
|
||||
|
||||
#+begin_src jupyter-python :exports both :results raw drawer
|
||||
|
@ -883,7 +882,7 @@ The cosθ distribution looks more like the paronic one.
|
|||
#+end_src
|
||||
|
||||
#+RESULTS:
|
||||
[[file:./.ob-jupyter/cd50dc4eb341d959871bf4288ff3620556a53970.png]]
|
||||
[[file:./.ob-jupyter/22100d74fc1e07aa9ac75e22dd38ce49eaad89d4.png]]
|
||||
|
||||
In this case the opening angles are the same because the CS frame is
|
||||
the same as the ordinary rest frame. The z-axis is the beam axis
|
||||
|
@ -907,4 +906,4 @@ because pT=0!
|
|||
#+end_src
|
||||
|
||||
#+RESULTS:
|
||||
[[file:./.ob-jupyter/9ae6d76460a0d99d6e64cd85c8d0b712984b93d6.png]]
|
||||
[[file:./.ob-jupyter/28abf9fcf1840bc31a5819d380a8eff0c2ef3e43.png]]
|
||||
|
|
|
@ -1 +1 @@
|
|||
\(\sigma = \SI{38.691\pm 0.020}{\pico\barn}\)
|
||||
\(\sigma = \SI{38.689\pm 0.009}{\pico\barn}\)
|
|
@ -1 +1 @@
|
|||
\(K=30000\)
|
||||
\(K=40000\)
|
|
@ -1 +1 @@
|
|||
\(\SI{10}{\percent}\)
|
||||
\(\SI{0}{\percent}\)
|
|
@ -1 +1 @@
|
|||
\(\mathfrak{e}=\SI{30}{\percent}\)
|
||||
\(\mathfrak{e}=\SI{36}{\percent}\)
|