mirror of
https://github.com/vale981/bachelor_thesis
synced 2025-03-04 09:01:38 -05:00
various updates
This commit is contained in:
parent
6bdc125136
commit
610c8de8a6
20 changed files with 1186 additions and 1204 deletions
|
@ -183,10 +183,10 @@ Viele Gruesse, Frank
|
|||
** TODO lab xs kuerzen
|
||||
** TODO shower scale anpassen
|
||||
** TODO effekt shower und kperp
|
||||
** TODO y-axis a.u.!
|
||||
** TODO mean, var einzeichnen
|
||||
** TODO Variance of vegas weighted f!
|
||||
** TODO look at xs plot -> they seem different
|
||||
** DONE y-axis a.u.!
|
||||
** DONE mean, var einzeichnen
|
||||
** DONE Variance of vegas weighted f!
|
||||
** DONE look at xs plot -> they seem different
|
||||
** TODO take new sample: still bias?
|
||||
|
||||
* Observations
|
||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 43 KiB |
Binary file not shown.
Before Width: | Height: | Size: 9.2 KiB |
Binary file not shown.
Before Width: | Height: | Size: 12 KiB |
Binary file not shown.
Before Width: | Height: | Size: 41 KiB |
|
@ -756,6 +756,18 @@ We define an auxilliary method for convenience.
|
|||
import matplotlib.gridspec as gridspec
|
||||
|
||||
|
||||
def test_compatibility(hist_1, hist_2, confidence=1):
|
||||
mask = hist_1 > hist_2
|
||||
comp_greater = hist_1[mask] - confidence * np.sqrt(hist_1[mask]) <= hist_2[
|
||||
mask
|
||||
] + confidence * np.sqrt(hist_2[mask])
|
||||
comp_lower = hist_1[~mask] + confidence * np.sqrt(hist_1[~mask]) >= hist_2[
|
||||
~mask
|
||||
] - confidence * np.sqrt(hist_2[~mask])
|
||||
|
||||
return (np.count_nonzero(comp_greater) + np.count_nonzero(comp_lower)) / len(hist_1)
|
||||
|
||||
|
||||
def draw_ratio_plot(histograms, normalize_to=1, **kwargs):
|
||||
fig, (ax_hist, ax_ratio) = set_up_plot(
|
||||
2, 1, sharex=True, gridspec_kw=dict(height_ratios=[3, 1], hspace=0), **kwargs
|
||||
|
@ -774,6 +786,8 @@ We define an auxilliary method for convenience.
|
|||
if "hist" in histogram
|
||||
else np.histogram(histogram["samples"], bins=edges)
|
||||
)
|
||||
print(test_compatibility(heights, histograms[0]["hist"][0]))
|
||||
|
||||
integral = hist_integral([heights, edges])
|
||||
errors = np.sqrt(heights) / integral
|
||||
heights = heights / integral
|
||||
|
@ -832,7 +846,7 @@ We define an auxilliary method for convenience.
|
|||
errorbars=True,
|
||||
hist_kwargs=dict(color="#1f77b4"),
|
||||
errorbar_kwargs=dict(),
|
||||
autoau=True,
|
||||
autoau=True,
|
||||
normalize_to=None,
|
||||
):
|
||||
"""Draws a histogram with optional errorbars using the step style.
|
||||
|
|
Binary file not shown.
Binary file not shown.
File diff suppressed because it is too large
Load diff
Binary file not shown.
Binary file not shown.
|
@ -1061,15 +1061,14 @@ def sample_stratified_vector(
|
|||
total_accepted = 0
|
||||
|
||||
cubes = [cube for cube in cubes if cube[2] > 0] # filter out cubes with zero weight
|
||||
# print([integrate(f, cube).result for cube, _, _ in cubes])
|
||||
weights = np.array([weight for _, _, weight in cubes])
|
||||
integral = weights.sum()
|
||||
weights = np.cumsum(weights / integral)
|
||||
|
||||
maxima = np.array(
|
||||
[
|
||||
find_upper_bound_vector(
|
||||
f, cube[0], cube[1][1:], tolerance=overestimate_factor - 1
|
||||
)
|
||||
find_upper_bound_vector(f, cube[0], cube[1][1:], tolerance=0.01)
|
||||
for cube in cubes
|
||||
]
|
||||
)
|
||||
|
|
|
@ -27,6 +27,8 @@ from tangled import observables
|
|||
#+END_SRC
|
||||
|
||||
#+RESULTS:
|
||||
: The autoreload extension is already loaded. To reload it, use:
|
||||
: %reload_ext autoreload
|
||||
|
||||
** Global Config
|
||||
#+begin_src jupyter-python :exports both :results raw drawer
|
||||
|
@ -691,7 +693,7 @@ Lets plot how the pdf looks.
|
|||
|
||||
Overestimating the upper bounds helps with bias.
|
||||
#+begin_src jupyter-python :exports both :results raw drawer
|
||||
overestimate = 1.0
|
||||
overestimate = 1.1
|
||||
tex_value(
|
||||
(overestimate - 1) * 100,
|
||||
unit=r"\percent",
|
||||
|
@ -701,12 +703,7 @@ Overestimating the upper bounds helps with bias.
|
|||
#+end_src
|
||||
|
||||
#+RESULTS:
|
||||
:RESULTS:
|
||||
: \(\SI{0}{\percent}\)
|
||||
[[file:./.ob-jupyter/542b03d025920448ba653b470ec6492cbdd1e4a7.png]]
|
||||
[[file:./.ob-jupyter/d47db0dde9ae59979f271a7cba8dfc46be3f1dd3.png]]
|
||||
[[file:./.ob-jupyter/7fe9d3bd60427cf20af835649efbcbaafefbb3e0.png]]
|
||||
:END:
|
||||
: \(\SI{10}{\percent}\)
|
||||
|
||||
Now we sample some events. Doing this in parallel helps. We let the os
|
||||
figure out the cpu mapping.
|
||||
|
@ -718,7 +715,7 @@ figure out the cpu mapping.
|
|||
cubes=xs_int_res.cubes,
|
||||
proc="auto",
|
||||
report_efficiency=True,
|
||||
cache="cache/pdf/total_xs_10000_000_2_5_take11",
|
||||
cache="cache/pdf/total_xs_10000_000_2_5_take12",
|
||||
status_path="/tmp/status1",
|
||||
overestimate_factor=overestimate,
|
||||
)
|
||||
|
@ -786,7 +783,11 @@ And now we compare all the observables with sherpa.
|
|||
#+end_src
|
||||
|
||||
#+RESULTS:
|
||||
[[file:./.ob-jupyter/800bffd38987eae852b74eb2483698169c08c4de.png]]
|
||||
:RESULTS:
|
||||
: 1.0
|
||||
: 0.78
|
||||
[[file:./.ob-jupyter/1b1562da892b4caff29588dde88a04cc3256ae35.png]]
|
||||
:END:
|
||||
|
||||
Hah! there we have it!
|
||||
|
||||
|
@ -817,7 +818,11 @@ both equal.
|
|||
#+end_src
|
||||
|
||||
#+RESULTS:
|
||||
:RESULTS:
|
||||
: 1.0
|
||||
: 0.86
|
||||
[[file:./.ob-jupyter/45cc8fbfa523c8faf704505d716ebc299a1a44fd.png]]
|
||||
:END:
|
||||
|
||||
The invariant mass is not constant anymore.
|
||||
#+begin_src jupyter-python :exports both :results raw drawer
|
||||
|
@ -860,7 +865,11 @@ The cosθ distribution looks more like the paronic one.
|
|||
#+end_src
|
||||
|
||||
#+RESULTS:
|
||||
:RESULTS:
|
||||
: 1.0
|
||||
: 0.82
|
||||
[[file:./.ob-jupyter/d834978baba81dba0e2f052f1965b62ae736c151.png]]
|
||||
:END:
|
||||
|
||||
|
||||
#+begin_src jupyter-python :exports both :results raw drawer
|
||||
|
@ -882,7 +891,11 @@ The cosθ distribution looks more like the paronic one.
|
|||
#+end_src
|
||||
|
||||
#+RESULTS:
|
||||
:RESULTS:
|
||||
: 1.0
|
||||
: 0.9
|
||||
[[file:./.ob-jupyter/22100d74fc1e07aa9ac75e22dd38ce49eaad89d4.png]]
|
||||
:END:
|
||||
|
||||
In this case the opening angles are the same because the CS frame is
|
||||
the same as the ordinary rest frame. The z-axis is the beam axis
|
||||
|
|
|
@ -1 +1 @@
|
|||
\(\SI{0}{\percent}\)
|
||||
\(\SI{10}{\percent}\)
|
|
@ -90,6 +90,18 @@ def plot_stratified_rho(ax, points, increment_borders, *args, **kwargs):
|
|||
import matplotlib.gridspec as gridspec
|
||||
|
||||
|
||||
def test_compatibility(hist_1, hist_2, confidence=1):
|
||||
mask = hist_1 > hist_2
|
||||
comp_greater = hist_1[mask] - confidence * np.sqrt(hist_1[mask]) <= hist_2[
|
||||
mask
|
||||
] + confidence * np.sqrt(hist_2[mask])
|
||||
comp_lower = hist_1[~mask] + confidence * np.sqrt(hist_1[~mask]) >= hist_2[
|
||||
~mask
|
||||
] - confidence * np.sqrt(hist_2[~mask])
|
||||
|
||||
return (np.count_nonzero(comp_greater) + np.count_nonzero(comp_lower)) / len(hist_1)
|
||||
|
||||
|
||||
def draw_ratio_plot(histograms, normalize_to=1, **kwargs):
|
||||
fig, (ax_hist, ax_ratio) = set_up_plot(
|
||||
2, 1, sharex=True, gridspec_kw=dict(height_ratios=[3, 1], hspace=0), **kwargs
|
||||
|
@ -108,6 +120,8 @@ def draw_ratio_plot(histograms, normalize_to=1, **kwargs):
|
|||
if "hist" in histogram
|
||||
else np.histogram(histogram["samples"], bins=edges)
|
||||
)
|
||||
print(test_compatibility(heights, histograms[0]["hist"][0]))
|
||||
|
||||
integral = hist_integral([heights, edges])
|
||||
errors = np.sqrt(heights) / integral
|
||||
heights = heights / integral
|
||||
|
@ -166,7 +180,7 @@ def draw_histogram(
|
|||
errorbars=True,
|
||||
hist_kwargs=dict(color="#1f77b4"),
|
||||
errorbar_kwargs=dict(),
|
||||
autoau=True,
|
||||
autoau=True,
|
||||
normalize_to=None,
|
||||
):
|
||||
"""Draws a histogram with optional errorbars using the step style.
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
|
||||
(selector){
|
||||
PseudoRapidity 22 -3 3;
|
||||
PT 22 2 200000000;
|
||||
PT 22 5 200000000;
|
||||
}(selector)
|
||||
|
||||
(analysis){
|
||||
|
|
|
@ -18,7 +18,7 @@ OUT_DIR = out
|
|||
RES_DIR = results
|
||||
|
||||
# Event Count
|
||||
EVENT_COUNT = 10000000
|
||||
EVENT_COUNT = 1000
|
||||
|
||||
# List of Runcards, see RUNCARD_FOLDER
|
||||
RUNCARDS =\
|
||||
|
|
|
@ -50,7 +50,7 @@ Title=Azimuthal angle between the photons
|
|||
XLabel=$\pi-\Delta\phi_{\gamma\gamma}$ [rad]
|
||||
YLabel=$\mathrm{d}\sigma/\mathrm{d}\Delta\phi_{\gamma\gamma}$ [pb rad$^{-1}$]
|
||||
LogY=0
|
||||
LogX=1
|
||||
LogX=0
|
||||
# END PLOT
|
||||
|
||||
# BEGIN PLOT /MC_DIPHOTON_PROTON/o_angle
|
||||
|
@ -68,7 +68,7 @@ LogY=0
|
|||
# END PLOT
|
||||
|
||||
# BEGIN PLOT /MC_DIPHOTON_PROTON/total_pT
|
||||
Title=total $\pT$
|
||||
Title=$\pT$ of the $\gamma\gamma$ system
|
||||
XLabel=$\pT$
|
||||
YLabel=$\mathrm{d}\sigma/\mathrm{d}\pT_{\gamma\gamma}$ [pb GeV$^{-1}$]
|
||||
LogY=1
|
||||
|
|
|
@ -29,7 +29,7 @@ BEAM_REMNANTS 1;
|
|||
|
||||
(selector){
|
||||
PseudoRapidity 22 -3 3;
|
||||
PT 22 2 200000000;
|
||||
PT 22 5 200000000;
|
||||
}(selector)
|
||||
|
||||
(analysis){
|
||||
|
|
2
sherpa
2
sherpa
|
@ -1 +1 @@
|
|||
Subproject commit e4cf1cd24ef048ed99a4447b51c3c76e1116cdf2
|
||||
Subproject commit 75b7ac47eb8c0f592660419dad5e87678d5db789
|
Loading…
Add table
Reference in a new issue