import pyhf
import pyhf.contrib.utils
import json
import numpy as np
pyhf.contrib.utils.download(
"https://doi.org/10.17182/hepdata.90607.v3/r3", "1Lbb-likelihoods"
)
created with https://github.com/eschanet/simplify
simplify convert < BkgOnly.json > simplified.json
with open("1Lbb-likelihoods/simplified.json") as f:
spec = json.load(f)
with open("1Lbb-likelihoods/patchset.json") as f:
patchset = pyhf.PatchSet(json.load(f))
[c["name"] for c in spec["channels"]]
['WREM_cuts', 'STCREM_cuts', 'TRHMEM_cuts', 'TRMMEM_cuts', 'TRLMEM_cuts', 'SRHMEM_mct2', 'SRMMEM_mct2', 'SRLMEM_mct2']
extract_channels = ["SRLMEM_mct2", "SRMMEM_mct2", "SRHMEM_mct2"]
def nosys(patch):
return pyhf.patchset.Patch(
dict(
metadata=patch.metadata,
patch=[
{
"op" : i["op"],
"path" : "/".join(i["path"].split("/")[:-1]) + "/1",
"value" : {
"data" : i["value"]["data"], "name" : i["value"]["name"],
"modifiers" : [{'data': None, 'name': 'mu_Sig', 'type': 'normfactor'}]
},
}
for i in patch
]
)
)
patched = nosys(patchset.patches[0]).apply(spec)
channel_dict = {c["name"] : c for c in patched["channels"]}
b = []
for channel in extract_channels:
b += channel_dict[channel]["samples"][0]["data"]
b
[13.00659260848957, 9.84251410934949, 6.668866664075592, 3.84335063984653, 5.928258939016435, 1.837121080628339, 6.4399309680624, 4.616131198108342, 1.6039841463924573]
[s["name"] for s in patched["channels"][0]["samples"]]
['Bkg', 'C1N2_Wh_hbb_1000_0']
signals = []
for patch in patchset:
patched = nosys(patch).apply(spec)
channel_dict = {c["name"] : c for c in patched["channels"]}
s = []
for channel in extract_channels:
if len(channel_dict[channel]["samples"]) > 1:
s += channel_dict[channel]["samples"][1]["data"]
else:
s += [0, 0, 0]
signals.append({"x" : patch.values[0], "y" : patch.values[1], "data" : s})
with open("example_signals.json", "w") as f:
json.dump(signals, f, indent=4)
with open("example_background.json", "w") as f:
json.dump(b, f, indent=4)