Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

run 2 updates #22

Merged
merged 6 commits into from
Jun 11, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
99 changes: 99 additions & 0 deletions hbt/calibration/fake_triggers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
# coding: utf-8

"""
Placeholder calibrator to produce missing single trigger columns.
Does not include default calibrations.
"""

from columnflow.calibration import Calibrator, calibrator
from columnflow.util import maybe_import
from columnflow.columnar_util import set_ak_column

np = maybe_import("numpy")
ak = maybe_import("awkward")


@calibrator(
uses={
"Electron.pt", "Electron.eta", "Muon.pt", "Muon.eta", "Tau.pt",
},
produces={
"HLT_Ele25_eta2p1_WPTight_Gsf", "HLT_IsoMu22", "HLT_IsoMu22_eta2p1", "HLT_IsoTkMu22", "HLT_IsoTkMu22_eta2p1",
"HLT_Ele24_eta2p1_WPLoose_Gsf_LooseIsoPFTau30", "HLT_Ele24_eta2p1_WPLoose_Gsf_LooseIsoPFTau20",
"HLT_Ele24_eta2p1_WPLoose_Gsf_LooseIsoPFTau20_SingleL1",
},
)
def fake_triggers(self: Calibrator, events: ak.Array, **kwargs) -> ak.Array:

HLT_Ele25_eta2p1_WPTight_Gsf = ak.any(
(events.Electron.pt >= 25.5) &
(events.Electron.eta <= 2.1),
axis=1,
)

HLT_IsoMu22 = ak.any(
(events.Muon.pt >= 22.5),
axis=1,
)

HLT_IsoMu22_eta2p1 = ak.any(
(events.Muon.pt >= 22.5) &
(events.Muon.eta <= 2.1),
axis=1,
)

HLT_Ele24_eta2p1_WPLoose_Gsf_LooseIsoPFTau30 = (
ak.any(
(events.Electron.pt >= 24.5) &
(events.Electron.eta <= 2.1),
axis=1,
) & ak.any(
(events.Tau.pt >= 30.5),
axis=1,
)
)

HLT_Ele24_eta2p1_WPLoose_Gsf_LooseIsoPFTau20 = (
ak.any(
(events.Electron.pt >= 24.5) &
(events.Electron.eta <= 2.1),
axis=1,
) & ak.any(
(events.Tau.pt >= 20.5),
axis=1,
)
)

HLT_Ele24_eta2p1_WPLoose_Gsf_LooseIsoPFTau20_SingleL1 = (
ak.any(
(events.Electron.pt >= 24.5) &
(events.Electron.eta <= 2.1),
axis=1,
) & ak.any(
(events.Tau.pt >= 20.5),
axis=1,
)
)

events = set_ak_column(events, "HLT_Ele25_eta2p1_WPTight_Gsf", HLT_Ele25_eta2p1_WPTight_Gsf)
events = set_ak_column(events, "HLT_IsoMu22", HLT_IsoMu22)
events = set_ak_column(events, "HLT_IsoMu22_eta2p1", HLT_IsoMu22_eta2p1)
events = set_ak_column(events, "HLT_IsoTkMu22", HLT_IsoMu22)
events = set_ak_column(events, "HLT_IsoTkMu22_eta2p1", HLT_IsoMu22_eta2p1)
events = set_ak_column(
events,
"HLT_Ele24_eta2p1_WPLoose_Gsf_LooseIsoPFTau30",
HLT_Ele24_eta2p1_WPLoose_Gsf_LooseIsoPFTau30,
)
events = set_ak_column(
events,
"HLT_Ele24_eta2p1_WPLoose_Gsf_LooseIsoPFTau20",
HLT_Ele24_eta2p1_WPLoose_Gsf_LooseIsoPFTau20,
)
events = set_ak_column(
events,
"HLT_Ele24_eta2p1_WPLoose_Gsf_LooseIsoPFTau20_SingleL1",
HLT_Ele24_eta2p1_WPLoose_Gsf_LooseIsoPFTau20_SingleL1,
)

return events
122 changes: 83 additions & 39 deletions hbt/config/analysis_hbt.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,45 +46,89 @@
# load configs
#

# # 2017
# from hbt.config.configs_run2ul import add_config as add_config_run2ul
# from cmsdb.campaigns.run2_2017_nano_v9 import campaign_run2_2017_nano_v9
# from cmsdb.campaigns.run2_2017_nano_uhh_v11 import campaign_run2_2017_nano_uhh_v11


# # default v9 config
# add_config_run2ul(
# analysis_hbt,
# campaign_run2_2017_nano_v9.copy(),
# config_name=campaign_run2_2017_nano_v9.name,
# config_id=2,
# )

# # v9 config with limited number of files for faster prototyping
# add_config_run2ul(
# analysis_hbt,
# campaign_run2_2017_nano_v9.copy(),
# config_name=f"{campaign_run2_2017_nano_v9.name}_limited",
# config_id=12,
# limit_dataset_files=2,
# )

# # default v11 uhh config
# add_config_run2ul(
# analysis_hbt,
# campaign_run2_2017_nano_uhh_v11.copy(),
# config_name=campaign_run2_2017_nano_uhh_v11.name,
# config_id=31,
# )

# # v11 uhh config with limited number of files for faster prototyping
# add_config_run2ul(
# analysis_hbt,
# campaign_run2_2017_nano_uhh_v11.copy(),
# config_name=f"{campaign_run2_2017_nano_uhh_v11.name}_limited",
# config_id=32,
# limit_dataset_files=2,
# )
#
# Run 2 configs
#

from hbt.config.configs_run2ul import add_config as add_config_run2ul

# 2016 HIPM
from cmsdb.campaigns.run2_2016_HIPM_nano_uhh_v12 import campaign_run2_2016_HIPM_nano_uhh_v12

# default v12 config
add_config_run2ul(
analysis_hbt,
campaign_run2_2016_HIPM_nano_uhh_v12.copy(),
config_name=campaign_run2_2016_HIPM_nano_uhh_v12.name,
config_id=6, # random number here that is not repeated ?
)

# default v12 config with limited number of files for faster prototyping
add_config_run2ul(
analysis_hbt,
campaign_run2_2016_HIPM_nano_uhh_v12.copy(),
config_name=f"{campaign_run2_2016_HIPM_nano_uhh_v12.name}_limited",
config_id=16, # random number here that is not repeated ?
limit_dataset_files=2,
)

# 2016 post
from cmsdb.campaigns.run2_2016_nano_uhh_v12 import campaign_run2_2016_nano_uhh_v12

# v12 uhh config with full datasets
add_config_run2ul(
analysis_hbt,
campaign_run2_2016_nano_uhh_v12.copy(),
config_name=campaign_run2_2016_nano_uhh_v12.name,
config_id=3,
)

# v12 uhh config with limited number of files for faster prototyping
add_config_run2ul(
analysis_hbt,
campaign_run2_2016_nano_uhh_v12.copy(),
config_name=f"{campaign_run2_2016_nano_uhh_v12.name}_limited",
config_id=13,
limit_dataset_files=2,
)

# 2017
from cmsdb.campaigns.run2_2017_nano_v9 import campaign_run2_2017_nano_v9
from cmsdb.campaigns.run2_2017_nano_uhh_v11 import campaign_run2_2017_nano_uhh_v11

# default v9 config
add_config_run2ul(
analysis_hbt,
campaign_run2_2017_nano_v9.copy(),
config_name=campaign_run2_2017_nano_v9.name,
config_id=2,
)

# v9 config with limited number of files for faster prototyping
add_config_run2ul(
analysis_hbt,
campaign_run2_2017_nano_v9.copy(),
config_name=f"{campaign_run2_2017_nano_v9.name}_limited",
config_id=12,
limit_dataset_files=2,
)

# default v11 uhh config
add_config_run2ul(
analysis_hbt,
campaign_run2_2017_nano_uhh_v11.copy(),
config_name=campaign_run2_2017_nano_uhh_v11.name,
config_id=31,
)

# v11 uhh config with limited number of files for faster prototyping
add_config_run2ul(
analysis_hbt,
campaign_run2_2017_nano_uhh_v11.copy(),
config_name=f"{campaign_run2_2017_nano_uhh_v11.name}_limited",
config_id=32,
limit_dataset_files=2,
)


#
Expand Down
36 changes: 27 additions & 9 deletions hbt/config/configs_run2ul.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,8 @@ def add_config(
"h",
"hh_ggf_bbtautau",
"graviton_hh_ggf_bbtautau_m400",
"graviton_hh_ggf_bbtautau_m450",
"graviton_hh_ggf_bbtautau_m500",
"graviton_hh_ggf_bbtautau_m1250",
]
for process_name in process_names:
Expand Down Expand Up @@ -145,6 +147,8 @@ def add_config(
# signals
"hh_ggf_bbtautau_madgraph",
"graviton_hh_ggf_bbtautau_m400_madgraph",
"graviton_hh_ggf_bbtautau_m450_madgraph",
"graviton_hh_ggf_bbtautau_m500_madgraph",
"graviton_hh_ggf_bbtautau_m1250_madgraph",
]
for dataset_name in dataset_names:
Expand Down Expand Up @@ -211,19 +215,26 @@ def add_config(

# lumi values in inverse pb
# https://twiki.cern.ch/twiki/bin/view/CMS/LumiRecommendationsRun2?rev=2#Combination_and_correlations
# difference pre-post VFP: https://cds.cern.ch/record/2854610/files/DP2023_006.pdf
if year == 2016:
cfg.x.luminosity = Number(36310, {
"lumi_13TeV_2016": 0.01j,
"lumi_13TeV_correlated": 0.006j,
})
if campaign.x.vfp == "pre":
cfg.x.luminosity = Number(19_500, {
"lumi_13TeV_2016": 0.01j,
"lumi_13TeV_correlated": 0.006j,
})
if campaign.x.vfp == "post":
cfg.x.luminosity = Number(16_800, {
"lumi_13TeV_2016": 0.01j,
"lumi_13TeV_correlated": 0.006j,
})
elif year == 2017:
cfg.x.luminosity = Number(41480, {
cfg.x.luminosity = Number(41_480, {
"lumi_13TeV_2017": 0.02j,
"lumi_13TeV_1718": 0.006j,
"lumi_13TeV_correlated": 0.009j,
})
else: # 2018
cfg.x.luminosity = Number(59830, {
cfg.x.luminosity = Number(59_830, {
"lumi_13TeV_2017": 0.015j,
"lumi_13TeV_1718": 0.002j,
"lumi_13TeV_correlated": 0.02j,
Expand Down Expand Up @@ -692,7 +703,7 @@ def add_config(
"Electron.pt", "Electron.eta", "Electron.phi", "Electron.mass", "Electron.deltaEtaSC",
"Electron.pfRelIso03_all",
"Muon.pt", "Muon.eta", "Muon.phi", "Muon.mass", "Muon.pfRelIso04_all",
"Tau.pt", "Tau.eta", "Tau.phi", "Tau.mass", "Tau.idDeepTau2017v2p1VSe",
"Tau.pt", "Tau.eta", "Tau.phi", "Tau.mass", "Tau.idDeepTau2017v2p1VSe", "Tau.charge",
"Tau.idDeepTau2017v2p1VSmu", "Tau.idDeepTau2017v2p1VSjet", "Tau.genPartFlav",
"Tau.decayMode",
"MET.pt", "MET.phi", "MET.significance", "MET.covXX", "MET.covXY", "MET.covYY",
Expand Down Expand Up @@ -762,9 +773,16 @@ def add_config(
add_met_filters(cfg)

# add triggers
if year == 2017:
if year == 2016:
# cfg.x.triggers = None
from hbt.config.triggers import add_triggers_2016
add_triggers_2016(cfg, campaign.x.vfp)
elif year == 2017:
from hbt.config.triggers import add_triggers_2017
add_triggers_2017(cfg)
elif year == 2018:
from hbt.config.triggers import add_triggers_2018
add_triggers_2018(cfg)
else:
raise NotImplementedError(f"triggers not implemented for {year}")

Expand All @@ -778,11 +796,11 @@ def get_dataset_lfns(
# destructure dataset_key into parts and create the lfn base directory
dataset_id, full_campaign, tier = dataset_key.split("/")[1:]
main_campaign, sub_campaign = full_campaign.split("-", 1)
# dataset_inst.data_source is either "mc" or "data"
lfn_base = law.wlcg.WLCGDirectoryTarget(
f"/store/{dataset_inst.data_source}/{main_campaign}/{dataset_id}/{tier}/{sub_campaign}/0",
fs=f"wlcg_fs_{cfg.campaign.x.custom['name']}",
)

# loop though files and interpret paths as lfns
return [
lfn_base.child(basename, type="f").path
Expand Down
Loading
Loading