diff --git a/CMakeLists.txt b/CMakeLists.txt
index b1b55d5a602f3813e1eddcff4ada32aaf806b92c..a764b9b3369f8c292eb16efef67817e9a9dd57ba 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -22,8 +22,10 @@ if (USE_DD4HEP)
endif()
# Declare project name and version
+
gaudi_project(Moore v51r0
- USE Phys v31r0
+ USE Phys v31r0
+ Allen v0r9
DATA AppConfig VERSION v3r*
FieldMap VERSION v5r*
PRConfig VERSION v1r*
diff --git a/Hlt/Hlt1Conf/options/allen_hlt1_pp_default.py b/Hlt/Hlt1Conf/options/allen_hlt1_pp_default.py
new file mode 100644
index 0000000000000000000000000000000000000000..808c5ff98918c03161c3d13e4f7814558332c9a6
--- /dev/null
+++ b/Hlt/Hlt1Conf/options/allen_hlt1_pp_default.py
@@ -0,0 +1,16 @@
+###############################################################################
+# (c) Copyright 2000-2018 CERN for the benefit of the LHCb Collaboration #
+# #
+# This software is distributed under the terms of the GNU General Public #
+# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". #
+# #
+# In applying this licence, CERN does not waive the privileges and immunities #
+# granted to it by virtue of its status as an Intergovernmental Organization #
+# or submit itself to any jurisdiction. #
+###############################################################################
+import os, json
+
+from Moore import options, run_allen
+from Hlt1Conf.settings import allen_lines
+
+run_allen(options, allen_lines)
diff --git a/Hlt/Hlt1Conf/python/Hlt1Conf/lines/allen.py b/Hlt/Hlt1Conf/python/Hlt1Conf/lines/allen.py
new file mode 100644
index 0000000000000000000000000000000000000000..0289752f1f16aa5c515ac69c970a52b38041c6b2
--- /dev/null
+++ b/Hlt/Hlt1Conf/python/Hlt1Conf/lines/allen.py
@@ -0,0 +1,20 @@
+###############################################################################
+# (c) Copyright 2019 CERN for the benefit of the LHCb Collaboration #
+# #
+# This software is distributed under the terms of the GNU General Public #
+# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". #
+# #
+# In applying this licence, CERN does not waive the privileges and immunities #
+# granted to it by virtue of its status as an Intergovernmental Organization #
+# or submit itself to any jurisdiction. #
+###############################################################################
+from Moore.config import HltLine
+
+from RecoConf.hlt1_allen import make_allen_dec_reports
+
+
+def allen_line(name='Hlt1Allen', prescale=1):
+
+ allen = make_allen_dec_reports()
+
+ return HltLine(name=name, algs=[allen], prescale=prescale)
diff --git a/Hlt/Hlt1Conf/python/Hlt1Conf/settings.py b/Hlt/Hlt1Conf/python/Hlt1Conf/settings.py
index 5367922e3f7110afd2267676fd2f9802fc510dcb..f620aefcd777e92cf5b2c7b03f25e20c4d671d3f 100644
--- a/Hlt/Hlt1Conf/python/Hlt1Conf/settings.py
+++ b/Hlt/Hlt1Conf/python/Hlt1Conf/settings.py
@@ -23,6 +23,8 @@ from Hlt1Conf.lines.minimum_bias import (
)
from Hlt1Conf.lines.luminosity import luminosity_line
+from Hlt1Conf.lines.allen import allen_line
+
def track_mva_lines():
return [
@@ -83,3 +85,7 @@ def comparison_lines():
one_track_muon_highpt_line(),
#gec_passthrough_line()
]
+
+
+def allen_lines():
+ return [allen_line()]
diff --git a/Hlt/Hlt1Conf/tests/options/allen_hlt1_mdf_output.py b/Hlt/Hlt1Conf/tests/options/allen_hlt1_mdf_output.py
new file mode 100644
index 0000000000000000000000000000000000000000..b6d0fd56e7d6a47f6965a733979de438c7a281d3
--- /dev/null
+++ b/Hlt/Hlt1Conf/tests/options/allen_hlt1_mdf_output.py
@@ -0,0 +1,15 @@
+###############################################################################
+# (c) Copyright 2019 CERN for the benefit of the LHCb Collaboration #
+# #
+# This software is distributed under the terms of the GNU General Public #
+# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". #
+# #
+# In applying this licence, CERN does not waive the privileges and immunities #
+# granted to it by virtue of its status as an Intergovernmental Organization #
+# or submit itself to any jurisdiction. #
+###############################################################################
+"""Write an HLT1-filtered MDF file."""
+from Moore import options
+
+options.output_file = 'test_allen_hlt1_persistence_mdf_write.mdf'
+options.output_type = 'MDF'
diff --git a/Hlt/Hlt1Conf/tests/options/test_allen_decreports.py b/Hlt/Hlt1Conf/tests/options/test_allen_decreports.py
new file mode 100644
index 0000000000000000000000000000000000000000..a91ceb0bb6925ac2166d1a6ac8f9c53664e9ac9d
--- /dev/null
+++ b/Hlt/Hlt1Conf/tests/options/test_allen_decreports.py
@@ -0,0 +1,126 @@
+###############################################################################
+# (c) Copyright 2000-2018 CERN for the benefit of the LHCb Collaboration #
+# #
+# This software is distributed under the terms of the GNU General Public #
+# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". #
+# #
+# In applying this licence, CERN does not waive the privileges and immunities #
+# granted to it by virtue of its status as an Intergovernmental Organization #
+# or submit itself to any jurisdiction. #
+###############################################################################
+"""Compare decisions from a log file to those stored in a MDF.
+
+Takes three outputs from a previously-ran job: the options dump, the log
+file, and the MDF. The decisions printed in the form of counters of the RunAllen algorithm are
+compared to those taken from the DecReports found in the MDF. Any difference
+between the two is considered a failure. The options dump is used to
+configure the HltANNSvc for the job ran by this script.
+"""
+from __future__ import print_function
+import argparse
+
+from Configurables import (
+ ApplicationMgr,
+ HistogramPersistencySvc,
+ HltANNSvc,
+ IODataManager,
+ LHCbApp,
+)
+from DAQSys.Decoders import DecoderDB
+from GaudiConf import IOHelper
+import GaudiPython
+
+from PyConf.utilities import read_options
+
+# Top-level control flow node
+ALLEN_KEY = "allen"
+
+
+def get_counts_from_log(f):
+ """Return the decisions of each line as extracted from a log file."""
+ counts = {}
+ with open(f) as f:
+ for line in filter(lambda l: "Selected by" in l, f):
+ columns = line.split()
+ hlt_line = columns[2].replace('"', '')
+ count = int(columns[6])
+ counts[hlt_line] = count
+ f.seek(0)
+ for line in filter(lambda l: "NONLAZY_OR: allen" in l, f):
+ columns = line.split()
+ hlt_line = columns[1]
+ count = int(columns[3].replace("Sum=", ""))
+ counts[hlt_line] = count
+ return counts
+
+
+parser = argparse.ArgumentParser()
+parser.add_argument("--input-mdf", help="Input MDF file")
+parser.add_argument("--input-log", help="Input log file")
+parser.add_argument(
+ "--input-options", help="Input options file (Python format)")
+args = parser.parse_args()
+
+# Configure basic application with inputs
+LHCbApp(DataType="Upgrade")
+IOHelper("MDF").inputFiles([args.input_mdf])
+# Disable warning about not being able to navigate ancestors
+IODataManager(DisablePFNWarning=True)
+# Disable warning about histogram saving not being required
+HistogramPersistencySvc(OutputLevel=5)
+# Decode Hlt DecReports
+ApplicationMgr(
+ TopAlg=[DecoderDB["HltDecReportsDecoder/Hlt1DecReportsDecoder"].setup()])
+
+# Get expected lines and HltANNSvc configuration from the previous job
+options = read_options(args.input_options)
+Hlt1SelectionIDs = options["HltANNSvc"]["Hlt1SelectionID"]
+HltANNSvc().Hlt1SelectionID = Hlt1SelectionIDs
+print(Hlt1SelectionIDs)
+
+# Set up counters for recording decisions from MDF
+counts_from_mdf = {key: 0 for key in Hlt1SelectionIDs}
+counts_from_mdf[ALLEN_KEY] = 0
+# Extract counters from log file of the previous job
+counts_from_log = get_counts_from_log(args.input_log)
+
+gaudi = GaudiPython.AppMgr()
+TES = gaudi.evtSvc()
+gaudi.run(1)
+
+error = False
+while TES["/Event"]:
+ decs = TES["/Event/Hlt1/DecReports"]
+ if not decs:
+ print("DecReports TES location not found")
+ error = True
+ break
+
+ triggered = False
+ for key in counts_from_mdf.keys():
+ if key == ALLEN_KEY:
+ continue
+ counts_from_mdf[key] += int(decs.decReport(key).decision())
+ if decs.decReport(key).decision():
+ triggered = True
+ if triggered:
+ counts_from_mdf[ALLEN_KEY] += 1
+
+ gaudi.run(1)
+
+for key in counts_from_mdf.keys():
+ line_name = key
+ if line_name not in counts_from_log.keys():
+ error = True
+ print("Test ERROR: Line {} missing".format(line_name))
+ else:
+ if counts_from_mdf[key] != counts_from_log[line_name]:
+ error = True
+ print("Test ERROR: Counts of {} wrong, log = {}, mdf = {}".format(
+ key, counts_from_log[line_name], counts_from_mdf[key]))
+ else:
+ print("Counts of {}, log = {}, mdf = {}".format(
+ key, counts_from_log[line_name], counts_from_mdf[key]))
+
+if error:
+ exit("Test failed") # exit with a non-zero code
diff --git a/Hlt/Hlt1Conf/tests/qmtest/persistency.qms/allen_mdf_write.qmt b/Hlt/Hlt1Conf/tests/qmtest/persistency.qms/allen_mdf_write.qmt
new file mode 100644
index 0000000000000000000000000000000000000000..734f66e2c581bb23b86c0e544a4a9b4026f5912b
--- /dev/null
+++ b/Hlt/Hlt1Conf/tests/qmtest/persistency.qms/allen_mdf_write.qmt
@@ -0,0 +1,31 @@
+
+
+
+
+gaudirun.py
+
+ $MOOREROOT/tests/options/default_input_and_conds_hlt1.py
+ $HLT1CONFROOT/tests/options/allen_hlt1_mdf_output.py
+ $HLT1CONFROOT/options/allen_hlt1_pp_default.py
+ --output=allen_mdf_write.opts.py
+ --all-opts
+
+true
+
+
+with open('test_hlt1_persistence_allen_mdf_write.stdout', 'w') as f:
+ f.write(stdout)
+
+
+
diff --git a/Hlt/Hlt1Conf/tests/qmtest/persistency.qms/mdf_read_decs_allen.qmt b/Hlt/Hlt1Conf/tests/qmtest/persistency.qms/mdf_read_decs_allen.qmt
new file mode 100644
index 0000000000000000000000000000000000000000..d1905fbab4ceb78907619a3808e15a03063ce7e4
--- /dev/null
+++ b/Hlt/Hlt1Conf/tests/qmtest/persistency.qms/mdf_read_decs_allen.qmt
@@ -0,0 +1,34 @@
+
+
+
+
+
+ persistency.allen_mdf_writePASS
+
+python
+
+ $HLT1CONFROOT/tests/options/test_allen_decreports.py
+ --input-log=test_hlt1_persistence_allen_mdf_write.stdout
+ --input-mdf=test_allen_hlt1_persistence_mdf_write.mdf
+ --input-options=allen_mdf_write.opts.py
+
+true
+
+
+from Moore.qmtest.exclusions import preprocessor
+countErrorLines({"FATAL": 0, "ERROR": 0, "WARNING": 0},
+ stdout=preprocessor(stdout))
+
+
+
diff --git a/Hlt/Moore/python/Moore/__init__.py b/Hlt/Moore/python/Moore/__init__.py
index 7d6c6220b3e43fda7393aacbed8b2a7113c2dbd8..cf5f1bff822ca95e47456d7779249ef4362e1aa3 100644
--- a/Hlt/Moore/python/Moore/__init__.py
+++ b/Hlt/Moore/python/Moore/__init__.py
@@ -12,4 +12,5 @@
"""
from .config import (options, run_moore, run_reconstruction,
- moore_control_flow) # noqa
+ moore_control_flow, run_allen,
+ run_allen_reconstruction) # noqa
diff --git a/Hlt/Moore/python/Moore/config.py b/Hlt/Moore/python/Moore/config.py
index 49021e1ca556adbbd1b8d7e68c098dae1dac730f..2cf0377a7d51e4c967aa5ea066de343d1a9ded2c 100644
--- a/Hlt/Moore/python/Moore/config.py
+++ b/Hlt/Moore/python/Moore/config.py
@@ -8,9 +8,12 @@
# granted to it by virtue of its status as an Intergovernmental Organization #
# or submit itself to any jurisdiction. #
###############################################################################
-import re
+import re, os, json, logging, inspect
from collections import namedtuple
-from Configurables import LumiCounterMerger
+from Configurables import (LumiCounterMerger, ApplicationMgr, DumpUTGeometry,
+ DumpFTGeometry, DumpMuonTable, DumpMuonGeometry,
+ DumpVPGeometry, DumpMagneticField, DumpBeamline,
+ DumpUTLookupTables, AllenUpdater)
from PyConf import configurable
from PyConf.Algorithms import (bankKiller, DeterministicPrescaler,
ExecutionReportsWriter, HltDecReportsWriter,
@@ -29,6 +32,9 @@ from PyConf.application import configure_input, configure
from .selreports import make_selreports
from .persistency import clone_candidates
+from RecoConf.hlt1_allen import make_allen_dec_reports
+
+log = logging.getLogger(__name__)
# TODO move the options global to a separate module such that functions
# defined here cannot access it
@@ -333,6 +339,149 @@ def run_moore(options, make_lines=None, public_tools=[]):
return config
+def setup_allen_non_event_data_service(dump_binaries=False):
+ """Setup Allen non-event data
+
+ An ExtSvc is added to the ApplicationMgr to provide the Allen non-event data (geometries etc.)
+ """
+ producers = [
+ p(DumpToFile=dump_binaries)
+ for p in (DumpVPGeometry, DumpUTGeometry, DumpFTGeometry,
+ DumpMuonGeometry, DumpMuonTable, DumpMagneticField,
+ DumpBeamline, DumpUTLookupTables)
+ ]
+ ApplicationMgr().ExtSvc += [
+ AllenUpdater(OutputLevel=2),
+ ] + producers
+
+
+def get_allen_hlt1_lines():
+ """Read Allen HLT1 lines from json configuration file
+
+ """
+ Hlt1SelectionIDs = {}
+ config_file_path = "$ALLEN_INSTALL_DIR/constants/Sequence.json"
+ try:
+ with open(os.path.expandvars(config_file_path)) as config_file:
+ config = (json.load(config_file))
+ except:
+ log.fatal(
+ "Could not find Allen sequence file %s; make sure you have built Allen",
+ config_file_path)
+ index = 1
+ for line in config["configured_lines"]:
+ line_name = "Hlt1" + str(line) + "Decision"
+ Hlt1SelectionIDs[line_name] = index
+ index += 1
+
+ return Hlt1SelectionIDs
+
+
+def setup_allen_Hlt1ANN():
+ """Configure HltANN service for Allen
+
+ """
+ Hlt1SelectionIDs = get_allen_hlt1_lines()
+ hlt_ann_svc = setup_component(
+ "HltANNSvc", Hlt1SelectionID=Hlt1SelectionIDs)
+
+ return Hlt1SelectionIDs
+
+
+def run_allen(options, make_lines):
+ """Configure Allen within Mooore.
+
+ Convenience function that sets up an Allen node and sets up services
+
+ Args:
+ options (ApplicationOptions): holder of application options
+ make_lines: function returning a list of `HltLine` objects
+
+ """
+ config = configure_input(options)
+ options.finalize()
+
+ setup_allen_non_event_data_service()
+
+ # Write DecReports raw banks
+
+ allen_dec_reports = make_allen_dec_reports()
+
+ # We will write the reports to raw banks at these locations
+ algs = []
+ locations_to_persist = []
+ report_banks_to_write = ['HltDecReports']
+ reports_raw_event = default_raw_event(report_banks_to_write)
+
+ # TODO we probably shouldn't write in Trigger/RawEvent when
+ # running on reconstructed data. Instead, we should create a
+ # RawEvent at DAQ/RawEvent, which would make HltDecReportsWriter
+ # happy, or make raw events/banks const and adapt everything.
+ kill_existing = True
+ if kill_existing:
+ all_banks_to_write = (report_banks_to_write)
+ algs.append(
+ bankKiller(
+ RawEventLocations=[reports_raw_event],
+ BankTypes=all_banks_to_write))
+
+ drw = HltDecReportsWriter(
+ InputHltDecReportsLocation=allen_dec_reports,
+ outputs=dict(
+ OutputRawEventLocation=force_location(reports_raw_event.location)),
+ )
+
+ algs.extend([drw])
+ locations_to_persist.append(drw.OutputRawEventLocation)
+
+ Hlt1SelectionIDs = setup_allen_Hlt1ANN()
+
+ locations_to_persist.append(reports_raw_event)
+
+ report_writers_node = CompositeNode(
+ 'report_writers',
+ combineLogic=NodeLogic.NONLAZY_OR,
+ children=algs,
+ forceOrder=True)
+
+ # Transform to a list of unique str
+ locations_to_persist = list(
+ set(map(_format_location, locations_to_persist)))
+
+ writers = [report_writers_node]
+ writers.extend(output_writers(options, locations_to_persist))
+
+ top_cf_node = CompositeNode(
+ 'allen',
+ combineLogic=NodeLogic.NONLAZY_OR,
+ children=[make_allen_dec_reports()] + writers,
+ forceOrder=True)
+
+ config.update(configure(options, top_cf_node))
+ # TODO pass config to gaudi explicitly when that is supported
+ return config
+
+
+def run_allen_reconstruction(options,
+ make_reconstruction,
+ dump_binaries=False,
+ public_tools=[]):
+ """Configure the Allen reconstruction data flow
+
+ Convenience function that configures all services and creates a data flow.
+
+ Args:
+ options (ApplicationOptions): holder of application options
+ make_reconstruction: function returning a single CompositeNode object
+ public_tools (list): list of public `Tool` instances to configure
+
+ """
+
+ setup_allen_non_event_data_service(dump_binaries=dump_binaries)
+ return run_reconstruction(
+ options, make_reconstruction, public_tools=public_tools)
+
+
def run_reconstruction(options, make_reconstruction, public_tools=[]):
"""Configure the reconstruction data flow with a simple control flow.
@@ -371,7 +520,6 @@ def _get_arg_default(function, name):
Raises TypeError if ``function`` has no default keyword argument
called ``name``.
"""
- import inspect
spec = inspect.getargspec(function)
try:
i = spec.args.index(name) # ValueError if not found
diff --git a/Hlt/RecoConf/options/dump_binary_input_for_standalone_Allen.py b/Hlt/RecoConf/options/dump_binary_input_for_standalone_Allen.py
new file mode 100644
index 0000000000000000000000000000000000000000..5b35101f15b5a93a29b71f6f7bab059d05559590
--- /dev/null
+++ b/Hlt/RecoConf/options/dump_binary_input_for_standalone_Allen.py
@@ -0,0 +1,42 @@
+###############################################################################
+# (c) Copyright 2019 CERN for the benefit of the LHCb Collaboration #
+# #
+# This software is distributed under the terms of the GNU General Public #
+# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". #
+# #
+# In applying this licence, CERN does not waive the privileges and immunities #
+# granted to it by virtue of its status as an Intergovernmental Organization #
+# or submit itself to any jurisdiction. #
+###############################################################################
+from Moore import options, run_allen_reconstruction
+from Moore.config import Reconstruction
+from RecoConf.hlt1_allen import make_dumped_raw_banks
+from RecoConf.mc_checking import tracker_dumper, pv_dumper
+
+outputDir = "dump/"
+
+
+def dump_allen_binary_input():
+ """Dump input required for Allen standalone running into binary files
+
+ The following input is dumped:
+ - Raw bank content of the Velo, UT, SciFi, Muon, ODIN banks
+ - MC info from the TrackerDumper required for the Allen standalone checker
+ - Non-event data such as geometry information
+ """
+
+ data = []
+ data.append(
+ make_dumped_raw_banks(
+ dump_to_file=True, output_dir=outputDir + "banks"))
+ data.append(
+ tracker_dumper(
+ dump_to_root=False,
+ dump_to_binary=True,
+ bin_output_dir=outputDir + "MC_info/tracks"))
+ data.append(pv_dumper(output_dir=outputDir + "MC_info/PVs"))
+
+ return Reconstruction('allen_dump', data)
+
+
+run_allen_reconstruction(options, dump_allen_binary_input, True)
diff --git a/Hlt/RecoConf/options/hlt1_reco_allen_IPresolution.py b/Hlt/RecoConf/options/hlt1_reco_allen_IPresolution.py
new file mode 100644
index 0000000000000000000000000000000000000000..3eccd94a1e603b3f3c004d3e6dbb663c3282bedd
--- /dev/null
+++ b/Hlt/RecoConf/options/hlt1_reco_allen_IPresolution.py
@@ -0,0 +1,29 @@
+###############################################################################
+# (c) Copyright 2019 CERN for the benefit of the LHCb Collaboration #
+# #
+# This software is distributed under the terms of the GNU General Public #
+# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". #
+# #
+# In applying this licence, CERN does not waive the privileges and immunities #
+# granted to it by virtue of its status as an Intergovernmental Organization #
+# or submit itself to any jurisdiction. #
+###############################################################################
+from Moore import options, run_allen_reconstruction
+from Moore.config import Reconstruction
+from RecoConf.hlt1_tracking import require_gec
+from RecoConf.hlt1_allen import make_allen_tracks, make_allen_pvs
+from RecoConf.mc_checking import monitor_IPresolution
+from Configurables import NTupleSvc
+
+
+def hlt1_reco_allen_IPresolution():
+ allen_tracks = make_allen_tracks()
+ allen_pvs = make_allen_pvs()
+ pr_checker = monitor_IPresolution(allen_tracks["Forward"]["v1"], allen_pvs,
+ allen_tracks["Velo"]["v1"])
+
+ return Reconstruction('IPresolution', [pr_checker], [require_gec()])
+
+
+options.histo_file = "Hlt1ForwardTracking_IPresolution_Allen.root"
+run_allen_reconstruction(options, hlt1_reco_allen_IPresolution)
diff --git a/Hlt/RecoConf/options/hlt1_reco_allen_muonid_efficiency.py b/Hlt/RecoConf/options/hlt1_reco_allen_muonid_efficiency.py
new file mode 100644
index 0000000000000000000000000000000000000000..58e191415c2b08d09761c3420cf71ce18eb1cb47
--- /dev/null
+++ b/Hlt/RecoConf/options/hlt1_reco_allen_muonid_efficiency.py
@@ -0,0 +1,61 @@
+###############################################################################
+# (c) Copyright 2019 CERN for the benefit of the LHCb Collaboration #
+# #
+# This software is distributed under the terms of the GNU General Public #
+# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". #
+# #
+# In applying this licence, CERN does not waive the privileges and immunities #
+# granted to it by virtue of its status as an Intergovernmental Organization #
+# or submit itself to any jurisdiction. #
+###############################################################################
+from Moore import options, run_allen_reconstruction
+from Moore.config import Reconstruction
+from RecoConf.hlt1_tracking import require_gec
+from RecoConf.mc_checking import monitor_tracking_efficiency, make_links_tracks_mcparticles, make_links_lhcbids_mcparticles_tracking_system
+from RecoConf.mc_checking_categories import get_mc_categories, get_hit_type_mask
+from RecoConf.hlt1_allen import (make_allen_forward_tracks,
+ make_allen_forward_muon_tracks)
+
+
+def hlt1_reco_allen_muonid_efficiency():
+ # get the fitted tracks with muon ID
+ forward_muon_tracks = make_allen_forward_muon_tracks()
+
+ # make links to lhcb id for mc matching
+ links_to_lhcbids = make_links_lhcbids_mcparticles_tracking_system()
+ # make links between tracks and mcparticles for mc matching
+ links_to_tracks_muon_id = make_links_tracks_mcparticles(
+ InputTracks=forward_muon_tracks, LinksToLHCbIDs=links_to_lhcbids)
+
+ # build the PrChecker algorihm for muon_id track
+ pr_checker_for_muon_id = monitor_tracking_efficiency(
+ TrackType="MuonMatch",
+ InputTracks=forward_muon_tracks,
+ #InputTracks={"v1": v1_tracks.OutputTracksName},
+ LinksToTracks=links_to_tracks_muon_id,
+ LinksToLHCbIDs=links_to_lhcbids,
+ MCCategories=get_mc_categories("MuonMatch"),
+ HitTypesToCheck=get_hit_type_mask("BestLong"),
+ )
+
+ # build the PrChecker algorihm for forward track
+ forward_tracks = make_allen_forward_tracks()
+ links_to_forward_tracks = make_links_tracks_mcparticles(
+ InputTracks=forward_tracks, LinksToLHCbIDs=links_to_lhcbids)
+
+ pr_checker_for_forward_track = monitor_tracking_efficiency(
+ TrackType="Forward",
+ InputTracks=forward_tracks,
+ LinksToTracks=links_to_forward_tracks,
+ LinksToLHCbIDs=links_to_lhcbids,
+ MCCategories=get_mc_categories("MuonMatch"),
+ HitTypesToCheck=get_hit_type_mask("BestLong"),
+ )
+
+ return Reconstruction(
+ 'muonideff', [pr_checker_for_forward_track, pr_checker_for_muon_id],
+ [require_gec()])
+
+
+options.histo_file = "PrChecker_MuonID.root"
+run_allen_reconstruction(options, hlt1_reco_allen_muonid_efficiency)
diff --git a/Hlt/RecoConf/options/hlt1_reco_allen_pvchecker.py b/Hlt/RecoConf/options/hlt1_reco_allen_pvchecker.py
new file mode 100644
index 0000000000000000000000000000000000000000..a5fd51f195556a403fb860067aab0e35de603687
--- /dev/null
+++ b/Hlt/RecoConf/options/hlt1_reco_allen_pvchecker.py
@@ -0,0 +1,37 @@
+###############################################################################
+# (c) Copyright 2019 CERN for the benefit of the LHCb Collaboration #
+# #
+# This software is distributed under the terms of the GNU General Public #
+# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". #
+# #
+# In applying this licence, CERN does not waive the privileges and immunities #
+# granted to it by virtue of its status as an Intergovernmental Organization #
+# or submit itself to any jurisdiction. #
+###############################################################################
+from Moore import options, run_allen_reconstruction
+from Moore.config import Reconstruction
+from RecoConf.hlt1_tracking import require_gec
+from RecoConf.mc_checking import get_pv_checkers, get_track_checkers
+from Configurables import ApplicationMgr
+from Configurables import NTupleSvc
+from RecoConf.hlt1_allen import make_allen_tracks, make_allen_pvs
+
+
+def hlt1_reco_pvchecker():
+
+ allen_tracks = make_allen_tracks()
+ pvs = make_allen_pvs()
+
+ data = [pvs]
+ data += get_pv_checkers(pvs, allen_tracks["Velo"], produce_ntuple=True)
+
+ return Reconstruction('PVperformance', data, [require_gec()])
+
+
+run_allen_reconstruction(options, hlt1_reco_pvchecker)
+
+NTupleSvc().Output += [
+ "FILE1 DATAFILE='Hlt1_PVperformance_Allen.root' TYPE='ROOT' OPT='NEW'"
+]
+ApplicationMgr().ExtSvc += [NTupleSvc()]
+ApplicationMgr().HistogramPersistency = "ROOT"
diff --git a/Hlt/RecoConf/options/hlt1_reco_allen_track_reconstruction.py b/Hlt/RecoConf/options/hlt1_reco_allen_track_reconstruction.py
new file mode 100644
index 0000000000000000000000000000000000000000..4e2a9a44111bb39559bc466a2c94a1d3a4f01557
--- /dev/null
+++ b/Hlt/RecoConf/options/hlt1_reco_allen_track_reconstruction.py
@@ -0,0 +1,47 @@
+###############################################################################
+# (c) Copyright 2019 CERN for the benefit of the LHCb Collaboration #
+# #
+# This software is distributed under the terms of the GNU General Public #
+# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". #
+# #
+# In applying this licence, CERN does not waive the privileges and immunities #
+# granted to it by virtue of its status as an Intergovernmental Organization #
+# or submit itself to any jurisdiction. #
+###############################################################################
+from Moore import options, run_allen_reconstruction
+from Moore.config import Reconstruction
+from RecoConf.hlt1_tracking import require_gec
+from RecoConf.hlt1_allen import make_allen_tracks, make_dumped_raw_banks
+from RecoConf.mc_checking import tracker_dumper, pv_dumper, get_track_checkers
+from Configurables import RunAllen
+
+dumpBinaries = False
+outputDir = "dump/"
+
+
+def hlt1_reco_allen_tracks():
+
+ allen_tracks = make_allen_tracks()
+
+ types_and_locations_for_checkers = {
+ "Velo": allen_tracks["Velo"],
+ "Upstream": allen_tracks["Upstream"],
+ "Forward": allen_tracks["Forward"],
+ }
+ data = get_track_checkers(types_and_locations_for_checkers)
+
+ if dumpBinaries:
+ data.append(
+ make_dumped_raw_banks(
+ dump_to_file=True, output_dir=outputDir + "banks"))
+ data.append(
+ tracker_dumper(
+ dump_to_root=False,
+ dump_to_binary=True,
+ bin_output_dir=outputDir + "MC_info/tracks"))
+ data.append(pv_dumper(output_dir=outputDir + "MC_info/PVs"))
+
+ return Reconstruction('allen_reco', data, [require_gec()])
+
+
+run_allen_reconstruction(options, hlt1_reco_allen_tracks, dumpBinaries)
diff --git a/Hlt/RecoConf/options/hlt1_reco_allen_trackresolution.py b/Hlt/RecoConf/options/hlt1_reco_allen_trackresolution.py
new file mode 100644
index 0000000000000000000000000000000000000000..46b667812945adc4cbcfd0fd1a6de84d9f5622da
--- /dev/null
+++ b/Hlt/RecoConf/options/hlt1_reco_allen_trackresolution.py
@@ -0,0 +1,31 @@
+###############################################################################
+# (c) Copyright 2019 CERN for the benefit of the LHCb Collaboration #
+# #
+# This software is distributed under the terms of the GNU General Public #
+# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". #
+# #
+# In applying this licence, CERN does not waive the privileges and immunities #
+# granted to it by virtue of its status as an Intergovernmental Organization #
+# or submit itself to any jurisdiction. #
+###############################################################################
+from Moore import options, run_allen_reconstruction
+from Moore.config import Reconstruction
+from RecoConf.hlt1_tracking import require_gec
+from RecoConf.hlt1_allen import make_allen_tracks
+from RecoConf.mc_checking import monitor_track_resolution
+from PyConf.application import make_data_with_FetchDataFromFile
+
+
+def hlt1_reco_allen_trackresolution():
+ track_type = "Forward"
+ tracks = make_allen_tracks()[track_type]
+ pr_checker = monitor_track_resolution(tracks)
+
+ return Reconstruction(
+ 'track_resolution',
+ [make_data_with_FetchDataFromFile("/Event/MC/TrackInfo"), pr_checker],
+ [require_gec()])
+
+
+options.histo_file = "Hlt1ForwardTrackingResolutionAllen.root"
+run_allen_reconstruction(options, hlt1_reco_allen_trackresolution)
diff --git a/Hlt/RecoConf/options/hlt1_reco_baseline_with_mcchecking.py b/Hlt/RecoConf/options/hlt1_reco_baseline_with_mcchecking.py
index f7995c44efc4b7e82d4c585e8b7d3fd70531c7de..2842fd1b9deead21f9186069f623d912412db235 100644
--- a/Hlt/RecoConf/options/hlt1_reco_baseline_with_mcchecking.py
+++ b/Hlt/RecoConf/options/hlt1_reco_baseline_with_mcchecking.py
@@ -15,3 +15,5 @@ options.histo_file = "MCMatching_baseline_MiniBias.root"
with standalone_hlt1_reco.bind(do_mc_checking=True):
run_reconstruction(options, standalone_hlt1_reco)
+
+options.histo_file = "MCMatching_baseline_MiniBias.root"
diff --git a/Hlt/RecoConf/options/hlt1_reco_pvchecker.py b/Hlt/RecoConf/options/hlt1_reco_pvchecker.py
new file mode 100644
index 0000000000000000000000000000000000000000..9117561889712f0cebf819d6a5005faebf1d4baf
--- /dev/null
+++ b/Hlt/RecoConf/options/hlt1_reco_pvchecker.py
@@ -0,0 +1,36 @@
+###############################################################################
+# (c) Copyright 2019 CERN for the benefit of the LHCb Collaboration #
+# #
+# This software is distributed under the terms of the GNU General Public #
+# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". #
+# #
+# In applying this licence, CERN does not waive the privileges and immunities #
+# granted to it by virtue of its status as an Intergovernmental Organization #
+# or submit itself to any jurisdiction. #
+###############################################################################
+from Moore import options, run_reconstruction
+from Moore.config import Reconstruction
+from RecoConf.hlt1_tracking import require_gec, make_hlt1_tracks, make_VeloKalman_fitted_tracks, make_pvs
+from RecoConf.mc_checking import get_pv_checkers, get_track_checkers
+from Configurables import ApplicationMgr
+from Configurables import NTupleSvc
+
+
+def hlt1_reco_pvchecker():
+
+ hlt1_tracks = make_hlt1_tracks()
+ pvs = make_pvs()
+
+ data = [pvs]
+ data += get_pv_checkers(pvs, hlt1_tracks["Velo"], produce_ntuple=True)
+
+ return Reconstruction('PVperformance', data, [require_gec()])
+
+
+run_reconstruction(options, hlt1_reco_pvchecker)
+
+NTupleSvc().Output += [
+ "FILE1 DATAFILE='Hlt1_PVperformance.root' TYPE='ROOT' OPT='NEW'"
+]
+ApplicationMgr().ExtSvc += [NTupleSvc()]
+ApplicationMgr().HistogramPersistency = "ROOT"
diff --git a/Hlt/RecoConf/python/RecoConf/hlt1_allen.py b/Hlt/RecoConf/python/RecoConf/hlt1_allen.py
new file mode 100644
index 0000000000000000000000000000000000000000..497131f5401123422bc2416698faa634dd14a465
--- /dev/null
+++ b/Hlt/RecoConf/python/RecoConf/hlt1_allen.py
@@ -0,0 +1,122 @@
+###############################################################################
+# (c) Copyright 2019 CERN for the benefit of the LHCb Collaboration #
+# #
+# This software is distributed under the terms of the GNU General Public #
+# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". #
+# #
+# In applying this licence, CERN does not waive the privileges and immunities #
+# granted to it by virtue of its status as an Intergovernmental Organization #
+# or submit itself to any jurisdiction. #
+###############################################################################
+from PyConf import configurable
+
+from PyConf.application import default_raw_event, make_odin
+
+from PyConf.Algorithms import (
+ LHCb__Converters__Track__v1__fromV2TrackV1Track as FromV2TrackV1Track,
+ AllenVeloToV2Tracks, AllenUTToV2Tracks, AllenForwardToV2Tracks,
+ AllenPVsToRecVertexV2, AllenDecReportsToTES, DumpRawBanks, RunAllen)
+from PyConf.Algorithms import HltDecReportsDecoder
+
+
+@configurable
+def make_dumped_raw_banks(make_raw=default_raw_event,
+ odin_location=make_odin,
+ dump_to_file=False,
+ output_dir="dump/banks"):
+ return DumpRawBanks(
+ RawEventLocation=make_raw(),
+ ODINLocation=odin_location(),
+ DumpToFile=dump_to_file,
+ OutputDirectory=output_dir).AllenRawInput
+
+
+def make_allen_output(odin_location=make_odin,
+ dumped_raw_banks=make_dumped_raw_banks,
+ filter_hlt1=False,
+ dump_to_file=False):
+ return RunAllen(
+ AllenRawInput=dumped_raw_banks(dump_to_file=dump_to_file),
+ ODINLocation=odin_location(),
+ ParamDir="${ALLEN_PROJECT_ROOT}/input/detector_configuration/down/",
+ FilterHLT1=filter_hlt1).AllenOutput
+
+
+def make_allen_dec_reports(odin_location=make_odin,
+ dumped_raw_banks=make_dumped_raw_banks,
+ filter_hlt1=False,
+ dump_to_file=False):
+ return RunAllen(
+ AllenRawInput=dumped_raw_banks(dump_to_file=dump_to_file),
+ ODINLocation=odin_location(),
+ ParamDir="${ALLEN_PROJECT_ROOT}/input/detector_configuration/down/",
+ FilterHLT1=filter_hlt1).DecReportsLocation
+
+
+def make_allen_velo_tracks():
+ allen_output = make_allen_output()
+ velo_tracks_v2 = AllenVeloToV2Tracks(AllenOutput=allen_output).OutputTracks
+ velo_tracks_v1 = FromV2TrackV1Track(
+ InputTracksName=velo_tracks_v2).OutputTracksName
+
+ return {"v2": velo_tracks_v2, "v1": velo_tracks_v1}
+
+
+def make_allen_veloUT_tracks():
+ allen_output = make_allen_output()
+ veloUT_tracks_v2 = AllenUTToV2Tracks(AllenOutput=allen_output).OutputTracks
+ veloUT_tracks_v1 = FromV2TrackV1Track(
+ InputTracksName=veloUT_tracks_v2).OutputTracksName
+
+ return {"v2": veloUT_tracks_v2, "v1": veloUT_tracks_v1}
+
+
+def make_allen_forward_tracks():
+ allen_output = make_allen_output()
+ forward_tracks_v2 = AllenForwardToV2Tracks(
+ AllenOutput=allen_output).OutputTracks
+ forward_tracks_v1 = FromV2TrackV1Track(
+ InputTracksName=forward_tracks_v2).OutputTracksName
+
+ return {"v2": forward_tracks_v2, "v1": forward_tracks_v1}
+
+
+def make_allen_forward_muon_tracks():
+ allen_output = make_allen_output()
+ forward_muon_tracks_v2 = AllenForwardToV2Tracks(
+ AllenOutput=allen_output).OutputMuonTracks
+ forward_muon_tracks_v1 = FromV2TrackV1Track(
+ InputTracksName=forward_muon_tracks_v2).OutputTracksName
+
+ return {"v2": forward_muon_tracks_v2, "v1": forward_muon_tracks_v1}
+
+
+def make_allen_tracks():
+ velo_tracks = make_allen_velo_tracks()
+ velo_ut_tracks = make_allen_veloUT_tracks()
+ forward_tracks = make_allen_forward_tracks()
+
+ return {
+ "Velo": velo_tracks,
+ "Upstream": velo_ut_tracks,
+ "Forward": forward_tracks,
+ }
+
+
+def make_allen_pvs():
+ allen_output = make_allen_output()
+ return AllenPVsToRecVertexV2(AllenOutput=allen_output).OutputPVs
+
+
+def make_allen_raw_dec_reports(filterHLT1=False):
+ allen_output = make_allen_output(filter_hlt1=filterHLT1)
+ allen_dec_reports_raw = AllenDecReportsToTES(
+ AllenOutput=allen_output).OutputDecReports
+ return allen_dec_reports_raw
+
+
+def decode_allen_dec_reports():
+ allen_dec_reports_raw = make_allen_raw_dec_reports()
+ return HltDecReportsDecoder(
+ RawEventLocations=allen_dec_reports_raw,
+ SourceID=1).OutputHltDecReportsLocation
diff --git a/Hlt/RecoConf/python/RecoConf/mc_checking.py b/Hlt/RecoConf/python/RecoConf/mc_checking.py
index b5f67f7248134186070c93e0dc4a34acb1837abf..3633424d8be0446faae3fea7fb256224e2d9bbe1 100644
--- a/Hlt/RecoConf/python/RecoConf/mc_checking.py
+++ b/Hlt/RecoConf/python/RecoConf/mc_checking.py
@@ -16,24 +16,28 @@ from PyConf.tonic import (configurable)
from PyConf.dataflow import DataHandle
from PyConf.components import Tool
-from PyConf.application import default_raw_event, make_data_with_FetchDataFromFile
+from PyConf.application import (
+ default_raw_event,
+ make_data_with_FetchDataFromFile,
+ make_odin,
+)
from PyConf.Algorithms import (
VPFullCluster2MCParticleLinker, PrLHCbID2MCParticle,
PrLHCbID2MCParticleVPUTFTMU, PrTrackAssociator, PrTrackChecker,
PrUTHitChecker, TrackListRefiner, TrackResChecker,
TrackIPResolutionCheckerNT, DataPacking__Unpack_LHCb__MCVPHitPacker_,
- MCParticle2MCHitAlg,
+ MCParticle2MCHitAlg, PrTrackerDumper, PVDumper,
LHCb__Converters__RecVertex__v2__fromVectorLHCbRecVertices as
- FromVectorLHCbRecVertex)
+ FromVectorLHCbRecVertex, PrimaryVertexChecker)
from PyConf.Tools import (IdealStateCreator, LoKi__Hybrid__MCTool,
VisPrimVertTool, LoKi__Hybrid__TrackSelector as
LoKiTrackSelector)
-from RecoConf.hlt1_tracking import (make_PrStoreUTHit_hits,
- make_FTRawBankDecoder_clusters,
- make_velo_full_clusters)
+from RecoConf.hlt1_tracking import (
+ make_PrStoreUTHit_hits, make_FTRawBankDecoder_clusters,
+ make_velo_full_clusters, make_VPClus_hits, make_PrStoreFTHit_hits)
from Hlt2Conf.data_from_file import mc_unpackers
@@ -263,6 +267,34 @@ def get_best_tracks_checkers(
efficiency_checkers.append(checker)
return efficiency_checkers
+@configurable
+def get_pv_checkers(
+ pvs,
+ tracks,
+ produce_ntuple=False,
+ make_links_lhcbids_mcparticles=make_links_lhcbids_mcparticles_tracking_system
+):
+
+ assert isinstance(
+ pvs, DataHandle), "Please provide reconstructed primary verticies"
+
+ links_to_lhcbids = make_links_lhcbids_mcparticles()
+ links_to_tracks = make_links_tracks_mcparticles(
+ InputTracks=tracks, LinksToLHCbIDs=links_to_lhcbids)
+ pv_checkers = []
+
+ pvchecker = PrimaryVertexChecker(
+ produceNtuple=produce_ntuple,
+ inputVerticesName=pvs,
+ inputTracksName=tracks["v1"],
+ MCVertexInput=mc_unpackers()["MCVertices"],
+ MCParticleInput=mc_unpackers()["MCParticles"],
+ MCHeaderLocation=make_data_with_FetchDataFromFile("/Event/MC/Header"),
+ MCPropertyInput=make_data_with_FetchDataFromFile(
+ "/Event/MC/TrackInfo"))
+
+ pv_checkers.append(pvchecker)
+ return pv_checkers
def make_track_filter(InputTracks, code):
selector = LoKiTrackSelector(Code=code, StatPrint=True)
@@ -325,3 +357,32 @@ def monitor_IPresolution(InputTracks, InputPVs, VeloTracks):
PVContainer=vertexConverter,
NTupleLUN="FILE1")
return IPres_checker
+
+
+def tracker_dumper(odin_location=make_odin,
+ root_output_dir="dump/TrackerDumper",
+ bin_output_dir="dump/MC_info/tracks",
+ dump_to_root=True,
+ dump_to_binary=False):
+ links_to_lhcbids = make_links_lhcbids_mcparticles_tracking_system()
+
+ return PrTrackerDumper(
+ MCParticlesLocation=mc_unpackers()["MCParticles"],
+ VPLightClusterLocation=make_VPClus_hits(),
+ FTHitsLocation=make_PrStoreFTHit_hits(),
+ UTHitsLocation=make_PrStoreUTHit_hits(),
+ ODINLocation=odin_location(),
+ LinkerLocation=links_to_lhcbids,
+ DumpToROOT=dump_to_root,
+ DumpToBinary=dump_to_binary,
+ OutputDirectory=root_output_dir,
+ MCOutputDirectory=bin_output_dir)
+
+
+def pv_dumper(odin_location=make_odin, output_dir="dump/MC_info/PVs"):
+ return PVDumper(
+ MCVerticesLocation=mc_unpackers()["MCVertices"],
+ MCPropertyLocation=make_data_with_FetchDataFromFile(
+ "/Event/MC/TrackInfo"),
+ ODINLocation=odin_location(),
+ OutputDirectory=output_dir)
diff --git a/Hlt/RecoConf/python/RecoConf/mc_checking_categories.py b/Hlt/RecoConf/python/RecoConf/mc_checking_categories.py
index 4aa653884f56e31082eb7ddd854093d7541d771f..2b21facfad3135804a34cd56f3217aec5b2130f0 100644
--- a/Hlt/RecoConf/python/RecoConf/mc_checking_categories.py
+++ b/Hlt/RecoConf/python/RecoConf/mc_checking_categories.py
@@ -14,19 +14,40 @@ from collections import OrderedDict
categories = {
"Velo": {
- "01_velo": "isVelo",
- "02_long": "isLong",
- "03_long_P>5GeV": "isLong & over5",
- "04_long_strange": "isLong & strange",
- "05_long_strange_P>5GeV": "isLong & strange & over5",
- "06_long_fromB": "isLong & fromB",
- "07_long_fromB_P>5GeV": "isLong & fromB & over5",
- "08_long_electrons": "isLong & isElectron",
- "09_long_fromB_electrons": "isLong & isElectron & fromB",
+ "01_velo":
+ "isVelo",
+ "02_long":
+ "isLong",
+ "03_long_P>5GeV":
+ "isLong & over5",
+ "04_long_strange":
+ "isLong & strange",
+ "05_long_strange_P>5GeV":
+ "isLong & strange & over5",
+ "06_long_fromB":
+ "isLong & fromB",
+ "06_long_fromD":
+ "isLong & fromD",
+ "07_long_fromB_P>5GeV":
+ "isLong & fromB & over5",
+ "07_long_fromD_P>5GeV":
+ "isLong & fromD & over5",
+ "08_long_electrons":
+ "isLong & isElectron",
+ "09_long_fromB_electrons":
+ "isLong & isElectron & fromB",
"10_long_fromB_electrons_P>5GeV":
"isLong & isElectron & over5 & fromB",
- "11_long_fromB_P>3GeV_Pt>0.5GeV": "isLong & fromB & trigger",
- "12_UT_long_fromB_P>3GeV_Pt>0.5GeV": "isLong & fromB & trigger & isUT",
+ "11_long_fromB_P>3GeV_Pt>0.5GeV":
+ "isLong & fromB & trigger",
+ "11_long_fromB_electrons_P>3GeV_Pt>0.5GeV":
+ "isLong & fromB & trigger & isElectron",
+ "11_long_strange_P>3GeV_Pt>0.5GeV":
+ "isLong & strange& trigger",
+ "11_long_fromD_P>3GeV_Pt>0.5GeV":
+ "isLong & fromD & trigger",
+ "12_UT_long_fromB_P>3GeV_Pt>0.5GeV":
+ "isLong & fromB & trigger & isUT",
},
"VeloFull": {
"01_notElectron_Velo":
@@ -111,36 +132,84 @@ categories = {
"isElectron & isLong & strange & (MCETA>2.0) & (MCETA<5.0) & (MCP>3000) & (MCPT>400)"
},
"Forward": {
- "01_long": "isLong",
- "02_long_P>5GeV": "isLong & over5",
- "03_long_strange": "isLong & strange",
- "04_long_strange_P>5GeV": "isLong & strange & over5",
- "05_long_fromB": "isLong & fromB",
- "06_long_fromB_P>5GeV": "isLong & fromB & over5",
- "07_long_electrons": "isLong & isElectron",
- "08_long_fromB_electrons": "isLong & isElectron & fromB",
+ "01_long":
+ "isLong",
+ "02_long_P>5GeV":
+ "isLong & over5",
+ "03_long_strange":
+ "isLong & strange",
+ "04_long_strange_P>5GeV":
+ "isLong & strange & over5",
+ "05_long_fromB":
+ "isLong & fromB",
+ "05_long_fromD":
+ "isLong & fromD",
+ "06_long_fromB_P>5GeV":
+ "isLong & fromB & over5",
+ "06_long_fromD_P>5GeV":
+ "isLong & fromD & over5",
+ "07_long_electrons":
+ "isLong & isElectron",
+ "08_long_fromB_electrons":
+ "isLong & isElectron & fromB",
"09_long_fromB_electrons_P>5GeV":
"isLong & isElectron & over5 & fromB",
- "10_long_fromB_P>3GeV_Pt>0.5GeV": "isLong & fromB & trigger",
- "11_UT_long_fromB_P>3GeV_Pt>0.5GeV": "isLong & fromB & trigger & isUT",
+ "10_long_fromB_P>3GeV_Pt>0.5GeV":
+ "isLong & fromB & trigger",
+ "10_long_fromB_electrons_P>3GeV_Pt>0.5GeV":
+ "isLong & fromB & trigger & isElectron",
+ "10_long_strange_P>3GeV_Pt>0.5GeV":
+ "isLong & strange & trigger",
+ "10_long_fromD_P>3GeV_Pt>0.5GeV":
+ "isLong & fromD & trigger",
+ "11_UT_long_fromB_P>3GeV_Pt>0.5GeV":
+ "isLong & fromB & trigger & isUT",
},
"Upstream": {
- "01_velo": "isVelo",
- "02_velo+UT": "isVelo & isUT",
- "03_velo+UT_P>5GeV": "isVelo & isUT & over5",
- "04_velo+notLong": "isNotLong & isVelo ",
- "05_velo+UT+notLong": "isNotLong & isVelo & isUT",
- "06_velo+UT+notLong_P>5GeV": "isNotLong & isVelo & isUT & over5",
- "07_long": "isLong",
- "08_long_P>5GeV": "isLong & over5 ",
- "09_long_fromB": "isLong & fromB",
- "10_long_fromB_P>5GeV": "isLong & fromB & over5",
- "11_long_electrons": "isLong & isElectron",
- "12_long_fromB_electrons": "isLong & isElectron & fromB",
+ "01_velo":
+ "isVelo",
+ "02_velo+UT":
+ "isVelo & isUT",
+ "03_velo+UT_P>5GeV":
+ "isVelo & isUT & over5",
+ "04_velo+notLong":
+ "isNotLong & isVelo ",
+ "05_velo+UT+notLong":
+ "isNotLong & isVelo & isUT",
+ "06_velo+UT+notLong_P>5GeV":
+ "isNotLong & isVelo & isUT & over5",
+ "07_long":
+ "isLong",
+ "07_long_strange":
+ "isLong & strange",
+ "08_long_P>5GeV":
+ "isLong & over5 ",
+ "08_long_strange_P>5GeV":
+ "isLong & over5 & strange",
+ "09_long_fromB":
+ "isLong & fromB",
+ "09_long_fromD":
+ "isLong & fromD",
+ "10_long_fromB_P>5GeV":
+ "isLong & fromB & over5",
+ "10_long_fromD_P>5GeV":
+ "isLong & fromD & over5",
+ "11_long_electrons":
+ "isLong & isElectron",
+ "12_long_fromB_electrons":
+ "isLong & isElectron & fromB",
"13_long_fromB_electrons_P>5GeV":
"isLong & isElectron & over5 & fromB",
- "14_long_fromB_P>3GeV_Pt>0.5GeV": "isLong & fromB & trigger",
- "15_UT_long_fromB_P>3GeV_Pt>0.5GeV": "isLong & fromB & trigger & isUT",
+ "14_long_fromB_P>3GeV_Pt>0.5GeV":
+ "isLong & fromB & trigger",
+ "14_long_fromB_electrons_P>3GeV_Pt>0.5GeV":
+ "isLong & fromB & isElectron & trigger",
+ "14_long_strange_P>3GeV_Pt>0.5GeV":
+ "isLong & strange& trigger",
+ "14_long_fromD_P>3GeV_Pt>0.5GeV":
+ "isLong & fromD & trigger",
+ "15_UT_long_fromB_P>3GeV_Pt>0.5GeV":
+ "isLong & fromB & trigger & isUT",
},
"UpstreamForMuons": {
"01_long_muon": "isLong & isMuon",
@@ -148,10 +217,20 @@ categories = {
"03_long_pion": "isLong & isPion",
},
"MuonMatch": {
- "01_long": "isLong",
- "02_long_muon": "isLong & isMuon",
- "03_long_muon_from_strange": "isLong & strange & isMuon",
- "04_long_pion": "isLong & isPion",
+ "01_long":
+ "isLong",
+ "02_long_muon":
+ "isLong & isMuon",
+ "02_long_muon_P>3GeV_Pt>0.5GeV":
+ "isLong & isMuon & (MCPT>500) & (MCP>3000)",
+ "03_long_muon_from_strange":
+ "isLong & strange & isMuon",
+ "03_long_muon_strange_P>3GeV_Pt>0.5GeV":
+ "isLong & isMuon & (MCPT>500) & (MCP>3000) & strange",
+ "04_long_pion":
+ "isLong & isPion",
+ "04_long_pion_P>3GeV_Pt>0.5GeV":
+ "isLong & isPion & (MCPT>500) & (MCP>3000)",
},
"MuonID": {
"01_long": "isLong",
diff --git a/Hlt/RecoConf/scripts/PrCheckerEfficiency_HLT1.py b/Hlt/RecoConf/scripts/PrCheckerEfficiency_HLT1.py
index 0bca93d296350a02da32c9a68a8cd4f961468730..3a387f42b007623a320624f4ec8834c75c3872fc 100644
--- a/Hlt/RecoConf/scripts/PrCheckerEfficiency_HLT1.py
+++ b/Hlt/RecoConf/scripts/PrCheckerEfficiency_HLT1.py
@@ -35,7 +35,7 @@ gROOT.SetBatch(True)
def getEfficiencyHistoNames():
- return ["eta", "p", "pt", "phi", "nPV"]
+ return ["eta", "p", "pt", "phi", "nPV"] #, "docaz"]
def getTrackers():
@@ -51,6 +51,14 @@ def getOriginFolders():
return basedict
+def getTrackNames():
+ basedict = {"Velo": {}, "Upstream": {}, "Forward": {}}
+
+ basedict["Velo"] = "Velo"
+ basedict["Upstream"] = "VeloUT"
+ basedict["Forward"] = "Forward"
+
+ return basedict
def get_colors():
return [kBlack, kAzure, kGreen + 3, kMagenta + 2, kOrange, kCyan + 2]
@@ -65,7 +73,11 @@ def get_fillstyles():
def getGhostHistoNames():
- return ["eta", "nPV", "pt", "p"]
+ basedict = {"Velo": {}, "Upstream": {}, "Forward": {}}
+ basedict["Velo"] = ["eta", "nPV"]
+ basedict["Upstream"] = ["eta", "nPV", "pt", "p"]
+ basedict["Forward"] = ["eta", "nPV", "pt", "p"]
+ return basedict
def argument_parser():
@@ -126,7 +138,7 @@ def get_eff(eff, hist, tf, histoName, label, var):
teff.SetStatisticOption(7)
eff[lab] = teff.CreateGraph()
eff[lab].SetName(lab)
- eff[lab].SetTitle(lab + "not elec.")
+ eff[lab].SetTitle(lab + "not electron")
if (histoName.find('strange') != -1):
eff[lab].SetTitle(lab + " from stranges")
if (histoName.find('electron') != -1):
@@ -134,11 +146,11 @@ def get_eff(eff, hist, tf, histoName, label, var):
hist[lab] = denominator.Clone()
hist[lab].SetName("h_numerator_notElectrons")
- hist[lab].SetTitle(var + " histo. not elec.")
+ hist[lab].SetTitle(var + " distribution, not electron")
if (histoName.find('strange') != -1):
- hist[lab].SetTitle(var + " histo. stranges")
+ hist[lab].SetTitle(var + " distribution, stranges")
if (histoName.find('electron') != -1):
- hist[lab].SetTitle(var + " histo. electron")
+ hist[lab].SetTitle(var + " distribution, electron")
return eff, hist
@@ -172,6 +184,7 @@ def PrCheckerEfficiency(filename, outfile, label, plotstyle, dist, plotelec,
cuts = getCuts()
trackers = getTrackers()
folders = getOriginFolders()
+ names = getTrackNames()
for tracker in trackers:
outputfile.cd()
@@ -232,7 +245,9 @@ def PrCheckerEfficiency(filename, outfile, label, plotstyle, dist, plotelec,
for lab in label:
eff[lab].Draw("P SAME")
cutName = categories[tracker][cut]["title"]
- latex.DrawLatex(0.7, 0.85, "LHCb simultaion")
+ latex.DrawLatex(0.7, 0.85, "LHCb simulation")
+ track_name = names[tracker] + " tracks"
+ latex.DrawLatex(0.7, 0.75, track_name)
latex.DrawLatex(0.35, 0.3, cutName)
canvasName = tracker + "_" + cut + "_" + histo + ".pdf"
if savepdf:
@@ -272,6 +287,8 @@ def PrCheckerEfficiency(filename, outfile, label, plotstyle, dist, plotelec,
scale = gPad.GetUymax() / rightmax
hist_elec[lab].Scale(scale)
hist_elec[lab].Draw("hist SAME")
+ set_style(hist_elec[label[0]], kGray + 1,
+ markers[i], styles[i])
else:
rightmax = 1.05 * hist_elec[label[0]].GetMaximum()
scale = gPad.GetUymax() / rightmax
@@ -283,7 +300,9 @@ def PrCheckerEfficiency(filename, outfile, label, plotstyle, dist, plotelec,
canvas_elec.PlaceLegend()
for lab in label:
eff_elec[lab].Draw("P SAME")
- latex.DrawLatex(0.7, 0.85, "LHCb simultaion")
+ latex.DrawLatex(0.7, 0.85, "LHCb simulation")
+ track_name = names[tracker] + " tracks"
+ latex.DrawLatex(0.7, 0.75, track_name)
latex.DrawLatex(0.35, 0.3, cutName)
canvasName_elec = tracker + "_" + cut + "_" + histo + "_elec.pdf"
if savepdf:
@@ -331,46 +350,51 @@ def PrCheckerEfficiency(filename, outfile, label, plotstyle, dist, plotelec,
for lab in label:
eff[lab].Draw("P SAME")
eff_elec[lab].Draw("P SAME")
- latex.DrawLatex(0.7, 0.85, "LHCb simultaion")
+ latex.DrawLatex(0.7, 0.85, "LHCb simulation")
+ track_name = names[tracker] + " tracks"
+ latex.DrawLatex(0.7, 0.75, track_name)
latex.DrawLatex(0.35, 0.3, cutName)
canvas_com.SetRightMargin(0.05)
canvas_com.Write()
# calculate ghost rate
- if tracker == "Forward":
- histoBaseName = "Track/" + folder + tracker + "/"
- for histo in ghostHistos:
- trackerDir.cd()
- title = "ghost rate vs " + histo
- canvas = TCanvas(title, title)
- gPad.SetTicks()
- numeratorName = histoBaseName + ghostHistoDict[histo][
- "variable"] + "_Ghosts"
- denominatorName = histoBaseName + ghostHistoDict[histo][
- "variable"] + "_Total"
- print("ghost histo: " + histoBaseName + " " + numeratorName +
- " " + denominatorName)
- ghost = {}
- mg = TMultiGraph()
- for i, lab in enumerate(label):
- numerator = tf[lab].Get(numeratorName)
- denominator = tf[lab].Get(denominatorName)
-
- teff = TEfficiency(numerator, denominator)
- teff.SetStatisticOption(7)
- ghost[lab] = teff.CreateGraph()
- ghost[lab].SetName(lab)
- set_style(ghost[lab], colors[i], markers[i], styles[i])
- mg.Add(ghost[lab])
-
- xtitle = ghostHistoDict[histo]["xTitle"]
- mg.GetXaxis().SetTitle(xtitle)
- mg.GetYaxis().SetTitle("ghost rate")
- mg.Draw("ap")
- canvas.PlaceLegend()
- if savepdf:
- canvas.SaveAs("ghost_rate_" + histo + ".pdf")
- canvas.Write()
+ histoBaseName = "Track/" + folder + tracker + "/"
+ for histo in ghostHistos[tracker]:
+ trackerDir.cd()
+ title = "ghost rate vs " + histo
+ canvas = TCanvas(title, title)
+ gPad.SetTicks()
+ numeratorName = histoBaseName + ghostHistoDict[histo][
+ "variable"] + "_Ghosts"
+ denominatorName = histoBaseName + ghostHistoDict[histo][
+ "variable"] + "_Total"
+ print("ghost histo: " + histoBaseName + " " + numeratorName +
+ " " + denominatorName)
+ ghost = {}
+ mg = TMultiGraph()
+ for i, lab in enumerate(label):
+ numerator = tf[lab].Get(numeratorName)
+ denominator = tf[lab].Get(denominatorName)
+ print("Numerator = " + numeratorName)
+ print("Denominator = " + denominatorName)
+ teff = TEfficiency(numerator, denominator)
+ teff.SetStatisticOption(7)
+ ghost[lab] = teff.CreateGraph()
+ ghost[lab].SetName(lab)
+ set_style(ghost[lab], colors[i], markers[i], styles[i])
+ mg.Add(ghost[lab])
+
+ xtitle = ghostHistoDict[histo]["xTitle"]
+ mg.GetXaxis().SetTitle(xtitle)
+ mg.GetYaxis().SetTitle("ghost rate")
+ mg.Draw("ap")
+ latex.DrawLatex(0.7, 0.85, "LHCb simulation")
+ track_name = names[tracker] + " tracks"
+ latex.DrawLatex(0.7, 0.75, track_name)
+ #canvas.PlaceLegend()
+ if savepdf:
+ canvas.SaveAs("ghost_rate_" + histo + ".pdf")
+ canvas.Write()
outputfile.Write()
outputfile.Close()
diff --git a/Hlt/RecoConf/scripts/PrCheckerMuonIDEff.py b/Hlt/RecoConf/scripts/PrCheckerMuonIDEff.py
index 08b19748de1685660e1faa9c091cee8701d9e0f8..f1251673498f9d7757dc6298d4e7353494180f90 100644
--- a/Hlt/RecoConf/scripts/PrCheckerMuonIDEff.py
+++ b/Hlt/RecoConf/scripts/PrCheckerMuonIDEff.py
@@ -112,14 +112,14 @@ def get_eff(eff, hist, tf, histoName, histoName_Den, label, var, printval):
teff.SetStatisticOption(7)
eff[lab] = teff.CreateGraph()
eff[lab].SetName(lab)
- eff[lab].SetTitle(lab + "not elec.")
+ eff[lab].SetTitle(lab)
if (histoName.find('strange') != -1):
eff[lab].SetTitle(lab + " from stranges")
if (histoName.find('electron') != -1):
eff[lab].SetTitle(lab + " electron")
hist[lab] = denominator.Clone()
hist[lab].SetName("h_numerator_notElectrons")
- hist[lab].SetTitle(var + " histo. not elec.")
+ hist[lab].SetTitle(var + " distribution")
if (histoName.find('strange') != -1):
hist[lab].SetTitle(var + " histo. stranges")
if (histoName.find('electron') != -1):
diff --git a/Hlt/RecoConf/scripts/PrCheckerTrackResolution.py b/Hlt/RecoConf/scripts/PrCheckerTrackResolution.py
index d2801945169eb4ed08864dd272631fb38acf19e2..e4e7c26582a2c1e9c2c77db33d69fc9325bb550d 100644
--- a/Hlt/RecoConf/scripts/PrCheckerTrackResolution.py
+++ b/Hlt/RecoConf/scripts/PrCheckerTrackResolution.py
@@ -80,6 +80,10 @@ def PrCheckerTrackResolution(filename, label, outfile, savepdf, plotstyle):
from Legend import place_legend
setLHCbStyle()
+ latex = TLatex()
+ latex.SetNDC()
+ latex.SetTextSize(0.05)
+
markers = get_markers()
colors = get_colors()
styles = get_fillstyles()
@@ -101,7 +105,7 @@ def PrCheckerTrackResolution(filename, label, outfile, savepdf, plotstyle):
"Track/TrackResChecker/ALL/vertex/dpoverp_vs_p")
hdp_p[lab].SetName("dpoverp_p_" + lab)
hmom[lab] = hdp_p[lab].ProjectionX()
- hmom[lab].SetTitle("p histo. " + lab)
+ hmom[lab].SetTitle("p distribution " + lab)
hdp_p[lab].FitSlicesY()
hres_p[lab] = gDirectory.Get("dpoverp_p_" + lab + "_2")
@@ -132,10 +136,12 @@ def PrCheckerTrackResolution(filename, label, outfile, savepdf, plotstyle):
"+-" + format(hres_p[lab].GetBinError(i), '.2f') + ")%")
print("-----------------------------------------------------")
+
canvas1.PlaceLegend()
for lab in label:
hres_p[lab].Draw("E1 p1 same")
canvas1.SetRightMargin(0.05)
+ latex.DrawLatex(0.7, 0.85, "LHCb simulation")
canvas1.Write()
if savepdf:
canvas1.SaveAs("trackres_p.pdf")
@@ -149,7 +155,7 @@ def PrCheckerTrackResolution(filename, label, outfile, savepdf, plotstyle):
hdp_eta[lab].SetName("dpoverp_eta_" + lab)
hdp_eta[lab].FitSlicesY()
heta[lab] = hdp_eta[lab].ProjectionX()
- heta[lab].SetTitle("#eta histo. " + lab)
+ heta[lab].SetTitle("#eta distribution " + lab)
hres_eta[lab] = gDirectory.Get("dpoverp_eta_" + lab + "_2")
hres_eta[lab].GetYaxis().SetTitle("dp/p [%]")
@@ -181,11 +187,12 @@ def PrCheckerTrackResolution(filename, label, outfile, savepdf, plotstyle):
format(hres_eta[lab].GetBinContent(i), '.2f') + "+-" +
format(hres_eta[lab].GetBinError(i), '.2f') + ")%")
print("-----------------------------------------------------")
-
+
canvas2.PlaceLegend()
for lab in label:
hres_eta[lab].Draw("E1 p1 same")
canvas2.SetRightMargin(0.05)
+ latex.DrawLatex(0.7, 0.85, "LHCb simulation")
canvas2.Write()
if savepdf:
canvas2.SaveAs("trackres_eta.pdf")
diff --git a/Hlt/RecoConf/scripts/PrimaryVertexCheckerEfficiency.py b/Hlt/RecoConf/scripts/PrimaryVertexCheckerEfficiency.py
new file mode 100644
index 0000000000000000000000000000000000000000..bfc3151734f55ee9f358c50cd02724f040294079
--- /dev/null
+++ b/Hlt/RecoConf/scripts/PrimaryVertexCheckerEfficiency.py
@@ -0,0 +1,154 @@
+###############################################################################
+# (c) Copyright 2019 CERN for the benefit of the LHCb Collaboration #
+# #
+# This software is distributed under the terms of the GNU General Public #
+# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". #
+# #
+# In applying this licence, CERN does not waive the privileges and immunities #
+# granted to it by virtue of its status as an Intergovernmental Organization #
+# or submit itself to any jurisdiction. #
+###############################################################################
+#!/usr/bin/python
+
+# The script for plotting PV efficinecy as the function
+# of various distributions: nTracks, z, r.
+# As input the NTuple created by hlt1_reco_pvchecker.py
+# is needed.
+#
+# The efficency is calculated usig TGraphAsymmErrors
+# and Bayesian error bars
+#
+# author: Agnieszka Dziurda (agnieszka.dziurda@cern.ch)
+# date: 02/2020
+#
+# Example of usage:
+# ../../../run python PrimaryVertexCheckerEfficiency.py
+# --file file1.root file2.root --label name1 name2 --dist
+#
+
+import os, sys
+import argparse
+import ROOT
+
+from ROOT import TFile, TTree
+from ROOT import kRed, kBlue, kOrange, kMagenta, kGreen, kCyan, kGray
+from ROOT import gROOT, TLegend
+
+parser = argparse.ArgumentParser()
+parser.add_argument(
+ '--file', dest='fileName', default="", nargs='+', help='filename to plot')
+parser.add_argument(
+ '--label', dest='label', default="", nargs='+', help='labels for files')
+parser.add_argument(
+ '--tree',
+ dest='treeName',
+ default="",
+ nargs='+',
+ help='tree name to plot',
+)
+parser.add_argument(
+ '--smog',
+ dest='smog',
+ default=False,
+ action='store_true',
+ help='set true for SMOG')
+parser.add_argument(
+ '--multi',
+ dest='multi',
+ default=False,
+ action='store_true',
+ help='add multiplicity plots')
+parser.add_argument(
+ '--dist',
+ dest='dist',
+ default=False,
+ action='store_true',
+ help='plot distributions in the canvas')
+parser.add_argument(
+ '--prefix',
+ dest='prefix',
+ default="pv_eff",
+ help='prefix for the plot name',
+)
+parser.add_argument(
+ '--dir',
+ dest='directory',
+ default=os.getcwd(),
+ help='tree name to plot',
+)
+
+parser.add_argument(
+ '--offset',
+ dest='offset',
+ default=0,
+ help='offset for plot colors',
+)
+
+
+def get_labels(number_of_files):
+ label = []
+ for i in range(0, number_of_files):
+ label.append("PV Checker {number}".format(number=str(i + 1)))
+ return label
+
+
+if __name__ == '__main__':
+ args = parser.parse_args()
+ path = args.directory + "/utils/"
+ sys.path.append(os.path.abspath(path))
+ offset = int(args.offset)
+
+ gROOT.SetBatch()
+
+ from pvutils import get_default_tree_name
+ from pvutils import get_files, get_trees, get_eff, plot
+
+ from pvconfig import get_variable_ranges
+ from pvconfig import get_style, get_categories
+ from pvconfig import set_legend
+
+ ranges = get_variable_ranges(args.smog)
+ style = get_style()
+
+ label = args.label
+ if args.label == "":
+ label = get_labels(len(args.fileName))
+
+ tr = {}
+ tf = {}
+ tf = get_files(tf, label, args.fileName)
+ tr = get_trees(tf, tr, label, args.treeName, True)
+
+ eff = {}
+ eff["tracks"] = {}
+ eff["z"] = {}
+ eff["r"] = {}
+
+ hist = {}
+ hist["tracks"] = {}
+ hist["z"] = {}
+ hist["r"] = {}
+
+ cat = get_categories(args.multi, args.smog)
+
+ eff["tracks"], hist["tracks"] = get_eff(eff["tracks"], hist["tracks"], tr,
+ "nrectrmc", style, ranges, cat,
+ label, offset)
+ eff["z"], hist["z"] = get_eff(eff["z"], hist["z"], tr, "zMC", style,
+ ranges, cat, label, offset)
+ eff["r"], hist["r"] = get_eff(eff["r"], hist["r"], tr, "rMC", style,
+ ranges, cat, label, offset)
+
+ if args.dist:
+ legend = TLegend(0.15, 0.82, 0.88, 0.98)
+ else:
+ legend = TLegend(0.15, 0.86, 0.88, 0.98)
+ legend = set_legend(legend, label, eff["tracks"], "eff", hist["z"],
+ args.dist)
+
+ plot(eff["tracks"], "eff", args.prefix, "ntracks", cat, label, legend,
+ hist["tracks"], args.dist)
+ plot(eff["z"], "eff", args.prefix, "z", cat, label, legend, hist["z"],
+ args.dist)
+ plot(eff["r"], "eff", args.prefix, "r", cat, label, legend, hist["r"],
+ args.dist)
diff --git a/Hlt/RecoConf/scripts/PrimaryVertexCheckerPull.py b/Hlt/RecoConf/scripts/PrimaryVertexCheckerPull.py
new file mode 100644
index 0000000000000000000000000000000000000000..cf30de6cca3bded82e0ef099d8736b5c4cec8854
--- /dev/null
+++ b/Hlt/RecoConf/scripts/PrimaryVertexCheckerPull.py
@@ -0,0 +1,294 @@
+###############################################################################
+# (c) Copyright 2019 CERN for the benefit of the LHCb Collaboration #
+# #
+# This software is distributed under the terms of the GNU General Public #
+# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". #
+# #
+# In applying this licence, CERN does not waive the privileges and immunities #
+# granted to it by virtue of its status as an Intergovernmental Organization #
+# or submit itself to any jurisdiction. #
+###############################################################################
+#!/usr/bin/python
+
+# The script for plotting PV efficinecy as the function
+# of various distributions: nTracks, z, r.
+# As input the NTuple created by hlt1_reco_pvchecker.py
+# is needed.
+#
+# The efficency is calculated usig TGraphAsymmErrors
+# and Bayesian error bars
+#
+# author: Agnieszka Dziurda (agnieszka.dziurda@cern.ch)
+# date: 02/2020
+#
+# Example of usage:
+# ../../../run python PrimaryVertexCheckerPull.py
+# --file file1.root file2.root --label name1 name2
+#
+
+import os, sys
+import argparse
+import ROOT
+
+from ROOT import TFile, TTree
+from ROOT import kRed, kBlue, kOrange, kMagenta, kGreen, kCyan, kGray, kYellow
+from ROOT import gROOT, TLegend
+
+parser = argparse.ArgumentParser()
+parser.add_argument(
+ '--file', dest='fileName', default="", nargs='+', help='filename to plot')
+parser.add_argument(
+ '--label', dest='label', default="", nargs='+', help='labels for files')
+parser.add_argument(
+ '--tree',
+ dest='treeName',
+ default="",
+ nargs='+',
+ help='tree name to plot',
+)
+parser.add_argument(
+ '--smog',
+ dest='smog',
+ default=False,
+ action='store_true',
+ help='set true for SMOG')
+parser.add_argument(
+ '--multi',
+ dest='multi',
+ default=False,
+ action='store_true',
+ help='add multiplicity plots')
+parser.add_argument(
+ '--dist',
+ dest='dist',
+ default=False,
+ action='store_true',
+ help='plot distributions in the canvas')
+parser.add_argument(
+ '--prefix',
+ dest='prefix',
+ default="pv_pull",
+ help='prefix for the plot name',
+)
+parser.add_argument(
+ '--dir',
+ dest='directory',
+ default=os.getcwd(),
+ help='tree name to plot',
+)
+
+parser.add_argument(
+ '--offset',
+ dest='offset',
+ default=0,
+ help='offset for plot colors',
+)
+
+
+def get_labels(number_of_files):
+ label = []
+ for i in range(0, number_of_files):
+ label.append("PV Checker {number}".format(number=str(i + 1)))
+ return label
+
+
+if __name__ == '__main__':
+ args = parser.parse_args()
+ path = args.directory + "/utils/"
+ sys.path.append(os.path.abspath(path))
+ offset = int(args.offset)
+ gROOT.SetBatch()
+
+ from pvutils import get_default_tree_name
+ from pvutils import get_files, get_trees
+ from pvutils import set_legend, get_global, plot_comparison
+ from pvutils import get_dependence
+ from pvutils import plot
+
+ from pvconfig import get_variable_ranges
+ from pvconfig import get_style, get_categories
+ from pvconfig import get_y_axis
+
+ ranges = get_variable_ranges(args.smog)
+ style = get_style()
+
+ cat = get_categories(args.multi, args.smog)
+ label = args.label
+ if args.label == "":
+ label = get_labels(len(args.fileName))
+
+ tr = {}
+ tf = {}
+ tf = get_files(tf, label, args.fileName)
+ tr = get_trees(tf, tr, label, args.treeName, True)
+
+ hist_x = {}
+ hist_y = {}
+ hist_z = {}
+ norm = True #to-do
+ hist_x = get_global(hist_x, tr, "pullx", "#Delta x / #sigma_{x}",
+ "Candidates Normalized", style, ranges, cat, label,
+ offset)
+ hist_y = get_global(hist_y, tr, "pully", "#Delta y / #sigma_{y}",
+ "Candidates Normalized", style, ranges, cat, label,
+ offset)
+ hist_z = get_global(hist_z, tr, "pullz", "#Delta z / #sigma_{z}",
+ "Candidates Normalized", style, ranges, cat, label,
+ offset)
+
+ plot_comparison(hist_x, args.prefix, "pullx", cat, label, style, norm,
+ offset)
+ plot_comparison(hist_y, args.prefix, "pully", cat, label, style, norm,
+ offset)
+ plot_comparison(hist_z, args.prefix, "pullz", cat, label, style, norm,
+ offset)
+
+ from ROOT import gEnv
+ gEnv.SetValue("Hist.Binning.1D.x", "100")
+
+ graph = {}
+ graph["tracks"] = {}
+ graph["tracks"]["pullx"] = {}
+ graph["tracks"]["pully"] = {}
+ graph["tracks"]["pullz"] = {}
+ graph["tracks"]["pullx"] = get_dependence(graph["tracks"]["pullx"], tr,
+ "pullx", "nrectrmc", ranges,
+ style, cat, label, offset)
+ graph["tracks"]["pully"] = get_dependence(graph["tracks"]["pully"], tr,
+ "pully", "nrectrmc", ranges,
+ style, cat, label, offset)
+ graph["tracks"]["pullz"] = get_dependence(graph["tracks"]["pullz"], tr,
+ "pullz", "nrectrmc", ranges,
+ style, cat, label, offset)
+
+ legend = TLegend(0.15, 0.86, 0.88, 0.98)
+ legend = set_legend(legend, label, graph["tracks"]["pullz"], "sigma")
+
+ labelpos = (0.75, 0.75, 0.9, 0.87)
+ plot(
+ graph["tracks"]["pullx"],
+ "mean",
+ args.prefix + "_ntracks_mean",
+ "pullx",
+ cat,
+ label,
+ legend,
+ labelpos=labelpos)
+ plot(
+ graph["tracks"]["pullx"],
+ "sigma",
+ args.prefix + "_ntracks_sigma",
+ "pullx",
+ cat,
+ label,
+ legend,
+ labelpos=labelpos)
+
+ plot(
+ graph["tracks"]["pully"],
+ "mean",
+ args.prefix + "_ntracks_mean",
+ "pully",
+ cat,
+ label,
+ legend,
+ labelpos=labelpos)
+ plot(
+ graph["tracks"]["pully"],
+ "sigma",
+ args.prefix + "_ntracks_sigma",
+ "pully",
+ cat,
+ label,
+ legend,
+ labelpos=labelpos)
+
+ plot(
+ graph["tracks"]["pullz"],
+ "mean",
+ args.prefix + "_ntracks_mean",
+ "pullz",
+ cat,
+ label,
+ legend,
+ labelpos=labelpos)
+ plot(
+ graph["tracks"]["pullz"],
+ "sigma",
+ args.prefix + "_ntracks_sigma",
+ "pullz",
+ cat,
+ label,
+ legend,
+ labelpos=labelpos)
+
+ graph["z"] = {}
+ graph["z"]["pullx"] = {}
+ graph["z"]["pully"] = {}
+ graph["z"]["pullz"] = {}
+ graph["z"]["pullx"] = get_dependence(graph["z"]["pullx"], tr, "pullx",
+ "zMC", ranges, style, cat, label,
+ offset)
+ graph["z"]["pully"] = get_dependence(graph["z"]["pully"], tr, "pully",
+ "zMC", ranges, style, cat, label,
+ offset)
+ graph["z"]["pullz"] = get_dependence(graph["z"]["pullz"], tr, "pullz",
+ "zMC", ranges, style, cat, label,
+ offset)
+
+ plot(
+ graph["z"]["pullx"],
+ "mean",
+ args.prefix + "_z_mean",
+ "pullx",
+ cat,
+ label,
+ legend,
+ labelpos=labelpos)
+ plot(
+ graph["z"]["pullx"],
+ "sigma",
+ args.prefix + "_z_sigma",
+ "pullx",
+ cat,
+ label,
+ legend,
+ labelpos=labelpos)
+
+ plot(
+ graph["z"]["pully"],
+ "mean",
+ args.prefix + "_z_mean",
+ "pully",
+ cat,
+ label,
+ legend,
+ labelpos=labelpos)
+ plot(
+ graph["z"]["pully"],
+ "sigma",
+ args.prefix + "_z_sigma",
+ "pully",
+ cat,
+ label,
+ legend,
+ labelpos=labelpos)
+
+ plot(
+ graph["z"]["pullz"],
+ "mean",
+ args.prefix + "_z_mean",
+ "pullz",
+ cat,
+ label,
+ legend,
+ labelpos=labelpos)
+ plot(
+ graph["z"]["pullz"],
+ "sigma",
+ args.prefix + "_z_sigma",
+ "pullz",
+ cat,
+ label,
+ legend,
+ labelpos=labelpos)
diff --git a/Hlt/RecoConf/scripts/PrimaryVertexCheckerResolution.py b/Hlt/RecoConf/scripts/PrimaryVertexCheckerResolution.py
new file mode 100644
index 0000000000000000000000000000000000000000..e1bdce35d1cb08dcb6bfdf807bf36c8033677283
--- /dev/null
+++ b/Hlt/RecoConf/scripts/PrimaryVertexCheckerResolution.py
@@ -0,0 +1,289 @@
+###############################################################################
+# (c) Copyright 2019 CERN for the benefit of the LHCb Collaboration #
+# #
+# This software is distributed under the terms of the GNU General Public #
+# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". #
+# #
+# In applying this licence, CERN does not waive the privileges and immunities #
+# granted to it by virtue of its status as an Intergovernmental Organization #
+# or submit itself to any jurisdiction. #
+###############################################################################
+#!/usr/bin/python
+
+# The script for plotting PV efficinecy as the function
+# of various distributions: nTracks, z, r.
+# As input the NTuple created by hlt1_reco_pvchecker.py
+# is needed.
+#
+# The efficency is calculated usig TGraphAsymmErrors
+# and Bayesian error bars
+#
+# author: Agnieszka Dziurda (agnieszka.dziurda@cern.ch)
+# date: 02/2020
+#
+# Example of usage:
+# ../../../run python PrimaryVertexCheckerResolution.py
+# --file file1.root file2.root --label name1 name2
+#
+
+import os, sys
+import argparse
+import ROOT
+
+from ROOT import TFile, TTree
+from ROOT import kRed, kBlue, kOrange, kMagenta, kGreen, kCyan, kGray, kYellow
+from ROOT import gROOT, TLegend
+
+parser = argparse.ArgumentParser()
+parser.add_argument(
+ '--file', dest='fileName', default="", nargs='+', help='filename to plot')
+parser.add_argument(
+ '--label', dest='label', default="", nargs='+', help='labels for files')
+parser.add_argument(
+ '--tree',
+ dest='treeName',
+ default="",
+ nargs='+',
+ help='tree name to plot',
+)
+parser.add_argument(
+ '--smog',
+ dest='smog',
+ default=False,
+ action='store_true',
+ help='set true for SMOG')
+parser.add_argument(
+ '--multi',
+ dest='multi',
+ default=False,
+ action='store_true',
+ help='add multiplicity plots')
+parser.add_argument(
+ '--dist',
+ dest='dist',
+ default=False,
+ action='store_true',
+ help='plot distributions in the canvas')
+parser.add_argument(
+ '--prefix',
+ dest='prefix',
+ default="pv_resol",
+ help='prefix for the plot name',
+)
+parser.add_argument(
+ '--dir',
+ dest='directory',
+ default=os.getcwd(),
+ help='tree name to plot',
+)
+
+parser.add_argument(
+ '--offset',
+ dest='offset',
+ default=0,
+ help='offset for plot colors',
+)
+
+
+def get_labels(number_of_files):
+ label = []
+ for i in range(0, number_of_files):
+ label.append("PVChecker{number}".format(number=str(i + 1)))
+ return label
+
+
+if __name__ == '__main__':
+ args = parser.parse_args()
+ path = args.directory + "/utils/"
+ sys.path.append(os.path.abspath(path))
+ offset = int(args.offset)
+
+ gROOT.SetBatch()
+
+ from pvutils import get_default_tree_name
+ from pvutils import get_files, get_trees
+ from pvutils import set_legend, get_global, plot_comparison
+ from pvutils import get_dependence
+ from pvutils import plot
+
+ from pvconfig import get_variable_ranges
+ from pvconfig import get_style, get_categories
+
+ ranges = get_variable_ranges(args.smog)
+ style = get_style()
+
+ cat = get_categories(args.multi, args.smog)
+ label = args.label
+ if args.label == "":
+ label = get_labels(len(args.fileName))
+
+ tr = {}
+ tf = {}
+ tf = get_files(tf, label, args.fileName)
+ tr = get_trees(tf, tr, label, args.treeName, True)
+
+ hist_x = {}
+ hist_y = {}
+ hist_z = {}
+ norm = True
+
+ hist_x = get_global(hist_x, tr, "dx", "#Delta x [mm]",
+ "Candidates Normalized", style, ranges, cat, label,
+ offset)
+ hist_y = get_global(hist_y, tr, "dy", "#Delta y [mm]",
+ "Candidates Normalized", style, ranges, cat, label,
+ offset)
+ hist_z = get_global(hist_z, tr, "dz", "#Delta z [mm]",
+ "Candidates Normalized", style, ranges, cat, label,
+ offset)
+
+ plot_comparison(hist_x, args.prefix, "dx", cat, label, style, norm, offset)
+ plot_comparison(hist_y, args.prefix, "dy", cat, label, style, norm, offset)
+ plot_comparison(hist_z, args.prefix, "dz", cat, label, style, norm, offset)
+
+ from ROOT import gEnv
+ gEnv.SetValue("Hist.Binning.1D.x", "100")
+
+ graph = {}
+ graph["tracks"] = {}
+ graph["tracks"]["dx"] = {}
+ graph["tracks"]["dy"] = {}
+ graph["tracks"]["dz"] = {}
+ graph["tracks"]["dx"] = get_dependence(graph["tracks"]["dx"], tr, "dx",
+ "nrectrmc", ranges, style, cat,
+ label, offset)
+ graph["tracks"]["dy"] = get_dependence(graph["tracks"]["dy"], tr, "dy",
+ "nrectrmc", ranges, style, cat,
+ label, offset)
+ graph["tracks"]["dz"] = get_dependence(graph["tracks"]["dz"], tr, "dz",
+ "nrectrmc", ranges, style, cat,
+ label, offset)
+
+ legend = TLegend(0.15, 0.86, 0.88, 0.98)
+ legend = set_legend(legend, label, graph["tracks"]["dz"], "sigma")
+
+ labelpos = (0.73, 0.75, 0.88, 0.87)
+ plot(
+ graph["tracks"]["dx"],
+ "mean",
+ args.prefix + "_ntracks_mean",
+ "dx",
+ cat,
+ label,
+ legend,
+ labelpos=labelpos)
+ plot(
+ graph["tracks"]["dx"],
+ "sigma",
+ args.prefix + "_ntracks_sigma",
+ "dx",
+ cat,
+ label,
+ legend,
+ labelpos=labelpos)
+
+ plot(
+ graph["tracks"]["dy"],
+ "mean",
+ args.prefix + "_ntracks_mean",
+ "dy",
+ cat,
+ label,
+ legend,
+ labelpos=labelpos)
+ plot(
+ graph["tracks"]["dy"],
+ "sigma",
+ args.prefix + "_ntracks_sigma",
+ "dy",
+ cat,
+ label,
+ legend,
+ labelpos=labelpos)
+
+ plot(
+ graph["tracks"]["dz"],
+ "mean",
+ args.prefix + "_ntracks_mean",
+ "dz",
+ cat,
+ label,
+ legend,
+ labelpos=labelpos)
+ plot(
+ graph["tracks"]["dz"],
+ "sigma",
+ args.prefix + "_ntracks_sigma",
+ "dz",
+ cat,
+ label,
+ legend,
+ labelpos=labelpos)
+
+ graph["z"] = {}
+ graph["z"]["dx"] = {}
+ graph["z"]["dy"] = {}
+ graph["z"]["dz"] = {}
+ graph["z"]["dx"] = get_dependence(graph["z"]["dx"], tr, "dx", "zMC",
+ ranges, style, cat, label, offset)
+ graph["z"]["dy"] = get_dependence(graph["z"]["dy"], tr, "dy", "zMC",
+ ranges, style, cat, label, offset)
+ graph["z"]["dz"] = get_dependence(graph["z"]["dz"], tr, "dz", "zMC",
+ ranges, style, cat, label, offset)
+
+ plot(
+ graph["z"]["dx"],
+ "mean",
+ args.prefix + "_z_mean",
+ "dx",
+ cat,
+ label,
+ legend,
+ labelpos=labelpos)
+ plot(
+ graph["z"]["dx"],
+ "sigma",
+ args.prefix + "_z_sigma",
+ "dx",
+ cat,
+ label,
+ legend,
+ labelpos=labelpos)
+
+ plot(
+ graph["z"]["dy"],
+ "mean",
+ args.prefix + "_z_mean",
+ "dy",
+ cat,
+ label,
+ legend,
+ labelpos=labelpos)
+ plot(
+ graph["z"]["dy"],
+ "sigma",
+ args.prefix + "_z_sigma",
+ "dy",
+ cat,
+ label,
+ legend,
+ labelpos=labelpos)
+
+ plot(
+ graph["z"]["dz"],
+ "mean",
+ args.prefix + "_z_mean",
+ "dz",
+ cat,
+ label,
+ legend,
+ labelpos=labelpos)
+ plot(
+ graph["z"]["dz"],
+ "sigma",
+ args.prefix + "_z_sigma",
+ "dz",
+ cat,
+ label,
+ legend,
+ labelpos=labelpos)
diff --git a/Hlt/RecoConf/scripts/utils/ConfigHistos.py b/Hlt/RecoConf/scripts/utils/ConfigHistos.py
index e3f113544cad80b058cc01537ff6515c95131e27..33cb61e2ccb5ab3fd65601e857dd33478a61b214 100644
--- a/Hlt/RecoConf/scripts/utils/ConfigHistos.py
+++ b/Hlt/RecoConf/scripts/utils/ConfigHistos.py
@@ -19,7 +19,8 @@ def efficiencyHistoDict():
"pt": {},
"phi": {},
"nPV": {},
- "docaz": {}
+ "docaz": {},
+ "z": {}
}
basedict["eta"]["xTitle"] = "#eta"
@@ -37,9 +38,12 @@ def efficiencyHistoDict():
basedict["nPV"]["xTitle"] = "# of PVs"
basedict["nPV"]["variable"] = "nPV"
- basedict["docaz"]["xTitle"] = "docaz (mm)"
+ basedict["docaz"]["xTitle"] = "docaz [mm]"
basedict["docaz"]["variable"] = "docaz"
+ basedict["z"]["xTitle"] = "PV z coordinate [mm]"
+ basedict["z"]["variable"] = "z"
+
return basedict
@@ -79,7 +83,7 @@ def getCuts():
"04_long_strange_P>5GeV", "05_long_fromB", "06_long_fromB_P>5GeV",
"10_long_fromB_P>3GeV_Pt>0.5GeV", "11_UT_long_fromB_P>3GeV_Pt>0.5GeV"
]
- basedict["MuonMatch"] = ["01_long", "02_long_muon"]
+ basedict["MuonMatch"] = ["01_long", "02_long_muon", "04_long_pion"]
return basedict
@@ -90,7 +94,9 @@ def categoriesDict():
basedict["MuonMatch"]["01_long"][
"title"] = "Long, forward track, 2 <#eta< 5"
basedict["MuonMatch"]["02_long_muon"][
- "title"] = "Long, #mu, forward track, 2 <#eta< 5"
+ "title"] = "Long, #mu, forward track, 2 <#eta< 5"
+ basedict["MuonMatch"]["04_long_pion"][
+ "title"] = "Long, #pi, forward track, 2 <#eta< 5"
basedict["Velo"]["01_velo"]["title"] = "Velo, 2 <#eta< 5"
basedict["Velo"]["02_long"]["title"] = "Long, 2 <#eta< 5"
diff --git a/Hlt/RecoConf/scripts/utils/pvconfig.py b/Hlt/RecoConf/scripts/utils/pvconfig.py
new file mode 100644
index 0000000000000000000000000000000000000000..09bccd2963f34e855b67f979edea621395286d44
--- /dev/null
+++ b/Hlt/RecoConf/scripts/utils/pvconfig.py
@@ -0,0 +1,188 @@
+###############################################################################
+# (c) Copyright 2019 CERN for the benefit of the LHCb Collaboration #
+# #
+# This software is distributed under the terms of the GNU General Public #
+# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". #
+# #
+# In applying this licence, CERN does not waive the privileges and immunities #
+# granted to it by virtue of its status as an Intergovernmental Organization #
+# or submit itself to any jurisdiction. #
+###############################################################################
+#!/usr/bin/python
+
+from ROOT import TH1F, TLegend
+from ROOT import kRed, kBlue, kOrange, kMagenta, kGreen, kCyan, kGray, kYellow
+
+
+def get_categories(multi, smog):
+ cut = {}
+ cut["all"] = {"cut": ""}
+ # cut["isolated"] = {"cut": "&&isol==1"}
+ # if not smog:
+ # cut["close"] = {"cut": "&&isol==0"}
+ if multi:
+ cut["1st"] = {"cut": "&&multimc==1"}
+ cut["2nd"] = {"cut": "&&multimc==2"}
+ cut["3rd"] = {"cut": "&&multimc==3"}
+ cut["4th"] = {"cut": "&&multimc==4"}
+ cut["5th"] = {"cut": "&&multimc==5"}
+
+ return cut
+
+
+def get_style():
+ return {
+ "color": [kRed, kBlue, kOrange, kMagenta, kGreen, kCyan],
+ "marker": [21, 20, 22, 23, 24, 25]
+ }
+
+
+def get_default_tree_name(is_checker):
+ if is_checker:
+ return "PrimaryVertexChecker/101"
+ else:
+ return "VertexCompare/102"
+
+
+def transfer_variable(dep):
+ dictionary = {
+ "pullx": "dx/errx",
+ "pully": "dy/erry",
+ "pullz": "dz/errz",
+ "dx": "dx",
+ "dy": "dy",
+ "dz": "dz",
+ }
+ return dictionary[dep]
+
+
+def get_variable_ranges(smog):
+ dictionary = {
+ "zMC": {
+ "bins": 50,
+ "min": -200,
+ "max": 200
+ },
+ "rMC": {
+ "bins": 50,
+ "min": 0.0,
+ "max": 0.2
+ },
+ "dx": {
+ "bins": 50,
+ "min": -0.10,
+ "max": 0.10
+ },
+ "dy": {
+ "bins": 50,
+ "min": -0.10,
+ "max": 0.10
+ },
+ "dz": {
+ "bins": 50,
+ "min": -0.40,
+ "max": 0.40
+ },
+ "pullx": {
+ "bins": 50,
+ "min": -3.50,
+ "max": 3.50
+ },
+ "pully": {
+ "bins": 50,
+ "min": -3.50,
+ "max": 3.50
+ },
+ "pullz": {
+ "bins": 50,
+ "min": -3.50,
+ "max": 3.50
+ },
+ "nrectrmc": {
+ "bins": 66,
+ "min": 4.0,
+ "max": 70.0
+ }
+ }
+ if smog:
+ dictionary["zMC"] = {"bins": 100, "min": -500, "max": 200}
+
+ return dictionary
+
+
+def get_y_axis(dependence):
+ dictionary = {
+ "dx": "Resolution #Delta x [#mu m]",
+ "dy": "Resolution #Delta y [#mu m]",
+ "dz": "Resolution #Delta z [#mu m]",
+ "pullx": "Pull #Delta x/#sigma_{x}",
+ "pully": "Pull #Delta y/#sigma_{y}",
+ "pullz": "Pull #Delta z/#sigma_{z}"
+ }
+ return dictionary[dependence]
+
+
+def get_x_axis(dependence):
+ dictionary = {
+ "zMC": "z [mm]",
+ "rMC": "radial distance [mm]",
+ "nrectrmc": "number of tracks in Primary Vertex"
+ }
+ return dictionary[dependence]
+
+
+def set_style(graph, color, marker, xaxis, yaxis, title):
+ graph.SetTitle("")
+ graph.SetLineColor(color)
+ graph.SetMarkerColor(color)
+ graph.SetMarkerSize(1.3)
+ graph.SetMarkerStyle(marker)
+ graph.GetYaxis().SetTitleOffset(0.85)
+ if type(graph) == TH1F:
+ graph.SetFillColor(color)
+ graph.SetLineWidth(1)
+ graph.SetStats(False)
+ graph.GetYaxis().SetTitleOffset(1.1)
+ graph.GetYaxis().SetTitleSize(0.06)
+ graph.GetYaxis().SetLabelSize(0.06)
+ graph.GetXaxis().SetTitleSize(0.06)
+ graph.GetXaxis().SetLabelSize(0.06)
+ graph.GetXaxis().SetTitleFont(132)
+ graph.GetXaxis().SetLabelFont(132)
+ graph.GetYaxis().SetTitleFont(132)
+ graph.GetYaxis().SetLabelFont(132)
+
+ if title != "":
+ graph.SetTitle(title)
+ if xaxis != "":
+ graph.GetXaxis().SetTitle(xaxis)
+ if yaxis != "":
+ graph.GetYaxis().SetTitle(yaxis)
+
+
+def set_legend(legend, label, gr, gr_type, hist=None, dist=False):
+ legend.SetTextSize(0.04)
+ legend.SetTextFont(12)
+ legend.SetFillColor(4000)
+ legend.SetShadowColor(0)
+ legend.SetBorderSize(0)
+ legend.SetTextFont(132)
+ legend.SetNColumns(2)
+ for lab in label:
+ legend.AddEntry(gr["all"][lab][gr_type], "{lab}".format(lab=lab),
+ "lep")
+ if dist:
+ legend.AddEntry(hist["all"][label[0]]["den"], "Distribution MC", "f")
+ for lab in label:
+ legend.AddEntry(hist["all"][lab]["nom"],
+ "Distribution {lab}".format(lab=lab), "lep")
+
+ return legend
+
+
+def get_text_cor():
+ return {"x": [0.17, 0.65, 0.17, 0.65], "y": [0.92, 0.92, 0.75, 0.75]}
+
+
+def basic_cut():
+ return "nrectrmc>=4 && dz < 2.0"
diff --git a/Hlt/RecoConf/scripts/utils/pvutils.py b/Hlt/RecoConf/scripts/utils/pvutils.py
new file mode 100644
index 0000000000000000000000000000000000000000..16d61773c5a08b29e8f43ba804f7a99efe7250ff
--- /dev/null
+++ b/Hlt/RecoConf/scripts/utils/pvutils.py
@@ -0,0 +1,442 @@
+###############################################################################
+# (c) Copyright 2019 CERN for the benefit of the LHCb Collaboration #
+# #
+# This software is distributed under the terms of the GNU General Public #
+# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". #
+# #
+# In applying this licence, CERN does not waive the privileges and immunities #
+# granted to it by virtue of its status as an Intergovernmental Organization #
+# or submit itself to any jurisdiction. #
+###############################################################################
+
+# The utils for processing output of the PrimaryVertexChecker output
+#
+# author: Agnieszka Dziurda (agnieszka.dziurda@cern.ch)
+# date: 02/2020
+#
+
+from ROOT import TFile, TTree, TH1F, gDirectory, TGraphAsymmErrors, TPaveText, gStyle
+from ROOT import TCanvas
+from ROOT import TGraphErrors, TLegend
+from ROOT import gPad, kGray, TLatex
+
+from pvconfig import get_default_tree_name
+from pvconfig import transfer_variable
+from pvconfig import get_x_axis, get_y_axis
+from pvconfig import set_style
+from pvconfig import set_legend
+from pvconfig import get_text_cor
+from pvconfig import basic_cut
+
+from array import array
+
+
+def get_files(tf, label, files):
+ i = 0
+ for f in files:
+ tf[label[i]] = TFile(f)
+ i += 1
+ return tf
+
+
+def get_trees(tf, tr, label, trees, is_checker):
+ i = 0
+ for lab in label:
+ if len(tf) == len(trees):
+ tr[lab] = tf[lab].Get(trees[i])
+ else:
+ tr[lab] = tf[lab].Get(get_default_tree_name(is_checker))
+ i += 1
+ return tr
+
+
+def get_eff(eff, hist, trees, dependence, style, ranges, categories, label,
+ offset):
+
+ for cat in categories:
+ eff[cat] = {}
+ hist[cat] = {}
+ i = offset
+ for lab in label:
+ var_den = '{dependence}>>hist{code}_{dep}_{cat}_denom{i}({bins},{mins},{maxs})'.format(
+ dependence=dependence,
+ code="den",
+ dep=dependence,
+ cat=cat,
+ lab=lab,
+ bins=ranges[dependence]["bins"],
+ mins=ranges[dependence]["min"],
+ maxs=ranges[dependence]["max"],
+ i=i)
+ var_nom = '{dependence}>>hist{code}_{dep}_{cat}_num{i}({bins},{mins},{maxs})'.format(
+ dependence=dependence,
+ code="nom",
+ dep=dependence,
+ cat=cat,
+ lab=lab,
+ bins=ranges[dependence]["bins"],
+ mins=ranges[dependence]["min"],
+ maxs=ranges[dependence]["max"],
+ i=i)
+
+ cut_den = 'nrectrmc>=4 && dz < 2.0 {cuts}'.format(
+ cuts=categories[cat]["cut"])
+ cut_nom = cut_den + ' && reco == 1'
+
+ trees[lab].Draw(var_nom, cut_nom)
+ trees[lab].Draw(var_den, cut_den)
+
+ h_nom = gDirectory.Get('hist{code}_{dep}_{cat}_num{i}'.format(
+ code="nom", dep=dependence, cat=cat, lab=lab, i=i))
+ h_den = gDirectory.Get('hist{code}_{dep}_{cat}_denom{i}'.format(
+ code="den", dep=dependence, cat=cat, lab=lab, i=i))
+
+ g_eff = TGraphAsymmErrors()
+ g_eff.Divide(h_nom, h_den, "cl=0.683 b(1,1) mode")
+
+ set_style(h_nom, style["color"][i] - 7, style["marker"][i] + 4,
+ get_x_axis(dependence), "Efficiency", "")
+ set_style(h_den, kGray, style["marker"][i], get_x_axis(dependence),
+ "Efficiency", "")
+ hist[cat][lab] = {}
+ hist[cat][lab]["nom"] = h_nom
+ hist[cat][lab]["den"] = h_den
+
+ set_style(g_eff, style["color"][i], style["marker"][i],
+ get_x_axis(dependence), "Efficiency", "")
+ eff[cat][lab] = {}
+ eff[cat][lab]["eff"] = g_eff
+ i += 1
+
+ return eff, hist
+
+
+def find_max(gr, gr_type, label):
+ m = -999999.0
+ for lab in label:
+ if (gr[lab][gr_type].GetYaxis().GetXmax() > m):
+ m = gr[lab][gr_type].GetYaxis().GetXmax()
+
+ return m
+
+
+def find_min(gr, gr_type, label):
+ m = 999999.0
+ for lab in label:
+ if (gr[lab][gr_type].GetYaxis().GetXmin() < m):
+ m = gr[lab][gr_type].GetYaxis().GetXmin()
+ return m
+
+
+def plot(gr,
+ gr_type,
+ prefix,
+ dependence,
+ categories,
+ label,
+ legend=None,
+ hist=None,
+ dist=False,
+ labelpos=(0.75, 0.78, 0.9, 0.9)):
+
+ for cat in categories:
+ can = TCanvas(
+ 'canvas_{depen}_{prefix}_{gr_type}_{cat}'.format(
+ depen=dependence, prefix=prefix, gr_type=gr_type, cat=cat),
+ "cR", 1200, 800)
+ can.SetBottomMargin(0.15)
+ can.SetLeftMargin(0.12)
+ can.SetTopMargin(0.15)
+ if dist:
+ can.SetTopMargin(0.20)
+ can.SetRightMargin(0.05)
+ can.cd()
+
+ maximum = find_max(gr[cat], gr_type, label)
+ minimum = find_min(gr[cat], gr_type, label)
+ if (abs(maximum) > abs(minimum) and minimum < 0):
+ minimum = -maximum
+
+ gr[cat][label[0]][gr_type].GetYaxis().SetRangeUser(
+ minimum * 1.1, maximum * 1.1)
+ if (gr_type == "eff"):
+ gr[cat][label[0]][gr_type].GetYaxis().SetRangeUser(0.0, 1.1)
+ if (gr_type == "sigma"):
+ gr[cat][label[0]][gr_type].GetYaxis().SetRangeUser(
+ 0.0, maximum * 1.1)
+ gr[cat][label[0]][gr_type].Draw("AP")
+
+ for lab in label:
+ gr[cat][lab][gr_type].Draw("SAME P")
+
+ if dist:
+ histmax_den = 1.1 * hist[cat][label[0]]["den"].GetMaximum()
+ scale = gPad.GetUymax() / histmax_den
+ hist[cat][label[0]]["den"].Scale(scale * 0.75)
+ hist[cat][label[0]]["den"].Draw("hist SAME")
+
+ for lab in label:
+ histmax_nom = 1.1 * hist[cat][lab]["nom"].GetMaximum()
+ #scale = gPad.GetUymax() / histmax_nom
+ hist[cat][lab]["nom"].Scale(scale * 0.75)
+ hist[cat][lab]["nom"].Draw("ep SAME")
+
+ gr[cat][lab][gr_type].Draw("SAME P")
+
+ if legend:
+ legend.Draw("SAME")
+ pavetext = TPaveText(labelpos[0], labelpos[1], labelpos[2],
+ labelpos[3], "NBNDC")
+ pavetext.AddText("LHCb simulation")
+ pavetext.SetFillColor(0)
+ pavetext.SetFillStyle(3000)
+ pavetext.SetTextSize(0.06)
+ pavetext.SetTextFont(132)
+ pavetext.Draw()
+
+ saveName = '{prefix}_{dependence}_{cat}.pdf'.format(
+ prefix=prefix, dependence=dependence, cat=cat)
+ can.SaveAs(saveName)
+
+
+def get_global(hist, trees, dependence, x_axis, y_axis, style, ranges,
+ categories, label, offset):
+
+ dep = transfer_variable(dependence)
+
+ for cat in categories:
+ hist[cat] = {}
+ i = offset
+ for lab in label:
+ var = '{dependence}>>hist_{dep}_{cat}_{i}({bins},{mins},{maxs})'.format(
+ dependence=dep,
+ dep=dependence,
+ cat=cat,
+ lab=lab,
+ bins=ranges[dependence]["bins"],
+ mins=ranges[dependence]["min"],
+ maxs=ranges[dependence]["max"],
+ i=i)
+
+ cut = 'nrectrmc>=4 && dz < 2.0 && reco == 1 {cuts}'.format(
+ cuts=categories[cat]["cut"])
+
+ trees[lab].Draw(var, cut)
+ h = gDirectory.Get('hist_{dep}_{cat}_{i}'.format(
+ dep=dependence, cat=cat, lab=lab, i=i))
+ if i == 0:
+ col = style["color"][i] - 10
+ else:
+ col = style["color"][i]
+ set_style(h, col, style["marker"][i], x_axis, y_axis, "")
+
+ hist[cat][lab] = h
+ i += 1
+
+ return hist
+
+
+def set_text(text, color, x, y, lab, mean, mean_err, rms, rms_err, scale,
+ units):
+ s = 1.0
+ if scale:
+ s = 1000.0
+
+ ur = ""
+ um = "#times 10^{-3}"
+ if units:
+ ur = "[#mu m]"
+ um = "[#mu m]"
+
+ text.SetNDC()
+ text.SetTextFont(132)
+ text.SetTextColor(color)
+ text.DrawLatex(x, y * 1.0, lab)
+
+ text.DrawLatex(
+ x, y * 0.95, "#mu = ({0:0.2f} #pm {1:0.2f}) {unit}".format(
+ mean * 1000.0, mean_err * 1000.0, unit=um))
+ text.DrawLatex(
+ x, y * 0.90, "#sigma = ({0:0.2f} #pm {1:0.2f}) {unit}".format(
+ rms * s, rms_err * s, unit=ur))
+ return text
+
+
+def plot_comparison(hist, prefix, dependence, categories, label, style, norm,
+ offset):
+
+ for cat in categories:
+ can = TCanvas('canvas_{depen}_{cat}'.format(depen=dependence, cat=cat),
+ "cR", 1200, 800)
+ can.SetBottomMargin(0.15)
+ can.SetLeftMargin(0.15)
+ can.SetTopMargin(0.20)
+ can.SetRightMargin(0.05)
+
+ can.cd()
+ cor = get_text_cor()
+
+ hist[cat][label[0]].GetYaxis().SetRangeUser(
+ 0.0, hist[cat][label[0]].GetMaximum() * 1.1)
+ scale = True
+ units = True
+ if "pull" in dependence:
+ scale = False
+ units = False
+ if norm:
+ hist[cat][label[0]].DrawNormalized("hist")
+ hist_max = hist[cat][label[0]].GetMaximum()
+ i = offset
+ for lab in label:
+ hist[cat][lab].DrawNormalized("SAME PE")
+ text = TLatex()
+ text = set_text(text, style["color"][i], cor["x"][i],
+ cor["y"][i], lab, hist[cat][lab].GetMean(),
+ hist[cat][lab].GetMeanError(),
+ hist[cat][lab].GetRMS(),
+ hist[cat][lab].GetRMSError(), scale, units)
+ i += 1
+
+ else:
+ hist[cat][label[0]].Draw("hist")
+ for lab in label:
+ hist[cat][lab].Draw("SAME PE")
+ pavetext = TPaveText(0.72, 0.77 - gStyle.GetPadTopMargin(), 0.87,
+ 0.89 - gStyle.GetPadTopMargin(), "NBNDC")
+ pavetext.AddText("LHCb simulation")
+ pavetext.SetFillColor(0)
+ pavetext.SetTextSize(0.06)
+ pavetext.SetTextFont(132)
+ pavetext.Draw()
+
+ can.Update()
+ saveName = '{prefix}_{dependence}_{cat}.pdf'.format(
+ prefix=prefix, dependence=dependence, cat=cat)
+ can.SaveAs(saveName)
+
+
+def get_robust_sigma(hist, resol):
+
+ y = array('d', [0.] * 3)
+ x = array('d', [0.] * 3)
+ x[0] = 0.25
+ x[1] = 0.50
+ x[2] = 0.75
+
+ hist.GetQuantiles(3, y, x)
+
+ _median = y[1]
+ _approxstdev = (y[2] - y[0]) / 1.34898
+ #factor gives correspondence between IQR and stdev for a Gaussian
+ mult = 4.0
+
+ histclone = hist.Clone()
+ nb = histclone.GetNbinsX()
+
+ for i in range(1, nb + 1):
+ if ((histclone.GetBinCenter(i) < (_median - _approxstdev * mult))
+ or (histclone.GetBinCenter(i) >
+ (_median + _approxstdev * mult))):
+ histclone.SetBinContent(i, 0)
+
+ if resol:
+ robustsigma = {
+ "sigma": {
+ "var": histclone.GetRMS() * 1000.0,
+ "err": histclone.GetRMSError() * 1000.0
+ },
+ "mean": {
+ "var": histclone.GetMean() * 1000.0,
+ "err": histclone.GetMeanError() * 1000.0
+ }
+ }
+
+ else:
+ robustsigma = {
+ "sigma": {
+ "var": histclone.GetRMS() * 1.0,
+ "err": histclone.GetRMSError() * 1.0
+ },
+ "mean": {
+ "var": histclone.GetMean(),
+ "err": histclone.GetMeanError()
+ }
+ }
+
+ return robustsigma
+
+
+def get_dependence(graph, trees, variable, dependence, ranges, style,
+ categories, label, offset):
+
+ var = transfer_variable(variable)
+ bin_width = (ranges[dependence]["max"] -
+ ranges[dependence]["min"]) / ranges[dependence]["bins"]
+
+ resol = True
+ mean = "Mean [#mu m]"
+ if "pull" in variable:
+ resol = False
+ mean = "Mean"
+
+ for cat in categories:
+ graph[cat] = {}
+ i = offset
+ for lab in label:
+ graph[cat][lab] = {}
+ graph_rms = TGraphAsymmErrors(ranges[dependence]["bins"])
+ graph_mean = TGraphAsymmErrors(ranges[dependence]["bins"])
+
+ bin_min = ranges[dependence]["min"]
+ for b in range(0, ranges[dependence]["bins"]):
+
+ cut_bin = '{variable1}>={mins} && {variable2}<{maxs}'.format(
+ variable1=dependence,
+ mins=bin_min,
+ variable2=dependence,
+ maxs=bin_min + bin_width)
+
+ hist_string = '{dependence}>>hist_{dep}_{cat}_{i}_{number}'.format(
+ dependence=var,
+ dep=variable,
+ cat=cat,
+ i=i,
+ number=int(b),
+ )
+
+ cut = '{basic} && reco == 1 && {bin_cut} {cuts}'.format(
+ basic=basic_cut(),
+ bin_cut=cut_bin,
+ cuts=categories[cat]["cut"])
+
+ a = trees[lab].Draw(hist_string, cut)
+
+ bin_min += bin_width
+
+ h = gDirectory.Get('hist_{dep}_{cat}_{i}_{number}'.format(
+ dep=variable, cat=cat, i=i, number=int(b)))
+
+ robust_sigma = get_robust_sigma(h, resol)
+
+ graph_mean.SetPoint(b, bin_min + bin_width / 2.0,
+ robust_sigma["mean"]["var"])
+ graph_mean.SetPointError(b, bin_width / 2.0, bin_width / 2.0,
+ robust_sigma["mean"]["err"],
+ robust_sigma["mean"]["err"])
+
+ graph_rms.SetPoint(b, bin_min + bin_width / 2.0,
+ robust_sigma["sigma"]["var"])
+ graph_rms.SetPointError(b, bin_width / 2.0, bin_width / 2.0,
+ robust_sigma["sigma"]["err"],
+ robust_sigma["sigma"]["err"])
+
+ set_style(graph_rms, style["color"][i], style["marker"][i],
+ get_x_axis(dependence), get_y_axis(variable), "")
+ set_style(graph_mean, style["color"][i], style["marker"][i],
+ get_x_axis(dependence), mean, "")
+
+ graph[cat][lab]["sigma"] = graph_rms
+ graph[cat][lab]["mean"] = graph_mean
+ i += 1
+
+ return graph