From bfc68a49c2ffd374fd48a03a1ca9f897695af157 Mon Sep 17 00:00:00 2001 From: sesen Date: Fri, 14 Oct 2022 17:37:22 +0200 Subject: [PATCH 01/28] unify spruce and hlt config a bit more --- Hlt/Moore/python/Moore/config.py | 266 ++++++++++++------------------- 1 file changed, 102 insertions(+), 164 deletions(-) diff --git a/Hlt/Moore/python/Moore/config.py b/Hlt/Moore/python/Moore/config.py index 141f76c83a7..e406c0efd53 100644 --- a/Hlt/Moore/python/Moore/config.py +++ b/Hlt/Moore/python/Moore/config.py @@ -159,27 +159,42 @@ def report_writers_nodes(streams, "spruce": "SpruceSelectionID", "pass": "SpruceSelectionID" }[process] + tes_root = { + "hlt1": "/Event", + "hlt2": "/Event/HLT2", + "spruce": "/Event/Spruce", + "pass": "/Event/Spruce" + }[process] + process_type = { + "hlt1": "Hlt", + "hlt2": "Hlt", + "spruce": "Spruce", + "pass": "Spruce" + }[process] + dec_key = int( register_encoding_dictionary( major_name, generate_encoding_dictionary(major_name, [l.decision_name for l in lines])), 16) # TODO unsigned? Stick to hex string? - - if process == "hlt1" or process == "hlt2": - erw = ExecutionReportsWriter( - Persist=[line.name for line in lines], - ANNSvcKey=major_name, - TCK=dec_key, + + erw = ExecutionReportsWriter( + Persist=[line.name for line in lines], + ANNSvcKey=major_name, + TCK=dec_key # TODO unsigned? Stick to hex string? ) - drw = HltDecReportsWriter( - SourceID=source_id, - InputHltDecReportsLocation=erw.DecReportsLocation, - EncodingKey=dec_key, + + drw = HltDecReportsWriter( + SourceID=source_id, + InputHltDecReportsLocation=erw.DecReportsLocation, + EncodingKey=dec_key, ) - algs.extend([erw, drw]) - new_hlt_banks['HltDecReports'] = drw.OutputRawEvent + algs.extend([erw, drw]) + new_hlt_banks[process_type+'DecReports'] = drw.OutputView + + # Make SelReports only for Hlt1 if process == "hlt1": srm = make_selreports(process, physics_lines, erw) srw = HltSelReportsWriter( @@ -190,7 +205,14 @@ def report_writers_nodes(streams, EncodingKey=srm.properties['EncodingKey']) algs.append(srw) new_hlt_banks['HltSelReports'] = srw.RawEvent - elif process == "hlt2": + # pass thourhough needs to copy input manifest + elif process == "pass": + if not options.input_manifest_file: + raise RuntimeError( + ' pass-through configuration -- must specify an input manifest' + ) + copy(options.input_manifest_file, options.output_manifest_file) + else: (line_output_cf, line_output_locations, packed_data) = persist_line_outputs( physics_lines, @@ -199,51 +221,13 @@ def report_writers_nodes(streams, associate_mc, process.capitalize(), output_manifest_file, + stream=tes_root, clone_mc=options.simulation and options.input_type == ROOT_KEY) + new_hlt_banks['DstData'] = packed_data.OutputView algs.append(line_output_cf) - new_hlt_banks['DstData'] = packed_data.OutputRawEvent extra_locations_to_persist.extend(line_output_locations) - else: - ##spruce and passthrough jobs will write a Spruce report - erw = ExecutionReportsWriter( - Persist=[line.name for line in lines], - ANNSvcKey=major_name, - TCK=dec_key # TODO unsigned? Stick to hex string? - ) - drw = HltDecReportsWriter( - SourceID=source_id, - InputHltDecReportsLocation=erw.DecReportsLocation, - EncodingKey=dec_key, - ) - - algs.extend([erw, drw]) - new_hlt_banks['SpruceDecReports'] = drw.OutputView - - if process == "spruce": - #Only create new DstData locations if exclusive sprucing and not if passthrough - line_output_cf, line_output_locations, packed_data = persist_line_outputs( - physics_lines, - data_type, - erw.DecReportsLocation, - associate_mc, - process.capitalize(), - output_manifest_file, - stream="/Event/Spruce", - reco_stream="/Event/HLT2", - clone_mc=options.simulation and options.input_type == ROOT_KEY) - - new_hlt_banks['DstData'] = packed_data.OutputView - algs.append(line_output_cf) - extra_locations_to_persist.extend(line_output_locations) - - if process == "pass": - if not options.input_manifest_file: - raise RuntimeError( - ' pass-through configuration -- must specify an input manifest' - ) - copy(options.input_manifest_file, options.output_manifest_file) - + node = CompositeNode( 'report_writers', combine_logic=NodeLogic.NONLAZY_OR, @@ -320,92 +304,48 @@ def stream_writer(stream, ]) elif output_type == ROOT_KEY: - if process == "spruce" or process == "pass": - locations = [dh.location for dh in new_locations] - if process == "spruce": - # Do not want DstData raw bank from HLT2 in Sprucing output - persist_locations = [ - x for x in persist_locations if x != 'DstData' - ] - extra_locations_to_persist = [] - if propagate_mc: - ##For simulation need to propagate MC objects - if process == "spruce": - extra_locations_to_persist += extra_locations - else: - mc_part = make_data_with_FetchDataFromFile( - '/Event/HLT2/pSim/MCParticles') - mc_vert = make_data_with_FetchDataFromFile( - '/Event/HLT2/pSim/MCVertices') - extra_locations_to_persist += [mc_part, mc_vert] - - #persist locations have been unpacked by LHCb__UnpackRawEvent - locations += [ - '/Event/DAQ/RawBanks/%s' % (rb) for rb in persist_locations - ] - - consolidate_views = CombineRawBankViewsToRawEvent( - name="EvtSize_" + stream, - # FIXME this only works because of a bug https://gitlab.cern.ch/lhcb/LHCb/-/issues/160 - # There is no trivial fix. - RawBankViews=locations, - OutputLevel=4, - #RawEvent dependent on the stream name - - #else multiple algs (one for each stream) output to same location - outputs={'RawEvent': force_location('/Event/' + stream)}) - # Kill any links the output file might have had to the input file - writers.append(AddressKillerAlg()) - writers.append(consolidate_views) - writers.append( - root_writer( - full_fname, - extra_locations_to_persist + [consolidate_views.RawEvent])) + locations = [dh.location for dh in new_locations] + if process == "spruce": + # Do not want DstData raw bank from HLT2 in Sprucing output + persist_locations = [ + x for x in persist_locations if x != 'DstData' + ] + extra_locations_to_persist = [] + if propagate_mc: + ##For simulation need to propagate MC objects + if process == "spruce": + extra_locations_to_persist += extra_locations + else: + mc_part = make_data_with_FetchDataFromFile( + '/Event/HLT2/pSim/MCParticles') + mc_vert = make_data_with_FetchDataFromFile( + '/Event/HLT2/pSim/MCVertices') + extra_locations_to_persist += [mc_part, mc_vert] - else: - raw_events = [default_raw_event([bt]) for bt in persist_locations] - # Some banks might come from the same RawEvent so we need to prune - # the duplicates: - raw_events = new_locations + _unique(raw_events) - - raw_event_locations = [dh.location for dh in raw_events] - # Kill any links the output file might have had to the input file - writers.append(AddressKillerAlg()) - # FIXME The following is a workaround for the inability to read - # the input raw event into a custom location. - - overwrite = ROOT_RAW_EVENT_LOCATION in raw_event_locations - - if overwrite: - # Do not copy raw_out_location onto itself - raw_event_locations.remove(ROOT_RAW_EVENT_LOCATION) - raw_event_combiner = RawEventSimpleCombiner( - InputRawEventLocations=raw_event_locations, - # False -> put, True -> use getIfExists for the output - EnableIncrementalMode=overwrite, - outputs={ - 'OutputRawEventLocation': - force_location(ROOT_RAW_EVENT_LOCATION) - }) - - # Always copy the locations from the input when the output is ROOT - input_leaves = Gaudi__Hive__FetchLeavesFromFile() - # Collecting the locations to copy must be the first thing in the - # control flow. - writer_setup.append(input_leaves) - writers.append(raw_event_combiner) - if write_detector_raw_banks: - writers.append( - root_copy_input_writer( - full_fname, - input_leaves, - [ROOT_RAW_EVENT_LOCATION] + extra_locations, - # don't copy the raw event - tes_veto_locations=raw_event_locations, - )) - else: - raise NotImplementedError() + #persist locations have been unpacked by LHCb__UnpackRawEvent + locations += [ + '/Event/DAQ/RawBanks/%s' % (rb) for rb in persist_locations + ] + consolidate_views = CombineRawBankViewsToRawEvent( + name="EvtSize_" + stream, + # FIXME this only works because of a bug https://gitlab.cern.ch/lhcb/LHCb/-/issues/160 + # There is no trivial fix. + RawBankViews=locations, + OutputLevel=4, + #RawEvent dependent on the stream name - + #else multiple algs (one for each stream) output to same location + outputs={'RawEvent': force_location('/Event/' + stream)}) + # Kill any links the output file might have had to the input file + writers.append(AddressKillerAlg()) + writers.append(consolidate_views) + + writers.append( + root_writer( + full_fname, + extra_locations_to_persist + [consolidate_views.RawEvent])) + return writer_setup, writers @@ -512,32 +452,27 @@ def moore_control_flow(options, streams, process, allen_hlt1, analytics=False): stream_rbw[stream] = rbw if options.output_file or options.output_type == ONLINE or analytics: - input_raw_bank_types = [] - if process == 'hlt2': - input_raw_bank_types += HLT1_REPORT_RAW_BANK_TYPES - elif process == 'spruce' or process == "pass": - input_raw_bank_types += (HLT1_REPORT_RAW_BANK_TYPES - | HLT2_REPORT_RAW_BANK_TYPES) - #For spruce and passthrough need to unpack raw event to RawBank::Views - if process == "spruce" or process == "pass": - bank_types = list(HLT1_REPORT_RAW_BANK_TYPES - | HLT2_REPORT_RAW_BANK_TYPES - | DETECTOR_RAW_BANK_TYPES) + input_raw_bank_types = (HLT1_REPORT_RAW_BANK_TYPES | DETECTOR_RAW_BANK_TYPES) + if process == 'spruce' or process == "pass": + input_raw_bank_types += ( HLT2_REPORT_RAW_BANK_TYPES) - if process == "spruce": - # sprucing already unpacked the DstData in reco_objects_for_spruce - bank_types_to_unpack = [ - b for b in bank_types if b != 'DstData' + bank_types = list( input_raw_bank_types) + + + if process == "spruce": + # sprucing already unpacked the DstData in reco_objects_for_spruce + bank_types_to_unpack = [ + b for b in bank_types if b != 'DstData' ] - else: - bank_types_to_unpack = bank_types - unpackrawevent = unpack_rawevent( - bank_types=bank_types_to_unpack, configurables=False) - unpack.append(unpackrawevent) - - ## Hack to make `extra_locations_to_persist` objects writable in pass through - if options.simulation and options.input_type == 'ROOT' and process == "pass": - unpack += mc_unpackers(configurables=False) + else: + bank_types_to_unpack = bank_types + unpackrawevent = unpack_rawevent( + bank_types=bank_types_to_unpack, configurables=False) + unpack.append(unpackrawevent) + + ## Hack to make `extra_locations_to_persist` objects writable in pass through + if options.simulation and options.input_type == 'ROOT' and process == "pass": + unpack += mc_unpackers(configurables=False) # TODO when running HLT2 we want to only keep some HLT1 banks and # discard banks produced by a previous run of HLT2 itself. @@ -552,12 +487,13 @@ def moore_control_flow(options, streams, process, allen_hlt1, analytics=False): # KillSourceID=..., KillSourceIDMask=...)) for stream, stream_lines in streams.items(): - persist_types = _unique(input_raw_bank_types + + persist_types = _unique( list(input_raw_bank_types) + stream_bank_types(stream)) new_locations = list(new_raw_banks.values()) + #ADD THIS BACK if (process == "hlt1" or process == "hlt2") and not analytics: - new_locations.append(stream_rbw[stream].RawEventLocation) + new_locations.append(stream_rbw[stream].OutputView) if analytics: post_algs = RawEventSize_analysis(process, stream, @@ -666,10 +602,10 @@ def run_moore(options, config = configure_input(options) # Then create the data (and control) flow for all streams. streams = (make_streams or options.lines_maker)() + # Create default streams definition if make_streams returned a list if not isinstance(streams, dict): streams = dict(default=streams) - # Exclude the lines with known issues (with non-thread safe algos etc.) if exclude_incompatible: filtered_streams = {} @@ -774,6 +710,8 @@ def allen_control_flow(options, write_detector_raw_banks=True): # Give stream name 'default', needed for 'RawEventSimpleCombiner'. # Will not change output file name stream = "default" + if process == "Hlt2": + stream = "DAQ" pre_algs, post_algs = stream_writer( stream, options.output_file, -- GitLab From 0520accc528d154268a236fdd26b157a556bcc97 Mon Sep 17 00:00:00 2001 From: sesen Date: Fri, 14 Oct 2022 18:02:22 +0200 Subject: [PATCH 02/28] fix raw banks --- Hlt/Moore/python/Moore/config.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Hlt/Moore/python/Moore/config.py b/Hlt/Moore/python/Moore/config.py index e406c0efd53..73f94560f3b 100644 --- a/Hlt/Moore/python/Moore/config.py +++ b/Hlt/Moore/python/Moore/config.py @@ -345,8 +345,8 @@ def stream_writer(stream, root_writer( full_fname, extra_locations_to_persist + [consolidate_views.RawEvent])) - - return writer_setup, writers + + return writer_setup, writers def moore_control_flow(options, streams, process, allen_hlt1, analytics=False): @@ -491,7 +491,7 @@ def moore_control_flow(options, streams, process, allen_hlt1, analytics=False): stream_bank_types(stream)) new_locations = list(new_raw_banks.values()) - #ADD THIS BACK + if (process == "hlt1" or process == "hlt2") and not analytics: new_locations.append(stream_rbw[stream].OutputView) @@ -711,7 +711,7 @@ def allen_control_flow(options, write_detector_raw_banks=True): # Will not change output file name stream = "default" if process == "Hlt2": - stream = "DAQ" + stream = "DAQ/RawEvent" pre_algs, post_algs = stream_writer( stream, options.output_file, -- GitLab From 88ed803bd9f6252e44f23c4b57340e153d79870b Mon Sep 17 00:00:00 2001 From: sesen Date: Tue, 18 Oct 2022 10:03:24 +0200 Subject: [PATCH 03/28] calo decoding with rawbank views --- .../options/hlt2_2or3bodytopo_realtime.py | 1 + Hlt/Hlt2Conf/options/hlt2_all_lines.py | 4 + .../test_hlt2_2or3bodytopo_realtime.qmt | 2 +- Hlt/Moore/python/Moore/config.py | 147 ++++++++---------- .../RecoConf/calorimeter_reconstruction.py | 69 ++++---- 5 files changed, 110 insertions(+), 113 deletions(-) diff --git a/Hlt/Hlt2Conf/options/hlt2_2or3bodytopo_realtime.py b/Hlt/Hlt2Conf/options/hlt2_2or3bodytopo_realtime.py index c7fe18ebca8..2dd74ff1e63 100644 --- a/Hlt/Hlt2Conf/options/hlt2_2or3bodytopo_realtime.py +++ b/Hlt/Hlt2Conf/options/hlt2_2or3bodytopo_realtime.py @@ -54,3 +54,4 @@ public_tools = [stateProvider_with_simplified_geom()] with reconstruction.bind(from_file=False): config = run_moore( options, make_lines, public_tools, exclude_incompatible=False) + diff --git a/Hlt/Hlt2Conf/options/hlt2_all_lines.py b/Hlt/Hlt2Conf/options/hlt2_all_lines.py index f6d96804758..c5b978e4d9e 100644 --- a/Hlt/Hlt2Conf/options/hlt2_all_lines.py +++ b/Hlt/Hlt2Conf/options/hlt2_all_lines.py @@ -36,3 +36,7 @@ def make_lines(): public_tools = [stateProvider_with_simplified_geom()] config = run_moore(options, make_lines, public_tools) + +from Configurables import HiveDataBrokerSvc, HLTControlFlowMgr +HiveDataBrokerSvc().OutputLevel = 2 +HLTControlFlowMgr().OutputLevel = 2 diff --git a/Hlt/Hlt2Conf/tests/qmtest/test_hlt2_2or3bodytopo_realtime.qmt b/Hlt/Hlt2Conf/tests/qmtest/test_hlt2_2or3bodytopo_realtime.qmt index 6a20b94fd5c..00903c5ab40 100644 --- a/Hlt/Hlt2Conf/tests/qmtest/test_hlt2_2or3bodytopo_realtime.qmt +++ b/Hlt/Hlt2Conf/tests/qmtest/test_hlt2_2or3bodytopo_realtime.qmt @@ -20,7 +20,7 @@ Check topo 2 or 3 body lines with persistreco using real time reco. from Configurables import HiveDataBrokerSvc -HiveDataBrokerSvc().OutputLevel = 5 +HiveDataBrokerSvc().OutputLevel = 2 true diff --git a/Hlt/Moore/python/Moore/config.py b/Hlt/Moore/python/Moore/config.py index 73f94560f3b..7f383ee93de 100644 --- a/Hlt/Moore/python/Moore/config.py +++ b/Hlt/Moore/python/Moore/config.py @@ -280,73 +280,59 @@ def stream_writer(stream, if stream != "default" and stream_part not in full_fname and output_type != ONLINE: raise ConfigurationError("{stream} must be part of output_file") - if output_type in [MDF_KEY, ONLINE]: - raw_events = [default_raw_event([bt]) for bt in persist_locations] - # Some banks might come from the same RawEvent so we need to prune - # the duplicates: - raw_events = new_locations + _unique(raw_events) - raw_event_combiner = RawEventCombiner( - RawEventLocations=raw_events, - outputs={ - 'RawEvent': force_location(stream + ROOT_RAW_EVENT_LOCATION) - # TODO check the above does not collide with the input raw event location (when stream = '') - }) - - if output_type == MDF_KEY: - writers.extend([ - raw_event_combiner, # is this needed if it's an input of the mdf writer? - mdf_writer(full_fname, raw_event_combiner), - ]) - elif output_type == ONLINE: - writers.extend([ - raw_event_combiner, # is this needed if it's an input of the mdf writer? - online_writer(raw_event_combiner), - ]) - - elif output_type == ROOT_KEY: - - locations = [dh.location for dh in new_locations] - if process == "spruce": - # Do not want DstData raw bank from HLT2 in Sprucing output - persist_locations = [ - x for x in persist_locations if x != 'DstData' - ] - extra_locations_to_persist = [] - if propagate_mc: - ##For simulation need to propagate MC objects - if process == "spruce": - extra_locations_to_persist += extra_locations - else: - mc_part = make_data_with_FetchDataFromFile( - '/Event/HLT2/pSim/MCParticles') - mc_vert = make_data_with_FetchDataFromFile( - '/Event/HLT2/pSim/MCVertices') - extra_locations_to_persist += [mc_part, mc_vert] - - #persist locations have been unpacked by LHCb__UnpackRawEvent - locations += [ - '/Event/DAQ/RawBanks/%s' % (rb) for rb in persist_locations + + locations = [dh.location for dh in new_locations] + if process == "spruce": + # Do not want DstData raw bank from HLT2 in Sprucing output + persist_locations = [ + x for x in persist_locations if x != 'DstData' ] - - consolidate_views = CombineRawBankViewsToRawEvent( - name="EvtSize_" + stream, - # FIXME this only works because of a bug https://gitlab.cern.ch/lhcb/LHCb/-/issues/160 - # There is no trivial fix. - RawBankViews=locations, - OutputLevel=4, - #RawEvent dependent on the stream name - - #else multiple algs (one for each stream) output to same location - outputs={'RawEvent': force_location('/Event/' + stream)}) + extra_locations_to_persist = [] + if propagate_mc: + ##For simulation need to propagate MC objects + if process == "spruce": + extra_locations_to_persist += extra_locations + else: + mc_part = make_data_with_FetchDataFromFile( + '/Event/HLT2/pSim/MCParticles') + mc_vert = make_data_with_FetchDataFromFile( + '/Event/HLT2/pSim/MCVertices') + extra_locations_to_persist += [mc_part, mc_vert] + + #persist locations have been unpacked by LHCb__UnpackRawEvent + locations += [ + '/Event/DAQ/RawBanks/%s' % (rb) for rb in persist_locations + ] + + ## NOT TODO This should not be hardcoded here + if process == "hlt2": + stream = "DAQ/RawEvent" + consolidate_views = CombineRawBankViewsToRawEvent( + name="EvtSize_" + stream, + # FIXME this only works because of a bug https://gitlab.cern.ch/lhcb/LHCb/-/issues/160 + # There is no trivial fix. + OutputLevel=2, + RawBankViews=locations, + #RawEvent dependent on the stream name - + #else multiple algs (one for each stream) output to same location + outputs={'RawEvent': force_location('/Event/' + stream)}) + + writers.append(consolidate_views) + if output_type == MDF_KEY: + writers.extend( + [mdf_writer(full_fname, consolidate_views.RawEvent.location)]) + elif output_type == ONLINE: + writers.extend( + [online_writer(full_fname, consolidate_views.RawEvent.location)]) + elif output_type == ROOT_KEY: # Kill any links the output file might have had to the input file writers.append(AddressKillerAlg()) - writers.append(consolidate_views) - writers.append( root_writer( full_fname, extra_locations_to_persist + [consolidate_views.RawEvent])) - - return writer_setup, writers + + return writer_setup, writers def moore_control_flow(options, streams, process, allen_hlt1, analytics=False): @@ -445,34 +431,27 @@ def moore_control_flow(options, streams, process, allen_hlt1, analytics=False): }), DecReports=dec_reports, ODIN=make_odin()) - if options.output_type == "ROOT": - # TODO remove once we use a functional raw event combiner - # for ROOT and not RawEventSimpleCombiner. - stream_writers.append(rbw) + + stream_writers.append(rbw) stream_rbw[stream] = rbw if options.output_file or options.output_type == ONLINE or analytics: - input_raw_bank_types = (HLT1_REPORT_RAW_BANK_TYPES | DETECTOR_RAW_BANK_TYPES) - if process == 'spruce' or process == "pass": - input_raw_bank_types += ( HLT2_REPORT_RAW_BANK_TYPES) + input_raw_bank_types = (DETECTOR_RAW_BANK_TYPES) - bank_types = list( input_raw_bank_types) - - - if process == "spruce": - # sprucing already unpacked the DstData in reco_objects_for_spruce - bank_types_to_unpack = [ - b for b in bank_types if b != 'DstData' - ] - else: - bank_types_to_unpack = bank_types - unpackrawevent = unpack_rawevent( - bank_types=bank_types_to_unpack, configurables=False) - unpack.append(unpackrawevent) + if process != 'hlt1': + input_raw_bank_types = (input_raw_bank_types | HLT1_REPORT_RAW_BANK_TYPES) + + if process == 'spruce' or process == "pass": + bank_types_to_unpack = list( input_raw_bank_types) + input_raw_bank_types = (input_raw_bank_types | HLT2_REPORT_RAW_BANK_TYPES) + + unpackrawevent = unpack_rawevent( + bank_types=bank_types_to_unpack, configurables=False) + unpack.append(unpackrawevent) - ## Hack to make `extra_locations_to_persist` objects writable in pass through - if options.simulation and options.input_type == 'ROOT' and process == "pass": - unpack += mc_unpackers(configurables=False) + ## Hack to make `extra_locations_to_persist` objects writable in pass through + if options.simulation and options.input_type == 'ROOT' and process == "pass": + unpack += mc_unpackers(configurables=False) # TODO when running HLT2 we want to only keep some HLT1 banks and # discard banks produced by a previous run of HLT2 itself. @@ -716,7 +695,7 @@ def allen_control_flow(options, write_detector_raw_banks=True): stream, options.output_file, options.output_type, - "hlt1", + process, options.simulation and options.input_type == 'ROOT', list(new_hlt_banks.values()), input_raw_bank_types, diff --git a/Hlt/RecoConf/python/RecoConf/calorimeter_reconstruction.py b/Hlt/RecoConf/python/RecoConf/calorimeter_reconstruction.py index 7e355093ad9..53a5986e945 100644 --- a/Hlt/RecoConf/python/RecoConf/calorimeter_reconstruction.py +++ b/Hlt/RecoConf/python/RecoConf/calorimeter_reconstruction.py @@ -25,35 +25,37 @@ from PyConf.Algorithms import ( from PyConf.Tools import (CaloFutureECorrection, CaloFutureSCorrection, CaloFutureLCorrection) -from PyConf.application import default_raw_event +from PyConf.application import default_raw_banks from DDDB.CheckDD4Hep import UseDD4Hep -def make_ecal_digits(raw): +def make_ecal_digits(raw, rawerror): ecalpath = '/world/DownstreamRegion/Ecal:DetElement-Info-IOV' if UseDD4Hep else '/dd/Structure/LHCb/DownstreamRegion/Ecal' adc_alg = CaloFutureRawToDigits( name='FutureEcalZSup', - RawEventLocation=raw, + RawBanks=raw, + ErrorRawBanks=rawerror, #need a different property wrt make_hcal_digits and output cannot be set #so DetectorLocation has to be set here DetectorLocation=ecalpath) return adc_alg.OutputDigitData -def make_hcal_digits(raw): +def make_hcal_digits(raw, rawerror): hcalpath = '/world/DownstreamRegion/Hcal:DetElement-Info-IOV' if UseDD4Hep else '/dd/Structure/LHCb/DownstreamRegion/Hcal' adc_alg_Hcal = CaloFutureRawToDigits( name='FutureHcalZSup', - RawEventLocation=raw, + RawBanks=raw, + ErrorRawBanks=rawerror, #need a different property wrt make_ecal_digits and output cannot be set #so DetectorLocation has to be set here DetectorLocation=hcalpath) return adc_alg_Hcal.OutputDigitData -def make_digits(raw): - digitsEcal = make_ecal_digits(raw) - digitsHcal = make_hcal_digits(raw) +def make_digits(raw, rawerror): + digitsEcal = make_ecal_digits(raw, rawerror) + digitsHcal = make_hcal_digits(raw, rawerror) return {"digitsEcal": digitsEcal, "digitsHcal": digitsHcal} @@ -442,7 +444,7 @@ def make_merged_pi0_various(ecalClusters, pvs): @configurable def make_calo(tracks, pvs, - make_raw=default_raw_event, + make_raw=default_raw_banks, calo_raw_event=False, chargedpid_types={ "calo": ["Long", "Downstream"], @@ -451,13 +453,18 @@ def make_calo(tracks, trackrels=None): # digits if calo_raw_event: # needed for real data - rawEventEcal = make_raw(["Calo"]) - rawEventHcal = make_raw(["Calo"]) + rawEventEcal = make_raw("Calo") + rawEventHcal = make_raw("Calo") + rawEventEcalError = make_raw("CaloError") + rawEventHcalError = make_raw("CaloError") else: - rawEventEcal = make_raw(["EcalPacked"]) - rawEventHcal = make_raw(["HcalPacked"]) - rawToDigitsOutputEcal = make_digits(rawEventEcal) - rawToDigitsOutputHcal = make_digits(rawEventHcal) + rawEventEcal = make_raw("EcalPacked") + rawEventHcal = make_raw("HcalPacked") + rawEventEcalError = make_raw("EcalPackedError") + rawEventHcalError = make_raw("HcalPackedError") + + rawToDigitsOutputEcal = make_digits(rawEventEcal, rawEventEcalError) + rawToDigitsOutputHcal = make_digits(rawEventHcal, rawEventHcalError) digitsEcal = rawToDigitsOutputEcal["digitsEcal"] digitsHcal = rawToDigitsOutputHcal["digitsHcal"] @@ -561,11 +568,13 @@ def make_calo(tracks, } -def make_calo_reduced(make_raw=default_raw_event): - rawEventEcal = make_raw(["EcalPacked"]) - rawEventHcal = make_raw(["HcalPacked"]) - rawToDigitsOutputEcal = make_digits(rawEventEcal) - rawToDigitsOutputHcal = make_digits(rawEventHcal) +def make_calo_reduced(make_raw=default_raw_banks): + rawEventEcal = make_raw("EcalPacked") + rawEventHcal = make_raw("HcalPacked") + rawEventEcalError = make_raw("EcalPackedError") + rawEventHcalError = make_raw("HcalPackedError") + rawToDigitsOutputEcal = make_digits(rawEventEcal,rawEventEcalError) + rawToDigitsOutputHcal = make_digits(rawEventHcal,rawEventHcalError) digitsEcal = rawToDigitsOutputEcal["digitsEcal"] digitsHcal = rawToDigitsOutputHcal["digitsHcal"] clusters = make_clusters(digitsEcal) @@ -577,9 +586,10 @@ def make_calo_reduced(make_raw=default_raw_event): } -def make_calo_resolution_gamma(tracks, pvs, make_raw=default_raw_event): - rawEventEcal = make_raw(["EcalPacked"]) - rawToDigitsOutputEcal = make_digits(rawEventEcal) +def make_calo_resolution_gamma(tracks, pvs, make_raw=default_raw_banks): + rawEventEcal = make_raw("EcalPacked") + rawEventEcalError = make_raw("EcalPackedError") + rawToDigitsOutputEcal = make_digits(rawEventEcal, rawEventEcalError) digitsEcal = rawToDigitsOutputEcal["digitsEcal"] clusters = make_clusters_various(digitsEcal) @@ -612,9 +622,11 @@ def make_calo_resolution_gamma(tracks, pvs, make_raw=default_raw_event): } -def make_calo_cluster_shapes(tracks, pvs, make_raw=default_raw_event): - rawEventEcal = make_raw(["EcalPacked"]) - digitsEcal = make_digits(rawEventEcal)["digitsEcal"] +def make_calo_cluster_shapes(tracks, pvs, make_raw=default_raw_banks): + rawEventEcal = make_raw("EcalPacked") + rawEventEcalError = make_raw("EcalPackedError") + + digitsEcal = make_digits(rawEventEcal, rawEventEcalError)["digitsEcal"] clusters = make_clusters_shapes(digitsEcal) tracks_incalo = make_acceptance(tracks) @@ -633,9 +645,10 @@ def make_calo_cluster_shapes(tracks, pvs, make_raw=default_raw_event): } -def make_calo_resolution_pi0(pvs, make_raw=default_raw_event): +def make_calo_resolution_pi0(pvs, make_raw=default_raw_banks): rawEventEcal = make_raw(["EcalPacked"]) - rawToDigitsOutputEcal = make_digits(rawEventEcal) + rawEventEcalError = make_raw("EcalPackedError") + rawToDigitsOutputEcal = make_digits(rawEventEcal, rawEventEcalError) digitsEcal = rawToDigitsOutputEcal["digitsEcal"] clusters = make_clusters_various(digitsEcal) -- GitLab From 9e89afb18332d4dc21fdf94527d001e90acdca53 Mon Sep 17 00:00:00 2001 From: sesen Date: Tue, 18 Oct 2022 15:37:04 +0200 Subject: [PATCH 04/28] update Velo clustering, muon, rich, calo decoding to take RawBank::View --- Hlt/Moore/python/Moore/config.py | 301 +++++++++++------- Hlt/RecoConf/python/RecoConf/hlt1_muonid.py | 4 +- Hlt/RecoConf/python/RecoConf/hlt2_muonid.py | 1 - .../python/RecoConf/rich_reconstruction.py | 22 +- Hlt/RecoConf/python/RecoConf/standalone.py | 14 +- 5 files changed, 213 insertions(+), 129 deletions(-) diff --git a/Hlt/Moore/python/Moore/config.py b/Hlt/Moore/python/Moore/config.py index 7f383ee93de..d297c313588 100644 --- a/Hlt/Moore/python/Moore/config.py +++ b/Hlt/Moore/python/Moore/config.py @@ -159,42 +159,27 @@ def report_writers_nodes(streams, "spruce": "SpruceSelectionID", "pass": "SpruceSelectionID" }[process] - tes_root = { - "hlt1": "/Event", - "hlt2": "/Event/HLT2", - "spruce": "/Event/Spruce", - "pass": "/Event/Spruce" - }[process] - process_type = { - "hlt1": "Hlt", - "hlt2": "Hlt", - "spruce": "Spruce", - "pass": "Spruce" - }[process] - dec_key = int( register_encoding_dictionary( major_name, generate_encoding_dictionary(major_name, [l.decision_name for l in lines])), 16) # TODO unsigned? Stick to hex string? - - erw = ExecutionReportsWriter( - Persist=[line.name for line in lines], - ANNSvcKey=major_name, - TCK=dec_key # TODO unsigned? Stick to hex string? - ) - drw = HltDecReportsWriter( - SourceID=source_id, - InputHltDecReportsLocation=erw.DecReportsLocation, - EncodingKey=dec_key, + if process == "hlt1" or process == "hlt2": + erw = ExecutionReportsWriter( + Persist=[line.name for line in lines], + ANNSvcKey=major_name, + TCK=dec_key, ) + drw = HltDecReportsWriter( + SourceID=source_id, + InputHltDecReportsLocation=erw.DecReportsLocation, + EncodingKey=dec_key, + ) + algs.extend([erw, drw]) + new_hlt_banks['HltDecReports'] = drw.OutputRawEvent - algs.extend([erw, drw]) - new_hlt_banks[process_type+'DecReports'] = drw.OutputView - - # Make SelReports only for Hlt1 if process == "hlt1": srm = make_selreports(process, physics_lines, erw) srw = HltSelReportsWriter( @@ -205,14 +190,7 @@ def report_writers_nodes(streams, EncodingKey=srm.properties['EncodingKey']) algs.append(srw) new_hlt_banks['HltSelReports'] = srw.RawEvent - # pass thourhough needs to copy input manifest - elif process == "pass": - if not options.input_manifest_file: - raise RuntimeError( - ' pass-through configuration -- must specify an input manifest' - ) - copy(options.input_manifest_file, options.output_manifest_file) - else: + elif process == "hlt2": (line_output_cf, line_output_locations, packed_data) = persist_line_outputs( physics_lines, @@ -221,13 +199,51 @@ def report_writers_nodes(streams, associate_mc, process.capitalize(), output_manifest_file, - stream=tes_root, clone_mc=options.simulation and options.input_type == ROOT_KEY) - new_hlt_banks['DstData'] = packed_data.OutputView algs.append(line_output_cf) + new_hlt_banks['DstData'] = packed_data.OutputRawEvent extra_locations_to_persist.extend(line_output_locations) - + else: + ##spruce and passthrough jobs will write a Spruce report + erw = ExecutionReportsWriter( + Persist=[line.name for line in lines], + ANNSvcKey=major_name, + TCK=dec_key # TODO unsigned? Stick to hex string? + ) + drw = HltDecReportsWriter( + SourceID=source_id, + InputHltDecReportsLocation=erw.DecReportsLocation, + EncodingKey=dec_key, + ) + + algs.extend([erw, drw]) + new_hlt_banks['SpruceDecReports'] = drw.OutputView + + if process == "spruce": + #Only create new DstData locations if exclusive sprucing and not if passthrough + line_output_cf, line_output_locations, packed_data = persist_line_outputs( + physics_lines, + data_type, + erw.DecReportsLocation, + associate_mc, + process.capitalize(), + output_manifest_file, + stream="/Event/Spruce", + reco_stream="/Event/HLT2", + clone_mc=options.simulation and options.input_type == ROOT_KEY) + + new_hlt_banks['DstData'] = packed_data.OutputView + algs.append(line_output_cf) + extra_locations_to_persist.extend(line_output_locations) + + if process == "pass": + if not options.input_manifest_file: + raise RuntimeError( + ' pass-through configuration -- must specify an input manifest' + ) + copy(options.input_manifest_file, options.output_manifest_file) + node = CompositeNode( 'report_writers', combine_logic=NodeLogic.NONLAZY_OR, @@ -280,58 +296,116 @@ def stream_writer(stream, if stream != "default" and stream_part not in full_fname and output_type != ONLINE: raise ConfigurationError("{stream} must be part of output_file") - - locations = [dh.location for dh in new_locations] - if process == "spruce": - # Do not want DstData raw bank from HLT2 in Sprucing output - persist_locations = [ - x for x in persist_locations if x != 'DstData' + if output_type in [MDF_KEY, ONLINE]: + raw_events = [default_raw_event([bt]) for bt in persist_locations] + # Some banks might come from the same RawEvent so we need to prune + # the duplicates: + raw_events = new_locations + _unique(raw_events) + raw_event_combiner = RawEventCombiner( + RawEventLocations=raw_events, + outputs={ + 'RawEvent': force_location(stream + ROOT_RAW_EVENT_LOCATION) + # TODO check the above does not collide with the input raw event location (when stream = '') + }) + + if output_type == MDF_KEY: + writers.extend([ + raw_event_combiner, # is this needed if it's an input of the mdf writer? + mdf_writer(full_fname, raw_event_combiner), + ]) + elif output_type == ONLINE: + writers.extend([ + raw_event_combiner, # is this needed if it's an input of the mdf writer? + online_writer(raw_event_combiner), + ]) + + elif output_type == ROOT_KEY: + if process == "spruce" or process == "pass": + locations = [dh.location for dh in new_locations] + if process == "spruce": + # Do not want DstData raw bank from HLT2 in Sprucing output + persist_locations = [ + x for x in persist_locations if x != 'DstData' + ] + extra_locations_to_persist = [] + if propagate_mc: + ##For simulation need to propagate MC objects + if process == "spruce": + extra_locations_to_persist += extra_locations + else: + mc_part = make_data_with_FetchDataFromFile( + '/Event/HLT2/pSim/MCParticles') + mc_vert = make_data_with_FetchDataFromFile( + '/Event/HLT2/pSim/MCVertices') + extra_locations_to_persist += [mc_part, mc_vert] + + #persist locations have been unpacked by LHCb__UnpackRawEvent + locations += [ + '/Event/DAQ/RawBanks/%s' % (rb) for rb in persist_locations ] - extra_locations_to_persist = [] - if propagate_mc: - ##For simulation need to propagate MC objects - if process == "spruce": - extra_locations_to_persist += extra_locations + + consolidate_views = CombineRawBankViewsToRawEvent( + name="EvtSize_" + stream, + # FIXME this only works because of a bug https://gitlab.cern.ch/lhcb/LHCb/-/issues/160 + # There is no trivial fix. + RawBankViews=locations, + OutputLevel=4, + #RawEvent dependent on the stream name - + #else multiple algs (one for each stream) output to same location + outputs={'RawEvent': force_location('/Event/' + stream)}) + # Kill any links the output file might have had to the input file + writers.append(AddressKillerAlg()) + writers.append(consolidate_views) + + writers.append( + root_writer( + full_fname, + extra_locations_to_persist + [consolidate_views.RawEvent])) + else: - mc_part = make_data_with_FetchDataFromFile( - '/Event/HLT2/pSim/MCParticles') - mc_vert = make_data_with_FetchDataFromFile( - '/Event/HLT2/pSim/MCVertices') - extra_locations_to_persist += [mc_part, mc_vert] - - #persist locations have been unpacked by LHCb__UnpackRawEvent - locations += [ - '/Event/DAQ/RawBanks/%s' % (rb) for rb in persist_locations - ] - - ## NOT TODO This should not be hardcoded here - if process == "hlt2": - stream = "DAQ/RawEvent" - consolidate_views = CombineRawBankViewsToRawEvent( - name="EvtSize_" + stream, - # FIXME this only works because of a bug https://gitlab.cern.ch/lhcb/LHCb/-/issues/160 - # There is no trivial fix. - OutputLevel=2, - RawBankViews=locations, - #RawEvent dependent on the stream name - - #else multiple algs (one for each stream) output to same location - outputs={'RawEvent': force_location('/Event/' + stream)}) - - writers.append(consolidate_views) - if output_type == MDF_KEY: - writers.extend( - [mdf_writer(full_fname, consolidate_views.RawEvent.location)]) - elif output_type == ONLINE: - writers.extend( - [online_writer(full_fname, consolidate_views.RawEvent.location)]) - elif output_type == ROOT_KEY: - # Kill any links the output file might have had to the input file - writers.append(AddressKillerAlg()) - writers.append( - root_writer( - full_fname, - extra_locations_to_persist + [consolidate_views.RawEvent])) - + raw_events = [default_raw_event([bt]) for bt in persist_locations] + # Some banks might come from the same RawEvent so we need to prune + # the duplicates: + raw_events = new_locations + _unique(raw_events) + + raw_event_locations = [dh.location for dh in raw_events] + # Kill any links the output file might have had to the input file + writers.append(AddressKillerAlg()) + # FIXME The following is a workaround for the inability to read + # the input raw event into a custom location. + + overwrite = ROOT_RAW_EVENT_LOCATION in raw_event_locations + + if overwrite: + # Do not copy raw_out_location onto itself + raw_event_locations.remove(ROOT_RAW_EVENT_LOCATION) + raw_event_combiner = RawEventSimpleCombiner( + InputRawEventLocations=raw_event_locations, + # False -> put, True -> use getIfExists for the output + EnableIncrementalMode=overwrite, + outputs={ + 'OutputRawEventLocation': + force_location(ROOT_RAW_EVENT_LOCATION) + }) + + # Always copy the locations from the input when the output is ROOT + input_leaves = Gaudi__Hive__FetchLeavesFromFile() + # Collecting the locations to copy must be the first thing in the + # control flow. + writer_setup.append(input_leaves) + writers.append(raw_event_combiner) + if write_detector_raw_banks: + writers.append( + root_copy_input_writer( + full_fname, + input_leaves, + [ROOT_RAW_EVENT_LOCATION] + extra_locations, + # don't copy the raw event + tes_veto_locations=raw_event_locations, + )) + else: + raise NotImplementedError() + return writer_setup, writers @@ -403,7 +477,6 @@ def moore_control_flow(options, streams, process, allen_hlt1, analytics=False): postcaler_decreports_monitor ], force_order=True) - # We want to run the monitoring of line decisions on every event to have the proper normalization. # Therefore, add the DecReportsMonitor after the control flow node containing all selection lines # and use NONLAZY_OR as type of the node containing both. To not run the monitoring on every event @@ -431,24 +504,39 @@ def moore_control_flow(options, streams, process, allen_hlt1, analytics=False): }), DecReports=dec_reports, ODIN=make_odin()) - - stream_writers.append(rbw) + if options.output_type == "ROOT": + # TODO remove once we use a functional raw event combiner + # for ROOT and not RawEventSimpleCombiner. + stream_writers.append(rbw) stream_rbw[stream] = rbw if options.output_file or options.output_type == ONLINE or analytics: - input_raw_bank_types = (DETECTOR_RAW_BANK_TYPES) - - if process != 'hlt1': - input_raw_bank_types = (input_raw_bank_types | HLT1_REPORT_RAW_BANK_TYPES) - - if process == 'spruce' or process == "pass": - bank_types_to_unpack = list( input_raw_bank_types) - input_raw_bank_types = (input_raw_bank_types | HLT2_REPORT_RAW_BANK_TYPES) - + input_raw_bank_types = [] + if process == 'hlt2': + input_raw_bank_types += HLT1_REPORT_RAW_BANK_TYPES + elif process == 'spruce' or process == "pass": + input_raw_bank_types += (HLT1_REPORT_RAW_BANK_TYPES + | HLT2_REPORT_RAW_BANK_TYPES) + #For spruce and passthrough need to unpack raw event to RawBank::Views + if process == "spruce" or process == "pass": + bank_types = list(HLT1_REPORT_RAW_BANK_TYPES + | HLT2_REPORT_RAW_BANK_TYPES + | DETECTOR_RAW_BANK_TYPES) + + if process == "spruce": + # sprucing already unpacked the DstData/ODIN in reco_objects_for_spruce + # TODO check if they are unpacked using conf + bank_types_to_unpack = [ + b for b in bank_types if b != 'DstData' and b != 'ODIN' + ] + else: + bank_types_to_unpack = [ + b for b in bank_types if b != 'ODIN' + ] unpackrawevent = unpack_rawevent( bank_types=bank_types_to_unpack, configurables=False) unpack.append(unpackrawevent) - + ## Hack to make `extra_locations_to_persist` objects writable in pass through if options.simulation and options.input_type == 'ROOT' and process == "pass": unpack += mc_unpackers(configurables=False) @@ -466,13 +554,12 @@ def moore_control_flow(options, streams, process, allen_hlt1, analytics=False): # KillSourceID=..., KillSourceIDMask=...)) for stream, stream_lines in streams.items(): - persist_types = _unique( list(input_raw_bank_types) + + persist_types = _unique(input_raw_bank_types + stream_bank_types(stream)) new_locations = list(new_raw_banks.values()) - if (process == "hlt1" or process == "hlt2") and not analytics: - new_locations.append(stream_rbw[stream].OutputView) + new_locations.append(stream_rbw[stream].RawEventLocation) if analytics: post_algs = RawEventSize_analysis(process, stream, @@ -581,10 +668,10 @@ def run_moore(options, config = configure_input(options) # Then create the data (and control) flow for all streams. streams = (make_streams or options.lines_maker)() - # Create default streams definition if make_streams returned a list if not isinstance(streams, dict): streams = dict(default=streams) + # Exclude the lines with known issues (with non-thread safe algos etc.) if exclude_incompatible: filtered_streams = {} @@ -689,13 +776,11 @@ def allen_control_flow(options, write_detector_raw_banks=True): # Give stream name 'default', needed for 'RawEventSimpleCombiner'. # Will not change output file name stream = "default" - if process == "Hlt2": - stream = "DAQ/RawEvent" pre_algs, post_algs = stream_writer( stream, options.output_file, options.output_type, - process, + "hlt1", options.simulation and options.input_type == 'ROOT', list(new_hlt_banks.values()), input_raw_bank_types, diff --git a/Hlt/RecoConf/python/RecoConf/hlt1_muonid.py b/Hlt/RecoConf/python/RecoConf/hlt1_muonid.py index 41638ea834f..b916fd886a6 100644 --- a/Hlt/RecoConf/python/RecoConf/hlt1_muonid.py +++ b/Hlt/RecoConf/python/RecoConf/hlt1_muonid.py @@ -9,7 +9,7 @@ # or submit itself to any jurisdiction. # ############################################################################### from PyConf import configurable -from PyConf.application import default_raw_event +from PyConf.application import default_raw_banks, default_raw_event from PyConf.Algorithms import ( MuonRawToHits, MuonRawInUpgradeToHits, @@ -27,7 +27,7 @@ def make_muon_hits(geometry_version=2, make_raw=default_raw_event): else: raise ValueError("Unsupported muon decoding version") - return raw_to_hits(RawEventLocation=make_raw(["Muon"])).HitContainer + return raw_to_hits(RawBanks=default_raw_banks("Muon", make_raw)).HitContainer @configurable diff --git a/Hlt/RecoConf/python/RecoConf/hlt2_muonid.py b/Hlt/RecoConf/python/RecoConf/hlt2_muonid.py index d5df0f372e2..1b2d24d0aa3 100644 --- a/Hlt/RecoConf/python/RecoConf/hlt2_muonid.py +++ b/Hlt/RecoConf/python/RecoConf/hlt2_muonid.py @@ -12,7 +12,6 @@ from PyConf import configurable from PyConf.Algorithms import MuonIDHlt2Alg from RecoConf.hlt1_muonid import make_muon_hits - @configurable def make_muon_ids(track_type, tracks, make_muon_hits=make_muon_hits): return MuonIDHlt2Alg( diff --git a/Hlt/RecoConf/python/RecoConf/rich_reconstruction.py b/Hlt/RecoConf/python/RecoConf/rich_reconstruction.py index cc659560e43..d11eaedb85a 100644 --- a/Hlt/RecoConf/python/RecoConf/rich_reconstruction.py +++ b/Hlt/RecoConf/python/RecoConf/rich_reconstruction.py @@ -10,7 +10,7 @@ ############################################################################### from PyConf import configurable from PyConf.packing import persisted_location -from PyConf.application import default_raw_event +from PyConf.application import default_raw_banks from Configurables import Rich__Future__ParticleProperties as PartProps from PyConf.Algorithms import ( @@ -230,13 +230,13 @@ def shortTrackType(track_name): @configurable -def make_rich_pixels(options, make_raw=default_raw_event): +def make_rich_pixels(options, make_raw=default_raw_banks): """ Return pixel specific RICH data. Args: options (dict): The processing options to use - make_raw : The LHCb RawEvent data object + make_raw : The LHCb RawBanks data object Returns: dict of useful data locations. @@ -248,19 +248,19 @@ def make_rich_pixels(options, make_raw=default_raw_event): # The conf dict to return results = {} - # Get the raw event - rawEvent = make_raw(["Rich"]) + # Get the raw banks + rawBanks = make_raw("Rich") # Get the detector options det_opts = get_detector_bool_opts(options) # document raw event and Odin used - results["RawEvent"] = rawEvent + results["RawBanks"] = rawBanks # Decode the Raw event to RichSmartIDs richDecode = RichDecoder( name="RichRawDecoder", - RawEventLocation=rawEvent, + RawBanks=rawBanks, Detectors=det_opts, Panels=options["ActivatePanel"]) results["RichDecodedData"] = richDecode.DecodedDataLocation @@ -467,7 +467,7 @@ def make_rich_tracks(track_name, input_tracks, options): def make_rich_photons(track_name, input_tracks, options, - make_raw=default_raw_event): + make_raw=default_raw_banks): """ Return reconstructed photon specific RICH data. @@ -475,7 +475,7 @@ def make_rich_photons(track_name, track_name (str): The name to assign to this configuration input_tracks (dict): The input tracks to process options (dict): The processing options to use - make_raw : The entity that provides the RawEvent to use (??) + make_raw : The entity that provides the RawBanks to use (??) Returns: dict of useful data locations. @@ -552,7 +552,7 @@ def make_rich_photons(track_name, def make_rich_pids(track_name, input_tracks, options, - make_raw=default_raw_event): + make_raw=default_raw_banks): """ Return RICH PID data. @@ -560,7 +560,7 @@ def make_rich_pids(track_name, track_name (str): The name to assign to this configuration input_tracks (dict): The input tracks to process options (dict): The processing options to use - make_raw : The entity that provides the RawEvent to use (??) + make_raw : The entity that provides the RawBanks to use (??) Returns: dict of useful data locations. diff --git a/Hlt/RecoConf/python/RecoConf/standalone.py b/Hlt/RecoConf/python/RecoConf/standalone.py index f9cd9194236..97b7687f5d6 100644 --- a/Hlt/RecoConf/python/RecoConf/standalone.py +++ b/Hlt/RecoConf/python/RecoConf/standalone.py @@ -40,7 +40,7 @@ from .calorimeter_mc_checking import ( check_calo_efficiency_fromB, check_calo_charged_pids) from .rich_reconstruction import make_rich_pixels, default_rich_reco_options -from PyConf.application import default_raw_event, make_odin, make_data_with_FetchDataFromFile +from PyConf.application import default_raw_event, default_raw_banks, make_odin, make_data_with_FetchDataFromFile from PyConf.packing import persisted_location from PyConf.Algorithms import ( VeloRetinaClusterTrackingSIMD, @@ -133,9 +133,9 @@ def phoenix_data_dump(): VP_hits = make_VeloClusterTrackingSIMD_hits() FT_hits = make_PrStoreSciFiHits_hits() odin = make_odin() - raw = default_raw_event(["VP"]) - calo_Edig = make_ecal_digits(raw) - calo_Hdig = make_hcal_digits(raw) + + calo_Edig = make_ecal_digits(default_raw_banks("ECal")) + calo_Hdig = make_hcal_digits(default_raw_banks("HCal")) ### Algorithms in Rec/RecAlgs Project to dump the data into Phoenix .json format @@ -190,9 +190,9 @@ def phoenix_data_dump_hlt2(): FT_hits = make_PrStoreSciFiHits_hits() Muon_hits = make_muon_hits() odin = make_odin() - raw = default_raw_event(["VP"]) - calo_Edig = make_ecal_digits(raw) - calo_Hdig = make_hcal_digits(raw) + + calo_Edig = make_ecal_digits(default_raw_banks("Ecal")) + calo_Hdig = make_hcal_digits(default_raw_banks("HCal")) rich_data = make_rich_pixels(default_rich_reco_options()) # Add proto particles -- GitLab From 096ac93397e620ad62b364ac99f7e8f14267132f Mon Sep 17 00:00:00 2001 From: sesen Date: Wed, 19 Oct 2022 16:28:46 +0200 Subject: [PATCH 05/28] fix retina raw banks --- Hlt/RecoConf/options/add_retina_clusters_to_digi.py | 4 ++-- .../options/hlt1_reco_decode_retina_with_mcchecking.py | 2 +- .../options/hlt2_reco_decode_retina_with_mcchecking.py | 2 +- Hlt/RecoConf/python/RecoConf/calorimeter_reconstruction.py | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Hlt/RecoConf/options/add_retina_clusters_to_digi.py b/Hlt/RecoConf/options/add_retina_clusters_to_digi.py index cd4d89d3a93..d37851e8cf9 100755 --- a/Hlt/RecoConf/options/add_retina_clusters_to_digi.py +++ b/Hlt/RecoConf/options/add_retina_clusters_to_digi.py @@ -13,7 +13,7 @@ from Moore import options, run_reconstruction from Moore.config import Reconstruction from PyConf.Algorithms import bankKiller, Gaudi__Hive__FetchLeavesFromFile, RawEventSimpleCombiner, VPRetinaClusterCreator, VPRetinaSPmixer from RecoConf.hlt1_tracking import make_SPmixed_raw_event -from PyConf.application import default_raw_event, root_copy_input_writer +from PyConf.application import default_raw_event, default_raw_banks, root_copy_input_writer from PyConf.components import force_location @@ -30,7 +30,7 @@ def combiner_digi(): # create VELO retina clusters vpclus = VPRetinaClusterCreator( - RawEventLocation=make_SPmixed_raw_event(["VP"]), + RawBanks=default_raw_banks("VP", make_SPmixed_raw_event), outputs={ 'RetinaClusterLocation': force_location('VeloCluster/RawEvent') }) diff --git a/Hlt/RecoConf/options/hlt1_reco_decode_retina_with_mcchecking.py b/Hlt/RecoConf/options/hlt1_reco_decode_retina_with_mcchecking.py index 87eb073adb6..d977840c2bc 100644 --- a/Hlt/RecoConf/options/hlt1_reco_decode_retina_with_mcchecking.py +++ b/Hlt/RecoConf/options/hlt1_reco_decode_retina_with_mcchecking.py @@ -20,7 +20,7 @@ options.histo_file = "MCMatching_decode_retina_MiniBias{}.root".format(suffix) with standalone_hlt1_reco.bind(do_mc_checking=True),\ make_VeloClusterTrackingSIMD.bind(algorithm=VeloRetinaClusterTrackingSIMD),\ - make_velo_full_clusters.bind(make_full_cluster=VPRetinaFullClusterDecoder),\ + make_velo_full_clusters.bind(make_full_cluster=VPRetinaFullClusterDecoder, bank_type="VPRetinaCluster"),\ default_ft_decoding_version.bind(value=6): run_reconstruction(options, standalone_hlt1_reco) diff --git a/Hlt/RecoConf/options/hlt2_reco_decode_retina_with_mcchecking.py b/Hlt/RecoConf/options/hlt2_reco_decode_retina_with_mcchecking.py index b44f04fee67..b013d32a415 100644 --- a/Hlt/RecoConf/options/hlt2_reco_decode_retina_with_mcchecking.py +++ b/Hlt/RecoConf/options/hlt2_reco_decode_retina_with_mcchecking.py @@ -29,6 +29,6 @@ with standalone_hlt2_full_track_reco.bind(do_mc_checking=True), \ make_VeloClusterTrackingSIMD.bind(algorithm=VeloRetinaClusterTrackingSIMD),\ get_UpgradeGhostId_tool.bind(velo_hits=make_RetinaDecoder_raw_event),\ get_global_measurement_provider.bind(velo_hits=make_RetinaDecoder_raw_event),\ - make_velo_full_clusters.bind(make_full_cluster=VPRetinaFullClusterDecoder),\ + make_velo_full_clusters.bind(make_full_cluster=VPRetinaFullClusterDecoder, bank_type="VPRetinaCluster"),\ check_track_resolution.bind(per_hit_resolutions=False, split_per_type=True): run_reconstruction(options, standalone_hlt2_full_track_reco) diff --git a/Hlt/RecoConf/python/RecoConf/calorimeter_reconstruction.py b/Hlt/RecoConf/python/RecoConf/calorimeter_reconstruction.py index 53a5986e945..a25ce2395c9 100644 --- a/Hlt/RecoConf/python/RecoConf/calorimeter_reconstruction.py +++ b/Hlt/RecoConf/python/RecoConf/calorimeter_reconstruction.py @@ -646,7 +646,7 @@ def make_calo_cluster_shapes(tracks, pvs, make_raw=default_raw_banks): def make_calo_resolution_pi0(pvs, make_raw=default_raw_banks): - rawEventEcal = make_raw(["EcalPacked"]) + rawEventEcal = make_raw("EcalPacked") rawEventEcalError = make_raw("EcalPackedError") rawToDigitsOutputEcal = make_digits(rawEventEcal, rawEventEcalError) digitsEcal = rawToDigitsOutputEcal["digitsEcal"] -- GitLab From 6e5b4c9fbbd881b41097203c724d1ee47f5bfe41 Mon Sep 17 00:00:00 2001 From: sesen Date: Thu, 20 Oct 2022 09:47:12 +0200 Subject: [PATCH 06/28] revert vpclus using rawbank view --- Hlt/Hlt2Conf/options/hlt2_2or3bodytopo_realtime.py | 1 - Hlt/Moore/python/Moore/config.py | 6 ++---- Hlt/RecoConf/options/hlt1_reco_retina_with_mcchecking.py | 2 ++ Hlt/RecoConf/python/RecoConf/calorimeter_reconstruction.py | 6 +++--- Hlt/RecoConf/python/RecoConf/standalone.py | 3 ++- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/Hlt/Hlt2Conf/options/hlt2_2or3bodytopo_realtime.py b/Hlt/Hlt2Conf/options/hlt2_2or3bodytopo_realtime.py index 2dd74ff1e63..c7fe18ebca8 100644 --- a/Hlt/Hlt2Conf/options/hlt2_2or3bodytopo_realtime.py +++ b/Hlt/Hlt2Conf/options/hlt2_2or3bodytopo_realtime.py @@ -54,4 +54,3 @@ public_tools = [stateProvider_with_simplified_geom()] with reconstruction.bind(from_file=False): config = run_moore( options, make_lines, public_tools, exclude_incompatible=False) - diff --git a/Hlt/Moore/python/Moore/config.py b/Hlt/Moore/python/Moore/config.py index d297c313588..9ae5f46cdfc 100644 --- a/Hlt/Moore/python/Moore/config.py +++ b/Hlt/Moore/python/Moore/config.py @@ -525,14 +525,12 @@ def moore_control_flow(options, streams, process, allen_hlt1, analytics=False): if process == "spruce": # sprucing already unpacked the DstData/ODIN in reco_objects_for_spruce - # TODO check if they are unpacked using conf + # TODO check if they are unpacked using conf bank_types_to_unpack = [ b for b in bank_types if b != 'DstData' and b != 'ODIN' ] else: - bank_types_to_unpack = [ - b for b in bank_types if b != 'ODIN' - ] + bank_types_to_unpack = [b for b in bank_types if b != 'ODIN'] unpackrawevent = unpack_rawevent( bank_types=bank_types_to_unpack, configurables=False) unpack.append(unpackrawevent) diff --git a/Hlt/RecoConf/options/hlt1_reco_retina_with_mcchecking.py b/Hlt/RecoConf/options/hlt1_reco_retina_with_mcchecking.py index f879f17c7fb..8720cfb8b29 100644 --- a/Hlt/RecoConf/options/hlt1_reco_retina_with_mcchecking.py +++ b/Hlt/RecoConf/options/hlt1_reco_retina_with_mcchecking.py @@ -20,3 +20,5 @@ with standalone_hlt1_reco_retinacluster.bind(do_mc_checking=True): run_reconstruction(options, standalone_hlt1_reco_retinacluster) options.histo_file = "MCMatching_retina_MiniBias.root" + +options.output_level = 2 diff --git a/Hlt/RecoConf/python/RecoConf/calorimeter_reconstruction.py b/Hlt/RecoConf/python/RecoConf/calorimeter_reconstruction.py index a25ce2395c9..e1fd1b37a22 100644 --- a/Hlt/RecoConf/python/RecoConf/calorimeter_reconstruction.py +++ b/Hlt/RecoConf/python/RecoConf/calorimeter_reconstruction.py @@ -573,8 +573,8 @@ def make_calo_reduced(make_raw=default_raw_banks): rawEventHcal = make_raw("HcalPacked") rawEventEcalError = make_raw("EcalPackedError") rawEventHcalError = make_raw("HcalPackedError") - rawToDigitsOutputEcal = make_digits(rawEventEcal,rawEventEcalError) - rawToDigitsOutputHcal = make_digits(rawEventHcal,rawEventHcalError) + rawToDigitsOutputEcal = make_digits(rawEventEcal, rawEventEcalError) + rawToDigitsOutputHcal = make_digits(rawEventHcal, rawEventHcalError) digitsEcal = rawToDigitsOutputEcal["digitsEcal"] digitsHcal = rawToDigitsOutputHcal["digitsHcal"] clusters = make_clusters(digitsEcal) @@ -625,7 +625,7 @@ def make_calo_resolution_gamma(tracks, pvs, make_raw=default_raw_banks): def make_calo_cluster_shapes(tracks, pvs, make_raw=default_raw_banks): rawEventEcal = make_raw("EcalPacked") rawEventEcalError = make_raw("EcalPackedError") - + digitsEcal = make_digits(rawEventEcal, rawEventEcalError)["digitsEcal"] clusters = make_clusters_shapes(digitsEcal) tracks_incalo = make_acceptance(tracks) diff --git a/Hlt/RecoConf/python/RecoConf/standalone.py b/Hlt/RecoConf/python/RecoConf/standalone.py index 97b7687f5d6..67191a8e9fe 100644 --- a/Hlt/RecoConf/python/RecoConf/standalone.py +++ b/Hlt/RecoConf/python/RecoConf/standalone.py @@ -190,7 +190,7 @@ def phoenix_data_dump_hlt2(): FT_hits = make_PrStoreSciFiHits_hits() Muon_hits = make_muon_hits() odin = make_odin() - + calo_Edig = make_ecal_digits(default_raw_banks("Ecal")) calo_Hdig = make_hcal_digits(default_raw_banks("HCal")) rich_data = make_rich_pixels(default_rich_reco_options()) @@ -286,6 +286,7 @@ def standalone_hlt1_reco_retinacluster(do_mc_checking=False): with make_RetinaCluster_raw_event.bind(make_raw=make_SPmixed_raw_event),\ make_VeloClusterTrackingSIMD.bind( raw_event=make_RetinaCluster_raw_event, + bank_type="VP", algorithm=VeloRetinaClusterTrackingSIMD),\ make_velo_full_clusters.bind(make_raw=make_SPmixed_raw_event, make_full_cluster=VPRetinaFullClustering): -- GitLab From e4f113f95b9652119e03d5ec7275082e591d3e3a Mon Sep 17 00:00:00 2001 From: sesen Date: Thu, 20 Oct 2022 10:33:56 +0200 Subject: [PATCH 07/28] clean up --- Hlt/Hlt2Conf/options/hlt2_all_lines.py | 4 ---- Hlt/Hlt2Conf/tests/qmtest/test_hlt2_2or3bodytopo_realtime.qmt | 2 +- Hlt/RecoConf/options/add_retina_clusters_to_digi.py | 4 ++-- .../options/hlt1_reco_decode_retina_with_mcchecking.py | 2 +- Hlt/RecoConf/options/hlt1_reco_retina_with_mcchecking.py | 2 -- .../options/hlt2_reco_decode_retina_with_mcchecking.py | 2 +- Hlt/RecoConf/python/RecoConf/hlt2_muonid.py | 1 + 7 files changed, 6 insertions(+), 11 deletions(-) diff --git a/Hlt/Hlt2Conf/options/hlt2_all_lines.py b/Hlt/Hlt2Conf/options/hlt2_all_lines.py index c5b978e4d9e..f6d96804758 100644 --- a/Hlt/Hlt2Conf/options/hlt2_all_lines.py +++ b/Hlt/Hlt2Conf/options/hlt2_all_lines.py @@ -36,7 +36,3 @@ def make_lines(): public_tools = [stateProvider_with_simplified_geom()] config = run_moore(options, make_lines, public_tools) - -from Configurables import HiveDataBrokerSvc, HLTControlFlowMgr -HiveDataBrokerSvc().OutputLevel = 2 -HLTControlFlowMgr().OutputLevel = 2 diff --git a/Hlt/Hlt2Conf/tests/qmtest/test_hlt2_2or3bodytopo_realtime.qmt b/Hlt/Hlt2Conf/tests/qmtest/test_hlt2_2or3bodytopo_realtime.qmt index 00903c5ab40..6a20b94fd5c 100644 --- a/Hlt/Hlt2Conf/tests/qmtest/test_hlt2_2or3bodytopo_realtime.qmt +++ b/Hlt/Hlt2Conf/tests/qmtest/test_hlt2_2or3bodytopo_realtime.qmt @@ -20,7 +20,7 @@ Check topo 2 or 3 body lines with persistreco using real time reco. from Configurables import HiveDataBrokerSvc -HiveDataBrokerSvc().OutputLevel = 2 +HiveDataBrokerSvc().OutputLevel = 5 true diff --git a/Hlt/RecoConf/options/add_retina_clusters_to_digi.py b/Hlt/RecoConf/options/add_retina_clusters_to_digi.py index d37851e8cf9..cd4d89d3a93 100755 --- a/Hlt/RecoConf/options/add_retina_clusters_to_digi.py +++ b/Hlt/RecoConf/options/add_retina_clusters_to_digi.py @@ -13,7 +13,7 @@ from Moore import options, run_reconstruction from Moore.config import Reconstruction from PyConf.Algorithms import bankKiller, Gaudi__Hive__FetchLeavesFromFile, RawEventSimpleCombiner, VPRetinaClusterCreator, VPRetinaSPmixer from RecoConf.hlt1_tracking import make_SPmixed_raw_event -from PyConf.application import default_raw_event, default_raw_banks, root_copy_input_writer +from PyConf.application import default_raw_event, root_copy_input_writer from PyConf.components import force_location @@ -30,7 +30,7 @@ def combiner_digi(): # create VELO retina clusters vpclus = VPRetinaClusterCreator( - RawBanks=default_raw_banks("VP", make_SPmixed_raw_event), + RawEventLocation=make_SPmixed_raw_event(["VP"]), outputs={ 'RetinaClusterLocation': force_location('VeloCluster/RawEvent') }) diff --git a/Hlt/RecoConf/options/hlt1_reco_decode_retina_with_mcchecking.py b/Hlt/RecoConf/options/hlt1_reco_decode_retina_with_mcchecking.py index d977840c2bc..87eb073adb6 100644 --- a/Hlt/RecoConf/options/hlt1_reco_decode_retina_with_mcchecking.py +++ b/Hlt/RecoConf/options/hlt1_reco_decode_retina_with_mcchecking.py @@ -20,7 +20,7 @@ options.histo_file = "MCMatching_decode_retina_MiniBias{}.root".format(suffix) with standalone_hlt1_reco.bind(do_mc_checking=True),\ make_VeloClusterTrackingSIMD.bind(algorithm=VeloRetinaClusterTrackingSIMD),\ - make_velo_full_clusters.bind(make_full_cluster=VPRetinaFullClusterDecoder, bank_type="VPRetinaCluster"),\ + make_velo_full_clusters.bind(make_full_cluster=VPRetinaFullClusterDecoder),\ default_ft_decoding_version.bind(value=6): run_reconstruction(options, standalone_hlt1_reco) diff --git a/Hlt/RecoConf/options/hlt1_reco_retina_with_mcchecking.py b/Hlt/RecoConf/options/hlt1_reco_retina_with_mcchecking.py index 8720cfb8b29..f879f17c7fb 100644 --- a/Hlt/RecoConf/options/hlt1_reco_retina_with_mcchecking.py +++ b/Hlt/RecoConf/options/hlt1_reco_retina_with_mcchecking.py @@ -20,5 +20,3 @@ with standalone_hlt1_reco_retinacluster.bind(do_mc_checking=True): run_reconstruction(options, standalone_hlt1_reco_retinacluster) options.histo_file = "MCMatching_retina_MiniBias.root" - -options.output_level = 2 diff --git a/Hlt/RecoConf/options/hlt2_reco_decode_retina_with_mcchecking.py b/Hlt/RecoConf/options/hlt2_reco_decode_retina_with_mcchecking.py index b013d32a415..b44f04fee67 100644 --- a/Hlt/RecoConf/options/hlt2_reco_decode_retina_with_mcchecking.py +++ b/Hlt/RecoConf/options/hlt2_reco_decode_retina_with_mcchecking.py @@ -29,6 +29,6 @@ with standalone_hlt2_full_track_reco.bind(do_mc_checking=True), \ make_VeloClusterTrackingSIMD.bind(algorithm=VeloRetinaClusterTrackingSIMD),\ get_UpgradeGhostId_tool.bind(velo_hits=make_RetinaDecoder_raw_event),\ get_global_measurement_provider.bind(velo_hits=make_RetinaDecoder_raw_event),\ - make_velo_full_clusters.bind(make_full_cluster=VPRetinaFullClusterDecoder, bank_type="VPRetinaCluster"),\ + make_velo_full_clusters.bind(make_full_cluster=VPRetinaFullClusterDecoder),\ check_track_resolution.bind(per_hit_resolutions=False, split_per_type=True): run_reconstruction(options, standalone_hlt2_full_track_reco) diff --git a/Hlt/RecoConf/python/RecoConf/hlt2_muonid.py b/Hlt/RecoConf/python/RecoConf/hlt2_muonid.py index 1b2d24d0aa3..d5df0f372e2 100644 --- a/Hlt/RecoConf/python/RecoConf/hlt2_muonid.py +++ b/Hlt/RecoConf/python/RecoConf/hlt2_muonid.py @@ -12,6 +12,7 @@ from PyConf import configurable from PyConf.Algorithms import MuonIDHlt2Alg from RecoConf.hlt1_muonid import make_muon_hits + @configurable def make_muon_ids(track_type, tracks, make_muon_hits=make_muon_hits): return MuonIDHlt2Alg( -- GitLab From dcfe47c3e1f5e1d500700a896236dfec4b7de730 Mon Sep 17 00:00:00 2001 From: sesen Date: Thu, 20 Oct 2022 12:10:05 +0200 Subject: [PATCH 08/28] fix calo raw bank typee in standalone --- Hlt/RecoConf/python/RecoConf/hlt1_muonid.py | 3 ++- Hlt/RecoConf/python/RecoConf/standalone.py | 8 ++++---- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/Hlt/RecoConf/python/RecoConf/hlt1_muonid.py b/Hlt/RecoConf/python/RecoConf/hlt1_muonid.py index b916fd886a6..1b06cddc471 100644 --- a/Hlt/RecoConf/python/RecoConf/hlt1_muonid.py +++ b/Hlt/RecoConf/python/RecoConf/hlt1_muonid.py @@ -27,7 +27,8 @@ def make_muon_hits(geometry_version=2, make_raw=default_raw_event): else: raise ValueError("Unsupported muon decoding version") - return raw_to_hits(RawBanks=default_raw_banks("Muon", make_raw)).HitContainer + return raw_to_hits( + RawBanks=default_raw_banks("Muon", make_raw)).HitContainer @configurable diff --git a/Hlt/RecoConf/python/RecoConf/standalone.py b/Hlt/RecoConf/python/RecoConf/standalone.py index 67191a8e9fe..379a6d685aa 100644 --- a/Hlt/RecoConf/python/RecoConf/standalone.py +++ b/Hlt/RecoConf/python/RecoConf/standalone.py @@ -134,8 +134,8 @@ def phoenix_data_dump(): FT_hits = make_PrStoreSciFiHits_hits() odin = make_odin() - calo_Edig = make_ecal_digits(default_raw_banks("ECal")) - calo_Hdig = make_hcal_digits(default_raw_banks("HCal")) + calo_Edig = make_ecal_digits(default_raw_banks("ECalE")) + calo_Hdig = make_hcal_digits(default_raw_banks("HCalE")) ### Algorithms in Rec/RecAlgs Project to dump the data into Phoenix .json format @@ -191,8 +191,8 @@ def phoenix_data_dump_hlt2(): Muon_hits = make_muon_hits() odin = make_odin() - calo_Edig = make_ecal_digits(default_raw_banks("Ecal")) - calo_Hdig = make_hcal_digits(default_raw_banks("HCal")) + calo_Edig = make_ecal_digits(default_raw_banks("EcalE")) + calo_Hdig = make_hcal_digits(default_raw_banks("HCalE")) rich_data = make_rich_pixels(default_rich_reco_options()) # Add proto particles -- GitLab From 7de3d3e8f871a5627f7c2819894821d155ec1b20 Mon Sep 17 00:00:00 2001 From: sesen Date: Mon, 24 Oct 2022 12:21:14 +0200 Subject: [PATCH 09/28] fix calo raw event location for phoenix dump --- Hlt/RecoConf/python/RecoConf/standalone.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Hlt/RecoConf/python/RecoConf/standalone.py b/Hlt/RecoConf/python/RecoConf/standalone.py index 379a6d685aa..0d709fb70e1 100644 --- a/Hlt/RecoConf/python/RecoConf/standalone.py +++ b/Hlt/RecoConf/python/RecoConf/standalone.py @@ -134,8 +134,8 @@ def phoenix_data_dump(): FT_hits = make_PrStoreSciFiHits_hits() odin = make_odin() - calo_Edig = make_ecal_digits(default_raw_banks("ECalE")) - calo_Hdig = make_hcal_digits(default_raw_banks("HCalE")) + calo_Edig = make_ecal_digits(default_raw_banks("ECalPacked"), default_raw_banks("ECalPackedError")) + calo_Hdig = make_hcal_digits(default_raw_banks("HCalPacked"), default_raw_banks("HCalPackedError")) ### Algorithms in Rec/RecAlgs Project to dump the data into Phoenix .json format @@ -191,8 +191,8 @@ def phoenix_data_dump_hlt2(): Muon_hits = make_muon_hits() odin = make_odin() - calo_Edig = make_ecal_digits(default_raw_banks("EcalE")) - calo_Hdig = make_hcal_digits(default_raw_banks("HCalE")) + calo_Edig = make_ecal_digits(default_raw_banks("ECalPacked"), default_raw_banks("ECalPackedError")) + calo_Hdig = make_hcal_digits(default_raw_banks("HCalPacked"), default_raw_banks("HCalPackedError")) rich_data = make_rich_pixels(default_rich_reco_options()) # Add proto particles -- GitLab From d208ac3b7a4deb7eb5b222e289d995b6b9c6ed1f Mon Sep 17 00:00:00 2001 From: sesen Date: Tue, 25 Oct 2022 15:47:27 +0200 Subject: [PATCH 10/28] fix tests --- .../RecoConf/calorimeter_reconstruction.py | 31 +++++++++---------- Hlt/RecoConf/python/RecoConf/standalone.py | 9 +++--- 2 files changed, 18 insertions(+), 22 deletions(-) diff --git a/Hlt/RecoConf/python/RecoConf/calorimeter_reconstruction.py b/Hlt/RecoConf/python/RecoConf/calorimeter_reconstruction.py index e1fd1b37a22..8d68827f32d 100644 --- a/Hlt/RecoConf/python/RecoConf/calorimeter_reconstruction.py +++ b/Hlt/RecoConf/python/RecoConf/calorimeter_reconstruction.py @@ -453,20 +453,20 @@ def make_calo(tracks, trackrels=None): # digits if calo_raw_event: # needed for real data - rawEventEcal = make_raw("Calo") - rawEventHcal = make_raw("Calo") - rawEventEcalError = make_raw("CaloError") - rawEventHcalError = make_raw("CaloError") + rawEventCalo = make_raw("Calo") + rawEventCaloError = make_raw("CaloError") + rawToDigitsOutput = make_digits(rawEventCalo, rawEventCaloError) + digitsEcal = rawToDigitsOutput["digitsEcal"] + digitsHcal = rawToDigitsOutput["digitsHcal"] else: rawEventEcal = make_raw("EcalPacked") rawEventHcal = make_raw("HcalPacked") rawEventEcalError = make_raw("EcalPackedError") rawEventHcalError = make_raw("HcalPackedError") - rawToDigitsOutputEcal = make_digits(rawEventEcal, rawEventEcalError) - rawToDigitsOutputHcal = make_digits(rawEventHcal, rawEventHcalError) - digitsEcal = rawToDigitsOutputEcal["digitsEcal"] - digitsHcal = rawToDigitsOutputHcal["digitsHcal"] + digitsEcal = make_ecal_digits(rawEventEcal, rawEventEcalError) + digitsHcal = make_hcal_digits(rawEventHcal, rawEventHcalError) + # clusters ecalClusters = make_clusters(digitsEcal) @@ -573,10 +573,8 @@ def make_calo_reduced(make_raw=default_raw_banks): rawEventHcal = make_raw("HcalPacked") rawEventEcalError = make_raw("EcalPackedError") rawEventHcalError = make_raw("HcalPackedError") - rawToDigitsOutputEcal = make_digits(rawEventEcal, rawEventEcalError) - rawToDigitsOutputHcal = make_digits(rawEventHcal, rawEventHcalError) - digitsEcal = rawToDigitsOutputEcal["digitsEcal"] - digitsHcal = rawToDigitsOutputHcal["digitsHcal"] + digitsEcal = make_ecal_digits(rawEventEcal, rawEventEcalError) + digitsHcal = make_hcal_digits(rawEventHcal, rawEventHcalError) clusters = make_clusters(digitsEcal) return { @@ -589,8 +587,8 @@ def make_calo_reduced(make_raw=default_raw_banks): def make_calo_resolution_gamma(tracks, pvs, make_raw=default_raw_banks): rawEventEcal = make_raw("EcalPacked") rawEventEcalError = make_raw("EcalPackedError") - rawToDigitsOutputEcal = make_digits(rawEventEcal, rawEventEcalError) - digitsEcal = rawToDigitsOutputEcal["digitsEcal"] + digitsEcal = make_ecal_digits(rawEventEcal, rawEventEcalError) + clusters = make_clusters_various(digitsEcal) # track-cluster matching input @@ -626,7 +624,7 @@ def make_calo_cluster_shapes(tracks, pvs, make_raw=default_raw_banks): rawEventEcal = make_raw("EcalPacked") rawEventEcalError = make_raw("EcalPackedError") - digitsEcal = make_digits(rawEventEcal, rawEventEcalError)["digitsEcal"] + digitsEcal = make_ecal_digits(rawEventEcal, rawEventEcalError) clusters = make_clusters_shapes(digitsEcal) tracks_incalo = make_acceptance(tracks) @@ -648,8 +646,7 @@ def make_calo_cluster_shapes(tracks, pvs, make_raw=default_raw_banks): def make_calo_resolution_pi0(pvs, make_raw=default_raw_banks): rawEventEcal = make_raw("EcalPacked") rawEventEcalError = make_raw("EcalPackedError") - rawToDigitsOutputEcal = make_digits(rawEventEcal, rawEventEcalError) - digitsEcal = rawToDigitsOutputEcal["digitsEcal"] + digitsEcal = make_ecal_digits(rawEventEcal, rawEventEcalError) clusters = make_clusters_various(digitsEcal) # default Cellular Automaton diff --git a/Hlt/RecoConf/python/RecoConf/standalone.py b/Hlt/RecoConf/python/RecoConf/standalone.py index 0d709fb70e1..6ddd614de65 100644 --- a/Hlt/RecoConf/python/RecoConf/standalone.py +++ b/Hlt/RecoConf/python/RecoConf/standalone.py @@ -134,8 +134,8 @@ def phoenix_data_dump(): FT_hits = make_PrStoreSciFiHits_hits() odin = make_odin() - calo_Edig = make_ecal_digits(default_raw_banks("ECalPacked"), default_raw_banks("ECalPackedError")) - calo_Hdig = make_hcal_digits(default_raw_banks("HCalPacked"), default_raw_banks("HCalPackedError")) + calo_Edig = make_ecal_digits(default_raw_banks("EcalPacked"), default_raw_banks("EcalPackedError")) + calo_Hdig = make_hcal_digits(default_raw_banks("HcalPacked"), default_raw_banks("HcalPackedError")) ### Algorithms in Rec/RecAlgs Project to dump the data into Phoenix .json format @@ -191,8 +191,8 @@ def phoenix_data_dump_hlt2(): Muon_hits = make_muon_hits() odin = make_odin() - calo_Edig = make_ecal_digits(default_raw_banks("ECalPacked"), default_raw_banks("ECalPackedError")) - calo_Hdig = make_hcal_digits(default_raw_banks("HCalPacked"), default_raw_banks("HCalPackedError")) + calo_Edig = make_ecal_digits(default_raw_banks("EcalPacked"), default_raw_banks("EcalPackedError")) + calo_Hdig = make_hcal_digits(default_raw_banks("HcalPacked"), default_raw_banks("HcalPackedError")) rich_data = make_rich_pixels(default_rich_reco_options()) # Add proto particles @@ -286,7 +286,6 @@ def standalone_hlt1_reco_retinacluster(do_mc_checking=False): with make_RetinaCluster_raw_event.bind(make_raw=make_SPmixed_raw_event),\ make_VeloClusterTrackingSIMD.bind( raw_event=make_RetinaCluster_raw_event, - bank_type="VP", algorithm=VeloRetinaClusterTrackingSIMD),\ make_velo_full_clusters.bind(make_raw=make_SPmixed_raw_event, make_full_cluster=VPRetinaFullClustering): -- GitLab From 237732fc3f6cc235a339106945a6f7372725fb0a Mon Sep 17 00:00:00 2001 From: Gitlab CI Date: Tue, 25 Oct 2022 13:48:01 +0000 Subject: [PATCH 11/28] Fixed formatting patch generated by https://gitlab.cern.ch/lhcb/Moore/-/jobs/25395561 --- .../python/RecoConf/calorimeter_reconstruction.py | 1 - Hlt/RecoConf/python/RecoConf/standalone.py | 12 ++++++++---- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/Hlt/RecoConf/python/RecoConf/calorimeter_reconstruction.py b/Hlt/RecoConf/python/RecoConf/calorimeter_reconstruction.py index 8d68827f32d..70a297fdf66 100644 --- a/Hlt/RecoConf/python/RecoConf/calorimeter_reconstruction.py +++ b/Hlt/RecoConf/python/RecoConf/calorimeter_reconstruction.py @@ -467,7 +467,6 @@ def make_calo(tracks, digitsEcal = make_ecal_digits(rawEventEcal, rawEventEcalError) digitsHcal = make_hcal_digits(rawEventHcal, rawEventHcalError) - # clusters ecalClusters = make_clusters(digitsEcal) diff --git a/Hlt/RecoConf/python/RecoConf/standalone.py b/Hlt/RecoConf/python/RecoConf/standalone.py index 6ddd614de65..6d5ca2aca84 100644 --- a/Hlt/RecoConf/python/RecoConf/standalone.py +++ b/Hlt/RecoConf/python/RecoConf/standalone.py @@ -134,8 +134,10 @@ def phoenix_data_dump(): FT_hits = make_PrStoreSciFiHits_hits() odin = make_odin() - calo_Edig = make_ecal_digits(default_raw_banks("EcalPacked"), default_raw_banks("EcalPackedError")) - calo_Hdig = make_hcal_digits(default_raw_banks("HcalPacked"), default_raw_banks("HcalPackedError")) + calo_Edig = make_ecal_digits( + default_raw_banks("EcalPacked"), default_raw_banks("EcalPackedError")) + calo_Hdig = make_hcal_digits( + default_raw_banks("HcalPacked"), default_raw_banks("HcalPackedError")) ### Algorithms in Rec/RecAlgs Project to dump the data into Phoenix .json format @@ -191,8 +193,10 @@ def phoenix_data_dump_hlt2(): Muon_hits = make_muon_hits() odin = make_odin() - calo_Edig = make_ecal_digits(default_raw_banks("EcalPacked"), default_raw_banks("EcalPackedError")) - calo_Hdig = make_hcal_digits(default_raw_banks("HcalPacked"), default_raw_banks("HcalPackedError")) + calo_Edig = make_ecal_digits( + default_raw_banks("EcalPacked"), default_raw_banks("EcalPackedError")) + calo_Hdig = make_hcal_digits( + default_raw_banks("HcalPacked"), default_raw_banks("HcalPackedError")) rich_data = make_rich_pixels(default_rich_reco_options()) # Add proto particles -- GitLab From b70e42d13d6020b67faac6004db65bb0942d7223 Mon Sep 17 00:00:00 2001 From: sesen Date: Thu, 27 Oct 2022 16:11:22 +0200 Subject: [PATCH 12/28] fix hlt1_tracking after rebase --- Hlt/RecoConf/python/RecoConf/hlt1_tracking.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/Hlt/RecoConf/python/RecoConf/hlt1_tracking.py b/Hlt/RecoConf/python/RecoConf/hlt1_tracking.py index 9316f1fb476..55935796b0d 100644 --- a/Hlt/RecoConf/python/RecoConf/hlt1_tracking.py +++ b/Hlt/RecoConf/python/RecoConf/hlt1_tracking.py @@ -76,7 +76,6 @@ def require_gec(make_raw=default_raw_banks, cut=-1, **kwargs): @configurable def make_VeloClusterTrackingSIMD(algorithm=VeloClusterTrackingSIMD, raw_event=default_raw_event, - detector=["VP"], masked_sensors=[]): """Simple helper to make sure both, make_VeloClusterTrackingSIMD_tracks and make_VeloClusterTrackingSIMD_hits, access the identically configured version of VeloClusterTrackingSIMD @@ -90,10 +89,16 @@ def make_VeloClusterTrackingSIMD(algorithm=VeloClusterTrackingSIMD, The Velo tracking algorithm. """ + # For VeloRetinaClusterTrackingSIMD use VPRetinaClusters rawbank in the file + if "Retina" in str(algorithm) and raw_event == default_raw_event: + bank_type = "VPRetinaCluster" + # In case raw_event is made from VP or algorithm is VeloClusterTrackingSIMD, use VP rawbank in the event + else: + bank_type = "VP" my_SensorMasks = [j in masked_sensors for j in range(208) ] # 208 = LHCb::Pr::Velo::VPInfos::NSensors return algorithm( - RawEventLocation=raw_event(detector), + RawBanks=default_raw_banks(bank_type, raw_event), SensorMasks=tuple(my_SensorMasks)) -- GitLab From fe45831b4a03fcba02c34031ea71eaf14a5b126d Mon Sep 17 00:00:00 2001 From: sesen Date: Sun, 30 Oct 2022 18:41:28 +0100 Subject: [PATCH 13/28] clean up dec reports --- .../options/test_allen_comp_dec_sel_rep.py | 21 ++++++++--- .../tests/options/test_allen_decreports.py | 18 ++++++--- .../tests/options/test_allen_sel_rep_info.py | 24 +++++++++--- Hlt/Hlt1Conf/tests/options/test_decreports.py | 16 ++++++-- Hlt/Hlt2Conf/options/hlt2_check_output.py | 7 +++- Hlt/Moore/python/Moore/lines.py | 2 +- Hlt/RecoConf/python/RecoConf/hlt1_allen.py | 37 +++++++++++++++++-- 7 files changed, 99 insertions(+), 26 deletions(-) diff --git a/Hlt/Hlt1Conf/tests/options/test_allen_comp_dec_sel_rep.py b/Hlt/Hlt1Conf/tests/options/test_allen_comp_dec_sel_rep.py index 37178a3956e..82ec059afad 100644 --- a/Hlt/Hlt1Conf/tests/options/test_allen_comp_dec_sel_rep.py +++ b/Hlt/Hlt1Conf/tests/options/test_allen_comp_dec_sel_rep.py @@ -24,7 +24,6 @@ import argparse from collections import defaultdict from Configurables import (ApplicationMgr, HistogramPersistencySvc, IODataManager, LHCbApp) -from DAQSys.Decoders import DecoderDB from GaudiConf import IOHelper import GaudiPython from PyConf.application import configured_ann_svc @@ -41,12 +40,22 @@ IODataManager(DisablePFNWarning=True) # Disable warning about histogram saving not being required HistogramPersistencySvc(OutputLevel=5) # Decode Hlt DecReports -app = ApplicationMgr(TopAlg=[ - DecoderDB["HltDecReportsDecoder/Hlt1DecReportsDecoder"].setup(), - DecoderDB["HltSelReportsDecoder/Hlt1SelReportsDecoder"].setup() -]) +from Configurables import LHCb__UnpackRawEvent, HltDecReportsDecoder, HltSelReportsDecoder +unpacker = LHCb__UnpackRawEvent("UnpackRawEvent", + RawBankLocations = ["DAQ/RawBanks/HltDecReports", "DAQ/RawBanks/HltSelReports"], + BankTypes = ["HltDecReports", "HltSelReports"]) + +decDec = HltDecReportsDecoder("HltDecReportsDecoder/Hlt1DecReportsDecoder", + SourceID = "Hlt1", + RawBanks = unpacker.RawBankLocations[0]) + +selDec = HltSelReportsDecoder("HltSelReportsDecoder/Hlt1SelReportsDecoder", + SourceID = "Hlt1", + RawBanks = unpacker.RawBankLocations[1]) + + +app = ApplicationMgr(TopAlg=[unpacker, decDec, selDec]) # decoderDB wants TCKANNSvc as name... -app.ExtSvc += [configured_ann_svc(name='TCKANNSvc')] # Set up counters for recording decisions and selreport existence from MDF counts_from_mdf = defaultdict(lambda: defaultdict(int)) diff --git a/Hlt/Hlt1Conf/tests/options/test_allen_decreports.py b/Hlt/Hlt1Conf/tests/options/test_allen_decreports.py index 2a2638b340b..7a4509fa06b 100644 --- a/Hlt/Hlt1Conf/tests/options/test_allen_decreports.py +++ b/Hlt/Hlt1Conf/tests/options/test_allen_decreports.py @@ -22,7 +22,6 @@ import argparse from collections import defaultdict from Configurables import (ApplicationMgr, HistogramPersistencySvc, IODataManager, LHCbApp) -from DAQSys.Decoders import DecoderDB from GaudiConf import IOHelper import GaudiPython from PyConf.application import configured_ann_svc @@ -56,9 +55,18 @@ IODataManager(DisablePFNWarning=True) # Disable warning about histogram saving not being required HistogramPersistencySvc(OutputLevel=5) # Decode Hlt DecReports -# Configure TCKANNSvc (as that is what DecoderDB wants...) -app = ApplicationMgr( - TopAlg=[DecoderDB["HltDecReportsDecoder/Hlt1DecReportsDecoder"].setup()]) +from Configurables import LHCb__UnpackRawEvent, HltDecReportsDecoder +unpacker = LHCb__UnpackRawEvent("UnpackRawEvent", + RawBankLocations = ["DAQ/RawBanks/HltDecReports"], + BankTypes = ["HltDecReports"]) + +decDec = HltDecReportsDecoder("HltDecReportsDecoder/Hlt1DecReportsDecoder", + SourceID = "Hlt1", + DecoderMapping= "TCKANNSvc", + RawBanks = unpacker.RawBankLocations[0]) + +app = ApplicationMgr(TopAlg=[unpacker, decDec]) +# Configure TCKANNSvc app.ExtSvc += [configured_ann_svc(name='TCKANNSvc')] # Set up counters for recording decisions from MDF @@ -72,7 +80,7 @@ gaudi.run(1) error = False while TES["/Event"]: - decs = TES["/Event/Hlt1/DecReports"] + decs = TES[str(decDec.OuputHltDecReportsLocation)] if not decs: print("DecReports TES location not found") error = True diff --git a/Hlt/Hlt1Conf/tests/options/test_allen_sel_rep_info.py b/Hlt/Hlt1Conf/tests/options/test_allen_sel_rep_info.py index 09b53c2952c..c199fb3ba5a 100644 --- a/Hlt/Hlt1Conf/tests/options/test_allen_sel_rep_info.py +++ b/Hlt/Hlt1Conf/tests/options/test_allen_sel_rep_info.py @@ -26,7 +26,6 @@ import re from collections import defaultdict from Configurables import (ApplicationMgr, HistogramPersistencySvc, IODataManager, LHCbApp) -from DAQSys.Decoders import DecoderDB from GaudiConf import IOHelper import GaudiPython from PyConf.application import configured_ann_svc @@ -224,11 +223,24 @@ IODataManager(DisablePFNWarning=True) # Disable warning about histogram saving not being required HistogramPersistencySvc(OutputLevel=5) # Decode Hlt DecReports -app = ApplicationMgr(TopAlg=[ - DecoderDB["HltDecReportsDecoder/Hlt1DecReportsDecoder"].setup(), - DecoderDB["HltSelReportsDecoder/Hlt1SelReportsDecoder"].setup() -]) -# Configure TCKANNSvc (as that is what DecoderDB wants...) +from Configurables import LHCb__UnpackRawEvent, HltDecReportsDecoder, HltSelReportsDecoder +unpacker = LHCb__UnpackRawEvent("UnpackRawEvent", + RawBankLocations = ["DAQ/RawBanks/HltDecReports", "DAQ/RawBanks/HltSelReports"], + BankTypes = ["HltDecReports", "HltSelReports"]) + +decDec = HltDecReportsDecoder("HltDecReportsDecoder/Hlt1DecReportsDecoder", + SourceID = "Hlt1", + DecoderMapping= "TCKANNSvc", + RawBanks = unpacker.RawBankLocations[0]) + +selDec = HltSelReportsDecoder("HltDecReportsDecoder/Hlt1SelReportsDecoder", + SourceID = "Hlt1", + DecoderMapping= "TCKANNSvc", + RawBanks = unpacker.RawBankLocations[1]) + +app = ApplicationMgr(TopAlg=[unpacker, decDec, selDec]) + +# Configure TCKANNSvc app.ExtSvc += [configured_ann_svc(name='TCKANNSvc')] gaudi = GaudiPython.AppMgr() diff --git a/Hlt/Hlt1Conf/tests/options/test_decreports.py b/Hlt/Hlt1Conf/tests/options/test_decreports.py index 3a7238c9be6..19f562cec91 100644 --- a/Hlt/Hlt1Conf/tests/options/test_decreports.py +++ b/Hlt/Hlt1Conf/tests/options/test_decreports.py @@ -21,7 +21,6 @@ import argparse from Configurables import (ApplicationMgr, HistogramPersistencySvc, IODataManager, LHCbApp) -from DAQSys.Decoders import DecoderDB from GaudiConf import IOHelper import GaudiPython from PyConf.application import configured_ann_svc @@ -59,9 +58,18 @@ IODataManager(DisablePFNWarning=True) HistogramPersistencySvc(OutputLevel=5) # Decode Hlt DecReports -appMgr = ApplicationMgr( - TopAlg=[DecoderDB["HltDecReportsDecoder/Hlt1DecReportsDecoder"].setup()]) -# Configure TCKANNSvc (as that is what DecoderDB wants...) +from Configurables import LHCb__UnpackRawEvent, HltDecReportsDecoder +unpacker = LHCb__UnpackRawEvent("UnpackRawEvent", + RawBankLocations = ["DAQ/RawBanks/HltDecReports"], + BankTypes = ["HltDecReports"]) + +decDec = HltDecReportsDecoder("HltDecReportsDecoder/Hlt1DecReportsDecoder", + SourceID = "Hlt1", + DecoderMapping= "TCKANNSvc", + RawBanks = unpacker.RawBankLocations[0]) + +appMgr = ApplicationMgr(TopAlg=[unpacker, decDec]) +# Configure TCKANNSvc appMgr.ExtSvc += [configured_ann_svc(name='TCKANNSvc')] # Get expected lines from the previous job diff --git a/Hlt/Hlt2Conf/options/hlt2_check_output.py b/Hlt/Hlt2Conf/options/hlt2_check_output.py index 703bd4b1421..dedae806c18 100644 --- a/Hlt/Hlt2Conf/options/hlt2_check_output.py +++ b/Hlt/Hlt2Conf/options/hlt2_check_output.py @@ -48,6 +48,11 @@ IODataManager(DisablePFNWarning=True) manifest = load_manifest(sys.argv[2]) algs = do_unpacking(manifest, process='Hlt2') +decdecoder = None +for alg in algs: + if "HltDecReportsDecoder" in alg.getFullName(): + decdecoder = alg + appmgr = ApplicationMgr(TopAlg=algs) appmgr.ExtSvc += [configured_ann_svc()] @@ -73,7 +78,7 @@ while TES['/Event']: print('Checking next event.') #TES.dump() - decRep = TES[str(algs[1].OutputHltDecReportsLocation)].decReports() + decRep = TES[str(decdecoder.OutputHltDecReportsLocation)].decReports() for name, report in decRep.items(): if report.decision(): diff --git a/Hlt/Moore/python/Moore/lines.py b/Hlt/Moore/python/Moore/lines.py index 1e3ab916ca1..7acea18dd80 100644 --- a/Hlt/Moore/python/Moore/lines.py +++ b/Hlt/Moore/python/Moore/lines.py @@ -320,7 +320,7 @@ class Hlt2Line(DecisionLine): filter_source_id, filter_source_id)) line_regex = "|".join(line for line in hlt_filter_code) hlt_dec_reports = HltDecReportsDecoder( - RawEventLocations=default_raw_event(bank_types=["HltDecReports"]), + RawBanks=default_raw_banks("HltDecReports"), SourceID=filter_source_id) return VoidFilter( name=name, diff --git a/Hlt/RecoConf/python/RecoConf/hlt1_allen.py b/Hlt/RecoConf/python/RecoConf/hlt1_allen.py index b633a18c80d..a4e027565a4 100644 --- a/Hlt/RecoConf/python/RecoConf/hlt1_allen.py +++ b/Hlt/RecoConf/python/RecoConf/hlt1_allen.py @@ -10,7 +10,7 @@ ############################################################################### import importlib from PyConf import configurable -from PyConf.components import Algorithm +from PyConf.components import Algorithm, force_location from PyConf.control_flow import CompositeNode from PyConf.Algorithms import ( LHCb__Converters__Track__v1__fromVectorLHCbTrack as @@ -19,6 +19,7 @@ from PyConf.Algorithms import ( FromV2TrackV1Track) from AllenConf.hlt1_reconstruction import hlt1_reconstruction +from typing import OrderedDict @configurable @@ -122,17 +123,47 @@ def call_allen_decision_logger(): Hlt1LineNames=get_allen_line_names()) +def unpack_raw_event(bank_type, raw_dec_reports=make_allen_raw_reports): + #Dynamically-sized list of outputs + location_map = OrderedDict([ + (bank_type, "/Event/DAQ/RawBanks/{}".format(bank_type)), + ]) + + def output_rawbank_transform(**outputs): + return {"RawBankLocations": [location_map[k] for k in outputs]} + + from PyConf.Algorithms import LHCb__UnpackRawEvent + unpackrawevent = LHCb__UnpackRawEvent( + BankTypes=[bank_type], + outputs={ + prop: force_location(loc) + for prop, loc in location_map.items() + }, + output_transform=output_rawbank_transform, + RawEventLocation=raw_dec_reports()) + + return unpackrawevent + def decode_allen_dec_reports(raw_dec_reports=make_allen_raw_reports): from PyConf.Algorithms import HltDecReportsDecoder + + unpackrawevent = unpack_raw_event("HltDecReports", raw_dec_reports) + + print(unpackrawevent.outputs) decoder = HltDecReportsDecoder( - RawEventLocations=raw_dec_reports(), SourceID='Hlt1') + RawBanks=unpackrawevent.HltDecReports, + SourceID='Hlt1') return decoder def decode_allen_sel_reports(raw_sel_reports=make_allen_raw_reports): from PyConf.Algorithms import HltSelReportsDecoder + + unpackrawevent = unpack_raw_event("HltSelReports", raw_sel_reports) + decoder = HltSelReportsDecoder( - RawEventLocations=raw_sel_reports(), SourceID='Hlt1') + RawBanks=unpackrawevent.HltSelReports, + SourceID='Hlt1') return decoder -- GitLab From 8626c1eaa36413f4d717d7d078fd9af5579c573e Mon Sep 17 00:00:00 2001 From: sesen Date: Mon, 31 Oct 2022 11:37:06 +0100 Subject: [PATCH 14/28] fix get_hlt_reports --- Hlt/Hlt1Conf/options/hlt1_selreports_filtering.py | 4 ++-- Hlt/Moore/python/Moore/config.py | 4 ++-- Hlt/Moore/python/Moore/lines.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Hlt/Hlt1Conf/options/hlt1_selreports_filtering.py b/Hlt/Hlt1Conf/options/hlt1_selreports_filtering.py index 37ae4d3fce9..aaa11b4bd42 100644 --- a/Hlt/Hlt1Conf/options/hlt1_selreports_filtering.py +++ b/Hlt/Hlt1Conf/options/hlt1_selreports_filtering.py @@ -21,7 +21,7 @@ from PyConf.Algorithms import ( TOSFilter__v1__Track, TOSFilter__v1__RecVertex, ) -from PyConf.application import default_raw_event +from PyConf.application import default_raw_banks from Moore import options, run_moore from Moore.config import HltLine @@ -44,7 +44,7 @@ from Functors import ALL def make_selreports(): return HltSelReportsDecoder( - RawEventLocations=default_raw_event(["HltSelReports"]), + RawBanks=default_raw_banks("HltSelReports"), SourceID='Hlt1', ) diff --git a/Hlt/Moore/python/Moore/config.py b/Hlt/Moore/python/Moore/config.py index 9ae5f46cdfc..dd026f87dfb 100644 --- a/Hlt/Moore/python/Moore/config.py +++ b/Hlt/Moore/python/Moore/config.py @@ -527,10 +527,10 @@ def moore_control_flow(options, streams, process, allen_hlt1, analytics=False): # sprucing already unpacked the DstData/ODIN in reco_objects_for_spruce # TODO check if they are unpacked using conf bank_types_to_unpack = [ - b for b in bank_types if b != 'DstData' and b != 'ODIN' + b for b in bank_types if b != 'DstData' and b != 'ODIN' and b!='HltDecReports' ] else: - bank_types_to_unpack = [b for b in bank_types if b != 'ODIN'] + bank_types_to_unpack = [b for b in bank_types if b != 'ODIN' and b!='HltDecReports'] unpackrawevent = unpack_rawevent( bank_types=bank_types_to_unpack, configurables=False) unpack.append(unpackrawevent) diff --git a/Hlt/Moore/python/Moore/lines.py b/Hlt/Moore/python/Moore/lines.py index 7acea18dd80..f1c341c41de 100644 --- a/Hlt/Moore/python/Moore/lines.py +++ b/Hlt/Moore/python/Moore/lines.py @@ -25,7 +25,7 @@ from Moore.selreports import ( convert_output as convert_output_for_selreports, ) -from PyConf.application import default_raw_event +from PyConf.application import default_raw_banks from .persistence.particle_moving import ( CopyParticles, CopyFlavourTags, dvalgorithm_locations, -- GitLab From fab9259309e2aa13567ecd11de8ce086e8149adb Mon Sep 17 00:00:00 2001 From: sesen Date: Thu, 3 Nov 2022 13:21:21 +0100 Subject: [PATCH 15/28] update dec utils --- .../options/test_allen_comp_dec_sel_rep.py | 10 ++++-- .../tests/options/sprucing/spruce_check.py | 4 +-- Hlt/Moore/python/Moore/config.py | 29 ++++------------- Hlt/RecoConf/python/RecoConf/hlt1_allen.py | 32 +++---------------- Hlt/RecoConf/python/RecoConf/hlt1_tracking.py | 2 +- 5 files changed, 21 insertions(+), 56 deletions(-) diff --git a/Hlt/Hlt1Conf/tests/options/test_allen_comp_dec_sel_rep.py b/Hlt/Hlt1Conf/tests/options/test_allen_comp_dec_sel_rep.py index 82ec059afad..658d2ec2d00 100644 --- a/Hlt/Hlt1Conf/tests/options/test_allen_comp_dec_sel_rep.py +++ b/Hlt/Hlt1Conf/tests/options/test_allen_comp_dec_sel_rep.py @@ -42,6 +42,7 @@ HistogramPersistencySvc(OutputLevel=5) # Decode Hlt DecReports from Configurables import LHCb__UnpackRawEvent, HltDecReportsDecoder, HltSelReportsDecoder unpacker = LHCb__UnpackRawEvent("UnpackRawEvent", + OutputLevel=2, RawBankLocations = ["DAQ/RawBanks/HltDecReports", "DAQ/RawBanks/HltSelReports"], BankTypes = ["HltDecReports", "HltSelReports"]) @@ -51,10 +52,13 @@ decDec = HltDecReportsDecoder("HltDecReportsDecoder/Hlt1DecReportsDecoder", selDec = HltSelReportsDecoder("HltSelReportsDecoder/Hlt1SelReportsDecoder", SourceID = "Hlt1", + DecReports= unpacker.RawBankLocations[0], RawBanks = unpacker.RawBankLocations[1]) -app = ApplicationMgr(TopAlg=[unpacker, decDec, selDec]) +app = ApplicationMgr(TopAlg=[unpacker, decDec, selDec], + ExtSvc=[configured_ann_svc(name='HltANNSvc')]) + # decoderDB wants TCKANNSvc as name... # Set up counters for recording decisions and selreport existence from MDF counts_from_mdf = defaultdict(lambda: defaultdict(int)) @@ -65,13 +69,13 @@ gaudi.run(1) error = False while TES["/Event"]: - decs = TES["/Event/Hlt1/DecReports"] + decs = TES[str(decDec.OutputHltDecReportsLocation)] if not decs: print("DecReports TES location not found") error = True break - sels = TES["/Event/Hlt1/SelReports"] + sels = TES[str(selDec.OutputHltSelReportsLocation)] if not sels: print("SelReports TES location not found") error = True diff --git a/Hlt/Hlt2Conf/tests/options/sprucing/spruce_check.py b/Hlt/Hlt2Conf/tests/options/sprucing/spruce_check.py index 0e3eeae9932..c42439d1d97 100644 --- a/Hlt/Hlt2Conf/tests/options/sprucing/spruce_check.py +++ b/Hlt/Hlt2Conf/tests/options/sprucing/spruce_check.py @@ -28,7 +28,7 @@ from PyConf.packing import default_persisted_locations from GaudiConf.reading import do_unpacking from GaudiConf.reading import load_manifest -from PyConf.application import configured_ann_svc +from PyConf.application import configured_ann_svc, default_raw_event from Hlt2Conf.check_output import check_persistreco @@ -69,7 +69,7 @@ elif process == "Turbo": RECO_ROOT = "/Event/HLT2" loc = "HLT2" dec_to_check = "PassThrough" - +default_raw_event.global_bind(raw_event_format=0.5) algs = do_unpacking( load_manifest(args.t), process=process, diff --git a/Hlt/Moore/python/Moore/config.py b/Hlt/Moore/python/Moore/config.py index dd026f87dfb..c264003bc26 100644 --- a/Hlt/Moore/python/Moore/config.py +++ b/Hlt/Moore/python/Moore/config.py @@ -29,6 +29,7 @@ from PyConf.application import ( root_copy_input_writer, make_odin, default_raw_event, + default_raw_banks, root_writer, make_data_with_FetchDataFromFile, ) @@ -321,7 +322,6 @@ def stream_writer(stream, elif output_type == ROOT_KEY: if process == "spruce" or process == "pass": - locations = [dh.location for dh in new_locations] if process == "spruce": # Do not want DstData raw bank from HLT2 in Sprucing output persist_locations = [ @@ -340,8 +340,8 @@ def stream_writer(stream, extra_locations_to_persist += [mc_part, mc_vert] #persist locations have been unpacked by LHCb__UnpackRawEvent - locations += [ - '/Event/DAQ/RawBanks/%s' % (rb) for rb in persist_locations + locations = new_locations + [ + default_raw_banks(rb) for rb in persist_locations ] consolidate_views = CombineRawBankViewsToRawEvent( @@ -519,22 +519,6 @@ def moore_control_flow(options, streams, process, allen_hlt1, analytics=False): | HLT2_REPORT_RAW_BANK_TYPES) #For spruce and passthrough need to unpack raw event to RawBank::Views if process == "spruce" or process == "pass": - bank_types = list(HLT1_REPORT_RAW_BANK_TYPES - | HLT2_REPORT_RAW_BANK_TYPES - | DETECTOR_RAW_BANK_TYPES) - - if process == "spruce": - # sprucing already unpacked the DstData/ODIN in reco_objects_for_spruce - # TODO check if they are unpacked using conf - bank_types_to_unpack = [ - b for b in bank_types if b != 'DstData' and b != 'ODIN' and b!='HltDecReports' - ] - else: - bank_types_to_unpack = [b for b in bank_types if b != 'ODIN' and b!='HltDecReports'] - unpackrawevent = unpack_rawevent( - bank_types=bank_types_to_unpack, configurables=False) - unpack.append(unpackrawevent) - ## Hack to make `extra_locations_to_persist` objects writable in pass through if options.simulation and options.input_type == 'ROOT' and process == "pass": unpack += mc_unpackers(configurables=False) @@ -894,13 +878,14 @@ def RawEventSize_analysis(process, stream, persist_types, new_locations): algs += [raw_event_combiner] else: - locations = [dh.location for dh in new_locations] # Do not want DstData raw bank from HLT2 in Sprucing output persist_types = [x for x in persist_types if x != 'DstData'] + #persist locations have been unpacked by LHCb__UnpackRawEvent - sizes_of = locations + [ - '/Event/DAQ/RawBanks/%s' % (rb) for rb in persist_types + sizes_of = new_locations + [ + default_raw_banks(rb) for rb in persist_types ] + algs += [ CombineRawBankViewsToRawEvent( name="EvtSize_" + stream, diff --git a/Hlt/RecoConf/python/RecoConf/hlt1_allen.py b/Hlt/RecoConf/python/RecoConf/hlt1_allen.py index a4e027565a4..1cf41531ab7 100644 --- a/Hlt/RecoConf/python/RecoConf/hlt1_allen.py +++ b/Hlt/RecoConf/python/RecoConf/hlt1_allen.py @@ -12,6 +12,7 @@ import importlib from PyConf import configurable from PyConf.components import Algorithm, force_location from PyConf.control_flow import CompositeNode +from PyConf.application import default_raw_banks, default_raw_event from PyConf.Algorithms import ( LHCb__Converters__Track__v1__fromVectorLHCbTrack as TrkConvVecV1ToKeyContV1, @@ -123,35 +124,11 @@ def call_allen_decision_logger(): Hlt1LineNames=get_allen_line_names()) -def unpack_raw_event(bank_type, raw_dec_reports=make_allen_raw_reports): - #Dynamically-sized list of outputs - location_map = OrderedDict([ - (bank_type, "/Event/DAQ/RawBanks/{}".format(bank_type)), - ]) - - def output_rawbank_transform(**outputs): - return {"RawBankLocations": [location_map[k] for k in outputs]} - - from PyConf.Algorithms import LHCb__UnpackRawEvent - unpackrawevent = LHCb__UnpackRawEvent( - BankTypes=[bank_type], - outputs={ - prop: force_location(loc) - for prop, loc in location_map.items() - }, - output_transform=output_rawbank_transform, - RawEventLocation=raw_dec_reports()) - - return unpackrawevent - def decode_allen_dec_reports(raw_dec_reports=make_allen_raw_reports): from PyConf.Algorithms import HltDecReportsDecoder - unpackrawevent = unpack_raw_event("HltDecReports", raw_dec_reports) - - print(unpackrawevent.outputs) decoder = HltDecReportsDecoder( - RawBanks=unpackrawevent.HltDecReports, + RawBanks=default_raw_banks("HltDecReports", raw_dec_reports), SourceID='Hlt1') return decoder @@ -159,10 +136,9 @@ def decode_allen_dec_reports(raw_dec_reports=make_allen_raw_reports): def decode_allen_sel_reports(raw_sel_reports=make_allen_raw_reports): from PyConf.Algorithms import HltSelReportsDecoder - unpackrawevent = unpack_raw_event("HltSelReports", raw_sel_reports) - decoder = HltSelReportsDecoder( - RawBanks=unpackrawevent.HltSelReports, + RawBanks=default_raw_banks("HltSelReports", raw_sel_reports), + DecReports=default_raw_banks("HltDecReports", raw_sel_reports), SourceID='Hlt1') return decoder diff --git a/Hlt/RecoConf/python/RecoConf/hlt1_tracking.py b/Hlt/RecoConf/python/RecoConf/hlt1_tracking.py index 55935796b0d..9b3f7eee883 100644 --- a/Hlt/RecoConf/python/RecoConf/hlt1_tracking.py +++ b/Hlt/RecoConf/python/RecoConf/hlt1_tracking.py @@ -90,7 +90,7 @@ def make_VeloClusterTrackingSIMD(algorithm=VeloClusterTrackingSIMD, """ # For VeloRetinaClusterTrackingSIMD use VPRetinaClusters rawbank in the file - if "Retina" in str(algorithm) and raw_event == default_raw_event: + if "Retina" in str(algorithm): bank_type = "VPRetinaCluster" # In case raw_event is made from VP or algorithm is VeloClusterTrackingSIMD, use VP rawbank in the event else: -- GitLab From 4d38b608b8b984bc1b97824fff8f2ee1f0989c26 Mon Sep 17 00:00:00 2001 From: sesen Date: Thu, 3 Nov 2022 22:57:57 +0100 Subject: [PATCH 16/28] add dec reports to sel report decoder --- Hlt/Hlt1Conf/options/hlt1_selreports_filtering.py | 1 + Hlt/Hlt1Conf/tests/options/test_allen_decreports.py | 2 +- Hlt/Moore/python/Moore/config.py | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/Hlt/Hlt1Conf/options/hlt1_selreports_filtering.py b/Hlt/Hlt1Conf/options/hlt1_selreports_filtering.py index aaa11b4bd42..aa4c0c06b97 100644 --- a/Hlt/Hlt1Conf/options/hlt1_selreports_filtering.py +++ b/Hlt/Hlt1Conf/options/hlt1_selreports_filtering.py @@ -44,6 +44,7 @@ from Functors import ALL def make_selreports(): return HltSelReportsDecoder( + DecReports=default_raw_banks("HltDecReports"), RawBanks=default_raw_banks("HltSelReports"), SourceID='Hlt1', ) diff --git a/Hlt/Hlt1Conf/tests/options/test_allen_decreports.py b/Hlt/Hlt1Conf/tests/options/test_allen_decreports.py index 7a4509fa06b..499465e741f 100644 --- a/Hlt/Hlt1Conf/tests/options/test_allen_decreports.py +++ b/Hlt/Hlt1Conf/tests/options/test_allen_decreports.py @@ -80,7 +80,7 @@ gaudi.run(1) error = False while TES["/Event"]: - decs = TES[str(decDec.OuputHltDecReportsLocation)] + decs = TES[str(decDec.OutputHltDecReportsLocation)] if not decs: print("DecReports TES location not found") error = True diff --git a/Hlt/Moore/python/Moore/config.py b/Hlt/Moore/python/Moore/config.py index c264003bc26..b704b29d54d 100644 --- a/Hlt/Moore/python/Moore/config.py +++ b/Hlt/Moore/python/Moore/config.py @@ -343,7 +343,7 @@ def stream_writer(stream, locations = new_locations + [ default_raw_banks(rb) for rb in persist_locations ] - + consolidate_views = CombineRawBankViewsToRawEvent( name="EvtSize_" + stream, # FIXME this only works because of a bug https://gitlab.cern.ch/lhcb/LHCb/-/issues/160 -- GitLab From a1d3f17b88be759e63def1a778f9b5633592731b Mon Sep 17 00:00:00 2001 From: Gitlab CI Date: Thu, 3 Nov 2022 21:59:29 +0000 Subject: [PATCH 17/28] Fixed formatting patch generated by https://gitlab.cern.ch/lhcb/Moore/-/jobs/25593862 --- .../options/test_allen_comp_dec_sel_rep.py | 35 +++++++++++-------- .../tests/options/test_allen_decreports.py | 18 +++++----- .../tests/options/test_allen_sel_rep_info.py | 31 +++++++++------- Hlt/Hlt1Conf/tests/options/test_decreports.py | 18 +++++----- Hlt/Moore/python/Moore/config.py | 6 ++-- Hlt/RecoConf/python/RecoConf/hlt1_tracking.py | 2 +- 6 files changed, 62 insertions(+), 48 deletions(-) diff --git a/Hlt/Hlt1Conf/tests/options/test_allen_comp_dec_sel_rep.py b/Hlt/Hlt1Conf/tests/options/test_allen_comp_dec_sel_rep.py index 658d2ec2d00..3afdca0884e 100644 --- a/Hlt/Hlt1Conf/tests/options/test_allen_comp_dec_sel_rep.py +++ b/Hlt/Hlt1Conf/tests/options/test_allen_comp_dec_sel_rep.py @@ -40,24 +40,29 @@ IODataManager(DisablePFNWarning=True) # Disable warning about histogram saving not being required HistogramPersistencySvc(OutputLevel=5) # Decode Hlt DecReports -from Configurables import LHCb__UnpackRawEvent, HltDecReportsDecoder, HltSelReportsDecoder -unpacker = LHCb__UnpackRawEvent("UnpackRawEvent", - OutputLevel=2, - RawBankLocations = ["DAQ/RawBanks/HltDecReports", "DAQ/RawBanks/HltSelReports"], - BankTypes = ["HltDecReports", "HltSelReports"]) +from Configurables import LHCb__UnpackRawEvent, HltDecReportsDecoder, HltSelReportsDecoder +unpacker = LHCb__UnpackRawEvent( + "UnpackRawEvent", + OutputLevel=2, + RawBankLocations=[ + "DAQ/RawBanks/HltDecReports", "DAQ/RawBanks/HltSelReports" + ], + BankTypes=["HltDecReports", "HltSelReports"]) -decDec = HltDecReportsDecoder("HltDecReportsDecoder/Hlt1DecReportsDecoder", - SourceID = "Hlt1", - RawBanks = unpacker.RawBankLocations[0]) +decDec = HltDecReportsDecoder( + "HltDecReportsDecoder/Hlt1DecReportsDecoder", + SourceID="Hlt1", + RawBanks=unpacker.RawBankLocations[0]) -selDec = HltSelReportsDecoder("HltSelReportsDecoder/Hlt1SelReportsDecoder", - SourceID = "Hlt1", - DecReports= unpacker.RawBankLocations[0], - RawBanks = unpacker.RawBankLocations[1]) +selDec = HltSelReportsDecoder( + "HltSelReportsDecoder/Hlt1SelReportsDecoder", + SourceID="Hlt1", + DecReports=unpacker.RawBankLocations[0], + RawBanks=unpacker.RawBankLocations[1]) - -app = ApplicationMgr(TopAlg=[unpacker, decDec, selDec], - ExtSvc=[configured_ann_svc(name='HltANNSvc')]) +app = ApplicationMgr( + TopAlg=[unpacker, decDec, selDec], + ExtSvc=[configured_ann_svc(name='HltANNSvc')]) # decoderDB wants TCKANNSvc as name... # Set up counters for recording decisions and selreport existence from MDF diff --git a/Hlt/Hlt1Conf/tests/options/test_allen_decreports.py b/Hlt/Hlt1Conf/tests/options/test_allen_decreports.py index 499465e741f..f534b458b1b 100644 --- a/Hlt/Hlt1Conf/tests/options/test_allen_decreports.py +++ b/Hlt/Hlt1Conf/tests/options/test_allen_decreports.py @@ -56,17 +56,19 @@ IODataManager(DisablePFNWarning=True) HistogramPersistencySvc(OutputLevel=5) # Decode Hlt DecReports from Configurables import LHCb__UnpackRawEvent, HltDecReportsDecoder -unpacker = LHCb__UnpackRawEvent("UnpackRawEvent", - RawBankLocations = ["DAQ/RawBanks/HltDecReports"], - BankTypes = ["HltDecReports"]) +unpacker = LHCb__UnpackRawEvent( + "UnpackRawEvent", + RawBankLocations=["DAQ/RawBanks/HltDecReports"], + BankTypes=["HltDecReports"]) -decDec = HltDecReportsDecoder("HltDecReportsDecoder/Hlt1DecReportsDecoder", - SourceID = "Hlt1", - DecoderMapping= "TCKANNSvc", - RawBanks = unpacker.RawBankLocations[0]) +decDec = HltDecReportsDecoder( + "HltDecReportsDecoder/Hlt1DecReportsDecoder", + SourceID="Hlt1", + DecoderMapping="TCKANNSvc", + RawBanks=unpacker.RawBankLocations[0]) app = ApplicationMgr(TopAlg=[unpacker, decDec]) -# Configure TCKANNSvc +# Configure TCKANNSvc app.ExtSvc += [configured_ann_svc(name='TCKANNSvc')] # Set up counters for recording decisions from MDF diff --git a/Hlt/Hlt1Conf/tests/options/test_allen_sel_rep_info.py b/Hlt/Hlt1Conf/tests/options/test_allen_sel_rep_info.py index c199fb3ba5a..f42feef7192 100644 --- a/Hlt/Hlt1Conf/tests/options/test_allen_sel_rep_info.py +++ b/Hlt/Hlt1Conf/tests/options/test_allen_sel_rep_info.py @@ -224,19 +224,24 @@ IODataManager(DisablePFNWarning=True) HistogramPersistencySvc(OutputLevel=5) # Decode Hlt DecReports from Configurables import LHCb__UnpackRawEvent, HltDecReportsDecoder, HltSelReportsDecoder -unpacker = LHCb__UnpackRawEvent("UnpackRawEvent", - RawBankLocations = ["DAQ/RawBanks/HltDecReports", "DAQ/RawBanks/HltSelReports"], - BankTypes = ["HltDecReports", "HltSelReports"]) - -decDec = HltDecReportsDecoder("HltDecReportsDecoder/Hlt1DecReportsDecoder", - SourceID = "Hlt1", - DecoderMapping= "TCKANNSvc", - RawBanks = unpacker.RawBankLocations[0]) - -selDec = HltSelReportsDecoder("HltDecReportsDecoder/Hlt1SelReportsDecoder", - SourceID = "Hlt1", - DecoderMapping= "TCKANNSvc", - RawBanks = unpacker.RawBankLocations[1]) +unpacker = LHCb__UnpackRawEvent( + "UnpackRawEvent", + RawBankLocations=[ + "DAQ/RawBanks/HltDecReports", "DAQ/RawBanks/HltSelReports" + ], + BankTypes=["HltDecReports", "HltSelReports"]) + +decDec = HltDecReportsDecoder( + "HltDecReportsDecoder/Hlt1DecReportsDecoder", + SourceID="Hlt1", + DecoderMapping="TCKANNSvc", + RawBanks=unpacker.RawBankLocations[0]) + +selDec = HltSelReportsDecoder( + "HltDecReportsDecoder/Hlt1SelReportsDecoder", + SourceID="Hlt1", + DecoderMapping="TCKANNSvc", + RawBanks=unpacker.RawBankLocations[1]) app = ApplicationMgr(TopAlg=[unpacker, decDec, selDec]) diff --git a/Hlt/Hlt1Conf/tests/options/test_decreports.py b/Hlt/Hlt1Conf/tests/options/test_decreports.py index 19f562cec91..c818c1c3a76 100644 --- a/Hlt/Hlt1Conf/tests/options/test_decreports.py +++ b/Hlt/Hlt1Conf/tests/options/test_decreports.py @@ -59,17 +59,19 @@ HistogramPersistencySvc(OutputLevel=5) # Decode Hlt DecReports from Configurables import LHCb__UnpackRawEvent, HltDecReportsDecoder -unpacker = LHCb__UnpackRawEvent("UnpackRawEvent", - RawBankLocations = ["DAQ/RawBanks/HltDecReports"], - BankTypes = ["HltDecReports"]) +unpacker = LHCb__UnpackRawEvent( + "UnpackRawEvent", + RawBankLocations=["DAQ/RawBanks/HltDecReports"], + BankTypes=["HltDecReports"]) -decDec = HltDecReportsDecoder("HltDecReportsDecoder/Hlt1DecReportsDecoder", - SourceID = "Hlt1", - DecoderMapping= "TCKANNSvc", - RawBanks = unpacker.RawBankLocations[0]) +decDec = HltDecReportsDecoder( + "HltDecReportsDecoder/Hlt1DecReportsDecoder", + SourceID="Hlt1", + DecoderMapping="TCKANNSvc", + RawBanks=unpacker.RawBankLocations[0]) appMgr = ApplicationMgr(TopAlg=[unpacker, decDec]) -# Configure TCKANNSvc +# Configure TCKANNSvc appMgr.ExtSvc += [configured_ann_svc(name='TCKANNSvc')] # Get expected lines from the previous job diff --git a/Hlt/Moore/python/Moore/config.py b/Hlt/Moore/python/Moore/config.py index b704b29d54d..e8075eef0f8 100644 --- a/Hlt/Moore/python/Moore/config.py +++ b/Hlt/Moore/python/Moore/config.py @@ -343,7 +343,7 @@ def stream_writer(stream, locations = new_locations + [ default_raw_banks(rb) for rb in persist_locations ] - + consolidate_views = CombineRawBankViewsToRawEvent( name="EvtSize_" + stream, # FIXME this only works because of a bug https://gitlab.cern.ch/lhcb/LHCb/-/issues/160 @@ -882,8 +882,8 @@ def RawEventSize_analysis(process, stream, persist_types, new_locations): persist_types = [x for x in persist_types if x != 'DstData'] #persist locations have been unpacked by LHCb__UnpackRawEvent - sizes_of = new_locations + [ - default_raw_banks(rb) for rb in persist_types + sizes_of = new_locations + [ + default_raw_banks(rb) for rb in persist_types ] algs += [ diff --git a/Hlt/RecoConf/python/RecoConf/hlt1_tracking.py b/Hlt/RecoConf/python/RecoConf/hlt1_tracking.py index 9b3f7eee883..2caa3ad7b87 100644 --- a/Hlt/RecoConf/python/RecoConf/hlt1_tracking.py +++ b/Hlt/RecoConf/python/RecoConf/hlt1_tracking.py @@ -98,7 +98,7 @@ def make_VeloClusterTrackingSIMD(algorithm=VeloClusterTrackingSIMD, my_SensorMasks = [j in masked_sensors for j in range(208) ] # 208 = LHCb::Pr::Velo::VPInfos::NSensors return algorithm( - RawBanks=default_raw_banks(bank_type, raw_event), + RawBanks=default_raw_banks(bank_type, raw_event), SensorMasks=tuple(my_SensorMasks)) -- GitLab From 217df8961a549151c950d03786bc8cd4b62ef75d Mon Sep 17 00:00:00 2001 From: sesen Date: Thu, 3 Nov 2022 23:05:47 +0100 Subject: [PATCH 18/28] remove unused imports --- Hlt/Moore/python/Moore/config.py | 2 +- Hlt/RecoConf/python/RecoConf/hlt1_allen.py | 5 ++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/Hlt/Moore/python/Moore/config.py b/Hlt/Moore/python/Moore/config.py index e8075eef0f8..039811946e8 100644 --- a/Hlt/Moore/python/Moore/config.py +++ b/Hlt/Moore/python/Moore/config.py @@ -35,7 +35,7 @@ from PyConf.application import ( ) from PyConf.filecontent_metadata import generate_encoding_dictionary, register_encoding_dictionary -from GaudiConf.reading import unpack_rawevent, mc_unpackers +from GaudiConf.reading import mc_unpackers from PyConf.utilities import ConfigurationError from PyConf.application import all_nodes_and_algs diff --git a/Hlt/RecoConf/python/RecoConf/hlt1_allen.py b/Hlt/RecoConf/python/RecoConf/hlt1_allen.py index 1cf41531ab7..f3650c57ed4 100644 --- a/Hlt/RecoConf/python/RecoConf/hlt1_allen.py +++ b/Hlt/RecoConf/python/RecoConf/hlt1_allen.py @@ -10,9 +10,9 @@ ############################################################################### import importlib from PyConf import configurable -from PyConf.components import Algorithm, force_location +from PyConf.components import Algorithm from PyConf.control_flow import CompositeNode -from PyConf.application import default_raw_banks, default_raw_event +from PyConf.application import default_raw_banks from PyConf.Algorithms import ( LHCb__Converters__Track__v1__fromVectorLHCbTrack as TrkConvVecV1ToKeyContV1, @@ -20,7 +20,6 @@ from PyConf.Algorithms import ( FromV2TrackV1Track) from AllenConf.hlt1_reconstruction import hlt1_reconstruction -from typing import OrderedDict @configurable -- GitLab From 53f0e1c292f020fc251ddf9a89fa691353499317 Mon Sep 17 00:00:00 2001 From: Gitlab CI Date: Thu, 3 Nov 2022 22:06:36 +0000 Subject: [PATCH 19/28] Fixed formatting patch generated by https://gitlab.cern.ch/lhcb/Moore/-/jobs/25594006 --- Hlt/Moore/python/Moore/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Hlt/Moore/python/Moore/config.py b/Hlt/Moore/python/Moore/config.py index 039811946e8..1cef091b613 100644 --- a/Hlt/Moore/python/Moore/config.py +++ b/Hlt/Moore/python/Moore/config.py @@ -35,7 +35,7 @@ from PyConf.application import ( ) from PyConf.filecontent_metadata import generate_encoding_dictionary, register_encoding_dictionary -from GaudiConf.reading import mc_unpackers +from GaudiConf.reading import mc_unpackers from PyConf.utilities import ConfigurationError from PyConf.application import all_nodes_and_algs -- GitLab From 3be06a27aed8da83ee83d0beb5c89c48b07961fd Mon Sep 17 00:00:00 2001 From: sesen Date: Fri, 4 Nov 2022 15:19:36 +0100 Subject: [PATCH 20/28] unify hlt2 and sprucing config a bit --- Hlt/Moore/python/Moore/config.py | 105 ++++++++---------- .../RecoConf/reco_objects_for_spruce.py | 4 +- 2 files changed, 47 insertions(+), 62 deletions(-) diff --git a/Hlt/Moore/python/Moore/config.py b/Hlt/Moore/python/Moore/config.py index 1cef091b613..040bd7fd4eb 100644 --- a/Hlt/Moore/python/Moore/config.py +++ b/Hlt/Moore/python/Moore/config.py @@ -160,6 +160,7 @@ def report_writers_nodes(streams, "spruce": "SpruceSelectionID", "pass": "SpruceSelectionID" }[process] + dec_key = int( register_encoding_dictionary( major_name, @@ -167,21 +168,35 @@ def report_writers_nodes(streams, [l.decision_name for l in lines])), 16) # TODO unsigned? Stick to hex string? - if process == "hlt1" or process == "hlt2": - erw = ExecutionReportsWriter( - Persist=[line.name for line in lines], - ANNSvcKey=major_name, - TCK=dec_key, + + ##spruce and passthrough jobs will write a Spruce report + erw = ExecutionReportsWriter( + Persist=[line.name for line in lines], + ANNSvcKey=major_name, + TCK=dec_key # TODO unsigned? Stick to hex string? ) - drw = HltDecReportsWriter( - SourceID=source_id, - InputHltDecReportsLocation=erw.DecReportsLocation, - EncodingKey=dec_key, + drw = HltDecReportsWriter( + SourceID=source_id, + InputHltDecReportsLocation=erw.DecReportsLocation, + EncodingKey=dec_key, ) - algs.extend([erw, drw]) + + algs.extend([erw, drw]) + if process == "hlt1" or process == "hlt2": new_hlt_banks['HltDecReports'] = drw.OutputRawEvent + else: + new_hlt_banks['SpruceDecReports'] = drw.OutputView - if process == "hlt1": + if process == "pass": + # For pass, just copy the manifest file, nothing else is needed to be done + if not options.input_manifest_file: + raise RuntimeError( + ' pass-through configuration -- must specify an input manifest' + ) + copy(options.input_manifest_file, options.output_manifest_file) + + elif process == "hlt1": + # For hlt1, make sel reports srm = make_selreports(process, physics_lines, erw) srw = HltSelReportsWriter( SourceID=source_id, @@ -191,60 +206,30 @@ def report_writers_nodes(streams, EncodingKey=srm.properties['EncodingKey']) algs.append(srw) new_hlt_banks['HltSelReports'] = srw.RawEvent - elif process == "hlt2": + + else: + # For hlt2 and spruce, persist_line_outputs + event_stream = "/Event/Spruce" if process=="spruce" else "/Event/HLT2" + reco_stream = "/Event/HLT2" (line_output_cf, line_output_locations, packed_data) = persist_line_outputs( - physics_lines, - data_type, - erw.DecReportsLocation, - associate_mc, - process.capitalize(), - output_manifest_file, - clone_mc=options.simulation and options.input_type == ROOT_KEY) - + physics_lines, + data_type, + erw.DecReportsLocation, + associate_mc, + process.capitalize(), + output_manifest_file, + stream=event_stream, + reco_stream=reco_stream, + clone_mc=options.simulation and options.input_type == ROOT_KEY) + algs.append(line_output_cf) - new_hlt_banks['DstData'] = packed_data.OutputRawEvent extra_locations_to_persist.extend(line_output_locations) - else: - ##spruce and passthrough jobs will write a Spruce report - erw = ExecutionReportsWriter( - Persist=[line.name for line in lines], - ANNSvcKey=major_name, - TCK=dec_key # TODO unsigned? Stick to hex string? - ) - drw = HltDecReportsWriter( - SourceID=source_id, - InputHltDecReportsLocation=erw.DecReportsLocation, - EncodingKey=dec_key, - ) - - algs.extend([erw, drw]) - new_hlt_banks['SpruceDecReports'] = drw.OutputView - - if process == "spruce": - #Only create new DstData locations if exclusive sprucing and not if passthrough - line_output_cf, line_output_locations, packed_data = persist_line_outputs( - physics_lines, - data_type, - erw.DecReportsLocation, - associate_mc, - process.capitalize(), - output_manifest_file, - stream="/Event/Spruce", - reco_stream="/Event/HLT2", - clone_mc=options.simulation and options.input_type == ROOT_KEY) - + if process == "hlt2": + new_hlt_banks['DstData'] = packed_data.OutputRawEvent + elif process == "spruce": new_hlt_banks['DstData'] = packed_data.OutputView - algs.append(line_output_cf) - extra_locations_to_persist.extend(line_output_locations) - - if process == "pass": - if not options.input_manifest_file: - raise RuntimeError( - ' pass-through configuration -- must specify an input manifest' - ) - copy(options.input_manifest_file, options.output_manifest_file) - + node = CompositeNode( 'report_writers', combine_logic=NodeLogic.NONLAZY_OR, diff --git a/Hlt/RecoConf/python/RecoConf/reco_objects_for_spruce.py b/Hlt/RecoConf/python/RecoConf/reco_objects_for_spruce.py index 70a2ee0bcc4..37880457dd1 100644 --- a/Hlt/RecoConf/python/RecoConf/reco_objects_for_spruce.py +++ b/Hlt/RecoConf/python/RecoConf/reco_objects_for_spruce.py @@ -44,13 +44,13 @@ def upfront_reconstruction(simulation=True): unpackers = reading.unpackers( reading.make_locations(m, stream), m, - upfront_decoder(process="Hlt2"), + upfront_decoder(process="Hlt2", stream=None), configurables=False) ### TODO:FIXME take advantage of the fact that the above have datahandles... # i.e. should _not_ have to return decoder here, and should just return the _output handles_ and not the algorithms # i.e. `upfront_reconstruction` should be a drop-in replacement for `reconstruction()`, with the same return type - return [upfront_decoder(process="Hlt2").producer] + mc_algs + unpackers + return [upfront_decoder(process="Hlt2", stream=None).producer] + mc_algs + unpackers def reconstruction(simulation=True): -- GitLab From 8b544d7ceb10882e83f115e860245a55eeb806be Mon Sep 17 00:00:00 2001 From: Gitlab CI Date: Fri, 4 Nov 2022 14:20:39 +0000 Subject: [PATCH 21/28] Fixed formatting patch generated by https://gitlab.cern.ch/lhcb/Moore/-/jobs/25610483 --- Hlt/Moore/python/Moore/config.py | 39 +++++++++---------- .../RecoConf/reco_objects_for_spruce.py | 3 +- 2 files changed, 21 insertions(+), 21 deletions(-) diff --git a/Hlt/Moore/python/Moore/config.py b/Hlt/Moore/python/Moore/config.py index 040bd7fd4eb..4de7bcfe231 100644 --- a/Hlt/Moore/python/Moore/config.py +++ b/Hlt/Moore/python/Moore/config.py @@ -168,18 +168,17 @@ def report_writers_nodes(streams, [l.decision_name for l in lines])), 16) # TODO unsigned? Stick to hex string? - ##spruce and passthrough jobs will write a Spruce report erw = ExecutionReportsWriter( Persist=[line.name for line in lines], ANNSvcKey=major_name, TCK=dec_key # TODO unsigned? Stick to hex string? - ) + ) drw = HltDecReportsWriter( SourceID=source_id, InputHltDecReportsLocation=erw.DecReportsLocation, EncodingKey=dec_key, - ) + ) algs.extend([erw, drw]) if process == "hlt1" or process == "hlt2": @@ -188,15 +187,15 @@ def report_writers_nodes(streams, new_hlt_banks['SpruceDecReports'] = drw.OutputView if process == "pass": - # For pass, just copy the manifest file, nothing else is needed to be done + # For pass, just copy the manifest file, nothing else is needed to be done if not options.input_manifest_file: raise RuntimeError( ' pass-through configuration -- must specify an input manifest' - ) + ) copy(options.input_manifest_file, options.output_manifest_file) - + elif process == "hlt1": - # For hlt1, make sel reports + # For hlt1, make sel reports srm = make_selreports(process, physics_lines, erw) srw = HltSelReportsWriter( SourceID=source_id, @@ -206,30 +205,30 @@ def report_writers_nodes(streams, EncodingKey=srm.properties['EncodingKey']) algs.append(srw) new_hlt_banks['HltSelReports'] = srw.RawEvent - + else: # For hlt2 and spruce, persist_line_outputs - event_stream = "/Event/Spruce" if process=="spruce" else "/Event/HLT2" + event_stream = "/Event/Spruce" if process == "spruce" else "/Event/HLT2" reco_stream = "/Event/HLT2" (line_output_cf, line_output_locations, packed_data) = persist_line_outputs( - physics_lines, - data_type, - erw.DecReportsLocation, - associate_mc, - process.capitalize(), - output_manifest_file, - stream=event_stream, - reco_stream=reco_stream, - clone_mc=options.simulation and options.input_type == ROOT_KEY) - + physics_lines, + data_type, + erw.DecReportsLocation, + associate_mc, + process.capitalize(), + output_manifest_file, + stream=event_stream, + reco_stream=reco_stream, + clone_mc=options.simulation and options.input_type == ROOT_KEY) + algs.append(line_output_cf) extra_locations_to_persist.extend(line_output_locations) if process == "hlt2": new_hlt_banks['DstData'] = packed_data.OutputRawEvent elif process == "spruce": new_hlt_banks['DstData'] = packed_data.OutputView - + node = CompositeNode( 'report_writers', combine_logic=NodeLogic.NONLAZY_OR, diff --git a/Hlt/RecoConf/python/RecoConf/reco_objects_for_spruce.py b/Hlt/RecoConf/python/RecoConf/reco_objects_for_spruce.py index 37880457dd1..f69a19fcbbb 100644 --- a/Hlt/RecoConf/python/RecoConf/reco_objects_for_spruce.py +++ b/Hlt/RecoConf/python/RecoConf/reco_objects_for_spruce.py @@ -50,7 +50,8 @@ def upfront_reconstruction(simulation=True): ### TODO:FIXME take advantage of the fact that the above have datahandles... # i.e. should _not_ have to return decoder here, and should just return the _output handles_ and not the algorithms # i.e. `upfront_reconstruction` should be a drop-in replacement for `reconstruction()`, with the same return type - return [upfront_decoder(process="Hlt2", stream=None).producer] + mc_algs + unpackers + return [upfront_decoder(process="Hlt2", stream=None).producer + ] + mc_algs + unpackers def reconstruction(simulation=True): -- GitLab From 1ba94311373c8bc26849492cc7c240ad0bd9d17e Mon Sep 17 00:00:00 2001 From: sesen Date: Fri, 4 Nov 2022 21:02:28 +0100 Subject: [PATCH 22/28] fix allen test --- Hlt/RecoConf/python/RecoConf/hlt1_tracking.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Hlt/RecoConf/python/RecoConf/hlt1_tracking.py b/Hlt/RecoConf/python/RecoConf/hlt1_tracking.py index 2caa3ad7b87..852a220f0b5 100644 --- a/Hlt/RecoConf/python/RecoConf/hlt1_tracking.py +++ b/Hlt/RecoConf/python/RecoConf/hlt1_tracking.py @@ -151,8 +151,9 @@ def make_SPmixed_raw_event(detector=["VP"], make_raw=default_raw_event): Returns: """ + # Can detector ever be something else return VPRetinaSPmixer( - RawEventLocation=make_raw(detector)).RawEventLocationMixed + RawEventLocation=make_raw(["VP"])).RawEventLocationMixed @configurable -- GitLab From 328859844f61e9d9f7d499d1f4da98c6efbc8099 Mon Sep 17 00:00:00 2001 From: sesen Date: Tue, 8 Nov 2022 17:55:10 +0100 Subject: [PATCH 23/28] fix dec report location for allen tests --- Hlt/Hlt1Conf/tests/options/test_allen_sel_rep_info.py | 2 +- Hlt/Hlt1Conf/tests/options/test_decreports.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Hlt/Hlt1Conf/tests/options/test_allen_sel_rep_info.py b/Hlt/Hlt1Conf/tests/options/test_allen_sel_rep_info.py index f42feef7192..88f00ecc8bd 100644 --- a/Hlt/Hlt1Conf/tests/options/test_allen_sel_rep_info.py +++ b/Hlt/Hlt1Conf/tests/options/test_allen_sel_rep_info.py @@ -264,7 +264,7 @@ print('Applying objectless line tests to lines matching ', objectless_pattern = re.compile(args.objectless_lines_regex) while TES["/Event"]: - sels = TES["/Event/Hlt1/SelReports"] + sels = TES[str(selDec.OutputHltSelReportsLocation)] if not sels: issues["SelReports TES location not found"] += 1 continue diff --git a/Hlt/Hlt1Conf/tests/options/test_decreports.py b/Hlt/Hlt1Conf/tests/options/test_decreports.py index c818c1c3a76..1af358eaa7e 100644 --- a/Hlt/Hlt1Conf/tests/options/test_decreports.py +++ b/Hlt/Hlt1Conf/tests/options/test_decreports.py @@ -89,7 +89,7 @@ gaudi.run(1) error = False while TES["/Event"]: - decs = TES["/Event/Hlt1/DecReports"] + decs = TES[str(decDec.OutputHltDecReportsLocation)] if not decs: error = True break -- GitLab From cc4f0c67f25fa0805b9ac5834dc99535ced52218 Mon Sep 17 00:00:00 2001 From: sesen Date: Wed, 9 Nov 2022 23:01:59 +0100 Subject: [PATCH 24/28] add sorting to truth matching to avoid changing algorithm counters --- .../Moore/persistence/truth_matching.py | 23 +++++++++++-------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/Hlt/Moore/python/Moore/persistence/truth_matching.py b/Hlt/Moore/python/Moore/persistence/truth_matching.py index 4b2336edf0d..436ec35a3f7 100644 --- a/Hlt/Moore/python/Moore/persistence/truth_matching.py +++ b/Hlt/Moore/python/Moore/persistence/truth_matching.py @@ -106,18 +106,20 @@ def _find_protoparticles(candidates): category. """ protos = { - dh + dh.location: dh for output in candidates for dh in _collect_dependencies( output.producer, PROTOPARTICLE_CONTAINER_T) } - charged = set() - neutral = set() - charged_brunel = set() - neutral_brunel = set() + charged = [] + neutral = [] + charged_brunel = [] + neutral_brunel = [] + protos = dict(sorted(protos.items())) + # Heuristically determine whether a given container of ProtoParticles was # created from the raw event or was taken from the input file. We must # truth-match differently between the two cases. - for dh in protos: + for dh in protos.values(): alg = dh.producer prop = alg.inputs.get("InputName") # If there is a packed container location in the data dependency tree, @@ -128,16 +130,17 @@ def _find_protoparticles(candidates): # We happen to know that Brunel outputs charged and neutral protos # to so-named locations if "Charged" in prop.location: - charged_brunel.add(dh) + charged_brunel.append(dh) else: - neutral_brunel.add(dh) + neutral_brunel.apend(dh) else: # Assume charged ProtoParticle container if the maker has CALO # clusters in its dependencies if _collect_dependencies(dh.producer, CALOCLUSTER_CONTAINER_T): - neutral.add(dh) + neutral.apend(dh) else: - charged.add(dh) + charged.append(dh) + return (charged, neutral), (charged_brunel, neutral_brunel) -- GitLab From 4a6f4b380d086bcbcb5557d709a11e9fc4d9ac2a Mon Sep 17 00:00:00 2001 From: Gitlab CI Date: Wed, 9 Nov 2022 22:03:03 +0000 Subject: [PATCH 25/28] Fixed formatting patch generated by https://gitlab.cern.ch/lhcb/Moore/-/jobs/25723525 --- Hlt/Moore/python/Moore/persistence/truth_matching.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Hlt/Moore/python/Moore/persistence/truth_matching.py b/Hlt/Moore/python/Moore/persistence/truth_matching.py index 436ec35a3f7..dd48a3d40fd 100644 --- a/Hlt/Moore/python/Moore/persistence/truth_matching.py +++ b/Hlt/Moore/python/Moore/persistence/truth_matching.py @@ -106,7 +106,7 @@ def _find_protoparticles(candidates): category. """ protos = { - dh.location: dh + dh.location: dh for output in candidates for dh in _collect_dependencies( output.producer, PROTOPARTICLE_CONTAINER_T) } -- GitLab From d9b40cad4e1290d3e172accd87e19aa2aee346d1 Mon Sep 17 00:00:00 2001 From: sesen Date: Thu, 10 Nov 2022 11:59:28 +0100 Subject: [PATCH 26/28] add sorting to truth matching to avoid changing algorithm counters --- .../Moore/persistence/truth_matching.py | 25 +++++++++++-------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/Hlt/Moore/python/Moore/persistence/truth_matching.py b/Hlt/Moore/python/Moore/persistence/truth_matching.py index dd48a3d40fd..ad51c0e1194 100644 --- a/Hlt/Moore/python/Moore/persistence/truth_matching.py +++ b/Hlt/Moore/python/Moore/persistence/truth_matching.py @@ -106,20 +106,19 @@ def _find_protoparticles(candidates): category. """ protos = { - dh.location: dh + dh for output in candidates for dh in _collect_dependencies( output.producer, PROTOPARTICLE_CONTAINER_T) } - charged = [] - neutral = [] - charged_brunel = [] - neutral_brunel = [] - protos = dict(sorted(protos.items())) + charged = set() + neutral = set() + charged_brunel = set() + neutral_brunel = set() # Heuristically determine whether a given container of ProtoParticles was # created from the raw event or was taken from the input file. We must # truth-match differently between the two cases. - for dh in protos.values(): + for dh in protos: alg = dh.producer prop = alg.inputs.get("InputName") # If there is a packed container location in the data dependency tree, @@ -130,17 +129,21 @@ def _find_protoparticles(candidates): # We happen to know that Brunel outputs charged and neutral protos # to so-named locations if "Charged" in prop.location: - charged_brunel.append(dh) + charged_brunel.add(dh) else: - neutral_brunel.apend(dh) + neutral_brunel.add(dh) else: # Assume charged ProtoParticle container if the maker has CALO # clusters in its dependencies if _collect_dependencies(dh.producer, CALOCLUSTER_CONTAINER_T): - neutral.apend(dh) + neutral.add(dh) else: - charged.append(dh) + charged.add(dh) + for cont in [charged, neutral, charged_brunel, neutral_brunel]: + cont_loc = {dh.location: dh for dh in cont} + cont_loc = dict(sorted(cont_loc.items())) + cont = [dh for dh in cont_loc.values()] return (charged, neutral), (charged_brunel, neutral_brunel) -- GitLab From 382d045899ae222c503464f24eae083cba4aaabb Mon Sep 17 00:00:00 2001 From: Sevda Esen Date: Thu, 10 Nov 2022 22:42:19 +0100 Subject: [PATCH 27/28] Apply 1 suggestion(s) to 1 file(s) --- Hlt/Moore/python/Moore/persistence/truth_matching.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/Hlt/Moore/python/Moore/persistence/truth_matching.py b/Hlt/Moore/python/Moore/persistence/truth_matching.py index ad51c0e1194..8f04dd45baa 100644 --- a/Hlt/Moore/python/Moore/persistence/truth_matching.py +++ b/Hlt/Moore/python/Moore/persistence/truth_matching.py @@ -140,11 +140,10 @@ def _find_protoparticles(candidates): else: charged.add(dh) - for cont in [charged, neutral, charged_brunel, neutral_brunel]: - cont_loc = {dh.location: dh for dh in cont} - cont_loc = dict(sorted(cont_loc.items())) - cont = [dh for dh in cont_loc.values()] - return (charged, neutral), (charged_brunel, neutral_brunel) + def dh_sorted(handles): + return sorted( handles, key = lambda dh : dh.location ) + + return (dh_sorted(charged), dh_sorted(neutral)), (dh_sorted(charged_brunel), dh_sorted(neutral_brunel)) def _match_charged(charged, charged_brunel, mc_particles): -- GitLab From 19280f403b358135fb1397041f5778b80fe93e98 Mon Sep 17 00:00:00 2001 From: Gitlab CI Date: Thu, 10 Nov 2022 21:43:01 +0000 Subject: [PATCH 28/28] Fixed formatting patch generated by https://gitlab.cern.ch/lhcb/Moore/-/jobs/25750274 --- Hlt/Moore/python/Moore/persistence/truth_matching.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/Hlt/Moore/python/Moore/persistence/truth_matching.py b/Hlt/Moore/python/Moore/persistence/truth_matching.py index 8f04dd45baa..37e7c0040cd 100644 --- a/Hlt/Moore/python/Moore/persistence/truth_matching.py +++ b/Hlt/Moore/python/Moore/persistence/truth_matching.py @@ -141,9 +141,11 @@ def _find_protoparticles(candidates): charged.add(dh) def dh_sorted(handles): - return sorted( handles, key = lambda dh : dh.location ) + return sorted(handles, key=lambda dh: dh.location) - return (dh_sorted(charged), dh_sorted(neutral)), (dh_sorted(charged_brunel), dh_sorted(neutral_brunel)) + return (dh_sorted(charged), + dh_sorted(neutral)), (dh_sorted(charged_brunel), + dh_sorted(neutral_brunel)) def _match_charged(charged, charged_brunel, mc_particles): -- GitLab