HPS-MC
 
Loading...
Searching...
No Matches
tools.py
Go to the documentation of this file.
1"""! Tools that can be used in HPSMC jobs."""
2
3import json
4import os
5import gzip
6import shutil
7import subprocess
8import tarfile
9
10from subprocess import PIPE
11
12from hpsmc.component import Component
13import hpsmc.func as func
14
15
17 """!
18 Run the SLIC Geant4 simulation.
19
20 Optional parameters are: **nevents**, **macros**, **run_number**, **disable_particle_table** \n
21 Required parameters are: **detector** \n
22 Required configurations are: **slic_dir**, **detector_dir**
23 """
24
25 def __init__(self, **kwargs):
26
27 self.macros = []
28
29 self.run_number = None
30
31 self.detector_dir = None
32
36
37 Component.__init__(
38 self, name="slic", command="slic", output_ext=".slcio", **kwargs
39 )
40
41 def cmd_args(self):
42 """!
43 Setup command arguments.
44 @return list of arguments
45 """
46 if not len(self.input_files()):
47 raise Exception("No inputs given for SLIC.")
48
49 args = [
50 "-g",
51 self.__detector_file(),
52 # "-i", self.input_files()[0],
53 "-o",
54 self.output_files()[0],
55 "-d%s" % str(self.seedseed),
56 ]
57
58 if self.neventsnevents is not None:
59 args.extend(["-r", str(self.neventsnevents)])
60
61 if self.run_number is not None:
62 args.extend(["-m", "run_number.mac"])
63
64 if not self.disable_particle_table:
65 tbl = self.__particle_tbl()
66 if os.path.exists(tbl):
67 args.extend(["-P", tbl])
68 else:
69 raise Exception("SLIC particle.tbl does not exist: %s" % tbl)
70
71 if len(self.macros):
72 # args = []
73 for macro in self.macros:
74 if macro == "run_number.mac":
75 raise Exception("Macro name '%s' is not allowed." % macro)
76 if not os.path.isabs(macro):
77 raise Exception("Macro '%s' is not an absolute path." % macro)
78 args.extend(["-m", macro])
79 else:
80 args.extend(["-i", self.input_files()[0]])
81
82 return args
83
84 def __detector_file(self):
85 """! Return path to detector file."""
86 return os.path.join(self.detector_dir, self.detector, self.detector + ".lcdd")
87
88 def __particle_tbl(self):
89 """! Return path to particle table."""
90 return os.path.join(self.slic_dir, "share", "particle.tbl")
91
92 def config(self, parser):
93 """! Configure SLIC component."""
94 super().config(parser)
95
96 if self.detector_dir is None:
97 self.detector_dir = "{}/share/detectors".format(self.hpsmc_dir)
98 if not os.path.isdir(self.detector_dir):
99 raise Exception("Failed to find valid detector_dir")
100 self.logger.debug(
101 "Using detector_dir from install: {}".format(self.detector_dir)
102 )
103
104 def setup(self):
105 """! Setup SLIC component."""
106 if not os.path.exists(self.slic_dir):
107 raise Exception("slic_dir does not exist: %s" % self.slic_dir)
108
109 self.env_script = self.slic_dir + os.sep + "bin" + os.sep + "slic-env.sh"
110 if not os.path.exists(self.env_script):
111 raise Exception("SLIC setup script does not exist: %s" % self.namename)
112
113 if self.run_number is not None:
114 run_number_cmd = "/lcio/runNumber %d" % self.run_number
115 run_number_mac = open("run_number.mac", "w")
116 run_number_mac.write(run_number_cmd)
117 run_number_mac.close()
118
120 """!
121 Return list of optional parameters.
122
123 Optional parameters are: **nevents**, **macros**, **run_number**
124 @return list of optional parameters
125 """
126 return ["nevents", "macros", "run_number", "disable_particle_table"]
127
129 """!
130 Return list of required parameters.
131
132 Required parameters are: **detector**
133 @return list of required parameters
134 """
135 return ["detector"]
136
138 """!
139 Return list of required configurations.
140
141 Required configurations are: **slic_dir**, **detector_dir**
142 @return list of required configurations
143 """
144 return ["slic_dir", "detector_dir"]
145
146 def execute(self, log_out, log_err):
147 """!
148 Execute SLIC component.
149
150 Component is executed by creating command line input
151 from command and command arguments.
152 @return return code of process
153 """
154 # SLIC needs to be run inside bash as the Geant4 setup script is a piece of #@$@#$.
155 cl = 'bash -c ". %s && %s %s"' % (
156 self.env_script,
158 " ".join(self.cmd_argscmd_args()),
159 )
160
161 # self.logger.info("Executing '%s' with command: %s" % (self.name, cl))
162 proc = subprocess.Popen(cl, shell=True, stdout=log_out, stderr=log_err)
163 proc.communicate()
164 proc.wait()
165
166 return proc.returncode
167
168
170 """!
171 Copy the SQLite database file to the desired location.
172 """
173
174 def __init__(self, **kwargs):
175 """!
176 Initialize SQLiteProc to copy the SQLite file.
177 """
178 self.source_file = kwargs.get("source_file")
179 self.destination_file = kwargs.get("destination_file")
180
181 # Set the Local SQLite Snapshot Location
182 if self.source_file is not None:
183 self.logger.debug(
184 "Setting SQLite local copy source file from config: %s"
185 + self.source_file
186 )
187 args.append(self.source_file)
188 if self.destination_file is not None:
189 self.logger.debug(
190 "Setting Job Destination file from config: %s" % self.destination_file
191 )
192 args.append("-Dorg.hps.conditions.url=%s" % self.destination_file)
193
194 # Ensure to call the parent constructor properly
195 Component.__init__(self, name="sqlite_file_copy", **kwargs)
196
197 def cmd_args(self):
198 """!
199 Return dummy command arguments to satisfy the parent class.
200 """
201 cmd_args = ["(no-command-needed)"]
202
203 if not all(isinstance(arg, str) for arg in cmd_args):
204 raise ValueError("All arguments must be strings.")
205 # return ["(no-command-needed)"]
206 return ["--source", self.source_file, "--destination", self.destination_file]
207
208 def execute(self, log_out, log_err):
209 """!
210 Execute the file copy operation.
211 """
212
213 try:
214 # Copy the file
215
216 self.logger.info(
217 f"Copying file from {self.source_file} to {self.destination_file}"
218 )
219 shutil.copy(self.source_file, self.destination_file)
220
221 # Log success
222 self.logger.info(f"Successfully copied file to {self.destination_file}")
223
224 return 0 # Success code
225
226 except Exception as e:
227 self.logger.error(f"Error during file copy: {e}")
228 return 1 # Error code
229
230
232 """!
233 Run the hps-java JobManager class.
234
235 Input files have slcio format.
236
237 Required parameters are: **steering_files** \n
238 Optional parameters are: **detector**, **run_number**, **defs**
239 """
240
241 def __init__(self, steering=None, **kwargs):
242
244 self.run_number = None
245
246 self.neventsnevents = None
247
248 self.detector = None
249
251
252 self.defs = None
253
254 self.java_args = None
255
257
258 self.lcsim_cache_dir = None
259
260 self.conditions_user = None
261
263
264 self.conditions_url = None
265
266 self.steering = steering
267
269
270 if "overlay_file" in kwargs:
271 self.overlay_file = kwargs["overlay_file"]
272 else:
273 self.overlay_file = None
274
275 Component.__init__(
276 self,
277 name="job_manager",
278 command="java",
279 description="HPS Java Job Manager",
280 output_ext=".slcio",
281 **kwargs,
282 )
283
284 # Automatically append steering file key to output file name
285 if self.append_tokappend_tok is None:
287 self.logger.debug(
288 "Append token for '%s' automatically set to '%s' from steering key."
290 )
291
292 def config(self, parser):
293 """! Configure JobManager component."""
294 super().config(parser)
295 # if installed these are set in the environment script...
296 if self.hps_java_bin_jar is None:
297 if os.getenv("HPS_JAVA_BIN_JAR", None) is not None:
298 self.hps_java_bin_jar = os.getenv("HPS_JAVA_BIN_JAR", None)
299 self.logger.debug(
300 "Set HPS_JAVA_BIN_JAR from environment: {}".format(
302 )
303 )
304 else:
305 raise Exception(
306 "hps_java_bin_jar not set in environment or config file!"
307 )
308 if self.conditions_url is None:
309 if os.getenv("CONDITIONS_URL", None) is not None:
310 self.conditions_url = os.getenv("CONDITIONS_URL", None)
311 self.logger.debug(
312 "Set CONDITIONS_URL from environment: {}".format(
314 )
315 )
316
318 """!
319 Return list of required configurations.
320
321 Required configurations are: **hps_java_bin_jar**
322 @retun list of required configurations.
323 """
324 return ["hps_java_bin_jar"]
325
326 def setup(self):
327 """! Setup JobManager component."""
328 if not len(self.input_files()):
329 raise Exception("No inputs provided to hps-java.")
330
331 if self.steering not in self.steering_files:
332 raise Exception(
333 "Steering '%s' not found in: %s" % (self.steering, self.steering_files)
334 )
336
337 def cmd_args(self):
338 """!
339 Setup command arguments.
340 @return list of arguments
341 """
342 args = []
343
344 if self.java_args is not None:
345 self.logger.debug("Setting java_args from config: %s" % self.java_args)
346 args.append(self.java_args)
347
348 if self.logging_config_file is not None:
349 self.logger.debug(
350 "Setting logging_config_file from config: %s" % self.logging_config_file
351 )
352 args.append("-Djava.util.logging.config.file=%s" % self.logging_config_file)
353
354 if self.lcsim_cache_dir is not None:
355 self.logger.debug(
356 "Setting lcsim_cache_dir from config: %s" % self.lcsim_cache_dir
357 )
358 args.append("-Dorg.lcsim.cacheDir=%s" % self.lcsim_cache_dir)
359
360 if self.conditions_user is not None:
361 self.logger.debug(
362 "Setting conditions_user from config: %s" % self.conditions_user
363 )
364 args.append("-Dorg.hps.conditions.user=%s" % self.conditions_user)
365 if self.conditions_password is not None:
366 self.logger.debug("Setting conditions_password from config (not shown)")
367 args.append("-Dorg.hps.conditions.password=%s" % self.conditions_password)
368 if self.conditions_url is not None:
369 self.logger.debug(
370 "Setting conditions_url from config: %s" % self.conditions_url
371 )
372 args.append("-Dorg.hps.conditions.url=%s" % self.conditions_url)
373
374 args.append("-jar")
375 args.append(self.hps_java_bin_jar)
376
377
378 if self.event_print_interval is not None:
379 args.append("-e")
380 args.append(str(self.event_print_interval))
381
382 if self.run_number is not None:
383 args.append("-R")
384 args.append(str(self.run_number))
385
386 if self.detector is not None:
387 args.append("-d")
388 args.append(self.detector)
389
390 if len(self.output_files()):
391 args.append("-D")
392 args.append("outputFile=" + os.path.splitext(self.output_files()[0])[0])
393
394 if self.defs:
395 for k, v in self.defs.items():
396 args.append("-D")
397 args.append(k + "=" + str(v))
398
399 if not os.path.isfile(self.steering_file):
400 args.append("-r")
401 self.logger.debug(
402 "Steering does not exist at '%s' so assuming it is a resource."
403 % self.steering_file
404 )
405 else:
406 if not os.path.isabs(self.steering_file):
407 raise Exception(
408 "Steering looks like a file but is not an abs path: %s"
409 % self.steering_file
410 )
411 args.append(self.steering_file)
412
413 if self.neventsnevents is not None:
414 args.append("-n")
415 args.append(str(self.neventsnevents))
416
417 for input_file in self.input_files():
418 args.append("-i")
419 args.append(input_file)
420
421 if self.overlay_file is not None:
422 args.append("-D")
423 args.append("overlayFile=" + os.path.splitext(self.overlay_file)[0])
424
425 return args
426
428 """!
429 Return list of required parameters.
430
431 Required parameters are: **steering_files**
432 @return list of required parameters
433 """
434 return ["steering_files"]
435
437 """!
438 Return list of optional parameters.
439
440 Optional parameters are: **detector**, **run_number**, **defs**
441 @return list of optional parameters
442 """
443 return ["detector", "run_number", "defs", "nevents"]
444
445
447 """!
448 Run the make_mini_dst command on the input file.
449
450 Required parameters are: **input_file**
451 Required configs are: **minidst_install_dir**
452 """
453
454 def __init__(self, **kwargs):
455 """!
456 Initialize ProcessMiniDst with default input file and the command to run.
457 """
458 self.input_file = None
459 self.minidst_args = None
460 # Ensure to call the parent constructor properly
461 Component.__init__(self, name='make_mini_dst',
462 command='make_mini_dst',
463 description='Create the MiniDST ROOT file',
464 output_ext='.root',
465 **kwargs)
466
467 def setup(self):
468 """! Setup the MiniDST component."""
469 # Check if input files exist
470 if not len(self.input_files()):
471 raise Exception("No input files provided to make_mini_dst.")
472
473 if not os.path.exists(self.minidst_install_dir):
474 raise Exception("minidst_install_dir does not exist: %s" % self.minidst_install_dir)
475
477 """!
478 Return list of required parameters.
479
480 Required parameters are only the standard "input_files".
481 @return list of required parameters
482 """
483 return []
484
486 """!
487 Return list of optional parameters.
488
489 There are currently no optional parameters.
490 @return list of optional parameters
491 """
492 return []
493
495 """!
496 Return list of required configs.
497
498 Required configs are: **minidst_install_dir**
499 @return list of required configs
500 """
501 return ["minidst_install_dir"]
502
503 def output_files(self):
504 """! Adjust names of output files."""
505 if self.outputsoutputs is None:
506 f, ext = os.path.splitext(self.input_files()[0])
507 self.outputsoutputs = f"{f}_minidst.root"
508 print(f"Set outputs to: {self.outputs}")
509
510 return self.outputsoutputs
511
512 def cmd_args(self):
513 """!
514 Setup command arguments for make_mini_dst.
515 @return list of arguments
516 """
517 args = []
518
519 print("===== Make MiniDST with input files: ", end=" ")
520 for i in range(len(self.input_files())):
521 print(f"{self.input_files()[i]}", end=", ")
522 print(f" ==> {self.output_files()}")
523
524 if self.minidst_args is not None:
525 args.extend(self.minidst_args)
526
527 args.extend(['-o', self.output_filesoutput_files()])
528 args.extend(self.input_files())
529 return args
530
531
533 """!
534 Run the hpstr analysis tool.
535
536 Required parameters are: **config_files** \n
537 Optional parameters are: **year**, **is_data**, **nevents** \n
538 Required configs are: **hpstr_install_dir**, **hpstr_base**
539 """
540
541 def __init__(self, cfg=None, is_data=0, year=None, tracking=None, **kwargs):
542
543 self.cfg = cfg
544
545 self.is_data = is_data
546
547 self.year = year
548
549 self.tracking = tracking
550
552 self.hpstr_base = None
553
554 Component.__init__(self, name="hpstr", command="hpstr", **kwargs)
555
556 def setup(self):
557 """! Setup HPSTR component."""
558 if not os.path.exists(self.hpstr_install_dir):
559 raise Exception(
560 "hpstr_install_dir does not exist: %s" % self.hpstr_install_dir
561 )
562 self.env_script = (
563 self.hpstr_install_dir + os.sep + "bin" + os.sep + "hpstr-env.sh"
564 )
565
566 # The config file to use is read from a dict in the JSON parameters.
567 if self.cfg not in self.config_files:
568 raise Exception(
569 "Config '%s' was not found in: %s" % (self.cfg, self.config_files)
570 )
571 config_file = self.config_files[self.cfg]
572 if len(os.path.dirname(config_file)):
573 # If there is a directory name then we expect an absolute path not in the hpstr dir.
574 if os.path.isabs(config_file):
575 self.cfg_path = config_file
576 else:
577 # The config must be an abs path.
578 raise Exception(
579 "The config has a directory but is not an abs path: %s" % self.cfg
580 )
581 else:
582 # Assume the cfg file is within the hpstr base dir.
583 self.cfg_path = os.path.join(
584 self.hpstr_base, "processors", "config", config_file
585 )
586 self.logger.debug("Set config path: %s" % self.cfg_path)
587
588 # For ROOT output, automatically append the cfg key from the job params.
589 if os.path.splitext(self.input_files()[0])[1] == ".root":
591 self.logger.debug(
592 "Automatically appending token to output file: %s" % self.append_tokappend_tok
593 )
594
596 """!
597 Return list of required parameters.
598
599 Required parameters are: **config_files**
600 @return list of required parameters
601 """
602 return ["config_files"]
603
605 """!
606 Return list of optional parameters.
607
608 Optional parameters are: **year**, **is_data**, **nevents**
609 @return list of optional parameters
610 """
611 return ["year", "is_data", "nevents", "tracking"]
612
614 """!
615 Return list of required configs.
616
617 Required configs are: **hpstr_install_dir**, **hpstr_base**
618 @return list of required configs
619 """
620 return ["hpstr_install_dir", "hpstr_base"]
621
622 def cmd_args(self):
623 """!
624 Setup command arguments.
625 @return list of arguments
626 """
627 args = [
628 self.cfg_path,
629 "-t",
630 str(self.is_data),
631 "-i",
632 self.input_files()[0],
633 "-o",
634 self.output_filesoutput_files()[0],
635 ]
636 if self.neventsnevents is not None:
637 args.extend(["-n", str(self.neventsnevents)])
638 if self.year is not None:
639 args.extend(["-y", str(self.year)])
640 if self.tracking is not None:
641 args.extend(["-w", str(self.tracking)])
642 return args
643
644 def output_files(self):
645 """! Adjust names of output files."""
646 f, ext = os.path.splitext(self.input_files()[0])
647 if ".slcio" in ext:
648 return ["%s.root" % f]
649 else:
650 if not self.append_tokappend_tok:
651 self.append_tokappend_tok = self.cfg
652 return ["%s_%s.root" % (f, self.append_tokappend_tok)]
653
654 def execute(self, log_out, log_err):
655 """! Execute HPSTR component."""
656 args = self.cmd_argscmd_args()
657 cl = 'bash -c ". %s && %s %s"' % (
658 self.env_script,
660 " ".join(self.cmd_argscmd_args()),
661 )
662
663 self.logger.debug("Executing '%s' with command: %s" % (self.namename, cl))
664 proc = subprocess.Popen(cl, shell=True, stdout=log_out, stderr=log_err)
665 proc.communicate()
666 proc.wait()
667
668 return proc.returncode
669
670
671
672
673
675 """!
676 Generic class for StdHep tools.
677 """
678
679
680 seed_names = [
681 "beam_coords",
682 "beam_coords_old",
683 "lhe_tridents",
684 "lhe_tridents_displacetime",
685 "lhe_tridents_displaceuni",
686 "merge_poisson",
687 "mix_signal",
688 "random_sample",
689 ]
690
691 def __init__(self, name=None, **kwargs):
692
693 Component.__init__(self, name=name, command="stdhep_" + name, **kwargs)
694
695 def cmd_args(self):
696 """!
697 Setup command arguments.
698 @return list of arguments
699 """
700 args = []
701
702 if self.name in StdHepTool.seed_names:
703 args.extend(["-s", str(self.seedseed)])
704
705 if len(self.output_files()) == 1:
706 args.insert(0, self.output_files()[0])
707 elif len(self.output_files()) > 1:
708 raise Exception("Too many outputs specified for StdHepTool.")
709 else:
710 raise Exception("No outputs specified for StdHepTool.")
711
712 if len(self.input_files()):
713 for i in self.inputs[::-1]:
714 args.insert(0, i)
715 else:
716 raise Exception("No inputs specified for StdHepTool.")
717
718 return args
719
720
722 """!
723 Transform StdHep events into beam coordinates.
724
725 Optional parameters are: **beam_sigma_x**, **beam_sigma_y**, **beam_rot_x**,
726 **beam_rot_y**, **beam_rot_z**, **target_x**, **target_y**, **target_z**
727 """
728
729 def __init__(self, **kwargs):
730
732 self.beam_sigma_x = None
733
734 self.beam_sigma_y = None
735
736 self.target_x = None
737
738 self.target_y = None
739
740 self.target_z = None
741
742 self.beam_rot_x = None
743
744 self.beam_rot_y = None
745
746 self.beam_rot_z = None
747
748 StdHepTool.__init__(self, name="beam_coords", append_tok="rot", **kwargs)
749
750 def cmd_args(self):
751 """!
752 Setup command arguments.
753 @return list of arguments
754 """
755 args = StdHepTool.cmd_args(self)
756
757 if self.beam_sigma_x is not None:
758 args.extend(["-x", str(self.beam_sigma_x)])
759 if self.beam_sigma_y is not None:
760 args.extend(["-y", str(self.beam_sigma_y)])
761
762 if self.beam_rot_x is not None:
763 args.extend(["-u", str(self.beam_rot_x)])
764 if self.beam_rot_y is not None:
765 args.extend(["-v", str(self.beam_rot_y)])
766 if self.beam_rot_z is not None:
767 args.extend(["-w", str(self.beam_rot_z)])
768
769 if self.target_x is not None:
770 args.extend(["-X", str(self.target_x)])
771 if self.target_y is not None:
772 args.extend(["-Y", str(self.target_y)])
773 if self.target_z is not None:
774 args.extend(["-Z", str(self.target_z)])
775
776 return args
777
779 """!
780 Return list of optional parameters.
781
782 Optional parameters are: **beam_sigma_x**, **beam_sigma_y**, **beam_rot_x**,
783 **beam_rot_y**, **beam_rot_z**, **target_x**, **target_y**, **target_z**
784 @return list of optional parameters
785 """
786 return [
787 "beam_sigma_x",
788 "beam_sigma_y",
789 "beam_rot_x",
790 "beam_rot_y",
791 "beam_rot_z",
792 "target_x",
793 "target_y",
794 "target_z",
795 ]
796
797
799 """!
800 Randomly sample StdHep events into a new file.
801
802 Optional parameters are: **nevents**, **mu**
803 """
804
805 def __init__(self, **kwargs):
806 StdHepTool.__init__(self, name="random_sample", append_tok="sampled", **kwargs)
807
808 self.mu = None
809
810 def cmd_args(self):
811 """!
812 Setup command arguments.
813 @return list of arguments
814 """
815 args = []
816
817 if self.name in StdHepTool.seed_names:
818 args.extend(["-s", str(self.seedseedseed)])
819
820 args.extend(["-N", str(1)])
821
822 if self.neventsnevents is not None:
823 args.extend(["-n", str(self.neventsnevents)])
824
825 if self.mu is not None:
826 args.extend(["-m", str(self.mu)])
827
828 if len(self.output_files()) == 1:
829 # only use file name, not extension because extension is added by tool
830 args.insert(0, os.path.splitext(self.output_files()[0])[0])
831 elif len(self.output_files()) > 1:
832 raise Exception("Too many outputs specified for RandomSample.")
833 else:
834 raise Exception("No outputs specified for RandomSample.")
835
836 if len(self.input_files()):
837 for i in self.inputs[::-1]:
838 args.insert(0, i)
839 else:
840 raise Exception("No inputs were provided.")
841
842 return args
843
845 """!
846 Return list of optional parameters.
847
848 Optional parameters are: **nevents**, **mu**
849 @return list of optional parameters
850 """
851 return ["nevents", "mu"]
852
853 def execute(self, log_out, log_err):
854 """! Execute RandomSample component"""
855 returncode = Component.execute(self, log_out, log_err)
856
857 # Move file to proper output file location.
858 src = "%s_1.stdhep" % os.path.splitext(self.output_files()[0])[0]
859 dest = "%s.stdhep" % os.path.splitext(self.output_files()[0])[0]
860 self.logger.debug("Moving '%s' to '%s'" % (src, dest))
861 shutil.move(src, dest)
862
863 return returncode
864
865
867 """!
868 Convert LHE files to StdHep.
869 """
870
871 def __init__(self, **kwargs):
872
873 self.ctau = None
874 StdHepTool.__init__(self,
875 name='lhe_phi',
876 output_ext='.stdhep',
877 **kwargs)
878
879 def cmd_args(self):
880 """!
881 Setup command arguments.
882 @return list of arguments
883 """
884 args = StdHepTool.cmd_args(self)
885 return args
886
887
889 """!
890 Convert LHE files to StdHep, displacing the time by given ctau.
891
892 Optional parameters are: **ctau**
893 """
894
895 def __init__(self, **kwargs):
896
897 self.ctau = None
898 StdHepTool.__init__(
899 self, name="lhe_tridents_displacetime", output_ext=".stdhep", **kwargs
900 )
901
902 def cmd_args(self):
903 """!
904 Setup command arguments.
905 @return list of arguments
906 """
907 args = StdHepTool.cmd_args(self)
908 if self.ctau is not None:
909 args.extend(["-l", str(self.ctau)])
910 return args
911
913 """!
914 Return list of optional parameters.
915
916 Optional parameters are: **ctau**
917 @return list of optional parameters
918 """
919 return ["ctau"]
920
921
923 """!
924 Convert LHE files to StdHep, displacing the time by given ctau.
925
926 Optional parameters are: **ctau**
927 """
928
929 def __init__(self, **kwargs):
930
931 self.ctau = None
932 StdHepTool.__init__(
933 self, name="lhe_tridents_displaceuni", output_ext=".stdhep", **kwargs
934 )
935
936 def cmd_args(self):
937 """!
938 Setup command arguments.
939 @return list of arguments
940 """
941 args = StdHepTool.cmd_args(self)
942 if self.ctau is not None:
943 args.extend(["-l", str(self.ctau)])
944 return args
945
947 """!
948 Return list of optional parameters.
949
950 Optional parameters are: **ctau**
951 @return list of optional parameters
952 """
953 return ["ctau"]
954
955
957 """!
958 Add mother particles for physics samples.
959 """
960
961 def __init__(self, **kwargs):
962 StdHepTool.__init__(self, name="add_mother", append_tok="mom", **kwargs)
963
964
966 """! Add full truth mother particles for physics samples"""
967
968 def __init__(self, **kwargs):
969 StdHepTool.__init__(
970 self, "add_mother_full_truth", append_tok="mom_full_truth", **kwargs
971 )
972 if len(self.inputsinputs) != 2:
973 raise Exception(
974 "Must have 2 input files: a stdhep file and a lhe file in order"
975 )
977 base, ext = os.path.splitext(self.input_file_1)
978 if ext != ".stdhep":
979 raise Exception("The first input file must be a stdhep file")
981 base, ext = os.path.splitext(self.input_file_2)
982 if ext != ".lhe":
983 raise Exception("The second input file must be a lhe file")
984
985 def cmd_args(self):
986 """!
987 Setup command arguments.
988 @return list of arguments
989 """
990 return super().cmd_args()
991
992
994 """!
995 Merge StdHep files, applying poisson sampling.
996
997 Required parameters are: **target_thickness**, **num_electrons**
998 """
999
1000 def __init__(self, xsec=0, **kwargs):
1001
1002 self.xsec = xsec
1003
1005
1006 self.num_electrons = None
1007
1008 StdHepTool.__init__(self, name="merge_poisson", append_tok="sampled", **kwargs)
1009
1010 def setup(self):
1011 """! Setup MergePoisson component."""
1012 if self.xsec > 0:
1013 self.mu = func.lint(self.target_thickness, self.num_electrons) * self.xsec
1014 else:
1015 raise Exception("Cross section is missing.")
1016 self.logger.info("mu is %f", self.mu)
1017
1019 """!
1020 Return list of required parameters.
1021
1022 Required parameters are: **target_thickness**, **num_electrons**
1023 @return list of required parameters
1024 """
1025 return ["target_thickness", "num_electrons"]
1026
1027 def cmd_args(self):
1028 """!
1029 Setup command arguments.
1030 @return list of arguments
1031 """
1032 args = []
1033 if self.name in StdHepTool.seed_names:
1034 args.extend(["-s", str(self.seedseedseed)])
1035
1036 args.extend(["-m", str(self.mu), "-N", str(1), "-n", str(self.neventsnevents)])
1037
1038 if len(self.output_files()) == 1:
1039 # only use file name, not extension because extension is added by tool
1040 args.insert(0, os.path.splitext(self.output_files()[0])[0])
1041 elif len(self.output_files()) > 1:
1042 raise Exception("Too many outputs specified for MergePoisson.")
1043 else:
1044 raise Exception("No outputs specified for MergePoisson.")
1045
1046 if len(self.input_files()):
1047 for i in self.inputs[::-1]:
1048 args.insert(0, i)
1049 else:
1050 raise Exception("No inputs were provided.")
1051
1052 return args
1053
1054 def execute(self, log_out, log_err):
1055 """! Execute MergePoisson component."""
1056 returncode = Component.execute(self, log_out, log_err)
1057
1058 # Move file from tool to proper output file location.
1059 src = "%s_1.stdhep" % os.path.splitext(self.output_files()[0])[0]
1060 dest = "%s.stdhep" % os.path.splitext(self.output_files()[0])[0]
1061 self.logger.debug("Moving '%s' to '%s'" % (src, dest))
1062 shutil.move(src, dest)
1063
1064 return returncode
1065
1066
1068 """!
1069 Merge StdHep files.
1070
1071 Optional parameters are: none \n
1072 Required parameters are: none
1073 """
1074
1075 def __init__(self, **kwargs):
1076 StdHepTool.__init__(self, name="merge_files", **kwargs)
1077
1079 """!
1080 Return list of optional parameters.
1081
1082 Optional parameters are: none
1083 @return list of optional parameters
1084 """
1085 return []
1086
1088 """!
1089 Return list of required parameters.
1090
1091 Required parameters are: none
1092 @return list of required parameters
1093 """
1094 return []
1095
1096
1098 """!
1099 Count number of events in a StdHep file.
1100 """
1101
1102 def __init__(self, **kwargs):
1103 Component.__init__(
1104 self, name="stdhep_count", command="stdhep_count.sh", **kwargs
1105 )
1106
1107 def cmd_args(self):
1108 """!
1109 Setup command arguments.
1110 @return list of arguments
1111 """
1112
1113 return [self.input_files()[0]]
1114
1115 def execute(self, log_out, log_err):
1116 """! Execute StdHepCount component."""
1117 cl = [self.command]
1118 cl.extend(self.cmd_argscmd_args())
1119 proc = subprocess.Popen(cl, stdout=PIPE)
1120 (output, err) = proc.communicate()
1121
1122 nevents = int(output.split()[1])
1123 print("StdHep file '%s' has %d events." % (self.input_files()[0], nevents))
1124
1125 return proc.returncode
1126
1127
1129 """!
1130 Generic base class for Java based tools.
1131 """
1132
1133 def __init__(self, name, java_class, **kwargs):
1134
1135 self.java_class = java_class
1136
1137 self.java_args = None
1138
1139 self.conditions_url = None
1140 Component.__init__(self, name, "java", **kwargs)
1141
1143 """!
1144 Return list of required config.
1145
1146 Required config are: **hps_java_bin_jar**
1147 @return list of required config
1148 """
1149 return ["hps_java_bin_jar"]
1150
1151 def cmd_args(self):
1152 """!
1153 Setup command arguments.
1154 @return list of arguments
1155 """
1156 args = []
1157 if self.java_args is not None:
1158 self.logger.debug("Setting java_args from config: %s" + self.java_args)
1159 args.append(self.java_args)
1160 if self.conditions_url is not None:
1161 self.logger.debug(
1162 "Setting conditions_url from config: %s" % self.conditions_url
1163 )
1164 args.append("-Dorg.hps.conditions.url=%s" % self.conditions_url)
1165 args.append("-cp")
1166 args.append(self.hps_java_bin_jar)
1167 args.append(self.java_class)
1168 return args
1169
1170 def config(self, parser):
1171 super().config(parser)
1172
1173
1175 """!
1176 Convert EVIO events to LCIO using the hps-java EvioToLcio command line tool.
1177
1178 Input files have evio format (format used by DAQ system).
1179
1180 Required parameters are: **detector**, **steering_files** \n
1181 Optional parameters are: **run_number**, **skip_events**, **nevents**, **event_print_interval**
1182 """
1183
1184 def __init__(self, steering=None, **kwargs):
1185
1186 self.detector = None
1187
1188 self.run_number = None
1189
1190 self.skip_events = None
1191
1193
1194 self.steering = steering
1195
1196 JavaTool.__init__(
1197 self,
1198 name="evio_to_lcio",
1199 java_class="org.hps.evio.EvioToLcio",
1200 output_ext=".slcio",
1201 **kwargs,
1202 )
1203
1205 """!
1206 Return list of required parameters.
1207
1208 Required parameters are: **detector**, **steering_files**
1209 @return list of required parameters
1210 """
1211 return ["detector", "steering_files"]
1212
1214 """!
1215 Return list of optional parameters.
1216
1217 Optional parameters are: **run_number**, **skip_events**, **nevents**, **event_print_interval**
1218 @return list of optional parameters
1219 """
1220 return ["run_number", "skip_events", "nevents", "event_print_interval"]
1221
1222 def setup(self):
1223 """! Setup EvioToLcio component."""
1224 super().setup()
1225 if self.steering not in self.steering_files:
1226 raise Exception(
1227 "Steering '%s' not found in: %s" % (self.steering, self.steering_files)
1228 )
1230
1231 def cmd_args(self):
1232 """!
1233 Setup command arguments.
1234 @return list of arguments
1235 """
1236 args = JavaTool.cmd_args(self)
1237 if not len(self.output_files()):
1238 raise Exception("No output files were provided.")
1239 output_file = self.output_files()[0]
1240 args.append("-DoutputFile=%s" % os.path.splitext(output_file)[0])
1241 args.extend(["-d", self.detector])
1242 if self.run_number is not None:
1243 args.extend(["-R", str(self.run_number)])
1244 if self.skip_events is not None:
1245 args.extend(["-s", str(self.skip_events)])
1246
1247 if not os.path.isfile(self.steering_file):
1248 args.append("-r")
1249 self.logger.debug(
1250 "Steering does not exist at '%s' so assuming it is a resource."
1251 % self.steering_file
1252 )
1253 else:
1254 if not os.path.isabs(self.steering_file):
1255 raise Exception(
1256 "Steering looks like a file but is not an abs path: %s"
1257 % self.steering_file
1258 )
1259 args.extend(["-x", self.steering_file])
1260
1261 if self.neventsnevents is not None:
1262 args.extend(["-n", str(self.neventsnevents)])
1263
1264 args.append("-b")
1265
1266 for inputfile in self.input_files():
1267 args.append(inputfile)
1268
1269 if self.event_print_interval is not None:
1270 args.extend(["-e", str(self.event_print_interval)])
1271
1272 return args
1273
1274
1276 """!
1277 Space MC events and apply energy filters to process before readout.
1278
1279 Optional parameters are: **filter_ecal_hit_ecut**, **filter_event_interval**,
1280 **filter_nevents_read**, **filter_nevents_write**, **filter_no_cuts** \n
1281 Required config are: **hps_java_bin_jar**
1282 """
1283
1284 def __init__(self, **kwargs):
1285 if "filter_no_cuts" in kwargs:
1286 self.filter_no_cuts = kwargs["filter_no_cuts"]
1287 else:
1288
1289 self.filter_no_cuts = False
1290
1291 if "filter_ecal_pairs" in kwargs:
1292 self.filter_ecal_pairs = kwargs["filter_ecal_pairs"]
1293 else:
1294 self.filter_ecal_pairs = False
1295
1296 if "filter_ecal_hit_ecut" in kwargs:
1297 self.filter_ecal_hit_ecut = kwargs["filter_ecal_hit_ecut"]
1298 else:
1299
1300 self.filter_ecal_hit_ecut = -1.0
1301 # self.filter_ecal_hit_ecut = 0.05
1302
1303 if "filter_event_interval" in kwargs:
1304 self.filter_event_interval = kwargs["filter_event_interval"]
1305 else:
1306
1307 self.filter_event_interval = 250
1308
1309 if "filter_nevents_read" in kwargs:
1310 self.filter_nevents_read = kwargs["filter_nevents_read"]
1311 else:
1312
1313 self.filter_nevents_read = -1
1314
1315 if "filter_nevents_write" in kwargs:
1316 self.filter_nevents_write = kwargs["filter_nevents_write"]
1317 else:
1318
1319 self.filter_nevents_write = -1
1320
1322
1323 JavaTool.__init__(
1324 self,
1325 name="filter_bunches",
1326 java_class="org.hps.util.FilterMCBunches",
1327 append_tok="filt",
1328 **kwargs,
1329 )
1330
1331 def config(self, parser):
1332 """! Configure FilterBunches component."""
1333 super().config(parser)
1334 if self.hps_java_bin_jarhps_java_bin_jar is None:
1335 if os.getenv("HPS_JAVA_BIN_JAR", None) is not None:
1336 self.hps_java_bin_jarhps_java_bin_jar = os.getenv("HPS_JAVA_BIN_JAR", None)
1337 self.logger.debug(
1338 "Set HPS_JAVA_BIN_JAR from environment: {}".format(
1340 )
1341 )
1342
1343 def cmd_args(self):
1344 """!
1345 Setup command arguments.
1346 @return list of arguments
1347 """
1348 args = JavaTool.cmd_args(self)
1349 args.append("-e")
1350 args.append(str(self.filter_event_interval))
1351 for i in self.input_files():
1352 args.append(i)
1353 args.append(self.output_files()[0])
1354 if self.filter_ecal_pairs:
1355 args.append("-d")
1356 if self.filter_ecal_hit_ecut > 0:
1357 args.append("-E")
1358 args.append(str(self.filter_ecal_hit_ecut))
1359 if self.filter_nevents_read > 0:
1360 args.append("-n")
1361 args.append(str(self.filter_nevents_read))
1362 if self.filter_nevents_write > 0:
1363 args.append("-w")
1364 args.append(str(self.filter_nevents_write))
1365 if self.filter_no_cuts:
1366 args.append("-a")
1367 return args
1368
1370 """!
1371 Return list of optional parameters.
1372
1373 Optional parameters are: **filter_ecal_hit_ecut**, **filter_event_interval**,
1374 **filter_nevents_read**, **filter_nevents_write**, **filter_no_cuts** \n
1375 @return list of optional parameters
1376 """
1377 return [
1378 "filter_ecal_hit_ecut",
1379 "filter_event_interval",
1380 "filter_nevents_read",
1381 "filter_nevents_write",
1382 "filter_no_cuts",
1383 ]
1384
1386 """!
1387 Return list of required config.
1388
1389 Required config are: **hps_java_bin_jar**
1390 @return list of required config
1391 """
1392 return ["hps_java_bin_jar"]
1393
1394
1396 """!
1397 Apply hodo-hit filter and space MC events to process before readout.
1398
1399 The nevents parameter is not settable from JSON in this class. It should
1400 be supplied as an init argument in the job script if it needs to be
1401 customized (the default nevents and event_interval used to apply spacing
1402 should usually not need to be changed by the user). \n
1403
1404 Optional parameters are: **num_hodo_hits**, **event_interval**
1405 """
1406
1407 def __init__(self, **kwargs):
1408 if "num_hodo_hits" in kwargs:
1409 self.num_hodo_hits = kwargs["num_hodo_hits"]
1410 else:
1411 self.num_hodo_hits = 0
1412
1413 if "event_interval" in kwargs:
1414 self.event_interval = kwargs["event_interval"]
1415 else:
1416 self.event_interval = 250
1417
1418 JavaTool.__init__(
1419 self,
1420 name="filter_events",
1421 java_class="org.hps.util.ExtractEventsWithHitAtHodoEcal",
1422 append_tok="filt",
1423 **kwargs,
1424 )
1425
1426 def cmd_args(self):
1427 """!
1428 Setup command arguments.
1429 @return list of arguments
1430 """
1431 args = JavaTool.cmd_args(self)
1432 args.append("-e")
1433 args.append(str(self.event_interval))
1434 for i in self.input_files():
1435 args.append(i)
1436 args.append(self.output_files()[0])
1437 if self.num_hodo_hits > 0:
1438 args.append("-M")
1439 args.append(str(self.num_hodo_hits))
1440 if self.neventsnevents:
1441 args.append("-w")
1442 args.append(str(self.neventsnevents))
1443 return args
1444
1446 """!
1447 Return list of optional parameters.
1448
1449 Optional parameters are: **num_hodo_hits**, **event_interval**
1450 @return list of optional parameters
1451 """
1452 return ["num_hodo_hits", "event_interval"]
1453
1454
1456 """!
1457 Unzip the input files to outputs.
1458 """
1459
1460 def __init__(self, **kwargs):
1461 Component.__init__(self, name="unzip", command="gunzip", **kwargs)
1462
1463 def output_files(self):
1464 """! Return list of output files."""
1465 if self.outputs:
1466 return self.outputs
1467 return [os.path.splitext(i)[0] for i in self.input_files()]
1468
1469 def execute(self, log_out, log_err):
1470 """! Execute Unzip component."""
1471 for i in range(0, len(self.input_files())):
1472 inputfile = self.input_files()[i]
1473 outputfile = self.output_filesoutput_files()[i]
1474 with gzip.open(inputfile, "rb") as in_file, open(
1475 outputfile, "wb"
1476 ) as out_file:
1477 shutil.copyfileobj(in_file, out_file)
1478 self.logger.debug("Unzipped '%s' to '%s'" % (inputfile, outputfile))
1479 return 0
1480
1481
1483 """!
1484 Dump LCIO event information.
1485
1486 Required parameters are: none \n
1487 Required config are: **lcio_dir**
1488 """
1489
1490 def __init__(self, **kwargs):
1491
1492 self.lcio_dir = None
1493 Component.__init__(self, name="lcio_dump_event", command="dumpevent", **kwargs)
1494
1495 if "event_num" in kwargs:
1496 self.event_num = kwargs["event_num"]
1497 else:
1498 self.event_num = 1
1499
1500 def config(self, parser):
1501 """! Configure LCIODumpEvent component."""
1502 super().config(parser)
1503 if self.lcio_dir is None:
1504 self.lcio_dir = self.hpsmc_dir
1505
1506 def setup(self):
1507 """! Setup LCIODumpEvent component."""
1508 self.commandcommand = self.lcio_dir + "/bin/dumpevent"
1509
1510 def cmd_args(self):
1511 """!
1512 Setup command arguments.
1513 @return list of arguments
1514 """
1515 if not len(self.input_files()):
1516 raise Exception("Missing required inputs for LCIODumpEvent.")
1517 args = []
1518 args.append(self.input_files()[0])
1519 args.append(str(self.event_num))
1520 return args
1521
1523 """!
1524 Return list of required config.
1525
1526 Required config are: **lcio_dir**
1527 @return list of required config
1528 """
1529 return ["lcio_dir"]
1530
1532 """!
1533 Return list of required parameters.
1534
1535 Required parameters are: none
1536 @return list of required parameters
1537 """
1538 return []
1539
1540
1542 """!
1543 Count events in an LHE file.
1544 """
1545
1546 def __init__(self, minevents=0, fail_on_underflow=False, **kwargs):
1547 self.minevents = minevents
1548 Component.__init__(self, name="lhe_count", **kwargs)
1549
1550 def setup(self):
1551 """! Setup LHECount component."""
1552 if not len(self.input_files()):
1553 raise Exception("Missing at least one input file.")
1554
1555 def cmd_exists(self):
1556 """!
1557 Check if command exists.
1558 @return True if command exists
1559 """
1560 return True
1561
1562 def execute(self, log_out, log_err):
1563 """! Execute LHECount component."""
1564 for i in self.inputs:
1565 with gzip.open(i, "rb") as in_file:
1566 lines = in_file.readlines()
1567
1568 nevents = 0
1569 for line in lines:
1570 if "<event>" in line:
1571 nevents += 1
1572
1573 print("LHE file '%s' has %d events." % (i, nevents))
1574
1575 if nevents < self.minevents:
1576 msg = "LHE file '%s' does not contain the minimum %d events." % (
1577 i,
1578 nevents,
1579 )
1580 if self.fail_on_underflow:
1581 raise Exception(msg)
1582 else:
1583 self.logger.warning(msg)
1584 return 0
1585
1586
1588 """!
1589 Tar files into an archive.
1590 """
1591
1592 def __init__(self, **kwargs):
1593 Component.__init__(self, name="tar_files", **kwargs)
1594
1595 def cmd_exists(self):
1596 """!
1597 Check if command exists.
1598 @return True if command exists
1599 """
1600 return True
1601
1602 def execute(self, log_out, log_err):
1603 """! Execute TarFiles component."""
1604 self.logger.debug("Opening '%s' for writing ..." % self.outputs[0])
1605 tar = tarfile.open(self.outputs[0], "w")
1606 for i in self.inputs:
1607 self.logger.debug("Adding '%s' to archive" % i)
1608 tar.add(i)
1609 tar.close()
1610 self.logger.info("Wrote archive '%s'" % self.outputs[0])
1611 return 0
1612
1613
1615 """!
1616 Move input files to new locations.
1617 """
1618
1619 def __init__(self, **kwargs):
1620 Component.__init__(self, name="move_files", **kwargs)
1621
1622 def cmd_exists(self):
1623 """!
1624 Check if command exists.
1625 @return True if command exists
1626 """
1627 return True
1628
1629 def execute(self, log_out, log_err):
1630 """! Execute TarFiles component."""
1631 if len(self.inputsinputs) != len(self.outputsoutputs):
1632 raise Exception("Input and output lists are not the same length!")
1633 for io in zip(self.inputsinputs, self.outputsoutputs):
1634 src = io[0]
1635 dest = io[1]
1636 self.logger.info("Moving %s -> %s" % (src, dest))
1637 shutil.move(src, dest)
1638 return 0
1639
1640
1642 """!
1643 Generic component for LCIO tools.
1644
1645 Required parameters are: none \n
1646 Required config are: **lcio_bin_jar**
1647 """
1648
1649 def __init__(self, name=None, **kwargs):
1650
1651 self.lcio_bin_jar = None
1652 Component.__init__(self, name, command="java", **kwargs)
1653
1654 def config(self, parser):
1655 """! Configure LCIOTool component."""
1656 super().config(parser)
1657 if self.lcio_bin_jar is None:
1658 self.config_from_environ()
1659
1660 def cmd_args(self):
1661 """!
1662 Setup command arguments.
1663 @return list of arguments
1664 """
1665 if not self.name:
1666 raise Exception("Name required to write cmd args for LCIOTool.")
1667 return ["-jar", self.lcio_bin_jar, self.name]
1668
1670 """!
1671 Return list of required config.
1672
1673 Required config are: **lcio_bin_jar**
1674 @return list of required config
1675 """
1676 return ["lcio_bin_jar"]
1677
1679 """!
1680 Return list of required parameters.
1681
1682 Required parameters are: none
1683 @return list of required parameters
1684 """
1685 return []
1686
1687
1689 """!
1690 Concatenate LCIO files together.
1691 """
1692
1693 def __init__(self, **kwargs):
1694 LCIOTool.__init__(self, name="concat", **kwargs)
1695
1696 def cmd_args(self):
1697 """!
1698 Setup command arguments.
1699 @return list of arguments
1700 """
1701 args = LCIOTool.cmd_args(self)
1702 if not len(self.input_files()):
1703 raise Exception("Missing at least one input file.")
1704 if not len(self.output_files()):
1705 raise Exception("Missing an output file.")
1706 for i in self.input_files():
1707 args.extend(["-f", i])
1708 args.extend(["-o", self.outputs[0]])
1709 return args
1710
1711
1713 """!
1714 Count events in LCIO files.
1715
1716 Required parameters are: none \n
1717 Optional parameters are: none
1718 """
1719
1720 def __init__(self, **kwargs):
1721 LCIOTool.__init__(self, name="count", **kwargs)
1722
1723 def cmd_args(self):
1724 """!
1725 Setup command arguments.
1726 @return list of arguments
1727 """
1728 args = LCIOTool.cmd_args(self)
1729 if not len(self.inputsinputs):
1730 raise Exception("Missing an input file.")
1731 args.extend(["-f", self.inputsinputs[0]])
1732 return args
1733
1735 """!
1736 Return list of required parameters.
1737
1738 Required parameters are: none
1739 @return list of required parameters
1740 """
1741 return []
1742
1744 """!
1745 Return list of optional parameters.
1746
1747 Optional parameters are: none
1748 @return list of optional parameters
1749 """
1750 return []
1751
1752
1754 """!
1755 Merge LCIO files.
1756 """
1757
1758 def __init__(self, **kwargs):
1759 LCIOTool.__init__(self, name="merge", **kwargs)
1760
1761 def cmd_args(self):
1762 """!
1763 Setup command arguments.
1764 @return list of arguments
1765 """
1766 args = LCIOTool.cmd_args(self)
1767 if not len(self.input_files()):
1768 raise Exception("Missing at least one input file.")
1769 if not len(self.output_files()):
1770 raise Exception("Missing an output file.")
1771 for i in self.input_files():
1772 args.extend(["-f", i])
1773 args.extend(["-o", self.outputs[0]])
1774 if self.neventsnevents is not None:
1775 args.extend(["-n", str(self.neventsnevents)])
1776 return args
1777
1778
1779"""
1780MergeROOT tool for hps-mc
1781Merges ROOT files using hadd with validation
1782"""
1783
1784
1786 """
1787 Merge ROOT files using hadd with event count validation.
1788
1789 This component uses ROOT's hadd utility to merge multiple ROOT files
1790 into a single output file, and validates that all events are preserved.
1791 """
1792
1793 def __init__(self, **kwargs):
1794 """
1795 Initialize MergeROOT component.
1796
1797 Parameters
1798 ----------
1799 inputs : list
1800 List of input ROOT files to merge
1801 outputs : list
1802 List containing the output merged ROOT file name
1803 force : bool, optional
1804 Force overwrite of output file (default: True)
1805 compression : int, optional
1806 Compression level for output file (0-9, default: None uses hadd default)
1807 validate : bool, optional
1808 Validate event counts after merge (default: True)
1809 write_stats : bool, optional
1810 Write JSON stats file after merge (default: True when validate=True)
1811 job_id : int, optional
1812 Job ID to include in stats output
1813 """
1814 Component.__init__(self, **kwargs)
1815
1816 # Set default command
1817 if not hasattr(self, "command") or self.commandcommand is None:
1818 self.commandcommand = "hadd"
1819
1820 # Set force overwrite by default
1821 if not hasattr(self, "force"):
1822 self.force = True
1823
1824 # Optional compression level
1825 if not hasattr(self, "compression"):
1826 self.compression = None
1827
1828 # Enable validation by default
1829 if not hasattr(self, "validate"):
1830 self.validate = True
1831
1832 # Write stats JSON (default: True when validate=True)
1833 if not hasattr(self, "write_stats"):
1835
1836 # Optional job ID for stats output
1837 if not hasattr(self, "job_id"):
1838 self.job_id = None
1839
1840 # Store event counts
1843
1844 # Track validation result
1846
1847 def cmd_args(self):
1848 """
1849 Build command line arguments for hadd.
1850
1851 Returns
1852 -------
1853 list
1854 List of command arguments
1855 """
1856 import sys
1857 sys.stderr.write("MergeROOT DEBUG: cmd_args() called\n")
1858 sys.stderr.write(" self.force=%s, self.compression=%s\n" % (self.force, self.compression))
1859 sys.stderr.write(" self.inputs=%s\n" % self.inputsinputs)
1860 sys.stderr.write(" self.outputs=%s\n" % self.outputsoutputs)
1861 sys.stderr.flush()
1862
1863 args = []
1864
1865 # Add force flag if enabled
1866 if self.force:
1867 args.append("-f")
1868
1869 # Add compression level if specified
1870 if self.compression is not None:
1871 args.extend(["-fk", "-f%d" % self.compression])
1872
1873 # Add output file
1874 if self.outputsoutputs and len(self.outputsoutputs) > 0:
1875 args.append(self.outputsoutputs[0])
1876 else:
1877 sys.stderr.write("MergeROOT DEBUG: ERROR - No output file specified!\n")
1878 sys.stderr.flush()
1879 raise RuntimeError("MergeROOT: No output file specified")
1880
1881 # Add input files
1882 if self.inputsinputs and len(self.inputsinputs) > 0:
1883 args.extend(self.inputsinputs)
1884 else:
1885 sys.stderr.write("MergeROOT DEBUG: ERROR - No input files specified!\n")
1886 sys.stderr.flush()
1887 raise RuntimeError("MergeROOT: No input files specified")
1888
1889 sys.stderr.write("MergeROOT DEBUG: cmd_args() returning: %s\n" % args)
1890 sys.stderr.flush()
1891 return args
1892
1893 def scan_root_file(self, filename, log_out=None):
1894 """
1895 Scan a ROOT file and extract TTree event counts.
1896
1897 Parameters
1898 ----------
1899 filename : str
1900 Path to ROOT file
1901 log_out : file, optional
1902 Log file for output (used to report multiple key cycles)
1903
1904 Returns
1905 -------
1906 dict
1907 Dictionary mapping tree names to entry counts
1908 """
1909 try:
1910 import ROOT
1911 except ImportError:
1912 raise RuntimeError(
1913 "MergeROOT: PyROOT is required for validation but not available"
1914 )
1915
1916 tree_counts = {}
1917 tree_cycles = {} # Track cycle numbers: {tree_name: [(cycle, entries), ...]}
1918
1919 # Open ROOT file
1920 root_file = ROOT.TFile.Open(filename, "READ")
1921 if not root_file or root_file.IsZombie():
1922 raise RuntimeError("MergeROOT: Cannot open ROOT file: %s" % filename)
1923
1924 # Iterate through all keys in the file
1925 for key in root_file.GetListOfKeys():
1926 obj = key.ReadObj()
1927
1928 # Check if it's a TTree
1929 if obj.InheritsFrom("TTree"):
1930 tree_name = obj.GetName()
1931 cycle = key.GetCycle()
1932 num_entries = obj.GetEntries()
1933
1934 if tree_name not in tree_cycles:
1935 tree_cycles[tree_name] = []
1936 tree_cycles[tree_name].append((cycle, num_entries))
1937
1938 root_file.Close()
1939
1940 # Process collected cycles - use highest cycle number for each tree
1941 for tree_name, cycles in tree_cycles.items():
1942 if len(cycles) > 1:
1943 # Sort by cycle number (highest first)
1944 cycles.sort(key=lambda x: x[0], reverse=True)
1945 highest_cycle, highest_entries = cycles[0]
1946 if log_out:
1947 log_out.write(" WARNING: Multiple key cycles found for tree '%s':\n" % tree_name)
1948 for cyc, ent in cycles:
1949 marker = " <-- using" if cyc == highest_cycle else ""
1950 log_out.write(" Cycle %d: %d entries%s\n" % (cyc, ent, marker))
1951 tree_counts[tree_name] = highest_entries
1952 else:
1953 tree_counts[tree_name] = cycles[0][1]
1954
1955 return tree_counts
1956
1957 def scan_input_files(self, log_out):
1958 """
1959 Scan all input files and store tree event counts.
1960
1961 Parameters
1962 ----------
1963 log_out : file
1964 Log file for output
1965 """
1966 log_out.write("\n" + "=" * 70 + "\n")
1967 log_out.write("MergeROOT: Scanning input files for TTrees\n")
1968 log_out.write("=" * 70 + "\n")
1969
1970 for input_file in self.inputsinputs:
1971 if not os.path.exists(input_file):
1972 raise RuntimeError("MergeROOT: Input file not found: %s" % input_file)
1973
1974 log_out.write("\nScanning: %s\n" % input_file)
1975 tree_counts = self.scan_root_file(input_file, log_out)
1976
1977 if not tree_counts:
1978 log_out.write(" WARNING: No TTrees found in this file\n")
1979 else:
1980 for tree_name, count in tree_counts.items():
1981 log_out.write(" Tree '%s': %d events\n" % (tree_name, count))
1982
1983 self.input_tree_counts[input_file] = tree_counts
1984
1985 log_out.write("\n" + "=" * 70 + "\n")
1986 log_out.flush()
1987
1988 def scan_output_file(self, log_out):
1989 """
1990 Scan output file and store tree event counts.
1991
1992 Parameters
1993 ----------
1994 log_out : file
1995 Log file for output
1996 """
1997 output_file = self.outputsoutputs[0]
1998
1999 log_out.write("\n" + "=" * 70 + "\n")
2000 log_out.write("MergeROOT: Scanning output file for TTrees\n")
2001 log_out.write("=" * 70 + "\n")
2002 log_out.write("\nScanning: %s\n" % output_file)
2003
2004 self.output_tree_counts = self.scan_root_file(output_file, log_out)
2005
2006 if not self.output_tree_counts:
2007 log_out.write(" WARNING: No TTrees found in output file\n")
2008 else:
2009 for tree_name, count in self.output_tree_counts.items():
2010 log_out.write(" Tree '%s': %d events\n" % (tree_name, count))
2011
2012 log_out.write("\n" + "=" * 70 + "\n")
2013 log_out.flush()
2014
2015 def validate_merge(self, log_out):
2016 """
2017 Validate that event counts match between input and output files.
2018
2019 Parameters
2020 ----------
2021 log_out : file
2022 Log file for output
2023
2024 Returns
2025 -------
2026 bool
2027 True if validation passes, False otherwise
2028 """
2029 log_out.write("\n" + "=" * 70 + "\n")
2030 log_out.write("MergeROOT: Validating merge results\n")
2031 log_out.write("=" * 70 + "\n\n")
2032
2033 # Calculate sum of events per tree across all input files
2034 total_input_counts = {}
2035
2036 for input_file, tree_counts in self.input_tree_counts.items():
2037 for tree_name, count in tree_counts.items():
2038 if tree_name not in total_input_counts:
2039 total_input_counts[tree_name] = 0
2040 total_input_counts[tree_name] += count
2041
2042 # Check that all input trees are in output
2043 all_valid = True
2044
2045 if not total_input_counts:
2046 log_out.write("WARNING: No TTrees found in input files\n")
2047 return True
2048
2049 log_out.write("Event count validation:\n")
2050 log_out.write("-" * 70 + "\n")
2051 log_out.write(
2052 "%-30s %15s %15s %10s\n"
2053 % ("Tree Name", "Input Events", "Output Events", "Status")
2054 )
2055 log_out.write("-" * 70 + "\n")
2056
2057 for tree_name, input_count in sorted(total_input_counts.items()):
2058 output_count = self.output_tree_counts.get(tree_name, 0)
2059
2060 if output_count == input_count:
2061 status = "✓ PASS"
2062 else:
2063 status = "✗ FAIL"
2064 all_valid = False
2065
2066 log_out.write(
2067 "%-30s %15d %15d %10s\n"
2068 % (tree_name, input_count, output_count, status)
2069 )
2070
2071 # Check for trees in output that weren't in input
2072 extra_trees = set(self.output_tree_counts.keys()) - set(
2073 total_input_counts.keys()
2074 )
2075 if extra_trees:
2076 log_out.write("\nWARNING: Output contains trees not found in inputs:\n")
2077 for tree_name in extra_trees:
2078 log_out.write(
2079 " - %s: %d events\n"
2080 % (tree_name, self.output_tree_counts[tree_name])
2081 )
2082
2083 log_out.write("-" * 70 + "\n")
2084
2085 if all_valid:
2086 log_out.write("\n✓ VALIDATION PASSED: All event counts match!\n")
2087 else:
2088 log_out.write("\n✗ VALIDATION FAILED: Event count mismatch detected!\n")
2089
2090 log_out.write("=" * 70 + "\n\n")
2091 log_out.flush()
2092
2093 return all_valid
2094
2095 def print_summary(self, log_out):
2096 """
2097 Print a summary of the merge operation.
2098
2099 Parameters
2100 ----------
2101 log_out : file
2102 Log file for output
2103 """
2104 log_out.write("\n" + "=" * 70 + "\n")
2105 log_out.write("MergeROOT: Summary\n")
2106 log_out.write("=" * 70 + "\n")
2107 log_out.write("Input files: %d\n" % len(self.inputsinputs))
2108
2109 for i, input_file in enumerate(self.inputsinputs, 1):
2110 log_out.write(" %d. %s\n" % (i, input_file))
2111
2112 log_out.write("\nOutput file: %s\n" % self.outputsoutputs[0])
2113 log_out.write(
2114 "Compression level: %s\n"
2115 % (self.compression if self.compression else "default")
2116 )
2117
2118 # Print total events per tree
2119 if self.output_tree_counts:
2120 log_out.write("\nTotal events in merged file:\n")
2121 for tree_name, count in sorted(self.output_tree_counts.items()):
2122 log_out.write(" %-30s: %d events\n" % (tree_name, count))
2123
2124 log_out.write("=" * 70 + "\n")
2125 log_out.flush()
2126
2128 """
2129 Get the stats JSON filename based on the output ROOT filename.
2130
2131 Returns
2132 -------
2133 str
2134 Path to stats JSON file (e.g., 'merged_X_job1.root' -> 'merged_X_job1_stats.json')
2135 """
2136 if not self.outputsoutputs or len(self.outputsoutputs) == 0:
2137 return None
2138 output_file = self.outputsoutputs[0]
2139 base, _ = os.path.splitext(output_file)
2140 return base + "_stats.json"
2141
2142 def write_stats_json(self, log_out, validation_passed):
2143 """
2144 Write merge statistics to a JSON file.
2145
2146 Parameters
2147 ----------
2148 log_out : file
2149 Log file for output
2150 validation_passed : bool
2151 Whether the validation passed
2152 """
2153 stats_file = self.get_stats_filename()
2154 if stats_file is None:
2155 log_out.write("WARNING: Cannot determine stats filename, skipping stats output\n")
2156 return
2157
2158 log_out.write("\n" + "=" * 70 + "\n")
2159 log_out.write("MergeROOT: Writing stats to %s\n" % stats_file)
2160 log_out.write("=" * 70 + "\n")
2161
2162 # Calculate total input events per tree
2163 total_input_events = {}
2164 for input_file, tree_counts in self.input_tree_counts.items():
2165 for tree_name, count in tree_counts.items():
2166 if tree_name not in total_input_events:
2167 total_input_events[tree_name] = 0
2168 total_input_events[tree_name] += count
2169
2170 # Build input files list with event counts
2171 input_files_list = []
2172 for input_file in self.inputsinputs:
2173 tree_counts = self.input_tree_counts.get(input_file, {})
2174 input_files_list.append({
2175 "path": input_file,
2176 "events": tree_counts
2177 })
2178
2179 # Build stats dictionary
2180 stats = {
2181 "job_id": self.job_id,
2182 "output_file": self.outputsoutputs[0] if self.outputsoutputs else None,
2183 "output_events": self.output_tree_counts,
2184 "input_files": input_files_list,
2185 "total_input_events": total_input_events,
2186 "validation_passed": validation_passed,
2187 "num_input_files": len(self.inputsinputs)
2188 }
2189
2190 # Write JSON file
2191 with open(stats_file, 'w') as f:
2192 json.dump(stats, f, indent=2)
2193
2194 log_out.write("Stats written successfully\n")
2195 log_out.write("=" * 70 + "\n")
2196 log_out.flush()
2197
2198 def execute(self, log_out, log_err):
2199 """
2200 Execute MergeROOT component using hadd.
2201
2202 Parameters
2203 ----------
2204 log_out : file
2205 Log file for stdout
2206 log_err : file
2207 Log file for stderr
2208
2209 Returns
2210 -------
2211 int
2212 Return code from hadd command
2213 """
2214 # Debug: Entry point
2215 log_out.write("\n" + "=" * 70 + "\n")
2216 log_out.write("MergeROOT: DEBUG - Entering execute()\n")
2217 log_out.write("=" * 70 + "\n")
2218 log_out.write("DEBUG: self.command = %s\n" % self.commandcommand)
2219 log_out.write("DEBUG: self.inputs = %s\n" % self.inputsinputs)
2220 log_out.write("DEBUG: self.outputs = %s\n" % self.outputsoutputs)
2221 log_out.write("DEBUG: self.force = %s\n" % self.force)
2222 log_out.write("DEBUG: self.compression = %s\n" % self.compression)
2223 log_out.write("DEBUG: self.validate = %s\n" % self.validate)
2224 log_out.flush()
2225
2226 # Check that hadd command exists
2227 log_out.write("\nDEBUG: Checking if hadd command exists...\n")
2228 log_out.flush()
2229 if not self.cmd_exists():
2230 raise RuntimeError("MergeROOT: hadd command not found in PATH")
2231 log_out.write("DEBUG: hadd command found\n")
2232 log_out.flush()
2233
2234 # Check that input files exist
2235 log_out.write("\nDEBUG: Checking input files exist...\n")
2236 log_out.flush()
2237 for input_file in self.inputsinputs:
2238 log_out.write("DEBUG: Checking: %s\n" % input_file)
2239 log_out.flush()
2240 if not os.path.exists(input_file):
2241 raise RuntimeError("MergeROOT: Input file not found: %s" % input_file)
2242 log_out.write("DEBUG: -> exists (size: %d bytes)\n" % os.path.getsize(input_file))
2243 log_out.flush()
2244
2245 # Scan input files before merge if validation is enabled
2246 log_out.write("\nDEBUG: Validation enabled = %s\n" % self.validate)
2247 log_out.flush()
2248 if self.validate:
2249 try:
2250 log_out.write("DEBUG: Starting input file scan...\n")
2251 log_out.flush()
2252 self.scan_input_files(log_out)
2253 log_out.write("DEBUG: Input file scan complete\n")
2254 log_out.flush()
2255 except Exception as e:
2256 log_out.write("\nWARNING: Could not scan input files: %s\n" % str(e))
2257 log_out.write("Proceeding with merge without validation.\n")
2258 self.validate = False
2259
2260 # Build full command
2261 log_out.write("\nDEBUG: Building command arguments...\n")
2262 log_out.flush()
2263 cmd = [self.commandcommand] + self.cmd_argscmd_args()
2264 log_out.write("DEBUG: cmd_args() returned: %s\n" % self.cmd_argscmd_args())
2265 log_out.flush()
2266
2267 # Log the command
2268 log_out.write("\n" + "=" * 70 + "\n")
2269 log_out.write("MergeROOT: Executing hadd\n")
2270 log_out.write("=" * 70 + "\n")
2271 log_out.write("Command: %s\n" % " ".join(cmd))
2272 log_out.write("=" * 70 + "\n\n")
2273 log_out.flush()
2274
2275 # Execute hadd
2276 log_out.write("DEBUG: About to call subprocess.Popen...\n")
2277 log_out.flush()
2278 proc = subprocess.Popen(cmd, stdout=log_out, stderr=log_err)
2279 log_out.write("DEBUG: Popen returned, PID = %s\n" % proc.pid)
2280 log_out.flush()
2281 log_out.write("DEBUG: Waiting for process to complete...\n")
2282 log_out.flush()
2283 proc.wait()
2284 log_out.write("DEBUG: Process completed, returncode = %d\n" % proc.returncode)
2285 log_out.flush()
2286
2287 # Check return code
2288 if proc.returncode != 0:
2289 log_out.write("DEBUG: hadd FAILED with return code %d\n" % proc.returncode)
2290 log_out.flush()
2291 raise RuntimeError(
2292 "MergeROOT: hadd failed with return code %d" % proc.returncode
2293 )
2294
2295 # Verify output file was created
2296 log_out.write("DEBUG: Checking if output file exists: %s\n" % self.outputsoutputs[0])
2297 log_out.flush()
2298 if not os.path.exists(self.outputsoutputs[0]):
2299 raise RuntimeError(
2300 "MergeROOT: Output file was not created: %s" % self.outputsoutputs[0]
2301 )
2302 log_out.write("DEBUG: Output file exists, size = %d bytes\n" % os.path.getsize(self.outputsoutputs[0]))
2303 log_out.flush()
2304
2305 log_out.write("\n✓ hadd completed successfully\n")
2306 log_out.flush()
2307
2308 # Scan output file and validate if enabled
2309 log_out.write("\nDEBUG: Post-merge validation check, self.validate = %s\n" % self.validate)
2310 log_out.flush()
2311 validation_passed = True
2312 if self.validate:
2313 try:
2314 log_out.write("DEBUG: Starting output file scan...\n")
2315 log_out.flush()
2316 self.scan_output_file(log_out)
2317 log_out.write("DEBUG: Output file scan complete\n")
2318 log_out.flush()
2319 log_out.write("DEBUG: Starting merge validation...\n")
2320 log_out.flush()
2321 validation_passed = self.validate_merge(log_out)
2322 self._validation_passed = validation_passed
2323 log_out.write("DEBUG: Merge validation complete, passed = %s\n" % validation_passed)
2324 log_out.flush()
2325
2326 if not validation_passed:
2327 raise RuntimeError("MergeROOT: Event count validation failed!")
2328
2329 except Exception as e:
2330 log_out.write("\nERROR during validation: %s\n" % str(e))
2331 log_out.flush()
2332 raise
2333
2334 # Write stats JSON if enabled
2335 log_out.write("\nDEBUG: write_stats = %s\n" % self.write_stats)
2336 log_out.flush()
2337 if self.write_stats:
2338 try:
2339 self.write_stats_json(log_out, validation_passed)
2340 except Exception as e:
2341 log_out.write("\nWARNING: Could not write stats JSON: %s\n" % str(e))
2342 log_out.flush()
2343
2344 # Print summary
2345 log_out.write("\nDEBUG: Printing summary...\n")
2346 log_out.flush()
2347 self.print_summary(log_out)
2348
2349 log_out.write("\nDEBUG: MergeROOT.execute() returning %d\n" % proc.returncode)
2350 log_out.flush()
2351 return proc.returncode
2352
2353 def output_files(self):
2354 """
2355 Return list of output files.
2356
2357 Returns
2358 -------
2359 list
2360 List containing the merged output ROOT file and optionally the stats JSON
2361 """
2362 files = list(self.outputsoutputs) if self.outputsoutputs else []
2363 if self.write_stats:
2364 stats_file = self.get_stats_filename()
2365 if stats_file and stats_file not in files:
2366 files.append(stats_file)
2367 return files
2368
2370 """
2371 Return list of required configuration parameters.
2372
2373 Returns
2374 -------
2375 list
2376 List of required config parameters (empty for MergeROOT)
2377 """
2378 return []
Base class for components in a job.
Definition component.py:15
output_files(self)
Return a list of output files created by this component.
Definition component.py:233
config_from_environ(self)
Configure component from environment variables which are just upper case versions of the required con...
Definition component.py:258
cmd_exists(self)
Check if the component's assigned command exists.
Definition component.py:96
cmd_args(self)
Return the command arguments of this component.
Definition component.py:108
input_files(self)
Get a list of input files for this component.
Definition component.py:229
Add full truth mother particles for physics samples.
Definition tools.py:965
__init__(self, **kwargs)
Definition tools.py:968
cmd_args(self)
Setup command arguments.
Definition tools.py:985
Add mother particles for physics samples.
Definition tools.py:956
__init__(self, **kwargs)
Definition tools.py:961
Transform StdHep events into beam coordinates.
Definition tools.py:721
beam_rot_x
beam rotation in x?
Definition tools.py:742
__init__(self, **kwargs)
Definition tools.py:729
optional_parameters(self)
Return list of optional parameters.
Definition tools.py:778
beam_sigma_y
beam sigma in y
Definition tools.py:734
target_x
target x position
Definition tools.py:736
target_y
target y position
Definition tools.py:738
beam_rot_z
beam rotation in z?
Definition tools.py:746
beam_rot_y
beam rotation in y?
Definition tools.py:744
cmd_args(self)
Setup command arguments.
Definition tools.py:750
target_z
target z position
Definition tools.py:740
Convert LHE files to StdHep, displacing the time by given ctau.
Definition tools.py:888
__init__(self, **kwargs)
Definition tools.py:895
optional_parameters(self)
Return list of optional parameters.
Definition tools.py:912
cmd_args(self)
Setup command arguments.
Definition tools.py:902
Convert LHE files to StdHep, displacing the time by given ctau.
Definition tools.py:922
__init__(self, **kwargs)
Definition tools.py:929
optional_parameters(self)
Return list of optional parameters.
Definition tools.py:946
cmd_args(self)
Setup command arguments.
Definition tools.py:936
Convert EVIO events to LCIO using the hps-java EvioToLcio command line tool.
Definition tools.py:1174
run_number
run number
Definition tools.py:1188
optional_parameters(self)
Return list of optional parameters.
Definition tools.py:1213
detector
detector name
Definition tools.py:1186
required_parameters(self)
Return list of required parameters.
Definition tools.py:1204
setup(self)
Setup EvioToLcio component.
Definition tools.py:1222
__init__(self, steering=None, **kwargs)
Definition tools.py:1184
steering
steering file
Definition tools.py:1194
skip_events
number of events that are skipped
Definition tools.py:1190
event_print_interval
event print interval
Definition tools.py:1192
cmd_args(self)
Setup command arguments.
Definition tools.py:1231
Apply hodo-hit filter and space MC events to process before readout.
Definition tools.py:1395
optional_parameters(self)
Return list of optional parameters.
Definition tools.py:1445
cmd_args(self)
Setup command arguments.
Definition tools.py:1426
Space MC events and apply energy filters to process before readout.
Definition tools.py:1275
filter_event_interval
Default event filtering interval.
Definition tools.py:1304
__init__(self, **kwargs)
Definition tools.py:1284
filter_ecal_hit_ecut
No default ecal hit cut energy (negative val to be ignored)
Definition tools.py:1297
optional_parameters(self)
Return list of optional parameters.
Definition tools.py:1369
filter_nevents_read
Default is no maximum nevents to read.
Definition tools.py:1310
filter_no_cuts
By default cuts are on.
Definition tools.py:1286
config(self, parser)
Configure FilterBunches component.
Definition tools.py:1331
required_config(self)
Return list of required config.
Definition tools.py:1385
filter_nevents_write
Default is no maximum nevents to write.
Definition tools.py:1316
cmd_args(self)
Setup command arguments.
Definition tools.py:1343
Run the hpstr analysis tool.
Definition tools.py:532
execute(self, log_out, log_err)
Execute HPSTR component.
Definition tools.py:654
output_files(self)
Adjust names of output files.
Definition tools.py:644
optional_parameters(self)
Return list of optional parameters.
Definition tools.py:604
required_parameters(self)
Return list of required parameters.
Definition tools.py:595
setup(self)
Setup HPSTR component.
Definition tools.py:556
__init__(self, cfg=None, is_data=0, year=None, tracking=None, **kwargs)
Definition tools.py:541
cfg
configuration
Definition tools.py:543
required_config(self)
Return list of required configs.
Definition tools.py:613
tracking
tracking option (KF, GBL, BOTH)
Definition tools.py:549
is_data
run mode
Definition tools.py:545
cmd_args(self)
Setup command arguments.
Definition tools.py:622
Generic base class for Java based tools.
Definition tools.py:1128
java_class
java class
Definition tools.py:1135
config(self, parser)
Automatic configuration.
Definition tools.py:1170
required_config(self)
Return list of required config.
Definition tools.py:1142
cmd_args(self)
Setup command arguments.
Definition tools.py:1151
java_args
java arguments
Definition tools.py:1137
__init__(self, name, java_class, **kwargs)
Definition tools.py:1133
Run the hps-java JobManager class.
Definition tools.py:231
optional_parameters(self)
Return list of optional parameters.
Definition tools.py:436
detector
detector name
Definition tools.py:248
required_parameters(self)
Return list of required parameters.
Definition tools.py:427
setup(self)
Setup JobManager component.
Definition tools.py:326
__init__(self, steering=None, **kwargs)
Definition tools.py:241
lcsim_cache_dir
lcsim cache directory
Definition tools.py:258
steering
steering file
Definition tools.py:266
config(self, parser)
Configure JobManager component.
Definition tools.py:292
hps_java_bin_jar
location of hps-java installation?
Definition tools.py:268
logging_config_file
file for config logging
Definition tools.py:256
required_config(self)
Return list of required configurations.
Definition tools.py:317
event_print_interval
event print interval
Definition tools.py:250
cmd_args(self)
Setup command arguments.
Definition tools.py:337
java_args
java arguments
Definition tools.py:254
conditions_password
no idea
Definition tools.py:262
Concatenate LCIO files together.
Definition tools.py:1688
__init__(self, **kwargs)
Definition tools.py:1693
cmd_args(self)
Setup command arguments.
Definition tools.py:1696
Count events in LCIO files.
Definition tools.py:1712
__init__(self, **kwargs)
Definition tools.py:1720
optional_parameters(self)
Return list of optional parameters.
Definition tools.py:1743
required_parameters(self)
Return list of required parameters.
Definition tools.py:1734
cmd_args(self)
Setup command arguments.
Definition tools.py:1723
Dump LCIO event information.
Definition tools.py:1482
__init__(self, **kwargs)
Definition tools.py:1490
lcio_dir
lcio directory
Definition tools.py:1492
required_parameters(self)
Return list of required parameters.
Definition tools.py:1531
setup(self)
Setup LCIODumpEvent component.
Definition tools.py:1506
config(self, parser)
Configure LCIODumpEvent component.
Definition tools.py:1500
required_config(self)
Return list of required config.
Definition tools.py:1522
cmd_args(self)
Setup command arguments.
Definition tools.py:1510
Merge LCIO files.
Definition tools.py:1753
__init__(self, **kwargs)
Definition tools.py:1758
cmd_args(self)
Setup command arguments.
Definition tools.py:1761
Generic component for LCIO tools.
Definition tools.py:1641
lcio_bin_jar
lcio bin jar (whatever this is)
Definition tools.py:1651
required_parameters(self)
Return list of required parameters.
Definition tools.py:1678
config(self, parser)
Configure LCIOTool component.
Definition tools.py:1654
required_config(self)
Return list of required config.
Definition tools.py:1669
cmd_args(self)
Setup command arguments.
Definition tools.py:1660
__init__(self, name=None, **kwargs)
Definition tools.py:1649
Count events in an LHE file.
Definition tools.py:1541
execute(self, log_out, log_err)
Execute LHECount component.
Definition tools.py:1562
__init__(self, minevents=0, fail_on_underflow=False, **kwargs)
Definition tools.py:1546
setup(self)
Setup LHECount component.
Definition tools.py:1550
cmd_exists(self)
Check if command exists.
Definition tools.py:1555
Merge StdHep files.
Definition tools.py:1067
__init__(self, **kwargs)
Definition tools.py:1075
optional_parameters(self)
Return list of optional parameters.
Definition tools.py:1078
required_parameters(self)
Return list of required parameters.
Definition tools.py:1087
Merge StdHep files, applying poisson sampling.
Definition tools.py:993
execute(self, log_out, log_err)
Execute MergePoisson component.
Definition tools.py:1054
target_thickness
target thickness in cm
Definition tools.py:1004
__init__(self, xsec=0, **kwargs)
Definition tools.py:1000
required_parameters(self)
Return list of required parameters.
Definition tools.py:1018
setup(self)
Setup MergePoisson component.
Definition tools.py:1010
xsec
cross section in pb
Definition tools.py:1002
num_electrons
number of electrons per bunch
Definition tools.py:1006
cmd_args(self)
Setup command arguments.
Definition tools.py:1027
execute(self, log_out, log_err)
Definition tools.py:2198
__init__(self, **kwargs)
Definition tools.py:1793
scan_output_file(self, log_out)
Definition tools.py:1988
scan_root_file(self, filename, log_out=None)
Definition tools.py:1893
scan_input_files(self, log_out)
Definition tools.py:1957
print_summary(self, log_out)
Definition tools.py:2095
validate_merge(self, log_out)
Definition tools.py:2015
write_stats_json(self, log_out, validation_passed)
Definition tools.py:2142
Move input files to new locations.
Definition tools.py:1614
execute(self, log_out, log_err)
Execute TarFiles component.
Definition tools.py:1629
__init__(self, **kwargs)
Definition tools.py:1619
cmd_exists(self)
Check if command exists.
Definition tools.py:1622
Convert LHE files to StdHep.
Definition tools.py:866
__init__(self, **kwargs)
Definition tools.py:871
cmd_args(self)
Setup command arguments.
Definition tools.py:879
Run the make_mini_dst command on the input file.
Definition tools.py:446
output_files(self)
Adjust names of output files.
Definition tools.py:503
__init__(self, **kwargs)
Initialize ProcessMiniDst with default input file and the command to run.
Definition tools.py:454
optional_parameters(self)
Return list of optional parameters.
Definition tools.py:485
required_parameters(self)
Return list of required parameters.
Definition tools.py:476
setup(self)
Setup the MiniDST component.
Definition tools.py:467
required_config(self)
Return list of required configs.
Definition tools.py:494
cmd_args(self)
Setup command arguments for make_mini_dst.
Definition tools.py:512
Randomly sample StdHep events into a new file.
Definition tools.py:798
execute(self, log_out, log_err)
Execute RandomSample component.
Definition tools.py:853
__init__(self, **kwargs)
Definition tools.py:805
mu
median of distribution?
Definition tools.py:808
optional_parameters(self)
Return list of optional parameters.
Definition tools.py:844
cmd_args(self)
Setup command arguments.
Definition tools.py:810
Run the SLIC Geant4 simulation.
Definition tools.py:16
execute(self, log_out, log_err)
Execute SLIC component.
Definition tools.py:146
__init__(self, **kwargs)
Definition tools.py:25
run_number
Run number to set on output file (optional)
Definition tools.py:29
optional_parameters(self)
Return list of optional parameters.
Definition tools.py:119
required_parameters(self)
Return list of required parameters.
Definition tools.py:128
__particle_tbl(self)
Return path to particle table.
Definition tools.py:88
setup(self)
Setup SLIC component.
Definition tools.py:104
disable_particle_table
Optionally disable loading of the particle table shipped with slic Note: This should not be used with...
Definition tools.py:35
__detector_file(self)
Return path to detector file.
Definition tools.py:84
macros
List of macros to run (optional)
Definition tools.py:27
config(self, parser)
Configure SLIC component.
Definition tools.py:92
required_config(self)
Return list of required configurations.
Definition tools.py:137
detector_dir
To be set from config or install dir.
Definition tools.py:31
cmd_args(self)
Setup command arguments.
Definition tools.py:41
Copy the SQLite database file to the desired location.
Definition tools.py:169
execute(self, log_out, log_err)
Execute the file copy operation.
Definition tools.py:208
__init__(self, **kwargs)
Initialize SQLiteProc to copy the SQLite file.
Definition tools.py:174
cmd_args(self)
Return dummy command arguments to satisfy the parent class.
Definition tools.py:197
Count number of events in a StdHep file.
Definition tools.py:1097
execute(self, log_out, log_err)
Execute StdHepCount component.
Definition tools.py:1115
__init__(self, **kwargs)
Definition tools.py:1102
cmd_args(self)
Setup command arguments.
Definition tools.py:1107
Generic class for StdHep tools.
Definition tools.py:674
cmd_args(self)
Setup command arguments.
Definition tools.py:695
__init__(self, name=None, **kwargs)
Definition tools.py:691
Tar files into an archive.
Definition tools.py:1587
execute(self, log_out, log_err)
Execute TarFiles component.
Definition tools.py:1602
__init__(self, **kwargs)
Definition tools.py:1592
cmd_exists(self)
Check if command exists.
Definition tools.py:1595
Unzip the input files to outputs.
Definition tools.py:1455
execute(self, log_out, log_err)
Execute Unzip component.
Definition tools.py:1469
output_files(self)
Return list of output files.
Definition tools.py:1463
__init__(self, **kwargs)
Definition tools.py:1460
Miscellaneous math functions.
Definition func.py:1