HPS-MC
 
Loading...
Searching...
No Matches
tools.py
Go to the documentation of this file.
1"""! Tools that can be used in HPSMC jobs."""
2
3import json
4import os
5import gzip
6import shutil
7import subprocess
8import tarfile
9
10from subprocess import PIPE
11
12from hpsmc.component import Component
13import hpsmc.func as func
14
15
17 """!
18 Run the SLIC Geant4 simulation.
19
20 Optional parameters are: **nevents**, **macros**, **run_number**, **disable_particle_table** \n
21 Required parameters are: **detector** \n
22 Required configurations are: **slic_dir**, **detector_dir**
23 """
24
25 def __init__(self, **kwargs):
26
27 self.macros = []
28
29 self.run_number = None
30
31 self.detector_dir = None
32
36
37 Component.__init__(
38 self, name="slic", command="slic", output_ext=".slcio", **kwargs
39 )
40
41 def cmd_args(self):
42 """!
43 Setup command arguments.
44 @return list of arguments
45 """
46 if not len(self.input_files()):
47 raise Exception("No inputs given for SLIC.")
48
49 args = [
50 "-g",
51 self.__detector_file(),
52 # "-i", self.input_files()[0],
53 "-o",
54 self.output_files()[0],
55 "-d%s" % str(self.seedseed),
56 ]
57
58 if self.neventsnevents is not None:
59 args.extend(["-r", str(self.neventsnevents)])
60
61 if self.run_number is not None:
62 args.extend(["-m", "run_number.mac"])
63
64 if not self.disable_particle_table:
65 tbl = self.__particle_tbl()
66 if os.path.exists(tbl):
67 args.extend(["-P", tbl])
68 else:
69 raise Exception("SLIC particle.tbl does not exist: %s" % tbl)
70
71 if len(self.macros):
72 # args = []
73 for macro in self.macros:
74 if macro == "run_number.mac":
75 raise Exception("Macro name '%s' is not allowed." % macro)
76 if not os.path.isabs(macro):
77 raise Exception("Macro '%s' is not an absolute path." % macro)
78 args.extend(["-m", macro])
79 else:
80 args.extend(["-i", self.input_files()[0]])
81
82 return args
83
84 def __detector_file(self):
85 """! Return path to detector file."""
86 return os.path.join(self.detector_dir, self.detector, self.detector + ".lcdd")
87
88 def __particle_tbl(self):
89 """! Return path to particle table."""
90 return os.path.join(self.slic_dir, "share", "particle.tbl")
91
92 def config(self, parser):
93 """! Configure SLIC component."""
94 super().config(parser)
95
96 if self.detector_dir is None:
97 self.detector_dir = "{}/share/detectors".format(self.hpsmc_dir)
98 if not os.path.isdir(self.detector_dir):
99 raise Exception("Failed to find valid detector_dir")
100 self.logger.debug(
101 "Using detector_dir from install: {}".format(self.detector_dir)
102 )
103
104 def setup(self):
105 """! Setup SLIC component."""
106 if not os.path.exists(self.slic_dir):
107 raise Exception("slic_dir does not exist: %s" % self.slic_dir)
108
109 self.env_script = self.slic_dir + os.sep + "bin" + os.sep + "slic-env.sh"
110 if not os.path.exists(self.env_script):
111 raise Exception("SLIC setup script does not exist: %s" % self.namename)
112
113 if self.run_number is not None:
114 run_number_cmd = "/lcio/runNumber %d" % self.run_number
115 run_number_mac = open("run_number.mac", "w")
116 run_number_mac.write(run_number_cmd)
117 run_number_mac.close()
118
120 """!
121 Return list of optional parameters.
122
123 Optional parameters are: **nevents**, **macros**, **run_number**
124 @return list of optional parameters
125 """
126 return ["nevents", "macros", "run_number", "disable_particle_table"]
127
129 """!
130 Return list of required parameters.
131
132 Required parameters are: **detector**
133 @return list of required parameters
134 """
135 return ["detector"]
136
138 """!
139 Return list of required configurations.
140
141 Required configurations are: **slic_dir**, **detector_dir**
142 @return list of required configurations
143 """
144 return ["slic_dir", "detector_dir"]
145
146 def execute(self, log_out, log_err):
147 """!
148 Execute SLIC component.
149
150 Component is executed by creating command line input
151 from command and command arguments.
152 @return return code of process
153 """
154 # SLIC needs to be run inside bash as the Geant4 setup script is a piece of #@$@#$.
155 cl = 'bash -c ". %s && %s %s"' % (
156 self.env_script,
158 " ".join(self.cmd_argscmd_args()),
159 )
160
161 # self.logger.info("Executing '%s' with command: %s" % (self.name, cl))
162 proc = subprocess.Popen(cl, shell=True, stdout=log_out, stderr=log_err)
163 proc.communicate()
164 proc.wait()
165
166 return proc.returncode
167
168
170 """!
171 Copy the SQLite database file to the desired location.
172 """
173
174 def __init__(self, **kwargs):
175 """!
176 Initialize SQLiteProc to copy the SQLite file.
177 """
178 self.source_file = kwargs.get("source_file")
179 self.destination_file = kwargs.get("destination_file")
180
181 # Set the Local SQLite Snapshot Location
182 if self.source_file is not None:
183 self.logger.debug(
184 "Setting SQLite local copy source file from config: %s"
185 + self.source_file
186 )
187 args.append(self.source_file)
188 if self.destination_file is not None:
189 self.logger.debug(
190 "Setting Job Destination file from config: %s" % self.destination_file
191 )
192 args.append("-Dorg.hps.conditions.url=%s" % self.destination_file)
193
194 # Ensure to call the parent constructor properly
195 Component.__init__(self, name="sqlite_file_copy", **kwargs)
196
197 def cmd_args(self):
198 """!
199 Return dummy command arguments to satisfy the parent class.
200 """
201 cmd_args = ["(no-command-needed)"]
202
203 if not all(isinstance(arg, str) for arg in cmd_args):
204 raise ValueError("All arguments must be strings.")
205 # return ["(no-command-needed)"]
206 return ["--source", self.source_file, "--destination", self.destination_file]
207
208 def execute(self, log_out, log_err):
209 """!
210 Execute the file copy operation.
211 """
212
213 try:
214 # Copy the file
215
216 self.logger.info(
217 f"Copying file from {self.source_file} to {self.destination_file}"
218 )
219 shutil.copy(self.source_file, self.destination_file)
220
221 # Log success
222 self.logger.info(f"Successfully copied file to {self.destination_file}")
223
224 return 0 # Success code
225
226 except Exception as e:
227 self.logger.error(f"Error during file copy: {e}")
228 return 1 # Error code
229
230
232 """!
233 Run the hps-java JobManager class.
234
235 Input files have slcio format.
236
237 Required parameters are: **steering_files** \n
238 Optional parameters are: **detector**, **run_number**, **defs**
239 """
240
241 def __init__(self, steering=None, **kwargs):
242
244 self.run_number = None
245
246 self.neventsnevents = None
247
248 self.detector = None
249
251
252 self.defs = None
253
254 self.java_args = None
255
257
258 self.lcsim_cache_dir = None
259
260 self.conditions_user = None
261
263
264 self.conditions_url = None
265
266 self.steering = steering
267
269
270 if "overlay_file" in kwargs:
271 self.overlay_file = kwargs["overlay_file"]
272 else:
273 self.overlay_file = None
274
275 Component.__init__(
276 self,
277 name="job_manager",
278 command="java",
279 description="HPS Java Job Manager",
280 output_ext=".slcio",
281 **kwargs,
282 )
283
284 # Automatically append steering file key to output file name
285 if self.append_tokappend_tok is None:
287 self.logger.debug(
288 "Append token for '%s' automatically set to '%s' from steering key."
290 )
291
292 def config(self, parser):
293 """! Configure JobManager component."""
294 super().config(parser)
295 # if installed these are set in the environment script...
296 if self.hps_java_bin_jar is None:
297 if os.getenv("HPS_JAVA_BIN_JAR", None) is not None:
298 self.hps_java_bin_jar = os.getenv("HPS_JAVA_BIN_JAR", None)
299 self.logger.debug(
300 "Set HPS_JAVA_BIN_JAR from environment: {}".format(
302 )
303 )
304 else:
305 raise Exception(
306 "hps_java_bin_jar not set in environment or config file!"
307 )
308 if self.conditions_url is None:
309 if os.getenv("CONDITIONS_URL", None) is not None:
310 self.conditions_url = os.getenv("CONDITIONS_URL", None)
311 self.logger.debug(
312 "Set CONDITIONS_URL from environment: {}".format(
314 )
315 )
316
318 """!
319 Return list of required configurations.
320
321 Required configurations are: **hps_java_bin_jar**
322 @retun list of required configurations.
323 """
324 return ["hps_java_bin_jar"]
325
326 def setup(self):
327 """! Setup JobManager component."""
328 if not len(self.input_files()):
329 raise Exception("No inputs provided to hps-java.")
330
331 if self.steering not in self.steering_files:
332 raise Exception(
333 "Steering '%s' not found in: %s" % (self.steering, self.steering_files)
334 )
336
337 def cmd_args(self):
338 """!
339 Setup command arguments.
340 @return list of arguments
341 """
342 args = []
343
344 if self.java_args is not None:
345 self.logger.debug("Setting java_args from config: %s" % self.java_args)
346 args.append(self.java_args)
347
348 if self.logging_config_file is not None:
349 self.logger.debug(
350 "Setting logging_config_file from config: %s" % self.logging_config_file
351 )
352 args.append("-Djava.util.logging.config.file=%s" % self.logging_config_file)
353
354 if self.lcsim_cache_dir is not None:
355 self.logger.debug(
356 "Setting lcsim_cache_dir from config: %s" % self.lcsim_cache_dir
357 )
358 args.append("-Dorg.lcsim.cacheDir=%s" % self.lcsim_cache_dir)
359
360 if self.conditions_user is not None:
361 self.logger.debug(
362 "Setting conditions_user from config: %s" % self.conditions_user
363 )
364 args.append("-Dorg.hps.conditions.user=%s" % self.conditions_user)
365 if self.conditions_password is not None:
366 self.logger.debug("Setting conditions_password from config (not shown)")
367 args.append("-Dorg.hps.conditions.password=%s" % self.conditions_password)
368 if self.conditions_url is not None:
369 self.logger.debug(
370 "Setting conditions_url from config: %s" % self.conditions_url
371 )
372 args.append("-Dorg.hps.conditions.url=%s" % self.conditions_url)
373
374 args.append("-jar")
375 args.append(self.hps_java_bin_jar)
376
377
378 if self.event_print_interval is not None:
379 args.append("-e")
380 args.append(str(self.event_print_interval))
381
382 if self.run_number is not None:
383 args.append("-R")
384 args.append(str(self.run_number))
385
386 if self.detector is not None:
387 args.append("-d")
388 args.append(self.detector)
389
390 if len(self.output_files()):
391 args.append("-D")
392 args.append("outputFile=" + os.path.splitext(self.output_files()[0])[0])
393
394 if self.defs:
395 for k, v in self.defs.items():
396 args.append("-D")
397 args.append(k + "=" + str(v))
398
399 if not os.path.isfile(self.steering_file):
400 args.append("-r")
401 self.logger.debug(
402 "Steering does not exist at '%s' so assuming it is a resource."
403 % self.steering_file
404 )
405 else:
406 if not os.path.isabs(self.steering_file):
407 raise Exception(
408 "Steering looks like a file but is not an abs path: %s"
409 % self.steering_file
410 )
411 args.append(self.steering_file)
412
413 if self.neventsnevents is not None:
414 args.append("-n")
415 args.append(str(self.neventsnevents))
416
417 for input_file in self.input_files():
418 args.append("-i")
419 args.append(input_file)
420
421 if self.overlay_file is not None:
422 args.append("-D")
423 args.append("overlayFile=" + os.path.splitext(self.overlay_file)[0])
424
425 return args
426
428 """!
429 Return list of required parameters.
430
431 Required parameters are: **steering_files**
432 @return list of required parameters
433 """
434 return ["steering_files"]
435
437 """!
438 Return list of optional parameters.
439
440 Optional parameters are: **detector**, **run_number**, **defs**
441 @return list of optional parameters
442 """
443 return ["detector", "run_number", "defs", "nevents"]
444
445
447 """!
448 Run the hpstr analysis tool.
449
450 Required parameters are: **config_files** \n
451 Optional parameters are: **year**, **is_data**, **nevents** \n
452 Required configs are: **hpstr_install_dir**, **hpstr_base**
453 """
454
455 def __init__(self, cfg=None, is_data=0, year=None, tracking=None, **kwargs):
456
457 self.cfg = cfg
458
459 self.is_data = is_data
460
461 self.year = year
462
463 self.tracking = tracking
464
466 self.hpstr_base = None
467
468 Component.__init__(self, name="hpstr", command="hpstr", **kwargs)
469
470 def setup(self):
471 """! Setup HPSTR component."""
472 if not os.path.exists(self.hpstr_install_dir):
473 raise Exception(
474 "hpstr_install_dir does not exist: %s" % self.hpstr_install_dir
475 )
476 self.env_script = (
477 self.hpstr_install_dir + os.sep + "bin" + os.sep + "hpstr-env.sh"
478 )
479
480 # The config file to use is read from a dict in the JSON parameters.
481 if self.cfg not in self.config_files:
482 raise Exception(
483 "Config '%s' was not found in: %s" % (self.cfg, self.config_files)
484 )
485 config_file = self.config_files[self.cfg]
486 if len(os.path.dirname(config_file)):
487 # If there is a directory name then we expect an absolute path not in the hpstr dir.
488 if os.path.isabs(config_file):
489 self.cfg_path = config_file
490 else:
491 # The config must be an abs path.
492 raise Exception(
493 "The config has a directory but is not an abs path: %s" % self.cfg
494 )
495 else:
496 # Assume the cfg file is within the hpstr base dir.
497 self.cfg_path = os.path.join(
498 self.hpstr_base, "processors", "config", config_file
499 )
500 self.logger.debug("Set config path: %s" % self.cfg_path)
501
502 # For ROOT output, automatically append the cfg key from the job params.
503 if os.path.splitext(self.input_files()[0])[1] == ".root":
505 self.logger.debug(
506 "Automatically appending token to output file: %s" % self.append_tokappend_tok
507 )
508
510 """!
511 Return list of required parameters.
512
513 Required parameters are: **config_files**
514 @return list of required parameters
515 """
516 return ["config_files"]
517
519 """!
520 Return list of optional parameters.
521
522 Optional parameters are: **year**, **is_data**, **nevents**
523 @return list of optional parameters
524 """
525 return ["year", "is_data", "nevents", "tracking"]
526
528 """!
529 Return list of required configs.
530
531 Required configs are: **hpstr_install_dir**, **hpstr_base**
532 @return list of required configs
533 """
534 return ["hpstr_install_dir", "hpstr_base"]
535
536 def cmd_args(self):
537 """!
538 Setup command arguments.
539 @return list of arguments
540 """
541 args = [
542 self.cfg_path,
543 "-t",
544 str(self.is_data),
545 "-i",
546 self.input_files()[0],
547 "-o",
548 self.output_filesoutput_files()[0],
549 ]
550 if self.neventsnevents is not None:
551 args.extend(["-n", str(self.neventsnevents)])
552 if self.year is not None:
553 args.extend(["-y", str(self.year)])
554 if self.tracking is not None:
555 args.extend(["-w", str(self.tracking)])
556 return args
557
558 def output_files(self):
559 """! Adjust names of output files."""
560 f, ext = os.path.splitext(self.input_files()[0])
561 if ".slcio" in ext:
562 return ["%s.root" % f]
563 else:
564 if not self.append_tokappend_tok:
565 self.append_tokappend_tok = self.cfg
566 return ["%s_%s.root" % (f, self.append_tokappend_tok)]
567
568 def execute(self, log_out, log_err):
569 """! Execute HPSTR component."""
570 args = self.cmd_argscmd_args()
571 cl = 'bash -c ". %s && %s %s"' % (
572 self.env_script,
574 " ".join(self.cmd_argscmd_args()),
575 )
576
577 self.logger.debug("Executing '%s' with command: %s" % (self.namename, cl))
578 proc = subprocess.Popen(cl, shell=True, stdout=log_out, stderr=log_err)
579 proc.communicate()
580 proc.wait()
581
582 return proc.returncode
583
584
585
586
587
589 """!
590 Generic class for StdHep tools.
591 """
592
593
594 seed_names = [
595 "beam_coords",
596 "beam_coords_old",
597 "lhe_tridents",
598 "lhe_tridents_displacetime",
599 "lhe_tridents_displaceuni",
600 "merge_poisson",
601 "mix_signal",
602 "random_sample",
603 ]
604
605 def __init__(self, name=None, **kwargs):
606
607 Component.__init__(self, name=name, command="stdhep_" + name, **kwargs)
608
609 def cmd_args(self):
610 """!
611 Setup command arguments.
612 @return list of arguments
613 """
614 args = []
615
616 if self.name in StdHepTool.seed_names:
617 args.extend(["-s", str(self.seedseed)])
618
619 if len(self.output_files()) == 1:
620 args.insert(0, self.output_files()[0])
621 elif len(self.output_files()) > 1:
622 raise Exception("Too many outputs specified for StdHepTool.")
623 else:
624 raise Exception("No outputs specified for StdHepTool.")
625
626 if len(self.input_files()):
627 for i in self.inputs[::-1]:
628 args.insert(0, i)
629 else:
630 raise Exception("No inputs specified for StdHepTool.")
631
632 return args
633
634
636 """!
637 Transform StdHep events into beam coordinates.
638
639 Optional parameters are: **beam_sigma_x**, **beam_sigma_y**, **beam_rot_x**,
640 **beam_rot_y**, **beam_rot_z**, **target_x**, **target_y**, **target_z**
641 """
642
643 def __init__(self, **kwargs):
644
646 self.beam_sigma_x = None
647
648 self.beam_sigma_y = None
649
650 self.target_x = None
651
652 self.target_y = None
653
654 self.target_z = None
655
656 self.beam_rot_x = None
657
658 self.beam_rot_y = None
659
660 self.beam_rot_z = None
661
662 StdHepTool.__init__(self, name="beam_coords", append_tok="rot", **kwargs)
663
664 def cmd_args(self):
665 """!
666 Setup command arguments.
667 @return list of arguments
668 """
669 args = StdHepTool.cmd_args(self)
670
671 if self.beam_sigma_x is not None:
672 args.extend(["-x", str(self.beam_sigma_x)])
673 if self.beam_sigma_y is not None:
674 args.extend(["-y", str(self.beam_sigma_y)])
675
676 if self.beam_rot_x is not None:
677 args.extend(["-u", str(self.beam_rot_x)])
678 if self.beam_rot_y is not None:
679 args.extend(["-v", str(self.beam_rot_y)])
680 if self.beam_rot_z is not None:
681 args.extend(["-w", str(self.beam_rot_z)])
682
683 if self.target_x is not None:
684 args.extend(["-X", str(self.target_x)])
685 if self.target_y is not None:
686 args.extend(["-Y", str(self.target_y)])
687 if self.target_z is not None:
688 args.extend(["-Z", str(self.target_z)])
689
690 return args
691
693 """!
694 Return list of optional parameters.
695
696 Optional parameters are: **beam_sigma_x**, **beam_sigma_y**, **beam_rot_x**,
697 **beam_rot_y**, **beam_rot_z**, **target_x**, **target_y**, **target_z**
698 @return list of optional parameters
699 """
700 return [
701 "beam_sigma_x",
702 "beam_sigma_y",
703 "beam_rot_x",
704 "beam_rot_y",
705 "beam_rot_z",
706 "target_x",
707 "target_y",
708 "target_z",
709 ]
710
711
713 """!
714 Randomly sample StdHep events into a new file.
715
716 Optional parameters are: **nevents**, **mu**
717 """
718
719 def __init__(self, **kwargs):
720 StdHepTool.__init__(self, name="random_sample", append_tok="sampled", **kwargs)
721
722 self.mu = None
723
724 def cmd_args(self):
725 """!
726 Setup command arguments.
727 @return list of arguments
728 """
729 args = []
730
731 if self.name in StdHepTool.seed_names:
732 args.extend(["-s", str(self.seedseedseed)])
733
734 args.extend(["-N", str(1)])
735
736 if self.neventsnevents is not None:
737 args.extend(["-n", str(self.neventsnevents)])
738
739 if self.mu is not None:
740 args.extend(["-m", str(self.mu)])
741
742 if len(self.output_files()) == 1:
743 # only use file name, not extension because extension is added by tool
744 args.insert(0, os.path.splitext(self.output_files()[0])[0])
745 elif len(self.output_files()) > 1:
746 raise Exception("Too many outputs specified for RandomSample.")
747 else:
748 raise Exception("No outputs specified for RandomSample.")
749
750 if len(self.input_files()):
751 for i in self.inputs[::-1]:
752 args.insert(0, i)
753 else:
754 raise Exception("No inputs were provided.")
755
756 return args
757
759 """!
760 Return list of optional parameters.
761
762 Optional parameters are: **nevents**, **mu**
763 @return list of optional parameters
764 """
765 return ["nevents", "mu"]
766
767 def execute(self, log_out, log_err):
768 """! Execute RandomSample component"""
769 returncode = Component.execute(self, log_out, log_err)
770
771 # Move file to proper output file location.
772 src = "%s_1.stdhep" % os.path.splitext(self.output_files()[0])[0]
773 dest = "%s.stdhep" % os.path.splitext(self.output_files()[0])[0]
774 self.logger.debug("Moving '%s' to '%s'" % (src, dest))
775 shutil.move(src, dest)
776
777 return returncode
778
779
781 """!
782 Convert LHE files to StdHep, displacing the time by given ctau.
783
784 Optional parameters are: **ctau**
785 """
786
787 def __init__(self, **kwargs):
788
789 self.ctau = None
790 StdHepTool.__init__(
791 self, name="lhe_tridents_displacetime", output_ext=".stdhep", **kwargs
792 )
793
794 def cmd_args(self):
795 """!
796 Setup command arguments.
797 @return list of arguments
798 """
799 args = StdHepTool.cmd_args(self)
800 if self.ctau is not None:
801 args.extend(["-l", str(self.ctau)])
802 return args
803
805 """!
806 Return list of optional parameters.
807
808 Optional parameters are: **ctau**
809 @return list of optional parameters
810 """
811 return ["ctau"]
812
813
815 """!
816 Convert LHE files to StdHep, displacing the time by given ctau.
817
818 Optional parameters are: **ctau**
819 """
820
821 def __init__(self, **kwargs):
822
823 self.ctau = None
824 StdHepTool.__init__(
825 self, name="lhe_tridents_displaceuni", output_ext=".stdhep", **kwargs
826 )
827
828 def cmd_args(self):
829 """!
830 Setup command arguments.
831 @return list of arguments
832 """
833 args = StdHepTool.cmd_args(self)
834 if self.ctau is not None:
835 args.extend(["-l", str(self.ctau)])
836 return args
837
839 """!
840 Return list of optional parameters.
841
842 Optional parameters are: **ctau**
843 @return list of optional parameters
844 """
845 return ["ctau"]
846
847
849 """!
850 Add mother particles for physics samples.
851 """
852
853 def __init__(self, **kwargs):
854 StdHepTool.__init__(self, name="add_mother", append_tok="mom", **kwargs)
855
856
858 """! Add full truth mother particles for physics samples"""
859
860 def __init__(self, **kwargs):
861 StdHepTool.__init__(
862 self, "add_mother_full_truth", append_tok="mom_full_truth", **kwargs
863 )
864 if len(self.inputsinputs) != 2:
865 raise Exception(
866 "Must have 2 input files: a stdhep file and a lhe file in order"
867 )
869 base, ext = os.path.splitext(self.input_file_1)
870 if ext != ".stdhep":
871 raise Exception("The first input file must be a stdhep file")
873 base, ext = os.path.splitext(self.input_file_2)
874 if ext != ".lhe":
875 raise Exception("The second input file must be a lhe file")
876
877 def cmd_args(self):
878 """!
879 Setup command arguments.
880 @return list of arguments
881 """
882 return super().cmd_args()
883
884
886 """!
887 Merge StdHep files, applying poisson sampling.
888
889 Required parameters are: **target_thickness**, **num_electrons**
890 """
891
892 def __init__(self, xsec=0, **kwargs):
893
894 self.xsec = xsec
895
897
898 self.num_electrons = None
899
900 StdHepTool.__init__(self, name="merge_poisson", append_tok="sampled", **kwargs)
901
902 def setup(self):
903 """! Setup MergePoisson component."""
904 if self.xsec > 0:
905 self.mu = func.lint(self.target_thickness, self.num_electrons) * self.xsec
906 else:
907 raise Exception("Cross section is missing.")
908 self.logger.info("mu is %f", self.mu)
909
911 """!
912 Return list of required parameters.
913
914 Required parameters are: **target_thickness**, **num_electrons**
915 @return list of required parameters
916 """
917 return ["target_thickness", "num_electrons"]
918
919 def cmd_args(self):
920 """!
921 Setup command arguments.
922 @return list of arguments
923 """
924 args = []
925 if self.name in StdHepTool.seed_names:
926 args.extend(["-s", str(self.seedseedseed)])
927
928 args.extend(["-m", str(self.mu), "-N", str(1), "-n", str(self.neventsnevents)])
929
930 if len(self.output_files()) == 1:
931 # only use file name, not extension because extension is added by tool
932 args.insert(0, os.path.splitext(self.output_files()[0])[0])
933 elif len(self.output_files()) > 1:
934 raise Exception("Too many outputs specified for MergePoisson.")
935 else:
936 raise Exception("No outputs specified for MergePoisson.")
937
938 if len(self.input_files()):
939 for i in self.inputs[::-1]:
940 args.insert(0, i)
941 else:
942 raise Exception("No inputs were provided.")
943
944 return args
945
946 def execute(self, log_out, log_err):
947 """! Execute MergePoisson component."""
948 returncode = Component.execute(self, log_out, log_err)
949
950 # Move file from tool to proper output file location.
951 src = "%s_1.stdhep" % os.path.splitext(self.output_files()[0])[0]
952 dest = "%s.stdhep" % os.path.splitext(self.output_files()[0])[0]
953 self.logger.debug("Moving '%s' to '%s'" % (src, dest))
954 shutil.move(src, dest)
955
956 return returncode
957
958
960 """!
961 Merge StdHep files.
962
963 Optional parameters are: none \n
964 Required parameters are: none
965 """
966
967 def __init__(self, **kwargs):
968 StdHepTool.__init__(self, name="merge_files", **kwargs)
969
971 """!
972 Return list of optional parameters.
973
974 Optional parameters are: none
975 @return list of optional parameters
976 """
977 return []
978
980 """!
981 Return list of required parameters.
982
983 Required parameters are: none
984 @return list of required parameters
985 """
986 return []
987
988
990 """!
991 Count number of events in a StdHep file.
992 """
993
994 def __init__(self, **kwargs):
995 Component.__init__(
996 self, name="stdhep_count", command="stdhep_count.sh", **kwargs
997 )
998
999 def cmd_args(self):
1000 """!
1001 Setup command arguments.
1002 @return list of arguments
1003 """
1004
1005 return [self.input_files()[0]]
1006
1007 def execute(self, log_out, log_err):
1008 """! Execute StdHepCount component."""
1009 cl = [self.command]
1010 cl.extend(self.cmd_argscmd_args())
1011 proc = subprocess.Popen(cl, stdout=PIPE)
1012 (output, err) = proc.communicate()
1013
1014 nevents = int(output.split()[1])
1015 print("StdHep file '%s' has %d events." % (self.input_files()[0], nevents))
1016
1017 return proc.returncode
1018
1019
1021 """!
1022 Generic base class for Java based tools.
1023 """
1024
1025 def __init__(self, name, java_class, **kwargs):
1026
1027 self.java_class = java_class
1028
1029 self.java_args = None
1030
1031 self.conditions_url = None
1032 Component.__init__(self, name, "java", **kwargs)
1033
1035 """!
1036 Return list of required config.
1037
1038 Required config are: **hps_java_bin_jar**
1039 @return list of required config
1040 """
1041 return ["hps_java_bin_jar"]
1042
1043 def cmd_args(self):
1044 """!
1045 Setup command arguments.
1046 @return list of arguments
1047 """
1048 args = []
1049 if self.java_args is not None:
1050 self.logger.debug("Setting java_args from config: %s" + self.java_args)
1051 args.append(self.java_args)
1052 if self.conditions_url is not None:
1053 self.logger.debug(
1054 "Setting conditions_url from config: %s" % self.conditions_url
1055 )
1056 args.append("-Dorg.hps.conditions.url=%s" % self.conditions_url)
1057 args.append("-cp")
1058 args.append(self.hps_java_bin_jar)
1059 args.append(self.java_class)
1060 return args
1061
1062 def config(self, parser):
1063 super().config(parser)
1064
1065
1067 """!
1068 Convert EVIO events to LCIO using the hps-java EvioToLcio command line tool.
1069
1070 Input files have evio format (format used by DAQ system).
1071
1072 Required parameters are: **detector**, **steering_files** \n
1073 Optional parameters are: **run_number**, **skip_events**, **nevents**, **event_print_interval**
1074 """
1075
1076 def __init__(self, steering=None, **kwargs):
1077
1078 self.detector = None
1079
1080 self.run_number = None
1081
1082 self.skip_events = None
1083
1085
1086 self.steering = steering
1087
1088 JavaTool.__init__(
1089 self,
1090 name="evio_to_lcio",
1091 java_class="org.hps.evio.EvioToLcio",
1092 output_ext=".slcio",
1093 **kwargs,
1094 )
1095
1097 """!
1098 Return list of required parameters.
1099
1100 Required parameters are: **detector**, **steering_files**
1101 @return list of required parameters
1102 """
1103 return ["detector", "steering_files"]
1104
1106 """!
1107 Return list of optional parameters.
1108
1109 Optional parameters are: **run_number**, **skip_events**, **nevents**, **event_print_interval**
1110 @return list of optional parameters
1111 """
1112 return ["run_number", "skip_events", "nevents", "event_print_interval"]
1113
1114 def setup(self):
1115 """! Setup EvioToLcio component."""
1116 super().setup()
1117 if self.steering not in self.steering_files:
1118 raise Exception(
1119 "Steering '%s' not found in: %s" % (self.steering, self.steering_files)
1120 )
1122
1123 def cmd_args(self):
1124 """!
1125 Setup command arguments.
1126 @return list of arguments
1127 """
1128 args = JavaTool.cmd_args(self)
1129 if not len(self.output_files()):
1130 raise Exception("No output files were provided.")
1131 output_file = self.output_files()[0]
1132 args.append("-DoutputFile=%s" % os.path.splitext(output_file)[0])
1133 args.extend(["-d", self.detector])
1134 if self.run_number is not None:
1135 args.extend(["-R", str(self.run_number)])
1136 if self.skip_events is not None:
1137 args.extend(["-s", str(self.skip_events)])
1138
1139 if not os.path.isfile(self.steering_file):
1140 args.append("-r")
1141 self.logger.debug(
1142 "Steering does not exist at '%s' so assuming it is a resource."
1143 % self.steering_file
1144 )
1145 else:
1146 if not os.path.isabs(self.steering_file):
1147 raise Exception(
1148 "Steering looks like a file but is not an abs path: %s"
1149 % self.steering_file
1150 )
1151 args.extend(["-x", self.steering_file])
1152
1153 if self.neventsnevents is not None:
1154 args.extend(["-n", str(self.neventsnevents)])
1155
1156 args.append("-b")
1157
1158 for inputfile in self.input_files():
1159 args.append(inputfile)
1160
1161 if self.event_print_interval is not None:
1162 args.extend(["-e", str(self.event_print_interval)])
1163
1164 return args
1165
1166
1168 """!
1169 Space MC events and apply energy filters to process before readout.
1170
1171 Optional parameters are: **filter_ecal_hit_ecut**, **filter_event_interval**,
1172 **filter_nevents_read**, **filter_nevents_write**, **filter_no_cuts** \n
1173 Required config are: **hps_java_bin_jar**
1174 """
1175
1176 def __init__(self, **kwargs):
1177 if "filter_no_cuts" in kwargs:
1178 self.filter_no_cuts = kwargs["filter_no_cuts"]
1179 else:
1180
1181 self.filter_no_cuts = False
1182
1183 if "filter_ecal_pairs" in kwargs:
1184 self.filter_ecal_pairs = kwargs["filter_ecal_pairs"]
1185 else:
1186 self.filter_ecal_pairs = False
1187
1188 if "filter_ecal_hit_ecut" in kwargs:
1189 self.filter_ecal_hit_ecut = kwargs["filter_ecal_hit_ecut"]
1190 else:
1191
1192 self.filter_ecal_hit_ecut = -1.0
1193 # self.filter_ecal_hit_ecut = 0.05
1194
1195 if "filter_event_interval" in kwargs:
1196 self.filter_event_interval = kwargs["filter_event_interval"]
1197 else:
1198
1199 self.filter_event_interval = 250
1200
1201 if "filter_nevents_read" in kwargs:
1202 self.filter_nevents_read = kwargs["filter_nevents_read"]
1203 else:
1204
1205 self.filter_nevents_read = -1
1206
1207 if "filter_nevents_write" in kwargs:
1208 self.filter_nevents_write = kwargs["filter_nevents_write"]
1209 else:
1210
1211 self.filter_nevents_write = -1
1212
1214
1215 JavaTool.__init__(
1216 self,
1217 name="filter_bunches",
1218 java_class="org.hps.util.FilterMCBunches",
1219 append_tok="filt",
1220 **kwargs,
1221 )
1222
1223 def config(self, parser):
1224 """! Configure FilterBunches component."""
1225 super().config(parser)
1226 if self.hps_java_bin_jarhps_java_bin_jar is None:
1227 if os.getenv("HPS_JAVA_BIN_JAR", None) is not None:
1228 self.hps_java_bin_jarhps_java_bin_jar = os.getenv("HPS_JAVA_BIN_JAR", None)
1229 self.logger.debug(
1230 "Set HPS_JAVA_BIN_JAR from environment: {}".format(
1232 )
1233 )
1234
1235 def cmd_args(self):
1236 """!
1237 Setup command arguments.
1238 @return list of arguments
1239 """
1240 args = JavaTool.cmd_args(self)
1241 args.append("-e")
1242 args.append(str(self.filter_event_interval))
1243 for i in self.input_files():
1244 args.append(i)
1245 args.append(self.output_files()[0])
1246 if self.filter_ecal_pairs:
1247 args.append("-d")
1248 if self.filter_ecal_hit_ecut > 0:
1249 args.append("-E")
1250 args.append(str(self.filter_ecal_hit_ecut))
1251 if self.filter_nevents_read > 0:
1252 args.append("-n")
1253 args.append(str(self.filter_nevents_read))
1254 if self.filter_nevents_write > 0:
1255 args.append("-w")
1256 args.append(str(self.filter_nevents_write))
1257 if self.filter_no_cuts:
1258 args.append("-a")
1259 return args
1260
1262 """!
1263 Return list of optional parameters.
1264
1265 Optional parameters are: **filter_ecal_hit_ecut**, **filter_event_interval**,
1266 **filter_nevents_read**, **filter_nevents_write**, **filter_no_cuts** \n
1267 @return list of optional parameters
1268 """
1269 return [
1270 "filter_ecal_hit_ecut",
1271 "filter_event_interval",
1272 "filter_nevents_read",
1273 "filter_nevents_write",
1274 "filter_no_cuts",
1275 ]
1276
1278 """!
1279 Return list of required config.
1280
1281 Required config are: **hps_java_bin_jar**
1282 @return list of required config
1283 """
1284 return ["hps_java_bin_jar"]
1285
1286
1288 """!
1289 Apply hodo-hit filter and space MC events to process before readout.
1290
1291 The nevents parameter is not settable from JSON in this class. It should
1292 be supplied as an init argument in the job script if it needs to be
1293 customized (the default nevents and event_interval used to apply spacing
1294 should usually not need to be changed by the user). \n
1295
1296 Optional parameters are: **num_hodo_hits**, **event_interval**
1297 """
1298
1299 def __init__(self, **kwargs):
1300 if "num_hodo_hits" in kwargs:
1301 self.num_hodo_hits = kwargs["num_hodo_hits"]
1302 else:
1303 self.num_hodo_hits = 0
1304
1305 if "event_interval" in kwargs:
1306 self.event_interval = kwargs["event_interval"]
1307 else:
1308 self.event_interval = 250
1309
1310 JavaTool.__init__(
1311 self,
1312 name="filter_events",
1313 java_class="org.hps.util.ExtractEventsWithHitAtHodoEcal",
1314 append_tok="filt",
1315 **kwargs,
1316 )
1317
1318 def cmd_args(self):
1319 """!
1320 Setup command arguments.
1321 @return list of arguments
1322 """
1323 args = JavaTool.cmd_args(self)
1324 args.append("-e")
1325 args.append(str(self.event_interval))
1326 for i in self.input_files():
1327 args.append(i)
1328 args.append(self.output_files()[0])
1329 if self.num_hodo_hits > 0:
1330 args.append("-M")
1331 args.append(str(self.num_hodo_hits))
1332 if self.neventsnevents:
1333 args.append("-w")
1334 args.append(str(self.neventsnevents))
1335 return args
1336
1338 """!
1339 Return list of optional parameters.
1340
1341 Optional parameters are: **num_hodo_hits**, **event_interval**
1342 @return list of optional parameters
1343 """
1344 return ["num_hodo_hits", "event_interval"]
1345
1346
1348 """!
1349 Unzip the input files to outputs.
1350 """
1351
1352 def __init__(self, **kwargs):
1353 Component.__init__(self, name="unzip", command="gunzip", **kwargs)
1354
1355 def output_files(self):
1356 """! Return list of output files."""
1357 if self.outputs:
1358 return self.outputs
1359 return [os.path.splitext(i)[0] for i in self.input_files()]
1360
1361 def execute(self, log_out, log_err):
1362 """! Execute Unzip component."""
1363 for i in range(0, len(self.input_files())):
1364 inputfile = self.input_files()[i]
1365 outputfile = self.output_filesoutput_files()[i]
1366 with gzip.open(inputfile, "rb") as in_file, open(
1367 outputfile, "wb"
1368 ) as out_file:
1369 shutil.copyfileobj(in_file, out_file)
1370 self.logger.debug("Unzipped '%s' to '%s'" % (inputfile, outputfile))
1371 return 0
1372
1373
1375 """!
1376 Dump LCIO event information.
1377
1378 Required parameters are: none \n
1379 Required config are: **lcio_dir**
1380 """
1381
1382 def __init__(self, **kwargs):
1383
1384 self.lcio_dir = None
1385 Component.__init__(self, name="lcio_dump_event", command="dumpevent", **kwargs)
1386
1387 if "event_num" in kwargs:
1388 self.event_num = kwargs["event_num"]
1389 else:
1390 self.event_num = 1
1391
1392 def config(self, parser):
1393 """! Configure LCIODumpEvent component."""
1394 super().config(parser)
1395 if self.lcio_dir is None:
1396 self.lcio_dir = self.hpsmc_dir
1397
1398 def setup(self):
1399 """! Setup LCIODumpEvent component."""
1400 self.commandcommand = self.lcio_dir + "/bin/dumpevent"
1401
1402 def cmd_args(self):
1403 """!
1404 Setup command arguments.
1405 @return list of arguments
1406 """
1407 if not len(self.input_files()):
1408 raise Exception("Missing required inputs for LCIODumpEvent.")
1409 args = []
1410 args.append(self.input_files()[0])
1411 args.append(str(self.event_num))
1412 return args
1413
1415 """!
1416 Return list of required config.
1417
1418 Required config are: **lcio_dir**
1419 @return list of required config
1420 """
1421 return ["lcio_dir"]
1422
1424 """!
1425 Return list of required parameters.
1426
1427 Required parameters are: none
1428 @return list of required parameters
1429 """
1430 return []
1431
1432
1434 """!
1435 Count events in an LHE file.
1436 """
1437
1438 def __init__(self, minevents=0, fail_on_underflow=False, **kwargs):
1439 self.minevents = minevents
1440 Component.__init__(self, name="lhe_count", **kwargs)
1441
1442 def setup(self):
1443 """! Setup LHECount component."""
1444 if not len(self.input_files()):
1445 raise Exception("Missing at least one input file.")
1446
1447 def cmd_exists(self):
1448 """!
1449 Check if command exists.
1450 @return True if command exists
1451 """
1452 return True
1453
1454 def execute(self, log_out, log_err):
1455 """! Execute LHECount component."""
1456 for i in self.inputs:
1457 with gzip.open(i, "rb") as in_file:
1458 lines = in_file.readlines()
1459
1460 nevents = 0
1461 for line in lines:
1462 if "<event>" in line:
1463 nevents += 1
1464
1465 print("LHE file '%s' has %d events." % (i, nevents))
1466
1467 if nevents < self.minevents:
1468 msg = "LHE file '%s' does not contain the minimum %d events." % (
1469 i,
1470 nevents,
1471 )
1472 if self.fail_on_underflow:
1473 raise Exception(msg)
1474 else:
1475 self.logger.warning(msg)
1476 return 0
1477
1478
1480 """!
1481 Tar files into an archive.
1482 """
1483
1484 def __init__(self, **kwargs):
1485 Component.__init__(self, name="tar_files", **kwargs)
1486
1487 def cmd_exists(self):
1488 """!
1489 Check if command exists.
1490 @return True if command exists
1491 """
1492 return True
1493
1494 def execute(self, log_out, log_err):
1495 """! Execute TarFiles component."""
1496 self.logger.debug("Opening '%s' for writing ..." % self.outputs[0])
1497 tar = tarfile.open(self.outputs[0], "w")
1498 for i in self.inputs:
1499 self.logger.debug("Adding '%s' to archive" % i)
1500 tar.add(i)
1501 tar.close()
1502 self.logger.info("Wrote archive '%s'" % self.outputs[0])
1503 return 0
1504
1505
1507 """!
1508 Move input files to new locations.
1509 """
1510
1511 def __init__(self, **kwargs):
1512 Component.__init__(self, name="move_files", **kwargs)
1513
1514 def cmd_exists(self):
1515 """!
1516 Check if command exists.
1517 @return True if command exists
1518 """
1519 return True
1520
1521 def execute(self, log_out, log_err):
1522 """! Execute TarFiles component."""
1523 if len(self.inputsinputs) != len(self.outputsoutputs):
1524 raise Exception("Input and output lists are not the same length!")
1525 for io in zip(self.inputsinputs, self.outputsoutputs):
1526 src = io[0]
1527 dest = io[1]
1528 self.logger.info("Moving %s -> %s" % (src, dest))
1529 shutil.move(src, dest)
1530 return 0
1531
1532
1534 """!
1535 Generic component for LCIO tools.
1536
1537 Required parameters are: none \n
1538 Required config are: **lcio_bin_jar**
1539 """
1540
1541 def __init__(self, name=None, **kwargs):
1542
1543 self.lcio_bin_jar = None
1544 Component.__init__(self, name, command="java", **kwargs)
1545
1546 def config(self, parser):
1547 """! Configure LCIOTool component."""
1548 super().config(parser)
1549 if self.lcio_bin_jar is None:
1550 self.config_from_environ()
1551
1552 def cmd_args(self):
1553 """!
1554 Setup command arguments.
1555 @return list of arguments
1556 """
1557 if not self.name:
1558 raise Exception("Name required to write cmd args for LCIOTool.")
1559 return ["-jar", self.lcio_bin_jar, self.name]
1560
1562 """!
1563 Return list of required config.
1564
1565 Required config are: **lcio_bin_jar**
1566 @return list of required config
1567 """
1568 return ["lcio_bin_jar"]
1569
1571 """!
1572 Return list of required parameters.
1573
1574 Required parameters are: none
1575 @return list of required parameters
1576 """
1577 return []
1578
1579
1581 """!
1582 Concatenate LCIO files together.
1583 """
1584
1585 def __init__(self, **kwargs):
1586 LCIOTool.__init__(self, name="concat", **kwargs)
1587
1588 def cmd_args(self):
1589 """!
1590 Setup command arguments.
1591 @return list of arguments
1592 """
1593 args = LCIOTool.cmd_args(self)
1594 if not len(self.input_files()):
1595 raise Exception("Missing at least one input file.")
1596 if not len(self.output_files()):
1597 raise Exception("Missing an output file.")
1598 for i in self.input_files():
1599 args.extend(["-f", i])
1600 args.extend(["-o", self.outputs[0]])
1601 return args
1602
1603
1605 """!
1606 Count events in LCIO files.
1607
1608 Required parameters are: none \n
1609 Optional parameters are: none
1610 """
1611
1612 def __init__(self, **kwargs):
1613 LCIOTool.__init__(self, name="count", **kwargs)
1614
1615 def cmd_args(self):
1616 """!
1617 Setup command arguments.
1618 @return list of arguments
1619 """
1620 args = LCIOTool.cmd_args(self)
1621 if not len(self.inputsinputs):
1622 raise Exception("Missing an input file.")
1623 args.extend(["-f", self.inputsinputs[0]])
1624 return args
1625
1627 """!
1628 Return list of required parameters.
1629
1630 Required parameters are: none
1631 @return list of required parameters
1632 """
1633 return []
1634
1636 """!
1637 Return list of optional parameters.
1638
1639 Optional parameters are: none
1640 @return list of optional parameters
1641 """
1642 return []
1643
1644
1646 """!
1647 Merge LCIO files.
1648 """
1649
1650 def __init__(self, **kwargs):
1651 LCIOTool.__init__(self, name="merge", **kwargs)
1652
1653 def cmd_args(self):
1654 """!
1655 Setup command arguments.
1656 @return list of arguments
1657 """
1658 args = LCIOTool.cmd_args(self)
1659 if not len(self.input_files()):
1660 raise Exception("Missing at least one input file.")
1661 if not len(self.output_files()):
1662 raise Exception("Missing an output file.")
1663 for i in self.input_files():
1664 args.extend(["-f", i])
1665 args.extend(["-o", self.outputs[0]])
1666 if self.neventsnevents is not None:
1667 args.extend(["-n", str(self.neventsnevents)])
1668 return args
1669
1670
1671"""
1672MergeROOT tool for hps-mc
1673Merges ROOT files using hadd with validation
1674"""
1675
1676
1678 """
1679 Merge ROOT files using hadd with event count validation.
1680
1681 This component uses ROOT's hadd utility to merge multiple ROOT files
1682 into a single output file, and validates that all events are preserved.
1683 """
1684
1685 def __init__(self, **kwargs):
1686 """
1687 Initialize MergeROOT component.
1688
1689 Parameters
1690 ----------
1691 inputs : list
1692 List of input ROOT files to merge
1693 outputs : list
1694 List containing the output merged ROOT file name
1695 force : bool, optional
1696 Force overwrite of output file (default: True)
1697 compression : int, optional
1698 Compression level for output file (0-9, default: None uses hadd default)
1699 validate : bool, optional
1700 Validate event counts after merge (default: True)
1701 write_stats : bool, optional
1702 Write JSON stats file after merge (default: True when validate=True)
1703 job_id : int, optional
1704 Job ID to include in stats output
1705 """
1706 Component.__init__(self, **kwargs)
1707
1708 # Set default command
1709 if not hasattr(self, "command") or self.commandcommand is None:
1710 self.commandcommand = "hadd"
1711
1712 # Set force overwrite by default
1713 if not hasattr(self, "force"):
1714 self.force = True
1715
1716 # Optional compression level
1717 if not hasattr(self, "compression"):
1718 self.compression = None
1719
1720 # Enable validation by default
1721 if not hasattr(self, "validate"):
1722 self.validate = True
1723
1724 # Write stats JSON (default: True when validate=True)
1725 if not hasattr(self, "write_stats"):
1727
1728 # Optional job ID for stats output
1729 if not hasattr(self, "job_id"):
1730 self.job_id = None
1731
1732 # Store event counts
1735
1736 # Track validation result
1738
1739 def cmd_args(self):
1740 """
1741 Build command line arguments for hadd.
1742
1743 Returns
1744 -------
1745 list
1746 List of command arguments
1747 """
1748 import sys
1749 sys.stderr.write("MergeROOT DEBUG: cmd_args() called\n")
1750 sys.stderr.write(" self.force=%s, self.compression=%s\n" % (self.force, self.compression))
1751 sys.stderr.write(" self.inputs=%s\n" % self.inputsinputs)
1752 sys.stderr.write(" self.outputs=%s\n" % self.outputsoutputs)
1753 sys.stderr.flush()
1754
1755 args = []
1756
1757 # Add force flag if enabled
1758 if self.force:
1759 args.append("-f")
1760
1761 # Add compression level if specified
1762 if self.compression is not None:
1763 args.extend(["-fk", "-f%d" % self.compression])
1764
1765 # Add output file
1766 if self.outputsoutputs and len(self.outputsoutputs) > 0:
1767 args.append(self.outputsoutputs[0])
1768 else:
1769 sys.stderr.write("MergeROOT DEBUG: ERROR - No output file specified!\n")
1770 sys.stderr.flush()
1771 raise RuntimeError("MergeROOT: No output file specified")
1772
1773 # Add input files
1774 if self.inputsinputs and len(self.inputsinputs) > 0:
1775 args.extend(self.inputsinputs)
1776 else:
1777 sys.stderr.write("MergeROOT DEBUG: ERROR - No input files specified!\n")
1778 sys.stderr.flush()
1779 raise RuntimeError("MergeROOT: No input files specified")
1780
1781 sys.stderr.write("MergeROOT DEBUG: cmd_args() returning: %s\n" % args)
1782 sys.stderr.flush()
1783 return args
1784
1785 def scan_root_file(self, filename, log_out=None):
1786 """
1787 Scan a ROOT file and extract TTree event counts.
1788
1789 Parameters
1790 ----------
1791 filename : str
1792 Path to ROOT file
1793 log_out : file, optional
1794 Log file for output (used to report multiple key cycles)
1795
1796 Returns
1797 -------
1798 dict
1799 Dictionary mapping tree names to entry counts
1800 """
1801 try:
1802 import ROOT
1803 except ImportError:
1804 raise RuntimeError(
1805 "MergeROOT: PyROOT is required for validation but not available"
1806 )
1807
1808 tree_counts = {}
1809 tree_cycles = {} # Track cycle numbers: {tree_name: [(cycle, entries), ...]}
1810
1811 # Open ROOT file
1812 root_file = ROOT.TFile.Open(filename, "READ")
1813 if not root_file or root_file.IsZombie():
1814 raise RuntimeError("MergeROOT: Cannot open ROOT file: %s" % filename)
1815
1816 # Iterate through all keys in the file
1817 for key in root_file.GetListOfKeys():
1818 obj = key.ReadObj()
1819
1820 # Check if it's a TTree
1821 if obj.InheritsFrom("TTree"):
1822 tree_name = obj.GetName()
1823 cycle = key.GetCycle()
1824 num_entries = obj.GetEntries()
1825
1826 if tree_name not in tree_cycles:
1827 tree_cycles[tree_name] = []
1828 tree_cycles[tree_name].append((cycle, num_entries))
1829
1830 root_file.Close()
1831
1832 # Process collected cycles - use highest cycle number for each tree
1833 for tree_name, cycles in tree_cycles.items():
1834 if len(cycles) > 1:
1835 # Sort by cycle number (highest first)
1836 cycles.sort(key=lambda x: x[0], reverse=True)
1837 highest_cycle, highest_entries = cycles[0]
1838 if log_out:
1839 log_out.write(" WARNING: Multiple key cycles found for tree '%s':\n" % tree_name)
1840 for cyc, ent in cycles:
1841 marker = " <-- using" if cyc == highest_cycle else ""
1842 log_out.write(" Cycle %d: %d entries%s\n" % (cyc, ent, marker))
1843 tree_counts[tree_name] = highest_entries
1844 else:
1845 tree_counts[tree_name] = cycles[0][1]
1846
1847 return tree_counts
1848
1849 def scan_input_files(self, log_out):
1850 """
1851 Scan all input files and store tree event counts.
1852
1853 Parameters
1854 ----------
1855 log_out : file
1856 Log file for output
1857 """
1858 log_out.write("\n" + "=" * 70 + "\n")
1859 log_out.write("MergeROOT: Scanning input files for TTrees\n")
1860 log_out.write("=" * 70 + "\n")
1861
1862 for input_file in self.inputsinputs:
1863 if not os.path.exists(input_file):
1864 raise RuntimeError("MergeROOT: Input file not found: %s" % input_file)
1865
1866 log_out.write("\nScanning: %s\n" % input_file)
1867 tree_counts = self.scan_root_file(input_file, log_out)
1868
1869 if not tree_counts:
1870 log_out.write(" WARNING: No TTrees found in this file\n")
1871 else:
1872 for tree_name, count in tree_counts.items():
1873 log_out.write(" Tree '%s': %d events\n" % (tree_name, count))
1874
1875 self.input_tree_counts[input_file] = tree_counts
1876
1877 log_out.write("\n" + "=" * 70 + "\n")
1878 log_out.flush()
1879
1880 def scan_output_file(self, log_out):
1881 """
1882 Scan output file and store tree event counts.
1883
1884 Parameters
1885 ----------
1886 log_out : file
1887 Log file for output
1888 """
1889 output_file = self.outputsoutputs[0]
1890
1891 log_out.write("\n" + "=" * 70 + "\n")
1892 log_out.write("MergeROOT: Scanning output file for TTrees\n")
1893 log_out.write("=" * 70 + "\n")
1894 log_out.write("\nScanning: %s\n" % output_file)
1895
1896 self.output_tree_counts = self.scan_root_file(output_file, log_out)
1897
1898 if not self.output_tree_counts:
1899 log_out.write(" WARNING: No TTrees found in output file\n")
1900 else:
1901 for tree_name, count in self.output_tree_counts.items():
1902 log_out.write(" Tree '%s': %d events\n" % (tree_name, count))
1903
1904 log_out.write("\n" + "=" * 70 + "\n")
1905 log_out.flush()
1906
1907 def validate_merge(self, log_out):
1908 """
1909 Validate that event counts match between input and output files.
1910
1911 Parameters
1912 ----------
1913 log_out : file
1914 Log file for output
1915
1916 Returns
1917 -------
1918 bool
1919 True if validation passes, False otherwise
1920 """
1921 log_out.write("\n" + "=" * 70 + "\n")
1922 log_out.write("MergeROOT: Validating merge results\n")
1923 log_out.write("=" * 70 + "\n\n")
1924
1925 # Calculate sum of events per tree across all input files
1926 total_input_counts = {}
1927
1928 for input_file, tree_counts in self.input_tree_counts.items():
1929 for tree_name, count in tree_counts.items():
1930 if tree_name not in total_input_counts:
1931 total_input_counts[tree_name] = 0
1932 total_input_counts[tree_name] += count
1933
1934 # Check that all input trees are in output
1935 all_valid = True
1936
1937 if not total_input_counts:
1938 log_out.write("WARNING: No TTrees found in input files\n")
1939 return True
1940
1941 log_out.write("Event count validation:\n")
1942 log_out.write("-" * 70 + "\n")
1943 log_out.write(
1944 "%-30s %15s %15s %10s\n"
1945 % ("Tree Name", "Input Events", "Output Events", "Status")
1946 )
1947 log_out.write("-" * 70 + "\n")
1948
1949 for tree_name, input_count in sorted(total_input_counts.items()):
1950 output_count = self.output_tree_counts.get(tree_name, 0)
1951
1952 if output_count == input_count:
1953 status = "✓ PASS"
1954 else:
1955 status = "✗ FAIL"
1956 all_valid = False
1957
1958 log_out.write(
1959 "%-30s %15d %15d %10s\n"
1960 % (tree_name, input_count, output_count, status)
1961 )
1962
1963 # Check for trees in output that weren't in input
1964 extra_trees = set(self.output_tree_counts.keys()) - set(
1965 total_input_counts.keys()
1966 )
1967 if extra_trees:
1968 log_out.write("\nWARNING: Output contains trees not found in inputs:\n")
1969 for tree_name in extra_trees:
1970 log_out.write(
1971 " - %s: %d events\n"
1972 % (tree_name, self.output_tree_counts[tree_name])
1973 )
1974
1975 log_out.write("-" * 70 + "\n")
1976
1977 if all_valid:
1978 log_out.write("\n✓ VALIDATION PASSED: All event counts match!\n")
1979 else:
1980 log_out.write("\n✗ VALIDATION FAILED: Event count mismatch detected!\n")
1981
1982 log_out.write("=" * 70 + "\n\n")
1983 log_out.flush()
1984
1985 return all_valid
1986
1987 def print_summary(self, log_out):
1988 """
1989 Print a summary of the merge operation.
1990
1991 Parameters
1992 ----------
1993 log_out : file
1994 Log file for output
1995 """
1996 log_out.write("\n" + "=" * 70 + "\n")
1997 log_out.write("MergeROOT: Summary\n")
1998 log_out.write("=" * 70 + "\n")
1999 log_out.write("Input files: %d\n" % len(self.inputsinputs))
2000
2001 for i, input_file in enumerate(self.inputsinputs, 1):
2002 log_out.write(" %d. %s\n" % (i, input_file))
2003
2004 log_out.write("\nOutput file: %s\n" % self.outputsoutputs[0])
2005 log_out.write(
2006 "Compression level: %s\n"
2007 % (self.compression if self.compression else "default")
2008 )
2009
2010 # Print total events per tree
2011 if self.output_tree_counts:
2012 log_out.write("\nTotal events in merged file:\n")
2013 for tree_name, count in sorted(self.output_tree_counts.items()):
2014 log_out.write(" %-30s: %d events\n" % (tree_name, count))
2015
2016 log_out.write("=" * 70 + "\n")
2017 log_out.flush()
2018
2020 """
2021 Get the stats JSON filename based on the output ROOT filename.
2022
2023 Returns
2024 -------
2025 str
2026 Path to stats JSON file (e.g., 'merged_X_job1.root' -> 'merged_X_job1_stats.json')
2027 """
2028 if not self.outputsoutputs or len(self.outputsoutputs) == 0:
2029 return None
2030 output_file = self.outputsoutputs[0]
2031 base, _ = os.path.splitext(output_file)
2032 return base + "_stats.json"
2033
2034 def write_stats_json(self, log_out, validation_passed):
2035 """
2036 Write merge statistics to a JSON file.
2037
2038 Parameters
2039 ----------
2040 log_out : file
2041 Log file for output
2042 validation_passed : bool
2043 Whether the validation passed
2044 """
2045 stats_file = self.get_stats_filename()
2046 if stats_file is None:
2047 log_out.write("WARNING: Cannot determine stats filename, skipping stats output\n")
2048 return
2049
2050 log_out.write("\n" + "=" * 70 + "\n")
2051 log_out.write("MergeROOT: Writing stats to %s\n" % stats_file)
2052 log_out.write("=" * 70 + "\n")
2053
2054 # Calculate total input events per tree
2055 total_input_events = {}
2056 for input_file, tree_counts in self.input_tree_counts.items():
2057 for tree_name, count in tree_counts.items():
2058 if tree_name not in total_input_events:
2059 total_input_events[tree_name] = 0
2060 total_input_events[tree_name] += count
2061
2062 # Build input files list with event counts
2063 input_files_list = []
2064 for input_file in self.inputsinputs:
2065 tree_counts = self.input_tree_counts.get(input_file, {})
2066 input_files_list.append({
2067 "path": input_file,
2068 "events": tree_counts
2069 })
2070
2071 # Build stats dictionary
2072 stats = {
2073 "job_id": self.job_id,
2074 "output_file": self.outputsoutputs[0] if self.outputsoutputs else None,
2075 "output_events": self.output_tree_counts,
2076 "input_files": input_files_list,
2077 "total_input_events": total_input_events,
2078 "validation_passed": validation_passed,
2079 "num_input_files": len(self.inputsinputs)
2080 }
2081
2082 # Write JSON file
2083 with open(stats_file, 'w') as f:
2084 json.dump(stats, f, indent=2)
2085
2086 log_out.write("Stats written successfully\n")
2087 log_out.write("=" * 70 + "\n")
2088 log_out.flush()
2089
2090 def execute(self, log_out, log_err):
2091 """
2092 Execute MergeROOT component using hadd.
2093
2094 Parameters
2095 ----------
2096 log_out : file
2097 Log file for stdout
2098 log_err : file
2099 Log file for stderr
2100
2101 Returns
2102 -------
2103 int
2104 Return code from hadd command
2105 """
2106 # Debug: Entry point
2107 log_out.write("\n" + "=" * 70 + "\n")
2108 log_out.write("MergeROOT: DEBUG - Entering execute()\n")
2109 log_out.write("=" * 70 + "\n")
2110 log_out.write("DEBUG: self.command = %s\n" % self.commandcommand)
2111 log_out.write("DEBUG: self.inputs = %s\n" % self.inputsinputs)
2112 log_out.write("DEBUG: self.outputs = %s\n" % self.outputsoutputs)
2113 log_out.write("DEBUG: self.force = %s\n" % self.force)
2114 log_out.write("DEBUG: self.compression = %s\n" % self.compression)
2115 log_out.write("DEBUG: self.validate = %s\n" % self.validate)
2116 log_out.flush()
2117
2118 # Check that hadd command exists
2119 log_out.write("\nDEBUG: Checking if hadd command exists...\n")
2120 log_out.flush()
2121 if not self.cmd_exists():
2122 raise RuntimeError("MergeROOT: hadd command not found in PATH")
2123 log_out.write("DEBUG: hadd command found\n")
2124 log_out.flush()
2125
2126 # Check that input files exist
2127 log_out.write("\nDEBUG: Checking input files exist...\n")
2128 log_out.flush()
2129 for input_file in self.inputsinputs:
2130 log_out.write("DEBUG: Checking: %s\n" % input_file)
2131 log_out.flush()
2132 if not os.path.exists(input_file):
2133 raise RuntimeError("MergeROOT: Input file not found: %s" % input_file)
2134 log_out.write("DEBUG: -> exists (size: %d bytes)\n" % os.path.getsize(input_file))
2135 log_out.flush()
2136
2137 # Scan input files before merge if validation is enabled
2138 log_out.write("\nDEBUG: Validation enabled = %s\n" % self.validate)
2139 log_out.flush()
2140 if self.validate:
2141 try:
2142 log_out.write("DEBUG: Starting input file scan...\n")
2143 log_out.flush()
2144 self.scan_input_files(log_out)
2145 log_out.write("DEBUG: Input file scan complete\n")
2146 log_out.flush()
2147 except Exception as e:
2148 log_out.write("\nWARNING: Could not scan input files: %s\n" % str(e))
2149 log_out.write("Proceeding with merge without validation.\n")
2150 self.validate = False
2151
2152 # Build full command
2153 log_out.write("\nDEBUG: Building command arguments...\n")
2154 log_out.flush()
2155 cmd = [self.commandcommand] + self.cmd_argscmd_args()
2156 log_out.write("DEBUG: cmd_args() returned: %s\n" % self.cmd_argscmd_args())
2157 log_out.flush()
2158
2159 # Log the command
2160 log_out.write("\n" + "=" * 70 + "\n")
2161 log_out.write("MergeROOT: Executing hadd\n")
2162 log_out.write("=" * 70 + "\n")
2163 log_out.write("Command: %s\n" % " ".join(cmd))
2164 log_out.write("=" * 70 + "\n\n")
2165 log_out.flush()
2166
2167 # Execute hadd
2168 log_out.write("DEBUG: About to call subprocess.Popen...\n")
2169 log_out.flush()
2170 proc = subprocess.Popen(cmd, stdout=log_out, stderr=log_err)
2171 log_out.write("DEBUG: Popen returned, PID = %s\n" % proc.pid)
2172 log_out.flush()
2173 log_out.write("DEBUG: Waiting for process to complete...\n")
2174 log_out.flush()
2175 proc.wait()
2176 log_out.write("DEBUG: Process completed, returncode = %d\n" % proc.returncode)
2177 log_out.flush()
2178
2179 # Check return code
2180 if proc.returncode != 0:
2181 log_out.write("DEBUG: hadd FAILED with return code %d\n" % proc.returncode)
2182 log_out.flush()
2183 raise RuntimeError(
2184 "MergeROOT: hadd failed with return code %d" % proc.returncode
2185 )
2186
2187 # Verify output file was created
2188 log_out.write("DEBUG: Checking if output file exists: %s\n" % self.outputsoutputs[0])
2189 log_out.flush()
2190 if not os.path.exists(self.outputsoutputs[0]):
2191 raise RuntimeError(
2192 "MergeROOT: Output file was not created: %s" % self.outputsoutputs[0]
2193 )
2194 log_out.write("DEBUG: Output file exists, size = %d bytes\n" % os.path.getsize(self.outputsoutputs[0]))
2195 log_out.flush()
2196
2197 log_out.write("\n✓ hadd completed successfully\n")
2198 log_out.flush()
2199
2200 # Scan output file and validate if enabled
2201 log_out.write("\nDEBUG: Post-merge validation check, self.validate = %s\n" % self.validate)
2202 log_out.flush()
2203 validation_passed = True
2204 if self.validate:
2205 try:
2206 log_out.write("DEBUG: Starting output file scan...\n")
2207 log_out.flush()
2208 self.scan_output_file(log_out)
2209 log_out.write("DEBUG: Output file scan complete\n")
2210 log_out.flush()
2211 log_out.write("DEBUG: Starting merge validation...\n")
2212 log_out.flush()
2213 validation_passed = self.validate_merge(log_out)
2214 self._validation_passed = validation_passed
2215 log_out.write("DEBUG: Merge validation complete, passed = %s\n" % validation_passed)
2216 log_out.flush()
2217
2218 if not validation_passed:
2219 raise RuntimeError("MergeROOT: Event count validation failed!")
2220
2221 except Exception as e:
2222 log_out.write("\nERROR during validation: %s\n" % str(e))
2223 log_out.flush()
2224 raise
2225
2226 # Write stats JSON if enabled
2227 log_out.write("\nDEBUG: write_stats = %s\n" % self.write_stats)
2228 log_out.flush()
2229 if self.write_stats:
2230 try:
2231 self.write_stats_json(log_out, validation_passed)
2232 except Exception as e:
2233 log_out.write("\nWARNING: Could not write stats JSON: %s\n" % str(e))
2234 log_out.flush()
2235
2236 # Print summary
2237 log_out.write("\nDEBUG: Printing summary...\n")
2238 log_out.flush()
2239 self.print_summary(log_out)
2240
2241 log_out.write("\nDEBUG: MergeROOT.execute() returning %d\n" % proc.returncode)
2242 log_out.flush()
2243 return proc.returncode
2244
2245 def output_files(self):
2246 """
2247 Return list of output files.
2248
2249 Returns
2250 -------
2251 list
2252 List containing the merged output ROOT file and optionally the stats JSON
2253 """
2254 files = list(self.outputsoutputs) if self.outputsoutputs else []
2255 if self.write_stats:
2256 stats_file = self.get_stats_filename()
2257 if stats_file and stats_file not in files:
2258 files.append(stats_file)
2259 return files
2260
2262 """
2263 Return list of required configuration parameters.
2264
2265 Returns
2266 -------
2267 list
2268 List of required config parameters (empty for MergeROOT)
2269 """
2270 return []
Base class for components in a job.
Definition component.py:15
output_files(self)
Return a list of output files created by this component.
Definition component.py:233
config_from_environ(self)
Configure component from environment variables which are just upper case versions of the required con...
Definition component.py:258
cmd_exists(self)
Check if the component's assigned command exists.
Definition component.py:96
cmd_args(self)
Return the command arguments of this component.
Definition component.py:108
input_files(self)
Get a list of input files for this component.
Definition component.py:229
Add full truth mother particles for physics samples.
Definition tools.py:857
__init__(self, **kwargs)
Definition tools.py:860
cmd_args(self)
Setup command arguments.
Definition tools.py:877
Add mother particles for physics samples.
Definition tools.py:848
__init__(self, **kwargs)
Definition tools.py:853
Transform StdHep events into beam coordinates.
Definition tools.py:635
beam_rot_x
beam rotation in x?
Definition tools.py:656
__init__(self, **kwargs)
Definition tools.py:643
optional_parameters(self)
Return list of optional parameters.
Definition tools.py:692
beam_sigma_y
beam sigma in y
Definition tools.py:648
target_x
target x position
Definition tools.py:650
target_y
target y position
Definition tools.py:652
beam_rot_z
beam rotation in z?
Definition tools.py:660
beam_rot_y
beam rotation in y?
Definition tools.py:658
cmd_args(self)
Setup command arguments.
Definition tools.py:664
target_z
target z position
Definition tools.py:654
Convert LHE files to StdHep, displacing the time by given ctau.
Definition tools.py:780
__init__(self, **kwargs)
Definition tools.py:787
optional_parameters(self)
Return list of optional parameters.
Definition tools.py:804
cmd_args(self)
Setup command arguments.
Definition tools.py:794
Convert LHE files to StdHep, displacing the time by given ctau.
Definition tools.py:814
__init__(self, **kwargs)
Definition tools.py:821
optional_parameters(self)
Return list of optional parameters.
Definition tools.py:838
cmd_args(self)
Setup command arguments.
Definition tools.py:828
Convert EVIO events to LCIO using the hps-java EvioToLcio command line tool.
Definition tools.py:1066
run_number
run number
Definition tools.py:1080
optional_parameters(self)
Return list of optional parameters.
Definition tools.py:1105
detector
detector name
Definition tools.py:1078
required_parameters(self)
Return list of required parameters.
Definition tools.py:1096
setup(self)
Setup EvioToLcio component.
Definition tools.py:1114
__init__(self, steering=None, **kwargs)
Definition tools.py:1076
steering
steering file
Definition tools.py:1086
skip_events
number of events that are skipped
Definition tools.py:1082
event_print_interval
event print interval
Definition tools.py:1084
cmd_args(self)
Setup command arguments.
Definition tools.py:1123
Apply hodo-hit filter and space MC events to process before readout.
Definition tools.py:1287
optional_parameters(self)
Return list of optional parameters.
Definition tools.py:1337
cmd_args(self)
Setup command arguments.
Definition tools.py:1318
Space MC events and apply energy filters to process before readout.
Definition tools.py:1167
filter_event_interval
Default event filtering interval.
Definition tools.py:1196
__init__(self, **kwargs)
Definition tools.py:1176
filter_ecal_hit_ecut
No default ecal hit cut energy (negative val to be ignored)
Definition tools.py:1189
optional_parameters(self)
Return list of optional parameters.
Definition tools.py:1261
filter_nevents_read
Default is no maximum nevents to read.
Definition tools.py:1202
filter_no_cuts
By default cuts are on.
Definition tools.py:1178
config(self, parser)
Configure FilterBunches component.
Definition tools.py:1223
required_config(self)
Return list of required config.
Definition tools.py:1277
filter_nevents_write
Default is no maximum nevents to write.
Definition tools.py:1208
cmd_args(self)
Setup command arguments.
Definition tools.py:1235
Run the hpstr analysis tool.
Definition tools.py:446
execute(self, log_out, log_err)
Execute HPSTR component.
Definition tools.py:568
output_files(self)
Adjust names of output files.
Definition tools.py:558
optional_parameters(self)
Return list of optional parameters.
Definition tools.py:518
required_parameters(self)
Return list of required parameters.
Definition tools.py:509
setup(self)
Setup HPSTR component.
Definition tools.py:470
__init__(self, cfg=None, is_data=0, year=None, tracking=None, **kwargs)
Definition tools.py:455
cfg
configuration
Definition tools.py:457
required_config(self)
Return list of required configs.
Definition tools.py:527
tracking
tracking option (KF, GBL, BOTH)
Definition tools.py:463
is_data
run mode
Definition tools.py:459
cmd_args(self)
Setup command arguments.
Definition tools.py:536
Generic base class for Java based tools.
Definition tools.py:1020
java_class
java class
Definition tools.py:1027
config(self, parser)
Automatic configuration.
Definition tools.py:1062
required_config(self)
Return list of required config.
Definition tools.py:1034
cmd_args(self)
Setup command arguments.
Definition tools.py:1043
java_args
java arguments
Definition tools.py:1029
__init__(self, name, java_class, **kwargs)
Definition tools.py:1025
Run the hps-java JobManager class.
Definition tools.py:231
optional_parameters(self)
Return list of optional parameters.
Definition tools.py:436
detector
detector name
Definition tools.py:248
required_parameters(self)
Return list of required parameters.
Definition tools.py:427
setup(self)
Setup JobManager component.
Definition tools.py:326
__init__(self, steering=None, **kwargs)
Definition tools.py:241
lcsim_cache_dir
lcsim cache directory
Definition tools.py:258
steering
steering file
Definition tools.py:266
config(self, parser)
Configure JobManager component.
Definition tools.py:292
hps_java_bin_jar
location of hps-java installation?
Definition tools.py:268
logging_config_file
file for config logging
Definition tools.py:256
required_config(self)
Return list of required configurations.
Definition tools.py:317
event_print_interval
event print interval
Definition tools.py:250
cmd_args(self)
Setup command arguments.
Definition tools.py:337
java_args
java arguments
Definition tools.py:254
conditions_password
no idea
Definition tools.py:262
Concatenate LCIO files together.
Definition tools.py:1580
__init__(self, **kwargs)
Definition tools.py:1585
cmd_args(self)
Setup command arguments.
Definition tools.py:1588
Count events in LCIO files.
Definition tools.py:1604
__init__(self, **kwargs)
Definition tools.py:1612
optional_parameters(self)
Return list of optional parameters.
Definition tools.py:1635
required_parameters(self)
Return list of required parameters.
Definition tools.py:1626
cmd_args(self)
Setup command arguments.
Definition tools.py:1615
Dump LCIO event information.
Definition tools.py:1374
__init__(self, **kwargs)
Definition tools.py:1382
lcio_dir
lcio directory
Definition tools.py:1384
required_parameters(self)
Return list of required parameters.
Definition tools.py:1423
setup(self)
Setup LCIODumpEvent component.
Definition tools.py:1398
config(self, parser)
Configure LCIODumpEvent component.
Definition tools.py:1392
required_config(self)
Return list of required config.
Definition tools.py:1414
cmd_args(self)
Setup command arguments.
Definition tools.py:1402
Merge LCIO files.
Definition tools.py:1645
__init__(self, **kwargs)
Definition tools.py:1650
cmd_args(self)
Setup command arguments.
Definition tools.py:1653
Generic component for LCIO tools.
Definition tools.py:1533
lcio_bin_jar
lcio bin jar (whatever this is)
Definition tools.py:1543
required_parameters(self)
Return list of required parameters.
Definition tools.py:1570
config(self, parser)
Configure LCIOTool component.
Definition tools.py:1546
required_config(self)
Return list of required config.
Definition tools.py:1561
cmd_args(self)
Setup command arguments.
Definition tools.py:1552
__init__(self, name=None, **kwargs)
Definition tools.py:1541
Count events in an LHE file.
Definition tools.py:1433
execute(self, log_out, log_err)
Execute LHECount component.
Definition tools.py:1454
__init__(self, minevents=0, fail_on_underflow=False, **kwargs)
Definition tools.py:1438
setup(self)
Setup LHECount component.
Definition tools.py:1442
cmd_exists(self)
Check if command exists.
Definition tools.py:1447
Merge StdHep files.
Definition tools.py:959
__init__(self, **kwargs)
Definition tools.py:967
optional_parameters(self)
Return list of optional parameters.
Definition tools.py:970
required_parameters(self)
Return list of required parameters.
Definition tools.py:979
Merge StdHep files, applying poisson sampling.
Definition tools.py:885
execute(self, log_out, log_err)
Execute MergePoisson component.
Definition tools.py:946
target_thickness
target thickness in cm
Definition tools.py:896
__init__(self, xsec=0, **kwargs)
Definition tools.py:892
required_parameters(self)
Return list of required parameters.
Definition tools.py:910
setup(self)
Setup MergePoisson component.
Definition tools.py:902
xsec
cross section in pb
Definition tools.py:894
num_electrons
number of electrons per bunch
Definition tools.py:898
cmd_args(self)
Setup command arguments.
Definition tools.py:919
execute(self, log_out, log_err)
Definition tools.py:2090
__init__(self, **kwargs)
Definition tools.py:1685
scan_output_file(self, log_out)
Definition tools.py:1880
scan_root_file(self, filename, log_out=None)
Definition tools.py:1785
scan_input_files(self, log_out)
Definition tools.py:1849
print_summary(self, log_out)
Definition tools.py:1987
validate_merge(self, log_out)
Definition tools.py:1907
write_stats_json(self, log_out, validation_passed)
Definition tools.py:2034
Move input files to new locations.
Definition tools.py:1506
execute(self, log_out, log_err)
Execute TarFiles component.
Definition tools.py:1521
__init__(self, **kwargs)
Definition tools.py:1511
cmd_exists(self)
Check if command exists.
Definition tools.py:1514
Randomly sample StdHep events into a new file.
Definition tools.py:712
execute(self, log_out, log_err)
Execute RandomSample component.
Definition tools.py:767
__init__(self, **kwargs)
Definition tools.py:719
mu
median of distribution?
Definition tools.py:722
optional_parameters(self)
Return list of optional parameters.
Definition tools.py:758
cmd_args(self)
Setup command arguments.
Definition tools.py:724
Run the SLIC Geant4 simulation.
Definition tools.py:16
execute(self, log_out, log_err)
Execute SLIC component.
Definition tools.py:146
__init__(self, **kwargs)
Definition tools.py:25
run_number
Run number to set on output file (optional)
Definition tools.py:29
optional_parameters(self)
Return list of optional parameters.
Definition tools.py:119
required_parameters(self)
Return list of required parameters.
Definition tools.py:128
__particle_tbl(self)
Return path to particle table.
Definition tools.py:88
setup(self)
Setup SLIC component.
Definition tools.py:104
disable_particle_table
Optionally disable loading of the particle table shipped with slic Note: This should not be used with...
Definition tools.py:35
__detector_file(self)
Return path to detector file.
Definition tools.py:84
macros
List of macros to run (optional)
Definition tools.py:27
config(self, parser)
Configure SLIC component.
Definition tools.py:92
required_config(self)
Return list of required configurations.
Definition tools.py:137
detector_dir
To be set from config or install dir.
Definition tools.py:31
cmd_args(self)
Setup command arguments.
Definition tools.py:41
Copy the SQLite database file to the desired location.
Definition tools.py:169
execute(self, log_out, log_err)
Execute the file copy operation.
Definition tools.py:208
__init__(self, **kwargs)
Initialize SQLiteProc to copy the SQLite file.
Definition tools.py:174
cmd_args(self)
Return dummy command arguments to satisfy the parent class.
Definition tools.py:197
Count number of events in a StdHep file.
Definition tools.py:989
execute(self, log_out, log_err)
Execute StdHepCount component.
Definition tools.py:1007
__init__(self, **kwargs)
Definition tools.py:994
cmd_args(self)
Setup command arguments.
Definition tools.py:999
Generic class for StdHep tools.
Definition tools.py:588
cmd_args(self)
Setup command arguments.
Definition tools.py:609
__init__(self, name=None, **kwargs)
Definition tools.py:605
Tar files into an archive.
Definition tools.py:1479
execute(self, log_out, log_err)
Execute TarFiles component.
Definition tools.py:1494
__init__(self, **kwargs)
Definition tools.py:1484
cmd_exists(self)
Check if command exists.
Definition tools.py:1487
Unzip the input files to outputs.
Definition tools.py:1347
execute(self, log_out, log_err)
Execute Unzip component.
Definition tools.py:1361
output_files(self)
Return list of output files.
Definition tools.py:1355
__init__(self, **kwargs)
Definition tools.py:1352
Miscellaneous math functions.
Definition func.py:1