14 from subprocess
import Popen, PIPE, STDOUT
19 Take a string with invalid ASCII/UTF characters and quote them so that the 20 string can be used in an XML text. 22 >>> sanitize_for_xml('this is \x1b') 23 'this is [NON-XML-CHAR-0x1B]' 25 bad_chars = re.compile(
26 u'[\x00-\x08\x0b\x0c\x0e-\x1F\uD800-\uDFFF\uFFFE\uFFFF]')
30 return ''.join(
'[NON-XML-CHAR-0x%2X]' % ord(c)
for c
in match.group())
31 return bad_chars.sub(quote, data)
35 '''helper to debug GAUDI-1084, dump the list of processes''' 36 from getpass
import getuser
37 if 'WORKSPACE' in os.environ:
38 p = Popen([
'ps',
'-fH',
'-U', getuser()], stdout=PIPE)
39 with open(os.path.join(os.environ[
'WORKSPACE'], name),
'w')
as f:
40 f.write(p.communicate()[0])
45 Send a signal to a process and all its child processes (starting from the 48 log = logging.getLogger(
'kill_tree')
49 ps_cmd = [
'ps',
'--no-headers',
'-o',
'pid',
'--ppid', str(ppid)]
50 get_children = Popen(ps_cmd, stdout=PIPE, stderr=PIPE)
51 children =
map(int, get_children.communicate()[0].split())
52 for child
in children:
55 log.debug(
'killing process %d', ppid)
60 log.debug(
'no such process %d', ppid)
96 logging.debug(
'running test %s', self.
name)
99 if re.search(
r'from\s+Gaudi.Configuration\s+import\s+\*|' 100 'from\s+Configurables\s+import', self.
options):
101 optionFile = tempfile.NamedTemporaryFile(suffix=
'.py')
103 optionFile = tempfile.NamedTemporaryFile(suffix=
'.opts')
104 optionFile.file.write(self.
options)
113 self.environment.items() + os.environ.items())
115 platform_id = (os.environ.get(
'BINARY_TAG')
or 116 os.environ.get(
'CMTCONFIG')
or 119 skip_test = bool([
None 121 if re.search(prex, platform_id)])
130 workdir = tempfile.mkdtemp()
136 elif "GAUDIEXE" in os.environ:
137 prog = os.environ[
"GAUDIEXE"]
141 dummy, prog_ext = os.path.splitext(prog)
142 if prog_ext
not in [
".exe",
".py",
".bat"]:
146 prog =
which(prog)
or prog
148 args =
map(RationalizePath, self.
args)
150 if prog_ext ==
".py":
155 validatorRes =
Result({
'CAUSE':
None,
'EXCEPTION':
None,
156 'RESOURCE':
None,
'TARGET':
None,
157 'TRACEBACK':
None,
'START_TIME':
None,
158 'END_TIME':
None,
'TIMEOUT_DETAIL':
None})
159 self.
result = validatorRes
167 logging.debug(
'executing %r in %s',
169 self.
proc = Popen(params, stdout=PIPE, stderr=PIPE,
171 logging.debug(
'(pid: %d)', self.proc.pid)
172 self.
out, self.
err = self.proc.communicate()
174 thread = threading.Thread(target=target)
179 if thread.is_alive():
180 logging.debug(
'time out in test %s (pid %d)',
181 self.
name, self.proc.pid)
183 cmd = [
'gdb',
'--pid', str(self.proc.pid),
'--batch',
184 '--eval-command=thread apply all backtrace']
185 gdb = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=STDOUT)
190 if thread.is_alive():
192 self.causes.append(
'timeout')
194 logging.debug(
'completed test %s', self.
name)
197 logging.debug(
'returnedCode = %s', self.proc.returncode)
200 logging.debug(
'validating test...')
207 shutil.rmtree(workdir,
True)
212 if self.
signal is not None:
214 self.causes.append(
'exit code')
218 self.causes.append(
'exit code')
221 self.causes.append(
"exit code")
231 logging.debug(
'%s: %s', self.
name, self.
status)
232 field_mapping = {
'Exit Code':
'returnedCode',
235 'Environment':
'environment',
238 'Program Name':
'program',
240 'Validator':
'validator',
241 'Output Reference File':
'reference',
242 'Error Reference File':
'error_reference',
245 'Unsupported Platforms':
'unsupported_platforms',
246 'Stack Trace':
'stack_trace'}
247 resultDict = [(key, getattr(self, attr))
248 for key, attr
in field_mapping.iteritems()
249 if getattr(self, attr)]
250 resultDict.append((
'Working Directory',
254 resultDict.extend(self.result.annotations.iteritems())
256 return dict(resultDict)
265 elif stderr.strip() != self.stderr.strip():
266 self.causes.append(
'standard error')
267 return result, self.
causes 269 def findReferenceBlock(self, reference=None, stdout=None, result=None, causes=None, signature_offset=0, signature=None, id=None):
271 Given a block of text, tries to find it in the output. The block had to be identified by a signature line. By default, the first line is used as signature, or the line pointed to by signature_offset. If signature_offset points outside the block, a signature line can be passed as signature argument. Note: if 'signature' is None (the default), a negative signature_offset is interpreted as index in a list (e.g. -1 means the last line), otherwise the it is interpreted as the number of lines before the first one of the block the signature must appear. The parameter 'id' allow to distinguish between different calls to this function in the same validation code. 274 if reference
is None:
284 None,
map(
lambda s: s.rstrip(), reference.splitlines()))
286 raise RuntimeError(
"Empty (or null) reference")
288 outlines = filter(
None,
map(
lambda s: s.rstrip(), stdout.splitlines()))
290 res_field =
"GaudiTest.RefBlock" 292 res_field +=
"_%s" % id
294 if signature
is None:
295 if signature_offset < 0:
296 signature_offset = len(reference) + signature_offset
297 signature = reflines[signature_offset]
300 pos = outlines.index(signature)
301 outlines = outlines[pos - signature_offset:pos +
302 len(reflines) - signature_offset]
303 if reflines != outlines:
304 msg =
"standard output" 307 if not msg
in causes:
310 ".observed"] = result.Quote(
"\n".join(outlines))
312 causes.append(
"missing signature")
313 result[res_field +
".signature"] = result.Quote(signature)
314 if len(reflines) > 1
or signature != reflines[0]:
315 result[res_field +
".expected"] = result.Quote(
"\n".join(reflines))
318 def countErrorLines(self, expected={'ERROR': 0,
'FATAL': 0}, stdout=
None, result=
None, causes=
None):
320 Count the number of messages with required severity (by default ERROR and FATAL) 321 and check if their numbers match the expected ones (0 by default). 322 The dictionary "expected" can be used to tune the number of errors and fatals 323 allowed, or to limit the number of expected warnings etc. 338 outlines = stdout.splitlines()
339 from math
import log10
340 fmt =
"%%%dd - %%s" % (int(log10(len(outlines) + 1)))
346 if len(words) >= 2
and words[1]
in errors:
347 errors[words[1]].append(fmt % (linecount, l.rstrip()))
350 if len(errors[e]) != expected[e]:
351 causes.append(
'%s(%d)' % (e, len(errors[e])))
352 result[
"GaudiTest.lines.%s" %
353 e] = result.Quote(
'\n'.join(errors[e]))
354 result[
"GaudiTest.lines.%s.expected#" %
355 e] = result.Quote(str(expected[e]))
361 ignore=
r"Basket|.*size|Compression"):
363 Compare the TTree summaries in stdout with the ones in trees_dict or in 364 the reference file. By default ignore the size, compression and basket 366 The presence of TTree summaries when none is expected is not a failure. 374 if trees_dict
is None:
377 if lreference
and os.path.isfile(lreference):
382 from pprint
import PrettyPrinter
385 result[
"GaudiTest.TTrees.expected"] = result.Quote(
386 pp.pformat(trees_dict))
388 result[
"GaudiTest.TTrees.ignore"] = result.Quote(ignore)
393 causes.append(
"trees summaries")
395 trees_dict, trees, failed)
396 result[
"GaudiTest.TTrees.failure_on"] = result.Quote(msg)
397 result[
"GaudiTest.TTrees.found"] = result.Quote(pp.pformat(trees))
405 Compare the TTree summaries in stdout with the ones in trees_dict or in 406 the reference file. By default ignore the size, compression and basket 408 The presence of TTree summaries when none is expected is not a failure. 420 if lreference
and os.path.isfile(lreference):
425 from pprint
import PrettyPrinter
428 result[
"GaudiTest.Histos.expected"] = result.Quote(
431 result[
"GaudiTest.Histos.ignore"] = result.Quote(ignore)
436 causes.append(
"histos summaries")
438 result[
"GaudiTest.Histos.failure_on"] = result.Quote(msg)
439 result[
"GaudiTest.Histos.found"] = result.Quote(pp.pformat(histos))
444 causes=
None, preproc=
None):
446 Default validation acti*on: compare standard output and error to the 461 preproc = normalizeExamples
465 if lreference
and os.path.isfile(lreference):
469 preproc=preproc)(stdout, result)
471 causes += [
"missing reference file"]
477 newref = open(lreference +
".new",
"w")
479 for l
in stdout.splitlines():
480 newref.write(l.rstrip() +
'\n')
490 if lreference
and os.path.isfile(lreference):
494 preproc=preproc)(stderr, result)
497 newref = open(lreference +
".new",
"w")
499 for l
in stderr.splitlines():
500 newref.write(l.rstrip() +
'\n')
504 "ExecTest.expected_stderr")(stderr, result)
513 def platformSplit(p):
515 delim = re.compile(
'-' in p
and r"[-+]" or r"_")
516 return set(delim.split(p))
518 reference = os.path.normpath(os.path.join(self.
basedir,
519 os.path.expandvars(reffile)))
522 spec_ref = reference[:-3] +
GetPlatform(self)[0:3] + reference[-3:]
523 if os.path.isfile(spec_ref):
527 dirname, basename = os.path.split(reference)
530 head = basename +
"." 533 if 'do0' in platform:
536 for f
in os.listdir(dirname):
537 if f.startswith(head):
538 req_plat = platformSplit(f[head_len:])
539 if platform.issuperset(req_plat):
540 candidates.append((len(req_plat), f))
545 reference = os.path.join(dirname, candidates[-1][1])
557 from GaudiKernel
import ROOT6WorkAroundEnabled
568 Function used to normalize the used path 570 newPath = os.path.normpath(os.path.expandvars(p))
571 if os.path.exists(newPath):
572 p = os.path.realpath(newPath)
578 Locates an executable in the executables path ($PATH) and returns the full 579 path to it. An application is looked for with or without the '.exe' suffix. 580 If the executable cannot be found, None is returned 582 if os.path.isabs(executable):
583 if not os.path.exists(executable):
584 if executable.endswith(
'.exe'):
585 if os.path.exists(executable[:-4]):
586 return executable[:-4]
588 head, executable = os.path.split(executable)
591 for d
in os.environ.get(
"PATH").split(os.pathsep):
592 fullpath = os.path.join(d, executable)
593 if os.path.exists(fullpath):
595 if executable.endswith(
'.exe'):
596 return which(executable[:-4])
611 UNTESTED =
'UNTESTED' 621 def __init__(self, kind=None, id=None, outcome=PASS, annotations={}):
625 assert type(key)
in types.StringTypes
629 assert type(key)
in types.StringTypes
630 assert type(value)
in types.StringTypes
654 """Validate the output of the program. 655 'stdout' -- A string containing the data written to the standard output 657 'stderr' -- A string containing the data written to the standard error 659 'result' -- A 'Result' object. It may be used to annotate 660 the outcome according to the content of stderr. 661 returns -- A list of strings giving causes of failure.""" 666 causes.append(self.
cause)
672 """Compare 's1' and 's2', ignoring line endings. 675 returns -- True if 's1' and 's2' are the same, ignoring 676 differences in line endings.""" 680 to_ignore = re.compile(
681 r'Warning in <TInterpreter::ReadRootmapFile>: .* is already in .*')
683 def keep_line(l):
return not to_ignore.match(l)
684 return filter(keep_line, s1.splitlines()) == filter(keep_line, s2.splitlines())
686 return s1.splitlines() == s2.splitlines()
692 """ Base class for a callable that takes a file and returns a modified 707 if hasattr(input,
"__iter__"):
711 lines = input.splitlines()
715 output =
'\n'.join(output)
746 if line.find(s) >= 0:
762 if self.
start in line:
765 elif self.
end in line:
776 when = re.compile(when)
780 if isinstance(rhs, RegexpReplacer):
782 res._operations = self.
_operations + rhs._operations
784 res = FilePreprocessor.__add__(self, rhs)
789 if w
is None or w.search(line):
790 line = o.sub(r, line)
797 "[0-2]?[0-9]:[0-5][0-9]:[0-5][0-9] [0-9]{4}[-/][01][0-9][-/][0-3][0-9][ A-Z]*",
798 "00:00:00 1970-01-01")
800 normalizeEOL.__processLine__ =
lambda line: str(line).rstrip() +
'\n' 804 skipEmptyLines.__processLine__ =
lambda line: (line.strip()
and line)
or None 819 line = line[:(pos + self.
siglen)]
820 lst = line[(pos + self.
siglen):].split()
822 line +=
" ".join(lst)
829 Sort group of lines matching a regular expression 833 self.
exp = exp
if hasattr(exp,
'match')
else re.compile(exp)
836 match = self.exp.match
852 normalizeExamples = maskPointers + normalizeDate
855 (
"TIMER.TIMER",
r"\s+[+-]?[0-9]+[0-9.]*",
" 0"),
856 (
"release all pending",
r"^.*/([^/]*:.*)",
r"\1"),
857 (
"^#.*file",
r"file '.*[/\\]([^/\\]*)$",
r"file '\1"),
858 (
"^JobOptionsSvc.*options successfully read in from",
859 r"read in from .*[/\\]([^/\\]*)$",
r"file \1"),
861 (
None,
r"[0-9A-Fa-f]{8}-[0-9A-Fa-f]{4}-[0-9A-Fa-f]{4}-[0-9A-Fa-f]{4}(?!-0{12})-[0-9A-Fa-f]{12}",
862 "00000000-0000-0000-0000-000000000000"),
864 (
"ServiceLocatorHelper::",
"ServiceLocatorHelper::(create|locate)Service",
865 "ServiceLocatorHelper::service"),
867 (
None,
r"e([-+])0([0-9][0-9])",
r"e\1\2"),
869 (
None,
r'Service reference count check:',
870 r'Looping over all active services...'),
872 (
None,
r"^(.*(DEBUG|SUCCESS) List of ALL properties of .*#properties = )\d+",
874 (
'ApplicationMgr',
r'(declareMultiSvcType|addMultiSvc): ',
''),
879 "JobOptionsSvc INFO # ",
880 "JobOptionsSvc WARNING # ",
883 "This machine has a speed",
886 "ToolSvc.Sequenc... INFO",
887 "DataListenerSvc INFO XML written to file:",
888 "[INFO]",
"[WARNING]",
889 "DEBUG No writable file catalog found which contains FID:",
890 "DEBUG Service base class initialized successfully",
892 "DEBUG Incident timing:",
896 "INFO 'CnvServices':[",
898 "DEBUG 'CnvServices':[",
903 r"^JobOptionsSvc INFO *$",
906 r"(Always|SUCCESS)\s*(Root f|[^ ]* F)ile version:",
907 r"File '.*.xml' does not exist",
908 r"INFO Refer to dataset .* by its file ID:",
909 r"INFO Referring to dataset .* by its file ID:",
910 r"INFO Disconnect from dataset",
911 r"INFO Disconnected from dataset",
912 r"INFO Disconnected data IO:",
913 r"IncidentSvc\s*(DEBUG (Adding|Removing)|VERBOSE Calling)",
915 r"^StatusCodeSvc.*listing all unchecked return codes:",
916 r"^StatusCodeSvc\s*INFO\s*$",
917 r"Num\s*\|\s*Function\s*\|\s*Source Library",
920 r"ERROR Failed to modify file: .* Errno=2 No such file or directory",
922 r"^ +[0-9]+ \|.*ROOT",
923 r"^ +[0-9]+ \|.*\|.*Dict",
925 r"StatusCodeSvc.*all StatusCode instances where checked",
927 r"EventLoopMgr.*---> Loop Finished",
928 r"HiveSlimEventLo.*---> Loop Finished",
933 r"SUCCESS\s*Booked \d+ Histogram\(s\)",
937 r"Property(.*)'Audit(Algorithm|Tool|Service)s':",
939 r"Property(.*)'AuditRe(start|initialize)':",
940 r"Property(.*)'IsIOBound':",
942 r"Property(.*)'ErrorCount(er)?':",
944 r"Property(.*)'Sequential':",
946 r"Property(.*)'FilterCircularDependencies':",
948 r"Property(.*)'IsClonable':",
950 r"Property update for OutputLevel : new value =",
951 r"EventLoopMgr\s*DEBUG Creating OutputStream",
958 r'Warning in <TInterpreter::ReadRootmapFile>: .* is already in .*',
961 normalizeExamples = (lineSkipper + normalizeExamples + skipEmptyLines +
962 normalizeEOL +
LineSorter(
"Services to release : ") +
970 def __init__(self, reffile, cause, result_key, preproc=normalizeExamples):
978 if os.path.isfile(self.
reffile):
979 orig = open(self.
reffile).xreadlines()
983 result.Quote(
'\n'.join(
map(str.strip, orig)))
986 new = stdout.splitlines()
990 diffs = difflib.ndiff(orig, new, charjunk=difflib.IS_CHARACTER_JUNK)
991 filterdiffs =
map(
lambda x: x.strip(), filter(
992 lambda x: x[0] !=
" ", diffs))
994 result[self.
result_key] = result.Quote(
"\n".join(filterdiffs))
998 +) standard output of the test""")
1000 result.Quote(
'\n'.join(
map(str.strip, new)))
1001 causes.append(self.
cause)
1007 Scan stdout to find ROOT TTree summaries and digest them. 1009 stars = re.compile(
r"^\*+$")
1010 outlines = stdout.splitlines()
1011 nlines = len(outlines)
1017 while i < nlines
and not stars.match(outlines[i]):
1022 trees[tree[
"Name"]] = tree
1029 Check that all the keys in reference are in to_check too, with the same value. 1030 If the value is a dict, the function is called recursively. to_check can 1031 contain more keys than reference, that will not be tested. 1032 The function returns at the first difference found. 1037 ignore_re = re.compile(ignore)
1038 keys = [key
for key
in reference
if not ignore_re.match(key)]
1040 keys = reference.keys()
1044 if (
type(reference[k])
is dict)
and (
type(to_check[k])
is dict):
1048 reference[k], to_check[k], ignore)
1051 failed = to_check[k] != reference[k]
1056 fail_keys.insert(0, k)
1067 if c
is None or r
is None:
1069 return (fail_path, r, c)
1073 h_count_re = re.compile(
1074 r"^(.*)SUCCESS\s+Booked (\d+) Histogram\(s\) :\s+([\s\w=-]*)")
1079 Parse the TTree summary table in lines, starting from pos. 1080 Returns a tuple with the dictionary with the digested informations and the 1081 position of the first line after the summary. 1087 def splitcols(l):
return [f.strip()
for f
in l.strip(
"*\n").split(
':', 2)]
1091 cols = splitcols(ll[0])
1092 r[
"Name"], r[
"Title"] = cols[1:]
1094 cols = splitcols(ll[1])
1095 r[
"Entries"] = int(cols[1])
1097 sizes = cols[2].split()
1098 r[
"Total size"] = int(sizes[2])
1099 if sizes[-1] ==
"memory":
1102 r[
"File size"] = int(sizes[-1])
1104 cols = splitcols(ll[2])
1105 sizes = cols[2].split()
1106 if cols[0] ==
"Baskets":
1107 r[
"Baskets"] = int(cols[1])
1108 r[
"Basket size"] = int(sizes[2])
1109 r[
"Compression"] = float(sizes[-1])
1112 if i < (count - 3)
and lines[i].startswith(
"*Tree"):
1113 result = parseblock(lines[i:i + 3])
1114 result[
"Branches"] = {}
1116 while i < (count - 3)
and lines[i].startswith(
"*Br"):
1117 if i < (count - 2)
and lines[i].startswith(
"*Branch "):
1121 branch = parseblock(lines[i:i + 3])
1122 result[
"Branches"][branch[
"Name"]] = branch
1130 Extract the histograms infos from the lines starting at pos. 1131 Returns the position of the first line after the summary block. 1134 h_table_head = re.compile(
1135 r'SUCCESS\s+List of booked (1D|2D|3D|1D profile|2D profile) histograms in directory\s+"(\w*)"')
1136 h_short_summ = re.compile(
r"ID=([^\"]+)\s+\"([^\"]+)\"\s+(.*)")
1141 m = h_count_re.search(lines[pos])
1142 name = m.group(1).strip()
1143 total = int(m.group(2))
1145 for k, v
in [x.split(
"=")
for x
in m.group(3).split()]:
1148 header[
"Total"] = total
1152 m = h_table_head.search(lines[pos])
1155 t = t.replace(
" profile",
"Prof")
1162 if l.startswith(
" | ID"):
1164 titles = [x.strip()
for x
in l.split(
"|")][1:]
1166 while pos < nlines
and lines[pos].startswith(
" |"):
1168 values = [x.strip()
for x
in l.split(
"|")][1:]
1170 for i
in range(len(titles)):
1171 hcont[titles[i]] = values[i]
1172 cont[hcont[
"ID"]] = hcont
1174 elif l.startswith(
" ID="):
1175 while pos < nlines
and lines[pos].startswith(
" ID="):
1177 for x
in h_short_summ.search(lines[pos]).groups()]
1178 cont[values[0]] = values
1182 "Cannot understand line %d: '%s'" % (pos, l))
1186 summ[d][
"header"] = header
1191 summ[name] = {
"header": header}
1197 Scan stdout to find ROOT TTree summaries and digest them. 1199 outlines = stdout.splitlines()
1200 nlines = len(outlines) - 1
1208 match = h_count_re.search(outlines[pos])
1209 while pos < nlines
and not match:
1211 match = h_count_re.search(outlines[pos])
1214 summaries.update(summ)
1220 unsupported = [re.compile(x)
for x
in [str(y).strip()
1221 for y
in unsupported_platforms]
if x]
1222 for p_re
in unsupported:
1223 if p_re.search(platform):
1224 result.SetOutcome(result.UNTESTED)
1225 result[result.CAUSE] =
'Platform not supported.' 1232 Return the platform Id defined in CMTCONFIG or SCRAM_ARCH. 1236 if "BINARY_TAG" in os.environ:
1237 arch = os.environ[
"BINARY_TAG"]
1238 elif "CMTCONFIG" in os.environ:
1239 arch = os.environ[
"CMTCONFIG"]
1240 elif "SCRAM_ARCH" in os.environ:
1241 arch = os.environ[
"SCRAM_ARCH"]
1247 Return True if the current platform is Windows. 1249 This function was needed because of the change in the CMTCONFIG format, 1250 from win32_vc71_dbg to i686-winxp-vc9-dbg. 1253 return "winxp" in platform
or platform.startswith(
"win")
def PlatformIsNotSupported(self, context, result)
def __processLine__(self, line)
def __init__(self, start, end)
def __call__(self, input)
def validateWithReference(self, stdout=None, stderr=None, result=None, causes=None, preproc=None)
def __processLine__(self, line)
def cmpTreesDicts(reference, to_check, ignore=None)
def __processFile__(self, lines)
def ValidateOutput(self, stdout, stderr, result)
def read(f, regex='.*', skipevents=0)
def __processLine__(self, line)
def __processFile__(self, lines)
def __call__(self, out, result)
def findHistosSummaries(stdout)
def _parseTTreeSummary(lines, pos)
struct GAUDI_API map
Parametrisation class for map-like implementation.
def __call__(self, stdout, result)
def __processLine__(self, line)
def __init__(self, orig, repl="", when=None)
decltype(auto) range(Args &&...args)
Zips multiple containers together to form a single range.
def __init__(self, signature)
def __call__(self, input)
def sanitize_for_xml(data)
def getCmpFailingValues(reference, to_check, fail_path)
def __init__(self, members=[])
def __init__(self, strings=[], regexps=[])
def __setitem__(self, key, value)
def __init__(self, kind=None, id=None, outcome=PASS, annotations={})
def __processLine__(self, line)
def parseHistosSummary(lines, pos)
def _expandReferenceFileName(self, reffile)
def findReferenceBlock(self, reference=None, stdout=None, result=None, causes=None, signature_offset=0, signature=None, id=None)
def CheckHistosSummaries(self, stdout=None, result=None, causes=None, dict=None, ignore=None)
def __CompareText(self, s1, s2)
def __init__(self, reffile, cause, result_key, preproc=normalizeExamples)
def __getitem__(self, key)
def findTTreeSummaries(stdout)
def __init__(self, ref, cause, result_key)
def ROOT6WorkAroundEnabled(id=None)
def CheckTTreesSummaries(self, stdout=None, result=None, causes=None, trees_dict=None, ignore=r"Basket|.*size|Compression")