23 from subprocess
import PIPE, STDOUT, Popen
26 from html
import escape
as escape_for_html
28 from cgi
import escape
as escape_for_html
32 if sys.version_info < (3, 5):
35 from codecs
import backslashreplace_errors, register_error
38 if isinstance(exc, UnicodeDecodeError):
39 code =
hex(ord(exc.object[exc.start]))
40 return (
"\\" + code[1:], exc.start + 1)
42 return backslashreplace_errors(exc)
44 register_error(
"backslashreplace", _new_backslashreplace_errors)
46 del backslashreplace_errors
47 del _new_backslashreplace_errors
54 Take a string with invalid ASCII/UTF characters and quote them so that the
55 string can be used in an XML text.
57 >>> sanitize_for_xml('this is \x1b')
58 'this is [NON-XML-CHAR-0x1B]'
60 bad_chars = re.compile(
"[\x00-\x08\x0b\x0c\x0e-\x1F\uD800-\uDFFF\uFFFE\uFFFF]")
64 return "".join(
"[NON-XML-CHAR-0x%2X]" % ord(c)
for c
in match.group())
66 return bad_chars.sub(quote, data)
70 """helper to debug GAUDI-1084, dump the list of processes"""
71 from getpass
import getuser
73 if "WORKSPACE" in os.environ:
74 p = Popen([
"ps",
"-fH",
"-U", getuser()], stdout=PIPE)
75 with open(os.path.join(os.environ[
"WORKSPACE"], name),
"wb")
as f:
76 f.write(p.communicate()[0])
81 Send a signal to a process and all its child processes (starting from the
84 log = logging.getLogger(
"kill_tree")
85 ps_cmd = [
"ps",
"--no-headers",
"-o",
"pid",
"--ppid", str(ppid)]
86 get_children = Popen(ps_cmd, stdout=PIPE, stderr=PIPE)
87 children =
map(int, get_children.communicate()[0].split())
88 for child
in children:
91 log.debug(
"killing process %d", ppid)
93 except OSError
as err:
96 log.debug(
"no such process %d", ppid)
104 _common_tmpdir =
None
133 logging.debug(
"running test %s", self.
name)
144 "TIMEOUT_DETAIL":
None,
150 r"from\s+Gaudi.Configuration\s+import\s+\*|"
151 "from\s+Configurables\s+import",
154 suffix, lang =
".py",
"python"
156 suffix, lang =
".opts",
"c++"
157 self.
result[
"Options"] =
'<code lang="{}"><pre>{}</pre></code>'.
format(
158 lang, escape_for_html(self.
options)
160 optionFile = tempfile.NamedTemporaryFile(suffix=suffix)
161 optionFile.file.write(self.
options.encode(
"utf-8"))
168 or platform.platform()
175 if re.search(prex, platform_id)
186 workdir = tempfile.mkdtemp()
197 prog_ext = os.path.splitext(prog)[1]
198 if prog_ext
not in [
".exe",
".py",
".bat"]:
202 prog =
which(prog)
or prog
204 args = list(
map(RationalizePath, self.
args))
206 if prog_ext ==
".py":
217 logging.debug(
"executing %r in %s", params, workdir)
219 params, stdout=PIPE, stderr=PIPE, env=self.
environment
221 logging.debug(
"(pid: %d)", self.
proc.pid)
222 out, err = self.
proc.communicate()
223 self.
out = out.decode(
"utf-8", errors=
"backslashreplace")
224 self.
err = err.decode(
"utf-8", errors=
"backslashreplace")
226 thread = threading.Thread(target=target)
231 if thread.is_alive():
232 logging.debug(
"time out in test %s (pid %d)", self.
name, self.
proc.pid)
239 "--eval-command=thread apply all backtrace",
241 gdb = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=STDOUT)
243 "utf-8", errors=
"backslashreplace"
248 if thread.is_alive():
250 self.
causes.append(
"timeout")
255 f
"completed test {self.name} with returncode = {self.returnedCode}"
257 logging.debug(
"validating test...")
262 logging.debug(f
"skipped test {self.name}")
267 shutil.rmtree(workdir,
True)
271 if self.
status !=
"skipped":
273 if self.
signal is not None:
275 self.
causes.append(
"exit code")
279 self.
causes.append(
"exit code")
282 self.
causes.append(
"exit code")
292 logging.debug(
"%s: %s", self.
name, self.
status)
294 "Exit Code":
"returnedCode",
297 "Runtime Environment":
"environment",
300 "Program Name":
"program",
302 "Validator":
"validator",
303 "Output Reference File":
"reference",
304 "Error Reference File":
"error_reference",
307 "Unsupported Platforms":
"unsupported_platforms",
308 "Stack Trace":
"stack_trace",
311 (key, getattr(self, attr))
312 for key, attr
in field_mapping.items()
313 if getattr(self, attr)
322 resultDict.extend(self.
result.annotations.items())
324 resultDict = dict(resultDict)
327 if "Validator" in resultDict:
328 resultDict[
"Validator"] =
'<code lang="{}"><pre>{}</pre></code>'.
format(
329 "python", escape_for_html(resultDict[
"Validator"])
340 elif stderr.strip() != self.
stderr.strip():
341 self.
causes.append(
"standard error")
342 return result, self.
causes
355 Given a block of text, tries to find it in the output. The block had to be identified by a signature line. By default, the first line is used as signature, or the line pointed to by signature_offset. If signature_offset points outside the block, a signature line can be passed as signature argument. Note: if 'signature' is None (the default), a negative signature_offset is interpreted as index in a list (e.g. -1 means the last line), otherwise the it is interpreted as the number of lines before the first one of the block the signature must appear. The parameter 'id' allow to distinguish between different calls to this function in the same validation code.
358 if reference
is None:
367 reflines = list(filter(
None,
map(
lambda s: s.rstrip(), reference.splitlines())))
369 raise RuntimeError(
"Empty (or null) reference")
371 outlines = list(filter(
None,
map(
lambda s: s.rstrip(), stdout.splitlines())))
373 res_field =
"GaudiTest.RefBlock"
375 res_field +=
"_%s" % id
377 if signature
is None:
378 if signature_offset < 0:
379 signature_offset = len(reference) + signature_offset
380 signature = reflines[signature_offset]
383 pos = outlines.index(signature)
385 pos - signature_offset : pos + len(reflines) - signature_offset
387 if reflines != outlines:
388 msg =
"standard output"
391 if not msg
in causes:
393 result[res_field +
".observed"] = result.Quote(
"\n".join(outlines))
395 causes.append(
"missing signature")
396 result[res_field +
".signature"] = result.Quote(signature)
397 if len(reflines) > 1
or signature != reflines[0]:
398 result[res_field +
".expected"] = result.Quote(
"\n".join(reflines))
402 self, expected={"ERROR": 0,
"FATAL": 0}, stdout=
None, result=
None, causes=
None
405 Count the number of messages with required severity (by default ERROR and FATAL)
406 and check if their numbers match the expected ones (0 by default).
407 The dictionary "expected" can be used to tune the number of errors and fatals
408 allowed, or to limit the number of expected warnings etc.
423 outlines = stdout.splitlines()
424 from math
import log10
426 fmt =
"%%%dd - %%s" % (int(log10(len(outlines) + 1)))
432 if len(words) >= 2
and words[1]
in errors:
433 errors[words[1]].append(fmt % (linecount, l.rstrip()))
436 if len(errors[e]) != expected[e]:
437 causes.append(
"%s(%d)" % (e, len(errors[e])))
438 result[
"GaudiTest.lines.%s" % e] = result.Quote(
"\n".join(errors[e]))
439 result[
"GaudiTest.lines.%s.expected#" % e] = result.Quote(
451 ignore=r"Basket|.*size|Compression",
454 Compare the TTree summaries in stdout with the ones in trees_dict or in
455 the reference file. By default ignore the size, compression and basket
457 The presence of TTree summaries when none is expected is not a failure.
465 if trees_dict
is None:
468 if lreference
and os.path.isfile(lreference):
473 from pprint
import PrettyPrinter
477 result[
"GaudiTest.TTrees.expected"] = result.Quote(pp.pformat(trees_dict))
479 result[
"GaudiTest.TTrees.ignore"] = result.Quote(ignore)
484 causes.append(
"trees summaries")
486 result[
"GaudiTest.TTrees.failure_on"] = result.Quote(msg)
487 result[
"GaudiTest.TTrees.found"] = result.Quote(pp.pformat(trees))
492 self, stdout=None, result=None, causes=None, dict=None, ignore=None
495 Compare the TTree summaries in stdout with the ones in trees_dict or in
496 the reference file. By default ignore the size, compression and basket
498 The presence of TTree summaries when none is expected is not a failure.
510 if lreference
and os.path.isfile(lreference):
515 from pprint
import PrettyPrinter
519 result[
"GaudiTest.Histos.expected"] = result.Quote(pp.pformat(dict))
521 result[
"GaudiTest.Histos.ignore"] = result.Quote(ignore)
526 causes.append(
"histos summaries")
528 result[
"GaudiTest.Histos.failure_on"] = result.Quote(msg)
529 result[
"GaudiTest.Histos.found"] = result.Quote(pp.pformat(histos))
534 self, stdout=None, stderr=None, result=None, causes=None, preproc=None
537 Default validation acti*on: compare standard output and error to the
552 preproc = normalizeExamples
556 if lreference
and os.path.isfile(lreference):
558 lreference,
"standard output",
"Output Diff", preproc=preproc
561 causes += [
"missing reference file"]
565 if causes
and lreference:
568 newrefname =
".".join([lreference,
"new"])
569 while os.path.exists(newrefname):
571 newrefname =
".".join([lreference,
"~%d~" % cnt,
"new"])
572 newref = open(newrefname,
"w")
574 for l
in stdout.splitlines():
575 newref.write(l.rstrip() +
"\n")
577 result[
"New Output Reference File"] = os.path.relpath(
589 if os.path.isfile(lreference):
591 lreference,
"standard error",
"Error Diff", preproc=preproc
594 newcauses = [
"missing error reference file"]
596 if newcauses
and lreference:
598 newrefname =
".".join([lreference,
"new"])
599 while os.path.exists(newrefname):
601 newrefname =
".".join([lreference,
"~%d~" % cnt,
"new"])
602 newref = open(newrefname,
"w")
604 for l
in stderr.splitlines():
605 newref.write(l.rstrip() +
"\n")
607 result[
"New Error Reference File"] = os.path.relpath(
612 lreference,
"standard error",
"ExecTest.expected_stderr"
622 def platformSplit(p):
625 delim = re.compile(
"-" in p
and r"[-+]" or r"_")
626 return set(delim.split(p))
628 reference = os.path.normpath(
629 os.path.join(self.
basedir, os.path.expandvars(reffile))
633 spec_ref = reference[:-3] +
GetPlatform(self)[0:3] + reference[-3:]
634 if os.path.isfile(spec_ref):
638 dirname, basename = os.path.split(reference)
641 head = basename +
"."
644 if "do0" in platform:
647 for f
in os.listdir(dirname):
648 if f.startswith(head):
649 req_plat = platformSplit(f[head_len:])
650 if platform.issuperset(req_plat):
651 candidates.append((len(req_plat), f))
656 reference = os.path.join(dirname, candidates[-1][1])
668 from GaudiKernel
import ROOT6WorkAroundEnabled
681 Function used to normalize the used path
683 newPath = os.path.normpath(os.path.expandvars(p))
684 if os.path.exists(newPath):
685 p = os.path.realpath(newPath)
691 Locates an executable in the executables path ($PATH) and returns the full
692 path to it. An application is looked for with or without the '.exe' suffix.
693 If the executable cannot be found, None is returned
695 if os.path.isabs(executable):
696 if not os.path.isfile(executable):
697 if executable.endswith(
".exe"):
698 if os.path.isfile(executable[:-4]):
699 return executable[:-4]
701 executable = os.path.split(executable)[1]
704 for d
in os.environ.get(
"PATH").split(os.pathsep):
705 fullpath = os.path.join(d, executable)
706 if os.path.isfile(fullpath):
708 elif executable.endswith(
".exe")
and os.path.isfile(fullpath[:-4]):
724 UNTESTED =
"UNTESTED"
734 def __init__(self, kind=None, id=None, outcome=PASS, annotations={}):
738 assert isinstance(key, six.string_types)
742 assert isinstance(key, six.string_types)
743 assert isinstance(value, six.string_types),
"{!r} is not a string".
format(value)
748 Convert text to html by escaping special chars and adding <pre> tags.
750 return "<pre>{}</pre>".
format(escape_for_html(text))
769 """Validate the output of the program.
770 'stdout' -- A string containing the data written to the standard output
772 'stderr' -- A string containing the data written to the standard error
774 'result' -- A 'Result' object. It may be used to annotate
775 the outcome according to the content of stderr.
776 returns -- A list of strings giving causes of failure."""
781 causes.append(self.
cause)
787 """Compare 's1' and 's2', ignoring line endings.
790 returns -- True if 's1' and 's2' are the same, ignoring
791 differences in line endings."""
795 to_ignore = re.compile(
796 r"Warning in <TInterpreter::ReadRootmapFile>: .* is already in .*"
800 return not to_ignore.match(l)
802 return list(filter(keep_line, s1.splitlines())) == list(
803 filter(keep_line, s2.splitlines())
806 return s1.splitlines() == s2.splitlines()
811 """Base class for a callable that takes a file and returns a modified
826 if not isinstance(input, six.string_types):
830 lines = input.splitlines()
834 output =
"\n".join(output)
864 if line.find(s) >= 0:
879 if self.
start in line:
882 elif self.
end in line:
892 when = re.compile(when)
896 if isinstance(rhs, RegexpReplacer):
898 res._operations = self.
_operations + rhs._operations
900 res = FilePreprocessor.__add__(self, rhs)
905 if w
is None or w.search(line):
906 line = o.sub(r, line)
913 "[0-2]?[0-9]:[0-5][0-9]:[0-5][0-9] [0-9]{4}[-/][01][0-9][-/][0-3][0-9][ A-Z]*",
914 "00:00:00 1970-01-01",
917 normalizeEOL.__processLine__ =
lambda line: str(line).rstrip() +
"\n"
921 skipEmptyLines.__processLine__ =
lambda line: (line.strip()
and line)
or None
935 line = line[: (pos + self.
siglen)]
936 lst = line[(pos + self.
siglen) :].split()
938 line +=
" ".join(lst)
944 Sort group of lines matching a regular expression
948 self.
exp = exp
if hasattr(exp,
"match")
else re.compile(exp)
951 match = self.
exp.match
967 normalizeExamples = maskPointers + normalizeDate
970 (
"TIMER.TIMER",
r"\s+[+-]?[0-9]+[0-9.]*",
" 0"),
971 (
"release all pending",
r"^.*/([^/]*:.*)",
r"\1"),
972 (
"^#.*file",
r"file '.*[/\\]([^/\\]*)$",
r"file '\1"),
974 "^JobOptionsSvc.*options successfully read in from",
975 r"read in from .*[/\\]([^/\\]*)$",
981 r"[0-9A-Fa-f]{8}-[0-9A-Fa-f]{4}-[0-9A-Fa-f]{4}-[0-9A-Fa-f]{4}(?!-0{12})-[0-9A-Fa-f]{12}",
982 "00000000-0000-0000-0000-000000000000",
986 "ServiceLocatorHelper::",
987 "ServiceLocatorHelper::(create|locate)Service",
988 "ServiceLocatorHelper::service",
991 (
None,
r"e([-+])0([0-9][0-9])",
r"e\1\2"),
993 (
None,
r"Service reference count check:",
r"Looping over all active services..."),
997 r"^(.*(DEBUG|SUCCESS) List of ALL properties of .*#properties = )\d+",
1000 (
"ApplicationMgr",
r"(declareMultiSvcType|addMultiSvc): ",
""),
1001 (
r"Property \['Name': Value\]",
r"( = '[^']+':)'(.*)'",
r"\1\2"),
1002 (
"TimelineSvc",
"to file 'TimelineFile':",
"to file "),
1003 (
"DataObjectHandleBase",
r'DataObjectHandleBase\("([^"]*)"\)',
r"'\1'"),
1010 "JobOptionsSvc INFO # ",
1011 "JobOptionsSvc WARNING # ",
1014 "This machine has a speed",
1017 "ToolSvc.Sequenc... INFO",
1018 "DataListenerSvc INFO XML written to file:",
1021 "DEBUG No writable file catalog found which contains FID:",
1022 "DEBUG Service base class initialized successfully",
1024 "DEBUG Incident timing:",
1028 "INFO 'CnvServices':[",
1030 "DEBUG 'CnvServices':[",
1035 "ServiceLocatorHelper::service: found service JobOptionsSvc",
1037 "mismatching case for property name:",
1039 "Histograms saving not required.",
1041 "Properties are dumped into",
1044 r"^JobOptionsSvc INFO *$",
1047 r"(Always|SUCCESS)\s*(Root f|[^ ]* F)ile version:",
1048 r"File '.*.xml' does not exist",
1049 r"INFO Refer to dataset .* by its file ID:",
1050 r"INFO Referring to dataset .* by its file ID:",
1051 r"INFO Disconnect from dataset",
1052 r"INFO Disconnected from dataset",
1053 r"INFO Disconnected data IO:",
1054 r"IncidentSvc\s*(DEBUG (Adding|Removing)|VERBOSE Calling)",
1056 r".*StatusCodeSvc.*",
1057 r".*StatusCodeCheck.*",
1058 r"Num\s*\|\s*Function\s*\|\s*Source Library",
1061 r"ERROR Failed to modify file: .* Errno=2 No such file or directory",
1063 r"^ +[0-9]+ \|.*ROOT",
1064 r"^ +[0-9]+ \|.*\|.*Dict",
1066 r"EventLoopMgr.*---> Loop Finished",
1067 r"HiveSlimEventLo.*---> Loop Finished",
1072 r"SUCCESS\s*Booked \d+ Histogram\(s\)",
1076 r"Property(.*)'Audit(Algorithm|Tool|Service)s':",
1077 r"Property(.*)'Audit(Begin|End)Run':",
1079 r"Property(.*)'AuditRe(start|initialize)':",
1080 r"Property(.*)'Blocking':",
1082 r"Property(.*)'ErrorCount(er)?':",
1084 r"Property(.*)'Sequential':",
1086 r"Property(.*)'FilterCircularDependencies':",
1088 r"Property(.*)'IsClonable':",
1090 r"Property update for OutputLevel : new value =",
1091 r"EventLoopMgr\s*DEBUG Creating OutputStream",
1100 r"Warning in <TInterpreter::ReadRootmapFile>: .* is already in .*",
1104 normalizeExamples = (
1117 def __init__(self, reffile, cause, result_key, preproc=normalizeExamples):
1125 if os.path.isfile(self.
reffile):
1126 orig = open(self.
reffile).readlines()
1129 result[self.
result_key +
".preproc.orig"] = result.Quote(
1130 "\n".join(
map(str.strip, orig))
1134 new = stdout.splitlines()
1138 diffs = difflib.ndiff(orig, new, charjunk=difflib.IS_CHARACTER_JUNK)
1140 map(
lambda x: x.strip(), filter(
lambda x: x[0] !=
" ", diffs))
1143 result[self.
result_key] = result.Quote(
"\n".join(filterdiffs))
1148 +) standard output of the test"""
1150 result[self.
result_key +
".preproc.new"] = result.Quote(
1151 "\n".join(
map(str.strip, new))
1153 causes.append(self.
cause)
1159 Scan stdout to find ROOT TTree summaries and digest them.
1161 stars = re.compile(
r"^\*+$")
1162 outlines = stdout.splitlines()
1163 nlines = len(outlines)
1169 while i < nlines
and not stars.match(outlines[i]):
1174 trees[tree[
"Name"]] = tree
1181 Check that all the keys in reference are in to_check too, with the same value.
1182 If the value is a dict, the function is called recursively. to_check can
1183 contain more keys than reference, that will not be tested.
1184 The function returns at the first difference found.
1189 ignore_re = re.compile(ignore)
1190 keys = [key
for key
in reference
if not ignore_re.match(key)]
1192 keys = reference.keys()
1196 if (
type(reference[k])
is dict)
and (
type(to_check[k])
is dict):
1199 failed = fail_keys =
cmpTreesDicts(reference[k], to_check[k], ignore)
1202 failed = to_check[k] != reference[k]
1207 fail_keys.insert(0, k)
1218 if c
is None or r
is None:
1220 return (fail_path, r, c)
1224 h_count_re = re.compile(
r"^(.*)SUCCESS\s+Booked (\d+) Histogram\(s\) :\s+([\s\w=-]*)")
1229 Parse the TTree summary table in lines, starting from pos.
1230 Returns a tuple with the dictionary with the digested informations and the
1231 position of the first line after the summary.
1238 return [f.strip()
for f
in l.strip(
"*\n").split(
":", 2)]
1242 cols = splitcols(ll[0])
1243 r[
"Name"], r[
"Title"] = cols[1:]
1245 cols = splitcols(ll[1])
1246 r[
"Entries"] = int(cols[1])
1248 sizes = cols[2].split()
1249 r[
"Total size"] = int(sizes[2])
1250 if sizes[-1] ==
"memory":
1253 r[
"File size"] = int(sizes[-1])
1255 cols = splitcols(ll[2])
1256 sizes = cols[2].split()
1257 if cols[0] ==
"Baskets":
1258 r[
"Baskets"] = int(cols[1])
1259 r[
"Basket size"] = int(sizes[2])
1260 r[
"Compression"] = float(sizes[-1])
1263 if i < (count - 3)
and lines[i].startswith(
"*Tree"):
1264 result = parseblock(lines[i : i + 3])
1265 result[
"Branches"] = {}
1267 while i < (count - 3)
and lines[i].startswith(
"*Br"):
1268 if i < (count - 2)
and lines[i].startswith(
"*Branch "):
1272 branch = parseblock(lines[i : i + 3])
1273 result[
"Branches"][branch[
"Name"]] = branch
1281 Extract the histograms infos from the lines starting at pos.
1282 Returns the position of the first line after the summary block.
1285 h_table_head = re.compile(
1286 r'SUCCESS\s+(1D|2D|3D|1D profile|2D profile) histograms in directory\s+"(\w*)"'
1288 h_short_summ = re.compile(
r"ID=([^\"]+)\s+\"([^\"]+)\"\s+(.*)")
1293 m = h_count_re.search(lines[pos])
1294 name = m.group(1).strip()
1295 total = int(m.group(2))
1297 for k, v
in [x.split(
"=")
for x
in m.group(3).split()]:
1300 header[
"Total"] = total
1304 m = h_table_head.search(lines[pos])
1307 t = t.replace(
" profile",
"Prof")
1314 if l.startswith(
" | ID"):
1316 titles = [x.strip()
for x
in l.split(
"|")][1:]
1318 while pos < nlines
and lines[pos].startswith(
" |"):
1320 values = [x.strip()
for x
in l.split(
"|")][1:]
1322 for i
in range(len(titles)):
1323 hcont[titles[i]] = values[i]
1324 cont[hcont[
"ID"]] = hcont
1326 elif l.startswith(
" ID="):
1327 while pos < nlines
and lines[pos].startswith(
" ID="):
1329 x.strip()
for x
in h_short_summ.search(lines[pos]).groups()
1331 cont[values[0]] = values
1334 raise RuntimeError(
"Cannot understand line %d: '%s'" % (pos, l))
1338 summ[d][
"header"] = header
1343 summ[name] = {
"header": header}
1349 Scan stdout to find ROOT TTree summaries and digest them.
1351 outlines = stdout.splitlines()
1352 nlines = len(outlines) - 1
1360 match = h_count_re.search(outlines[pos])
1361 while pos < nlines
and not match:
1363 match = h_count_re.search(outlines[pos])
1366 summaries.update(summ)
1372 Return the platform Id defined in CMTCONFIG or SCRAM_ARCH.
1376 if "BINARY_TAG" in os.environ:
1377 arch = os.environ[
"BINARY_TAG"]
1378 elif "CMTCONFIG" in os.environ:
1379 arch = os.environ[
"CMTCONFIG"]
1380 elif "SCRAM_ARCH" in os.environ:
1381 arch = os.environ[
"SCRAM_ARCH"]
1382 elif os.environ.get(
"ENV_CMAKE_BUILD_TYPE",
"")
in (
1388 elif os.environ.get(
"ENV_CMAKE_BUILD_TYPE",
"")
in (
1400 Return True if the current platform is Windows.
1402 This function was needed because of the change in the CMTCONFIG format,
1403 from win32_vc71_dbg to i686-winxp-vc9-dbg.
1406 return "winxp" in platform
or platform.startswith(
"win")