14 from subprocess
import Popen, PIPE, STDOUT
19 Take a string with invalid ASCII/UTF characters and quote them so that the 20 string can be used in an XML text. 22 >>> sanitize_for_xml('this is \x1b') 23 'this is [NON-XML-CHAR-0x1B]' 25 bad_chars = re.compile(
26 u'[\x00-\x08\x0b\x0c\x0e-\x1F\uD800-\uDFFF\uFFFE\uFFFF]')
30 return ''.join(
'[NON-XML-CHAR-0x%2X]' % ord(c)
for c
in match.group())
31 return bad_chars.sub(quote, data)
35 '''helper to debug GAUDI-1084, dump the list of processes''' 36 from getpass
import getuser
37 if 'WORKSPACE' in os.environ:
38 p = Popen([
'ps',
'-fH',
'-U', getuser()], stdout=PIPE)
39 with open(os.path.join(os.environ[
'WORKSPACE'], name),
'w')
as f:
40 f.write(p.communicate()[0])
45 Send a signal to a process and all its child processes (starting from the 48 log = logging.getLogger(
'kill_tree')
49 ps_cmd = [
'ps',
'--no-headers',
'-o',
'pid',
'--ppid', str(ppid)]
50 get_children = Popen(ps_cmd, stdout=PIPE, stderr=PIPE)
51 children =
map(int, get_children.communicate()[0].split())
52 for child
in children:
55 log.debug(
'killing process %d', ppid)
60 log.debug(
'no such process %d', ppid)
96 logging.debug(
'running test %s', self.
name)
99 if re.search(
r'from\s+Gaudi.Configuration\s+import\s+\*|' 100 'from\s+Configurables\s+import', self.
options):
101 optionFile = tempfile.NamedTemporaryFile(suffix=
'.py')
103 optionFile = tempfile.NamedTemporaryFile(suffix=
'.opts')
104 optionFile.file.write(self.
options)
113 self.environment.items() + os.environ.items())
115 platform_id = (os.environ.get(
'BINARY_TAG')
or 116 os.environ.get(
'CMTCONFIG')
or 119 skip_test = bool([
None 121 if re.search(prex, platform_id)])
130 workdir = tempfile.mkdtemp()
136 elif "GAUDIEXE" in os.environ:
137 prog = os.environ[
"GAUDIEXE"]
141 dummy, prog_ext = os.path.splitext(prog)
142 if prog_ext
not in [
".exe",
".py",
".bat"]:
146 prog =
which(prog)
or prog
148 args =
map(RationalizePath, self.
args)
150 if prog_ext ==
".py":
155 validatorRes =
Result({
'CAUSE':
None,
'EXCEPTION':
None,
156 'RESOURCE':
None,
'TARGET':
None,
157 'TRACEBACK':
None,
'START_TIME':
None,
158 'END_TIME':
None,
'TIMEOUT_DETAIL':
None})
159 self.
result = validatorRes
167 logging.debug(
'executing %r in %s',
169 self.
proc = Popen(params, stdout=PIPE, stderr=PIPE,
171 logging.debug(
'(pid: %d)', self.proc.pid)
172 self.
out, self.
err = self.proc.communicate()
174 thread = threading.Thread(target=target)
179 if thread.is_alive():
180 logging.debug(
'time out in test %s (pid %d)',
181 self.
name, self.proc.pid)
183 cmd = [
'gdb',
'--pid', str(self.proc.pid),
'--batch',
184 '--eval-command=thread apply all backtrace']
185 gdb = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=STDOUT)
190 if thread.is_alive():
192 self.causes.append(
'timeout')
194 logging.debug(
'completed test %s', self.
name)
197 logging.debug(
'returnedCode = %s', self.proc.returncode)
200 logging.debug(
'validating test...')
207 shutil.rmtree(workdir,
True)
212 if self.
signal is not None:
214 self.causes.append(
'exit code')
218 self.causes.append(
'exit code')
221 self.causes.append(
"exit code")
231 logging.debug(
'%s: %s', self.
name, self.
status)
232 field_mapping = {
'Exit Code':
'returnedCode',
235 'Environment':
'environment',
238 'Program Name':
'program',
240 'Validator':
'validator',
241 'Output Reference File':
'reference',
242 'Error Reference File':
'error_reference',
245 'Unsupported Platforms':
'unsupported_platforms',
246 'Stack Trace':
'stack_trace'}
247 resultDict = [(key, getattr(self, attr))
248 for key, attr
in field_mapping.iteritems()
249 if getattr(self, attr)]
250 resultDict.append((
'Working Directory',
254 resultDict.extend(self.result.annotations.iteritems())
256 return dict(resultDict)
265 elif stderr.strip() != self.stderr.strip():
266 self.causes.append(
'standard error')
267 return result, self.
causes 269 def findReferenceBlock(self, reference=None, stdout=None, result=None, causes=None, signature_offset=0, signature=None, id=None):
271 Given a block of text, tries to find it in the output. The block had to be identified by a signature line. By default, the first line is used as signature, or the line pointed to by signature_offset. If signature_offset points outside the block, a signature line can be passed as signature argument. Note: if 'signature' is None (the default), a negative signature_offset is interpreted as index in a list (e.g. -1 means the last line), otherwise the it is interpreted as the number of lines before the first one of the block the signature must appear. The parameter 'id' allow to distinguish between different calls to this function in the same validation code. 274 if reference
is None:
284 None,
map(
lambda s: s.rstrip(), reference.splitlines()))
286 raise RuntimeError(
"Empty (or null) reference")
288 outlines = filter(
None,
map(
lambda s: s.rstrip(), stdout.splitlines()))
290 res_field =
"GaudiTest.RefBlock" 292 res_field +=
"_%s" % id
294 if signature
is None:
295 if signature_offset < 0:
296 signature_offset = len(reference) + signature_offset
297 signature = reflines[signature_offset]
300 pos = outlines.index(signature)
301 outlines = outlines[pos - signature_offset:pos +
302 len(reflines) - signature_offset]
303 if reflines != outlines:
304 msg =
"standard output" 306 if not msg
in causes:
309 ".observed"] = result.Quote(
"\n".join(outlines))
311 causes.append(
"missing signature")
312 result[res_field +
".signature"] = result.Quote(signature)
313 if len(reflines) > 1
or signature != reflines[0]:
314 result[res_field +
".expected"] = result.Quote(
"\n".join(reflines))
317 def countErrorLines(self, expected={'ERROR': 0,
'FATAL': 0}, stdout=
None, result=
None, causes=
None):
319 Count the number of messages with required severity (by default ERROR and FATAL) 320 and check if their numbers match the expected ones (0 by default). 321 The dictionary "expected" can be used to tune the number of errors and fatals 322 allowed, or to limit the number of expected warnings etc. 337 outlines = stdout.splitlines()
338 from math
import log10
339 fmt =
"%%%dd - %%s" % (int(log10(len(outlines) + 1)))
345 if len(words) >= 2
and words[1]
in errors:
346 errors[words[1]].append(fmt % (linecount, l.rstrip()))
349 if len(errors[e]) != expected[e]:
350 causes.append(
'%s(%d)' % (e, len(errors[e])))
351 result[
"GaudiTest.lines.%s" %
352 e] = result.Quote(
'\n'.join(errors[e]))
353 result[
"GaudiTest.lines.%s.expected#" %
354 e] = result.Quote(str(expected[e]))
360 ignore=
r"Basket|.*size|Compression"):
362 Compare the TTree summaries in stdout with the ones in trees_dict or in 363 the reference file. By default ignore the size, compression and basket 365 The presence of TTree summaries when none is expected is not a failure. 373 if trees_dict
is None:
376 if lreference
and os.path.isfile(lreference):
381 from pprint
import PrettyPrinter
384 result[
"GaudiTest.TTrees.expected"] = result.Quote(
385 pp.pformat(trees_dict))
387 result[
"GaudiTest.TTrees.ignore"] = result.Quote(ignore)
392 causes.append(
"trees summaries")
394 trees_dict, trees, failed)
395 result[
"GaudiTest.TTrees.failure_on"] = result.Quote(msg)
396 result[
"GaudiTest.TTrees.found"] = result.Quote(pp.pformat(trees))
404 Compare the TTree summaries in stdout with the ones in trees_dict or in 405 the reference file. By default ignore the size, compression and basket 407 The presence of TTree summaries when none is expected is not a failure. 419 if lreference
and os.path.isfile(lreference):
424 from pprint
import PrettyPrinter
427 result[
"GaudiTest.Histos.expected"] = result.Quote(
430 result[
"GaudiTest.Histos.ignore"] = result.Quote(ignore)
435 causes.append(
"histos summaries")
437 result[
"GaudiTest.Histos.failure_on"] = result.Quote(msg)
438 result[
"GaudiTest.Histos.found"] = result.Quote(pp.pformat(histos))
443 causes=
None, preproc=
None):
445 Default validation acti*on: compare standard output and error to the 460 preproc = normalizeExamples
464 if lreference
and os.path.isfile(lreference):
468 preproc=preproc)(stdout, result)
474 newref = open(lreference +
".new",
"w")
476 for l
in stdout.splitlines():
477 newref.write(l.rstrip() +
'\n')
487 if lreference
and os.path.isfile(lreference):
491 preproc=preproc)(stderr, result)
494 newref = open(lreference +
".new",
"w")
496 for l
in stderr.splitlines():
497 newref.write(l.rstrip() +
'\n')
501 "ExecTest.expected_stderr")(stderr, result)
510 def platformSplit(p):
return set(p.split(
'-' in p
and '-' or '_'))
512 reference = os.path.normpath(os.path.join(self.
basedir,
513 os.path.expandvars(reffile)))
516 spec_ref = reference[:-3] +
GetPlatform(self)[0:3] + reference[-3:]
517 if os.path.isfile(spec_ref):
521 dirname, basename = os.path.split(reference)
524 head = basename +
"." 527 if 'do0' in platform:
530 for f
in os.listdir(dirname):
531 if f.startswith(head):
532 req_plat = platformSplit(f[head_len:])
533 if platform.issuperset(req_plat):
534 candidates.append((len(req_plat), f))
539 reference = os.path.join(dirname, candidates[-1][1])
551 from GaudiKernel
import ROOT6WorkAroundEnabled
562 Function used to normalize the used path 564 newPath = os.path.normpath(os.path.expandvars(p))
565 if os.path.exists(newPath):
566 p = os.path.realpath(newPath)
572 Locates an executable in the executables path ($PATH) and returns the full 573 path to it. An application is looked for with or without the '.exe' suffix. 574 If the executable cannot be found, None is returned 576 if os.path.isabs(executable):
577 if not os.path.exists(executable):
578 if executable.endswith(
'.exe'):
579 if os.path.exists(executable[:-4]):
580 return executable[:-4]
582 head, executable = os.path.split(executable)
585 for d
in os.environ.get(
"PATH").split(os.pathsep):
586 fullpath = os.path.join(d, executable)
587 if os.path.exists(fullpath):
589 if executable.endswith(
'.exe'):
590 return which(executable[:-4])
606 UNTESTED =
'UNTESTED' 616 def __init__(self, kind=None, id=None, outcome=PASS, annotations={}):
620 assert type(key)
in types.StringTypes
624 assert type(key)
in types.StringTypes
625 assert type(value)
in types.StringTypes
647 """Validate the output of the program. 648 'stdout' -- A string containing the data written to the standard output 650 'stderr' -- A string containing the data written to the standard error 652 'result' -- A 'Result' object. It may be used to annotate 653 the outcome according to the content of stderr. 654 returns -- A list of strings giving causes of failure.""" 659 causes.append(self.
cause)
665 """Compare 's1' and 's2', ignoring line endings. 668 returns -- True if 's1' and 's2' are the same, ignoring 669 differences in line endings.""" 672 to_ignore = re.compile(
673 r'Warning in <TInterpreter::ReadRootmapFile>: .* is already in .*')
675 def keep_line(l):
return not to_ignore.match(l)
676 return filter(keep_line, s1.splitlines()) == filter(keep_line, s2.splitlines())
678 return s1.splitlines() == s2.splitlines()
683 """ Base class for a callable that takes a file and returns a modified 698 if hasattr(input,
"__iter__"):
702 lines = input.splitlines()
706 output =
'\n'.join(output)
735 if line.find(s) >= 0:
750 if self.
start in line:
753 elif self.
end in line:
763 when = re.compile(when)
767 if isinstance(rhs, RegexpReplacer):
769 res._operations = self.
_operations + rhs._operations
771 res = FilePreprocessor.__add__(self, rhs)
776 if w
is None or w.search(line):
777 line = o.sub(r, line)
783 normalizeDate =
RegexpReplacer(
"[0-2]?[0-9]:[0-5][0-9]:[0-5][0-9] [0-9]{4}[-/][01][0-9][-/][0-3][0-9][ A-Z]*",
784 "00:00:00 1970-01-01")
786 normalizeEOL.__processLine__ =
lambda line: str(line).rstrip() +
'\n' 790 skipEmptyLines.__processLine__ =
lambda line: (line.strip()
and line)
or None 804 line = line[:(pos + self.
siglen)]
805 lst = line[(pos + self.
siglen):].split()
807 line +=
" ".join(lst)
813 Sort group of lines matching a regular expression 817 self.
exp = exp
if hasattr(exp,
'match')
else re.compile(exp)
820 match = self.exp.match
836 normalizeExamples = maskPointers + normalizeDate
839 (
"TIMER.TIMER",
r"\s+[+-]?[0-9]+[0-9.]*",
" 0"),
840 (
"release all pending",
r"^.*/([^/]*:.*)",
r"\1"),
841 (
"^#.*file",
r"file '.*[/\\]([^/\\]*)$",
r"file '\1"),
842 (
"^JobOptionsSvc.*options successfully read in from",
843 r"read in from .*[/\\]([^/\\]*)$",
r"file \1"),
845 (
None,
r"[0-9A-Fa-f]{8}-[0-9A-Fa-f]{4}-[0-9A-Fa-f]{4}-[0-9A-Fa-f]{4}(?!-0{12})-[0-9A-Fa-f]{12}",
846 "00000000-0000-0000-0000-000000000000"),
848 (
"ServiceLocatorHelper::",
"ServiceLocatorHelper::(create|locate)Service",
849 "ServiceLocatorHelper::service"),
851 (
None,
r"e([-+])0([0-9][0-9])",
r"e\1\2"),
853 (
None,
r'Service reference count check:',
854 r'Looping over all active services...'),
856 (
None,
r"^(.*(DEBUG|SUCCESS) List of ALL properties of .*#properties = )\d+",
r"\1NN"),
861 "JobOptionsSvc INFO # ",
862 "JobOptionsSvc WARNING # ",
865 "This machine has a speed",
868 "ToolSvc.Sequenc... INFO",
869 "DataListenerSvc INFO XML written to file:",
870 "[INFO]",
"[WARNING]",
871 "DEBUG No writable file catalog found which contains FID:",
872 "DEBUG Service base class initialized successfully",
873 "DEBUG Incident timing:",
875 "INFO 'CnvServices':[",
879 r"^JobOptionsSvc INFO *$",
882 r"(Always|SUCCESS)\s*(Root f|[^ ]* F)ile version:",
883 r"File '.*.xml' does not exist",
884 r"INFO Refer to dataset .* by its file ID:",
885 r"INFO Referring to dataset .* by its file ID:",
886 r"INFO Disconnect from dataset",
887 r"INFO Disconnected from dataset",
888 r"INFO Disconnected data IO:",
889 r"IncidentSvc\s*(DEBUG (Adding|Removing)|VERBOSE Calling)",
891 r"^StatusCodeSvc.*listing all unchecked return codes:",
892 r"^StatusCodeSvc\s*INFO\s*$",
893 r"Num\s*\|\s*Function\s*\|\s*Source Library",
896 r"ERROR Failed to modify file: .* Errno=2 No such file or directory",
898 r"^ +[0-9]+ \|.*ROOT",
899 r"^ +[0-9]+ \|.*\|.*Dict",
901 r"StatusCodeSvc.*all StatusCode instances where checked",
903 r"EventLoopMgr.*---> Loop Finished",
907 r"SUCCESS\s*Booked \d+ Histogram\(s\)",
911 r"Property(.*)'Audit(Algorithm|Tool|Service)s':",
913 r"Property(.*)'AuditRe(start|initialize)':",
914 r"Property(.*)'IsIOBound':",
916 r"Property(.*)'ErrorCount(er)?':",
918 r"Property(.*)'Sequential':",
920 r"Property(.*)'FilterCircularDependencies':",
922 r"Property(.*)'IsClonable':",
924 r"Property update for OutputLevel : new value =",
925 r"EventLoopMgr\s*DEBUG Creating OutputStream",
931 r'Warning in <TInterpreter::ReadRootmapFile>: .* is already in .*',
934 normalizeExamples = (lineSkipper + normalizeExamples + skipEmptyLines +
935 normalizeEOL +
LineSorter(
"Services to release : ") +
942 def __init__(self, reffile, cause, result_key, preproc=normalizeExamples):
950 if os.path.isfile(self.
reffile):
951 orig = open(self.
reffile).xreadlines()
955 result.Quote(
'\n'.join(
map(str.strip, orig)))
958 new = stdout.splitlines()
962 diffs = difflib.ndiff(orig, new, charjunk=difflib.IS_CHARACTER_JUNK)
963 filterdiffs =
map(
lambda x: x.strip(), filter(
964 lambda x: x[0] !=
" ", diffs))
966 result[self.
result_key] = result.Quote(
"\n".join(filterdiffs))
970 +) standard output of the test""")
972 result.Quote(
'\n'.join(
map(str.strip, new)))
973 causes.append(self.
cause)
979 Scan stdout to find ROOT TTree summaries and digest them. 981 stars = re.compile(
r"^\*+$")
982 outlines = stdout.splitlines()
983 nlines = len(outlines)
989 while i < nlines
and not stars.match(outlines[i]):
994 trees[tree[
"Name"]] = tree
1001 Check that all the keys in reference are in to_check too, with the same value. 1002 If the value is a dict, the function is called recursively. to_check can 1003 contain more keys than reference, that will not be tested. 1004 The function returns at the first difference found. 1009 ignore_re = re.compile(ignore)
1010 keys = [key
for key
in reference
if not ignore_re.match(key)]
1012 keys = reference.keys()
1016 if (
type(reference[k])
is dict)
and (
type(to_check[k])
is dict):
1019 reference[k], to_check[k], ignore)
1022 failed = to_check[k] != reference[k]
1027 fail_keys.insert(0, k)
1038 if c
is None or r
is None:
1040 return (fail_path, r, c)
1044 h_count_re = re.compile(
1045 r"^(.*)SUCCESS\s+Booked (\d+) Histogram\(s\) :\s+([\s\w=-]*)")
1050 Parse the TTree summary table in lines, starting from pos. 1051 Returns a tuple with the dictionary with the digested informations and the 1052 position of the first line after the summary. 1058 def splitcols(l):
return [f.strip()
for f
in l.strip(
"*\n").split(
':', 2)]
1062 cols = splitcols(ll[0])
1063 r[
"Name"], r[
"Title"] = cols[1:]
1065 cols = splitcols(ll[1])
1066 r[
"Entries"] = int(cols[1])
1068 sizes = cols[2].split()
1069 r[
"Total size"] = int(sizes[2])
1070 if sizes[-1] ==
"memory":
1073 r[
"File size"] = int(sizes[-1])
1075 cols = splitcols(ll[2])
1076 sizes = cols[2].split()
1077 if cols[0] ==
"Baskets":
1078 r[
"Baskets"] = int(cols[1])
1079 r[
"Basket size"] = int(sizes[2])
1080 r[
"Compression"] = float(sizes[-1])
1083 if i < (count - 3)
and lines[i].startswith(
"*Tree"):
1084 result = parseblock(lines[i:i + 3])
1085 result[
"Branches"] = {}
1087 while i < (count - 3)
and lines[i].startswith(
"*Br"):
1088 if i < (count - 2)
and lines[i].startswith(
"*Branch "):
1092 branch = parseblock(lines[i:i + 3])
1093 result[
"Branches"][branch[
"Name"]] = branch
1101 Extract the histograms infos from the lines starting at pos. 1102 Returns the position of the first line after the summary block. 1105 h_table_head = re.compile(
1106 r'SUCCESS\s+List of booked (1D|2D|3D|1D profile|2D profile) histograms in directory\s+"(\w*)"')
1107 h_short_summ = re.compile(
r"ID=([^\"]+)\s+\"([^\"]+)\"\s+(.*)")
1112 m = h_count_re.search(lines[pos])
1113 name = m.group(1).strip()
1114 total = int(m.group(2))
1116 for k, v
in [x.split(
"=")
for x
in m.group(3).split()]:
1119 header[
"Total"] = total
1123 m = h_table_head.search(lines[pos])
1126 t = t.replace(
" profile",
"Prof")
1133 if l.startswith(
" | ID"):
1135 titles = [x.strip()
for x
in l.split(
"|")][1:]
1137 while pos < nlines
and lines[pos].startswith(
" |"):
1139 values = [x.strip()
for x
in l.split(
"|")][1:]
1141 for i
in range(len(titles)):
1142 hcont[titles[i]] = values[i]
1143 cont[hcont[
"ID"]] = hcont
1145 elif l.startswith(
" ID="):
1146 while pos < nlines
and lines[pos].startswith(
" ID="):
1148 for x
in h_short_summ.search(lines[pos]).groups()]
1149 cont[values[0]] = values
1153 "Cannot understand line %d: '%s'" % (pos, l))
1157 summ[d][
"header"] = header
1162 summ[name] = {
"header": header}
1168 Scan stdout to find ROOT TTree summaries and digest them. 1170 outlines = stdout.splitlines()
1171 nlines = len(outlines) - 1
1179 match = h_count_re.search(outlines[pos])
1180 while pos < nlines
and not match:
1182 match = h_count_re.search(outlines[pos])
1185 summaries.update(summ)
1191 unsupported = [re.compile(x)
for x
in [str(y).strip()
1192 for y
in unsupported_platforms]
if x]
1193 for p_re
in unsupported:
1194 if p_re.search(platform):
1195 result.SetOutcome(result.UNTESTED)
1196 result[result.CAUSE] =
'Platform not supported.' 1203 Return the platform Id defined in CMTCONFIG or SCRAM_ARCH. 1207 if "BINARY_TAG" in os.environ:
1208 arch = os.environ[
"BINARY_TAG"]
1209 elif "CMTCONFIG" in os.environ:
1210 arch = os.environ[
"CMTCONFIG"]
1211 elif "SCRAM_ARCH" in os.environ:
1212 arch = os.environ[
"SCRAM_ARCH"]
1218 Return True if the current platform is Windows. 1220 This function was needed because of the change in the CMTCONFIG format, 1221 from win32_vc71_dbg to i686-winxp-vc9-dbg. 1224 return "winxp" in platform
or platform.startswith(
"win")
def PlatformIsNotSupported(self, context, result)
def __processLine__(self, line)
def __init__(self, start, end)
def __call__(self, input)
def validateWithReference(self, stdout=None, stderr=None, result=None, causes=None, preproc=None)
def __processLine__(self, line)
def cmpTreesDicts(reference, to_check, ignore=None)
def __processFile__(self, lines)
def ValidateOutput(self, stdout, stderr, result)
def read(f, regex='.*', skipevents=0)
def __processLine__(self, line)
def __processFile__(self, lines)
def __call__(self, out, result)
def findHistosSummaries(stdout)
def _parseTTreeSummary(lines, pos)
struct GAUDI_API map
Parametrisation class for map-like implementation.
def __call__(self, stdout, result)
def __processLine__(self, line)
def __init__(self, orig, repl="", when=None)
decltype(auto) range(Args &&...args)
Zips multiple containers together to form a single range.
def __init__(self, signature)
def __call__(self, input)
def sanitize_for_xml(data)
def getCmpFailingValues(reference, to_check, fail_path)
def __init__(self, members=[])
def __init__(self, strings=[], regexps=[])
def __setitem__(self, key, value)
def __init__(self, kind=None, id=None, outcome=PASS, annotations={})
def __processLine__(self, line)
def parseHistosSummary(lines, pos)
def _expandReferenceFileName(self, reffile)
def findReferenceBlock(self, reference=None, stdout=None, result=None, causes=None, signature_offset=0, signature=None, id=None)
def CheckHistosSummaries(self, stdout=None, result=None, causes=None, dict=None, ignore=None)
def __CompareText(self, s1, s2)
def __init__(self, reffile, cause, result_key, preproc=normalizeExamples)
def __getitem__(self, key)
def findTTreeSummaries(stdout)
def __init__(self, ref, cause, result_key)
def ROOT6WorkAroundEnabled(id=None)
def CheckTTreesSummaries(self, stdout=None, result=None, causes=None, trees_dict=None, ignore=r"Basket|.*size|Compression")