All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Groups Pages
ProcessJobOptions.py
Go to the documentation of this file.
1 import os, sys, re
2 
3 import logging
4 _log = logging.getLogger(__name__)
5 
6 class LogFormatter(logging.Formatter):
7  def __init__(self, fmt=None, datefmt=None, prefix = "# "):
8  logging.Formatter.__init__(self, fmt, datefmt)
9  self.prefix = prefix
10  def format(self, record):
11  fmsg = logging.Formatter.format(self, record)
12  prefix = self.prefix
13  if record.levelno >= logging.WARNING:
14  prefix += record.levelname + ": "
15  s = "\n".join([ prefix + line
16  for line in fmsg.splitlines() ])
17  return s
18 
19 class LogFilter(logging.Filter):
20  def __init__(self, name = ""):
21  logging.Filter.__init__(self, name)
22  self.printing_level = 0
23  self.enabled = True
24  self.threshold = logging.WARNING
25  def filter(self, record):
26  return record.levelno >= self.threshold or (self.enabled and self.printing_level <= 0)
27  def printOn(self, step = 1, force = False):
28  """
29  Decrease the printing_level of 'step' units. ( >0 means no print)
30  The level cannot go below 0, unless the force flag is set to True.
31  A negative value of the threshold disables subsequent "PrintOff"s.
32  """
33  if force:
34  self.printing_level -= step
35  else:
36  if self.printing_level > step:
37  self.printing_level -= step
38  else:
39  self.printing_level = 0
40  def printOff(self, step = 1):
41  """
42  Increase the printing_level of 'step' units. ( >0 means no print)
43  """
44  self.printing_level += step
45  def disable(self, allowed = logging.WARNING):
46  self.enabled = False
47  self.threshold = allowed
48  def enable(self, allowed = logging.WARNING):
49  self.enabled = True
50  self.threshold = allowed
51 
52 class ConsoleHandler(logging.StreamHandler):
53  def __init__(self, stream = None, prefix = None):
54  if stream is None:
55  stream = sys.stdout
56  logging.StreamHandler.__init__(self, stream)
57  if prefix is None:
58  prefix = "# "
59  self._filter = LogFilter(_log.name)
60  self._formatter = LogFormatter(prefix = prefix)
61  self.setFormatter(self._formatter)
62  self.addFilter(self._filter)
63  def setPrefix(self, prefix):
64  self._formatter.prefix = prefix
65  def printOn(self, step = 1, force = False):
66  """
67  Decrease the printing_level of 'step' units. ( >0 means no print)
68  The level cannot go below 0, unless the force flag is set to True.
69  A negative value of the threshold disables subsequent "PrintOff"s.
70  """
71  self._filter.printOn(step, force)
72  def printOff(self, step = 1):
73  """
74  Increase the printing_level of 'step' units. ( >0 means no print)
75  """
76  self._filter.printOff(step)
77  def disable(self, allowed = logging.WARNING):
78  self._filter.disable(allowed)
79  def enable(self, allowed = logging.WARNING):
80  self._filter.enable(allowed)
81 
82 _consoleHandler = None
83 def GetConsoleHandler(prefix = None, stream = None):
84  global _consoleHandler
85  if _consoleHandler is None:
86  _consoleHandler = ConsoleHandler(prefix = prefix, stream = stream)
87  elif prefix is not None:
88  _consoleHandler.setPrefix(prefix)
89  return _consoleHandler
90 
91 def InstallRootLoggingHandler(prefix = None, level = None, stream = None):
92  root_logger = logging.getLogger()
93  if not root_logger.handlers:
94  root_logger.addHandler(GetConsoleHandler(prefix, stream))
95  root_logger.setLevel(logging.WARNING)
96  if level is not None:
97  root_logger.setLevel(level)
98 
99 def PrintOn(step = 1, force = False):
100  GetConsoleHandler().printOn(step, force)
101 def PrintOff(step = 1):
102  GetConsoleHandler().printOff(step)
103 
104 class ParserError(RuntimeError):
105  pass
106 
107 def _find_file(f):
108  # expand environment variables in the filename
109  f = os.path.expandvars(f)
110  if os.path.isfile(f):
111  return os.path.realpath(f)
112 
113  path = os.environ.get('JOBOPTSEARCHPATH','').split(os.pathsep)
114  # find the full path to the option file
115  candidates = [d for d in path if os.path.isfile(os.path.join(d,f))]
116  if not candidates:
117  raise ParserError("Cannot find '%s' in %s" % (f,path))
118  return os.path.realpath(os.path.join(candidates[0],f))
119 
120 _included_files = set()
122  if f in _included_files:
123  _log.warning("file '%s' already included, ignored.", f)
124  return False
125  _included_files.add(f)
126  return True
127 
129  comment = re.compile(r'(//.*)$')
130  # non-perfect R-E to check if '//' is inside a string
131  # (a tokenizer would be better)
132  comment_in_string = re.compile(r'(["\']).*//.*\1')
133  directive = re.compile(r'^\s*#\s*([\w!]+)\s*(.*)\s*$')
134  comment_ml = ( re.compile(r'/\*'), re.compile(r'\*/') )
135  statement_sep = ";"
136  reference = re.compile(r'^@([\w.]*)$')
137 
138  def __init__(self):
139  # parser level states
140  self.units = {}
141  self.defines = {}
142  if sys.platform != 'win32':
143  self.defines[ "WIN32" ] = True
144 
145  def _include(self,file,function):
146  file = _find_file(file)
147  if _to_be_included(file):
148  _log.info("--> Including file '%s'", file)
149  function(file)
150  _log.info("<-- End of file '%s'", file)
151 
152  def parse(self,file):
153  # states for the "translation unit"
154  statement = ""
155 
156  ifdef_level = 0
157  ifdef_skipping = False
158  ifdef_skipping_level = 0
159 
160  f = open(_find_file(file))
161  l = f.readline()
162  if l.startswith("#!"):
163  # Skip the first line if it starts with "#!".
164  # It allows to use options files as scripts.
165  l = f.readline()
166 
167  while l:
168  l = l.rstrip()+'\n' # normalize EOL chars (to avoid problems with DOS new-line on Unix)
169 
170  # single line comment
171  m = self.comment.search(l)
172  if m:
173  # check if the '//' is part of a string
174  m2 = self.comment_in_string.search(l)
175  # the '//' is part of a string if we find the quotes around it
176  # and they are not part of the comment itself
177  if not ( m2 and m2.start() < m.start() ):
178  # if it is not the case, we can remove the comment from the
179  # statement
180  l = l[:m.start()]+l[m.end():]
181  # process directives
182  m = self.directive.search(l)
183  if m:
184  directive_name = m.group(1)
185  directive_arg = m.group(2).strip()
186  if directive_name == "include":
187  included_file = directive_arg.strip("'\"")
188  importOptions(included_file)
189  elif directive_name == "units":
190  units_file = directive_arg.strip("'\"")
191  self._include(units_file,self._parse_units)
192  elif directive_name in [ "ifdef", "ifndef"]:
193  ifdef_skipping_level = ifdef_level
194  ifdef_level += 1
195  if directive_arg in self.defines:
196  ifdef_skipping = directive_name == "ifndef"
197  else:
198  ifdef_skipping = directive_name == "ifdef"
199  elif directive_name == "else":
200  ifdef_skipping = not ifdef_skipping
201  elif directive_name == "endif":
202  ifdef_level -= 1
203  if ifdef_skipping and ifdef_skipping_level == ifdef_level:
204  ifdef_skipping = False
205  elif directive_name == "pragma":
206  if not directive_arg:
207  l = f.readline()
208  continue
209  pragma = directive_arg.split()
210  if pragma[0] == "print":
211  if len(pragma) > 1:
212  if pragma[1].upper() in [ "ON", "TRUE", "1" ]:
213  PrintOn()
214  else:
215  PrintOff()
216  else:
217  _log.warning("unknown directive '%s'", directive_name)
218  l = f.readline()
219  continue
220 
221  if ifdef_skipping:
222  l = f.readline()
223  continue
224 
225  # multi-line comment
226  m = self.comment_ml[0].search(l)
227  if m:
228  l,l1 = l[:m.start()],l[m.end():]
229  m = self.comment_ml[1].search(l1)
230  while not m:
231  l1 = f.readline()
232  if not l1:
233  break # EOF
234  m = self.comment_ml[1].search(l1)
235  if not l1 and not m:
236  raise ParserError("End Of File reached before end of multi-line comment")
237  l += l1[m.end():]
238 
239  if self.statement_sep in l:
240  i = l.index(self.statement_sep)
241  statement += l[:i]
242  self._eval_statement(statement.replace("\n","").strip())
243  statement = l[i+1:]
244  # it may happen (bug #37479) that the rest of the statement
245  # contains a comment.
246  if statement.lstrip().startswith("//"):
247  statement = ""
248  else:
249  statement += l
250 
251  l = f.readline()
252 
253  def _parse_units(self,file):
254  for line in open(file):
255  if '//' in line:
256  line = line[:line.index('//')]
257  line = line.strip()
258  if not line:
259  continue
260  nunit, value = line.split('=')
261  factor, unit = nunit.split()
262  value = eval(value)/eval(factor)
263  self.units[unit] = value
264 
265  def _eval_statement(self,statement):
266  from GaudiKernel.Proxy.Configurable import (ConfigurableGeneric,
267  Configurable,
268  PropertyReference)
269  #statement = statement.replace("\n","").strip()
270  _log.info("%s%s", statement, self.statement_sep)
271 
272  property,value = statement.split("=",1)
273 
274  inc = None
275  if property[-1] in [ "+", "-" ]:
276  inc = property[-1]
277  property = property[:-1]
278 
279  property = property.strip()
280  value = value.strip()
281 
282  ## find the configurable to apply the property to
283  #parent_cfg = None
284  #while '.' in property:
285  # component, property = property.split('.',1)
286  # if parent_cfg:
287  # if hasattr(parent_cfg,component):
288  # cfg = getattr(parent_cfg,component)
289  # else:
290  # cfg = ConfigurableGeneric(component)
291  # setattr(parent_cfg,component,cfg)
292  # else:
293  # cfg = ConfigurableGeneric(component)
294  # parent_cfg = cfg
295 
296  # remove spaces around dots
297  property = '.'.join([w.strip() for w in property.split('.')])
298  component, property = property.rsplit('.',1)
299  if component in Configurable.allConfigurables:
300  cfg = Configurable.allConfigurables[component]
301  else:
302  cfg = ConfigurableGeneric(component)
303 
304  #value = os.path.expandvars(value)
305  value = value.replace('true','True').replace('false','False')
306  if value[0] == '{' :
307  # Try to guess if the values looks like a dictionary
308  if ':' in value and not ( value[:value.index(':')].count('"')%2 or value[:value.index(':')].count("'")%2 ) :
309  # for dictionaries, keep the surrounding {}
310  value = '{'+value[1:-1].replace('{','[').replace('}',']')+'}'
311  else : # otherwise replace all {} with []
312  value = value.replace('{','[').replace('}',']')
313 
314  # We must escape '\' because eval tends to interpret them
315  value = value.replace('\\','\\\\')
316  # Replace literal '\n' and '\t' with spaces (bug #47258)
317  value = value.replace("\\n", " ").replace("\\t", " ")
318 
319  # interprete the @ operator
320  m = self.reference.match(value)
321  if m:
322  # this allows late binding of references
323  value = PropertyReference(m.group(1))
324  else:
325  value = eval(value,self.units)
326 
327  #if type(value) is str : value = os.path.expandvars(value)
328  #elif type(value) is list : value = [ type(item) is str and os.path.expandvars(item) or item for item in value ]
329 
330  if property not in cfg.__slots__ and not hasattr(cfg,property):
331  # check if the case of the property is wrong (old options are case insensitive)
332  lprop = property.lower()
333  for p in cfg.__slots__:
334  if lprop == p.lower():
335  _log.warning("property '%s' was requested for %s, but the correct spelling is '%s'", property, cfg.name(), p)
336  property = p
337  break
338 
339  # consider the += and -=
340  if inc == "+":
341  if hasattr(cfg,property):
342  prop = getattr(cfg,property)
343  if type(prop) == dict:
344  for k in value:
345  prop[k] = value[k]
346  else:
347  prop += value
348  else:
349  setattr(cfg,property,value)
350  elif inc == "-":
351  if hasattr(cfg,property):
352  prop = getattr(cfg,property)
353  if type(prop) is dict:
354  for k in value:
355  if k in prop:
356  del prop[k]
357  else:
358  _log.warning("key '%s' not in %s.%s", k, cfg.name(), property)
359  else:
360  for k in value:
361  if k in prop:
362  prop.remove(k)
363  else:
364  _log.warning("value '%s' not in %s.%s", k, cfg.name(), property)
365  else:
366  setattr(cfg,property,value)
367 
369  def __init__(self, new_path):
370  self.old_path = sys.path
371  sys.path = new_path
372  def __del__(self):
373  sys.path = self.old_path
374 
375 _parser = JobOptsParser()
376 
377 def _import_python(file):
378  execfile(file, {})
379 
380 def _import_pickle(file):
381  import pickle
382  input = open(file, 'rb')
383  catalog = pickle.load(input)
384  _log.info('Unpickled %d configurables', len(catalog))
385 
386 def _import_opts(file):
387  _parser.parse(file)
388 
389 _import_function_mapping = {
390  ".py" : _import_python,
391  ".pkl" : _import_pickle,
392  ".opts" : _import_opts,
393  }
394 
395 def importOptions( optsfile ) :
396  # expand environment variables before checking the extension
397  optsfile = os.path.expandvars(optsfile)
398  # check the file type (extension)
399  dummy, ext = os.path.splitext(optsfile)
400  if ext in _import_function_mapping:
401  # check if the file has been already included
402  optsfile = _find_file(optsfile)
403  if _to_be_included(optsfile):
404  _log.info("--> Including file '%s'", optsfile)
405  # include the file
406  _import_function_mapping[ext](optsfile)
407  _log.info("<-- End of file '%s'", optsfile)
408  else:
409  raise ParserError("Unknown file type '%s' ('%s')" % (ext,optsfile))
410 
411 ## Import a file containing declaration of units.
412 # It is equivalent to:
413 #
414 # #units "unitsfile.opts"
415 #
416 def importUnits(unitsfile):
417  # expand environment variables
418  unitsfile = os.path.expandvars(unitsfile)
419  # we do not need to check the file type (extension) because it must be a
420  # units file
421  _parser._include(unitsfile, _parser._parse_units)
def importUnits
Import a file containing declaration of units.
StatusCode parse(GaudiUtils::HashMap< K, V > &result, const std::string &input)
Basic parser for the types of HashMap used in DODBasicMapper.
string type
Definition: gaudirun.py:126