Gaudi Framework, version v23r5

Home   Generated: Wed Nov 28 2012
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Groups Pages
cmt2cmake.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 """
3 Script to convert CMT projects/packages to CMake Gaudi-based configuration.
4 """
5 import os
6 import sys
7 import re
8 import logging
9 import shelve
10 import json
11 import operator
12 
13 def makeParser(patterns=None):
14  from pyparsing import ( Word, QuotedString, Keyword, Literal, SkipTo, StringEnd,
15  ZeroOrMore, Optional, Combine,
16  alphas, alphanums, printables )
17  dblQuotedString = QuotedString(quoteChar='"', escChar='\\', unquoteResults=False)
18  sglQuotedString = QuotedString(quoteChar="'", escChar='\\', unquoteResults=False)
19  value = dblQuotedString | sglQuotedString | Word(printables)
20 
21  tag_name = Word(alphas + "_", alphanums + "_-")
22  tag_expression = Combine(tag_name + ZeroOrMore('&' + tag_name))
23  values = value + ZeroOrMore(tag_expression + value)
24 
25  identifier = Word(alphas + "_", alphanums + "_")
26  variable = Combine(identifier + '=' + value)
27 
28  constituent_option = (Keyword('-no_share')
29  | Keyword('-no_static')
30  | Keyword('-prototypes')
31  | Keyword('-no_prototypes')
32  | Keyword('-check')
33  | Keyword('-target_tag')
34  | Combine('-group=' + value)
35  | Combine('-suffix=' + value)
36  | Combine('-import=' + value)
37  | variable
38  | Keyword('-OS9')
39  | Keyword('-windows'))
40  source = (Word(alphanums + "_*./$()")
41  | Combine('-s=' + value)
42  | Combine('-k=' + value)
43  | Combine('-x=' + value))
44 
45  # statements
46  comment = (Literal("#") + SkipTo(StringEnd())).suppress()
47 
48  package = Keyword('package') + Word(printables)
49  version = Keyword("version") + Word(printables)
50  use = Keyword("use") + identifier + Word(printables) + Optional(identifier) + Optional(Keyword("-no_auto_imports"))
51 
52  constituent = ((Keyword('library') | Keyword('application') | Keyword('document'))
53  + identifier + ZeroOrMore(constituent_option | source))
54  macro = (Keyword('macro') | Keyword('macro_append')) + identifier + values
55  setenv = (Keyword('set') | Keyword('path_append') | Keyword('path_prepend')) + identifier + values
56 
57  apply_pattern = Keyword("apply_pattern") + identifier + ZeroOrMore(variable)
58  if patterns:
59  direct_patterns = reduce(operator.or_, map(Keyword, set(patterns)))
60  # add the implied 'apply_pattern' to the list of tokens
61  direct_patterns.addParseAction(lambda toks: toks.insert(0, 'apply_pattern'))
62  apply_pattern = apply_pattern | (direct_patterns + ZeroOrMore(variable))
63 
64  statement = (package | version | use | constituent | macro | setenv | apply_pattern)
65 
66  return Optional(statement) + Optional(comment) + StringEnd()
67 
68 
69 cache = None
70 def open_cache():
71  global cache
72  # record of known subdirs with their libraries
73  # {'<subdir>': {'libraries': [...]}}
74  # it contains some info about the projects too, under the keys like repr(('<project>', '<version>'))
75  try:
76  # First we try the environment variable CMT2CMAKECACHE and the directory
77  # containing this file...
78  _shelve_file = os.environ.get('CMT2CMAKECACHE',
79  os.path.join(os.path.dirname(__file__),
80  '.cmt2cmake.cache'))
81  cache = shelve.open(_shelve_file)
82  except:
83  # ... otherwise we use the user home directory
84  _shelve_file = os.path.join(os.path.expanduser('~'), '.cmt2cmake.cache')
85  #logging.info("Using cache file %s", _shelve_file)
86  cache = shelve.open(_shelve_file)
87 
89  global cache
90  if cache:
91  cache.close()
92  cache = None
93 
94 config = {}
95 for k in ['ignored_packages', 'data_packages', 'needing_python', 'no_pedantic',
96  'ignore_env']:
97  config[k] = set()
98 
99 # mappings
100 ignored_packages = config['ignored_packages']
101 data_packages = config['data_packages']
102 
103 # List of packages known to actually need Python to build
104 needing_python = config['needing_python']
105 
106 # packages that must have the pedantic option disabled
107 no_pedantic = config['no_pedantic']
108 
109 ignore_env = config['ignore_env']
110 
111 def loadConfig(config_file):
112  '''
113  Merge the content of the JSON file with the configuration dictionary.
114  '''
115  global config
116  if os.path.exists(config_file):
117  data = json.load(open(config_file))
118  for k in data:
119  if k not in config:
120  config[k] = set()
121  config[k].update(map(str, data[k]))
122  # print config
123 
124 loadConfig(os.path.join(os.path.dirname(__file__), 'cmt2cmake.cfg'))
125 
126 def extName(n):
127  '''
128  Mapping between the name of the LCG_Interface name and the Find*.cmake name
129  (if non-trivial).
130  '''
131  mapping = {'Reflex': 'ROOT',
132  'Python': 'PythonLibs',
133  'neurobayes_expert': 'NeuroBayesExpert',
134  'mysql': 'MySQL',
135  'oracle': 'Oracle',
136  'sqlite': 'SQLite',
137  'lfc': 'LFC',
138  'fftw': 'FFTW',
139  'uuid': 'UUID',
140  }
141  return mapping.get(n, n)
142 
143 def isPackage(path):
144  return os.path.isfile(os.path.join(path, "cmt", "requirements"))
145 
146 def isProject(path):
147  return os.path.isfile(os.path.join(path, "cmt", "project.cmt"))
148 
149 def projectCase(name):
150  return {'DAVINCI': 'DaVinci',
151  'LHCB': 'LHCb'}.get(name.upper(), name.capitalize())
152 
153 def callStringWithIndent(cmd, arglines):
154  '''
155  Produce a string for a call of a command with indented arguments.
156 
157  >>> print callStringWithIndent('example_command', ['arg1', 'arg2', 'arg3'])
158  example_command(arg1
159  arg2
160  arg3)
161  >>> print callStringWithIndent('example_command', ['', 'arg2', 'arg3'])
162  example_command(arg2
163  arg3)
164  '''
165  indent = '\n' + ' ' * (len(cmd) + 1)
166  return cmd + '(' + indent.join(filter(None, arglines)) + ')'
167 
168 def writeToFile(filename, data, log=None):
169  '''
170  Write the generated CMakeLists.txt.
171  '''
172  if log and os.path.exists(filename):
173  log.info('overwriting %s', filename)
174  f = open(filename, "w")
175  f.write(data)
176  f.close()
177 
178 class Package(object):
179  def __init__(self, path, project=None):
180  self.path = os.path.realpath(path)
181  if not isPackage(self.path):
182  raise ValueError("%s is not a package" % self.path)
183 
184  self.name = os.path.basename(self.path)
185  self.requirements = os.path.join(self.path, "cmt", "requirements")
186  self.project = project
187 
188  # prepare attributes filled during parsing of requirements
189  self.uses = {}
190  self.version = None
191  self.libraries = []
192  self.applications = []
193  self.documents = []
194  self.macros = {}
195  self.sets = {}
196  self.paths = {}
197 
198  # These are patterns that can appear only once per package.
199  # The corresponding dictionary will contain the arguments passed to the
200  # pattern.
201  self.singleton_patterns = set(["QMTest", "install_python_modules", "install_scripts",
202  "install_more_includes", "god_headers", "god_dictionary",
203  "PyQtResource", "PyQtUIC"])
205  self.install_python_modules = self.install_scripts = self.QMTest = False
206  self.god_headers = {}
207  self.god_dictionary = {}
208  self.PyQtResource = {}
209  self.PyQtUIC = {}
210 
211  # These are patterns that can be repeated in the requirements.
212  # The corresponding data members will contain the list of dictionaries
213  # corresponding to the various calls.
214  self.multi_patterns = set(['reflex_dictionary', 'component_library', 'linker_library',
215  'copy_relax_rootmap'])
218  self.linker_library = []
220 
222  self.component_libraries = set()
223  self.linker_libraries = set()
224 
225  self.log = logging.getLogger('Package(%s)' % self.name)
227  try:
228  self._parseRequirements()
229  except:
230  print "Processing %s" % self.requirements
231  raise
232  # update the known subdirs
233  cache[self.name] = {# list of linker libraries provided by the package
234  'libraries': list(self.linker_libraries),
235  # true if it's a headers-only package
236  'includes': bool(self.install_more_includes and
237  not self.linker_libraries)}
238 
239  def generate(self):
240  # header
241  data = ["#" * 80,
242  "# Package: %s" % self.name,
243  "#" * 80,
244  "gaudi_subdir(%s %s)" % (self.name, self.version),
245  ""]
246  # dependencies
247  # subdirectories (excluding specials)
248  subdirs = [n for n in sorted(self.uses)
249  if not n.startswith("LCG_Interfaces/")
250  and n not in ignored_packages
251  and n not in data_packages]
252 
253  inc_dirs = []
254  if subdirs:
255  # check if we are missing info for a subdir
256  missing_subdirs = set([s.rsplit('/')[-1] for s in subdirs]) - set(cache)
257  if missing_subdirs:
258  self.log.warning('Missing info cache for subdirs %s', ' '.join(sorted(missing_subdirs)))
259  # declare inclusion order
260  data.append(callStringWithIndent('gaudi_depends_on_subdirs', subdirs))
261  data.append('')
262  # consider header-only subdirs
263  # for each required subdir that comes with only headers, add its
264  # location to the call to 'include_directories'
265  inc_only = lambda s: cache.get(s.rsplit('/')[-1], {}).get('includes')
266  inc_dirs = filter(inc_only, subdirs)
267 
268 
269  # externals (excluding specials)
270  # - Python needs to be treated in a special way
271  find_packages = {}
272  for n in sorted(self.uses):
273  if n.startswith("LCG_Interfaces/"):
274  n = extName(n[15:])
275  # FIXME: find a general way to treat these special cases
276  if n == "PythonLibs":
277  if self.name not in needing_python: # only these packages actually link against Python
278  continue
279  # get custom link options
280  linkopts = self.macros.get(n + '_linkopts', '')
281  components = [m.group(1) or m.group(2)
282  for m in re.finditer(r'(?:\$\(%s_linkopts_([^)]*)\))|(?:-l(\w*))' % n,
283  linkopts)]
284  # FIXME: find a general way to treat the special cases
285  if n == 'COOL':
286  components = ['CoolKernel', 'CoolApplication']
287  elif n == 'CORAL':
288  components = ['CoralBase', 'CoralKernel', 'RelationalAccess']
289  elif n == 'RELAX' and self.copy_relax_rootmap:
290  components = [d['dict'] for d in self.copy_relax_rootmap if 'dict' in d]
291 
292  find_packages[n] = find_packages.get(n, []) + components
293 
294  # this second loops avoid double entries do to converging results of extName()
295  for n in sorted(find_packages):
296  args = [n]
297  components = find_packages[n]
298  if components:
299  if n == 'RELAX': # FIXME: probably we should set 'REQUIRED' for all the externals
300  args.append('REQUIRED')
301  args.append('COMPONENTS')
302  args.extend(components)
303  data.append('find_package(%s)' % ' '.join(args))
304  if find_packages:
305  data.append("")
306 
307  if self.name in no_pedantic:
308  data.append('string(REPLACE "-pedantic" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")\n')
309 
310  # the headers can be installed via "PUBLIC_HEADERS" or by hand
311  if self.install_more_includes:
312  headers = [d for d in self.install_more_includes.values()
313  if os.path.isdir(os.path.join(self.path, d))]
314  else:
315  headers = []
316 
317  if self.god_headers or self.god_dictionary:
318  data.append("include(GaudiObjDesc)")
319  data.append("")
320 
321  god_headers_dest = None
322  if self.god_headers:
323  godargs = [self.god_headers["files"].replace("../", "")]
324 
325  godflags = self.macros.get('%sObj2Doth_GODflags' % self.name, "")
326  godflags = re.search(r'-s\s*(\S+)', godflags)
327  if godflags:
328  god_headers_dest = os.path.normpath('Event/' + godflags.group(1))
329  if god_headers_dest == 'src':
330  # special case
331  godargs.append('PRIVATE')
332  else:
333  godargs.append('DESTINATION ' + god_headers_dest)
334 
335  data.append(callStringWithIndent('god_build_headers', godargs))
336  data.append("")
337 
338  god_dict = []
339  if self.god_dictionary:
340  god_dict = [('--GOD--',
341  [self.god_dictionary["files"].replace("../", "")],
342  None, [])]
343 
344  rflx_dict = []
345  for d in self.reflex_dictionary:
346  for k in d:
347  v = d[k]
348  v = v.replace("$(%sROOT)/" % self.name.upper(), "")
349  v = v.replace("../", "")
350  d[k] = v
351  imports = [i.strip('"').replace('-import=', '') for i in d.get('imports', '').strip().split()]
352  rflx_dict.append((d['dictionary'] + 'Dict',
353  [d['headerfiles'], d['selectionfile']],
354  None,
355  imports))
356 
357  # libraries
358  global_imports = [extName(name[15:])
359  for name in self.uses
360  if name.startswith('LCG_Interfaces/') and self.uses[name][1]] # list of imported ext
361  if 'PythonLibs' in global_imports and self.name not in needing_python:
362  global_imports.remove('PythonLibs')
363 
364  subdir_imports = [s.rsplit('/')[-1] for s in subdirs if self.uses[s][1]]
365  local_links = [] # keep track of linker libraries found so far
366  applications_names = set([a[0] for a in self.applications])
367  # Note: a god_dictionary, a reflex_dictionary or an application is like a module
368  for name, sources, group, imports in self.libraries + god_dict + rflx_dict + self.applications:
369  isGODDict = isRflxDict = isComp = isApp = isLinker = False
370  if name == '--GOD--':
371  isGODDict = True
372  name = '' # no library name for GOD dictionaries
373  elif name.endswith('Dict') and name[:-4] in self.reflex_dictionaries:
374  isRflxDict = True
375  name = name[:-4]
376  elif name in self.component_libraries:
377  isComp = True
378  elif name in applications_names:
379  isApp = True
380  else:
381  if name not in self.linker_libraries:
382  self.log.warning('library %s not declared as component or linker, assume linker', name)
383  isLinker = True
384 
385  # prepare the bits of the command: cmd, name, sources, args
386  if isComp:
387  cmd = 'gaudi_add_module'
388  elif isGODDict:
389  cmd = 'god_build_dictionary'
390  elif isRflxDict:
391  cmd = 'gaudi_add_dictionary'
392  elif isApp:
393  cmd = 'gaudi_add_executable'
394  else: # i.e. isLinker (a fallback)
395  cmd = 'gaudi_add_library'
396 
397  if not sources:
398  self.log.warning("Missing sources for target %s", name)
399 
400  args = []
401  if isLinker:
402  if headers:
403  args.append('PUBLIC_HEADERS ' + ' '.join(headers))
404  else:
405  args.append('NO_PUBLIC_HEADERS')
406  elif isGODDict:
407  if god_headers_dest:
408  args.append('HEADERS_DESTINATION ' + god_headers_dest)
409  # check if we have a customdict in the documents
410  for docname, _, docsources in self.documents:
411  if docname == 'customdict':
412  args.append('EXTEND ' + docsources[0].replace('../', ''))
413  break
414 
415 
416  # # collection of link libraries. #
417  # Externals and subdirs are treated differently:
418  # - externals: just use the package name
419  # - subdirs: find the exported libraries in the global var cache
420  # We also have to add the local linker libraries.
421 
422  # separate external and subdir explicit imports
423  subdirsnames = [s.rsplit('/')[-1] for s in subdirs]
424  subdir_local_imports = [i for i in imports if i in subdirsnames]
425  ext_local_imports = [extName(i) for i in imports if i not in subdir_local_imports]
426 
427  # prepare the link list with the externals
428  links = global_imports + ext_local_imports
429  if links or inc_dirs:
430  # external links need the include dirs
431  args.append('INCLUDE_DIRS ' + ' '.join(links + inc_dirs))
432 
433  if links:
434  not_included = set(links).difference(find_packages, set([s.rsplit('/')[-1] for s in subdirs]))
435  if not_included:
436  self.log.warning('imports without use: %s', ', '.join(sorted(not_included)))
437 
438  # add subdirs...
439  for s in subdir_imports + subdir_local_imports:
440  if s in cache:
441  links.extend(cache[s]['libraries'])
442  # ... and local libraries
443  links.extend(local_links)
444  if 'AIDA' in links:
445  links.remove('AIDA') # FIXME: AIDA does not have a library
446 
447  if links:
448  # note: in some cases we get quoted library names
449  args.append('LINK_LIBRARIES ' + ' '.join([l.strip('"') for l in links]))
450 
451  if isRflxDict and self.reflex_dictionaries[name]:
452  args.append('OPTIONS ' + self.reflex_dictionaries[name])
453 
454  if isLinker:
455  local_links.append(name)
456 
457  # FIXME: very very special case :(
458  if name == 'garbage' and self.name == 'FileStager':
459  data.append('# only for the applications\nfind_package(Boost COMPONENTS program_options)\n')
460 
461  # write command
462  if not (isGODDict or isRflxDict):
463  # dictionaries to not need to have the paths fixed
464  sources = [os.path.normpath('src/' + s) for s in sources]
465  # FIXME: special case
466  sources = [s.replace('src/$(GAUDICONFROOT)', '${CMAKE_SOURCE_DIR}/GaudiConf') for s in sources]
467  libdata = callStringWithIndent(cmd, [name] + sources + args)
468 
469  # FIXME: wrap the test libraries in one if block (instead of several)
470  if group in ('tests', 'test'):
471  # increase indentation
472  libdata = [' ' + l for l in libdata.splitlines()]
473  # and wrap
474  libdata.insert(0, 'if(GAUDI_BUILD_TESTS)')
475  libdata.append('endif()')
476  libdata = '\n'.join(libdata)
477  data.append(libdata)
478  data.append('') # empty line
479 
480  # PyQt resources and UIs
481  if self.PyQtResource or self.PyQtUIC:
482  data.append("# gen_pyqt_* functions are provided by 'pygraphics'")
483  if self.PyQtResource:
484  qrc_files = self.PyQtResource["qrc_files"].replace("../", "")
485  qrc_dest = self.PyQtResource["outputdir"].replace("../python/", "")
486  qrc_target = qrc_dest.replace('/', '.') + '.Resources'
487  data.append('gen_pyqt_resource(%s %s %s)' % (qrc_target, qrc_dest, qrc_files))
488  if self.PyQtUIC:
489  ui_files = self.PyQtUIC["ui_files"].replace("../", "")
490  ui_dest = self.PyQtUIC["outputdir"].replace("../python/", "")
491  ui_target = qrc_dest.replace('/', '.') + '.UI'
492  data.append('gen_pyqt_uic(%s %s %s)' % (ui_target, ui_dest, ui_files))
493  if self.PyQtResource or self.PyQtUIC:
494  data.append('') # empty line
495 
496  if self.copy_relax_rootmap:
497  data.extend(['# Merge the RELAX rootmaps',
498  'set(rootmapfile ${CMAKE_LIBRARY_OUTPUT_DIRECTORY}/relax.rootmap)',
499  callStringWithIndent('add_custom_command',
500  ['OUTPUT ${rootmapfile}',
501  'COMMAND ${merge_cmd} ${RELAX_ROOTMAPS} ${rootmapfile}',
502  'DEPENDS ${RELAX_ROOTMAPS}']),
503  'add_custom_target(RelaxRootmap ALL DEPENDS ${rootmapfile})',
504  '\n# Install the merged file',
505  'install(FILES ${rootmapfile} DESTINATION lib)\n'])
506 
507  # installation
508  installs = []
509  if headers and not self.linker_libraries: # not installed yet
510  installs.append("gaudi_install_headers(%s)" % (" ".join(headers)))
511  if self.install_python_modules:
512  # if we install Python modules, we need to check if we have special
513  # names for the ConfUser modules
514  if (self.name + 'ConfUserModules') in self.macros:
515  installs.append('set_property(DIRECTORY PROPERTY CONFIGURABLE_USER_MODULES %s)'
516  % self.macros[self.name + 'ConfUserModules'])
517  installs.append("gaudi_install_python_modules()")
518  if self.install_scripts:
519  installs.append("gaudi_install_scripts()")
520  if installs:
521  data.extend(installs)
522  data.append('') # empty line
523 
524  # environment
525  def fixSetValue(s):
526  '''
527  Convert environment variable values from CMT to CMake.
528  '''
529  # escape '$' if not done already
530  s = re.sub(r'(?<!\\)\$', '\\$', s)
531  # replace parenthesis with curly braces
532  s = re.sub(r'\$\(([^()]*)\)', r'${\1}', s)
533  # replace variables like Package_root with PACKAGEROOT
534  v = re.compile(r'\$\{(\w*)_root\}')
535  m = v.search(s)
536  while m:
537  s = s[:m.start()] + ('${%sROOT}' % m.group(1).upper()) + s[m.end():]
538  m = v.search(s)
539  return s
540 
541  if self.sets:
542  data.append(callStringWithIndent('gaudi_env',
543  ['SET %s %s' % (v, fixSetValue(self.sets[v]))
544  for v in sorted(self.sets)]))
545  data.append('') # empty line
546 
547  # tests
548  if self.QMTest:
549  data.append("\ngaudi_add_test(QMTest QMTEST)")
550 
551  return "\n".join(data) + "\n"
552 
553  @property
554  def data_packages(self):
555  '''
556  Return the list of data packages used by this package in the form of a
557  dictionary {name: version_pattern}.
558  '''
559  return dict([ (n, self.uses[n][0]) for n in self.uses if n in data_packages ])
560 
561  def process(self, overwrite=None):
562  cml = os.path.join(self.path, "CMakeLists.txt")
563  if ((overwrite == 'force')
564  or (not os.path.exists(cml))
565  or ((overwrite == 'update')
566  and (os.path.getmtime(cml) < os.path.getmtime(self.requirements)))):
567  # write the file
568  data = self.generate()
569  writeToFile(cml, data, self.log)
570  else:
571  self.log.warning("file %s already exists", cml)
572 
574  def requirements():
575  statement = ""
576  for l in open(self.requirements):
577  if '#' in l:
578  l = l[:l.find('#')]
579  l = l.strip()
580  # if we have something in the line, extend the statement
581  if l:
582  statement += l
583  if statement.endswith('\\'):
584  # if the statement requires another line, get the next
585  statement = statement[:-1] + ' '
586  continue
587  # either we got something more in the statement or not, but
588  # an empty line after a '\' means ending the statement
589  if statement:
590  try:
591  yield list(self.CMTParser.parseString(statement))
592  except:
593  # ignore not know statements
594  self.log.debug("Failed to parse statement: %r", statement)
595  statement = ""
596 
597  for args in requirements():
598  cmd = args.pop(0)
599  if cmd == 'version':
600  self.version = args[0]
601  elif cmd == "use":
602  if "-no_auto_imports" in args:
603  imp = False
604  args.remove("-no_auto_imports")
605  else:
606  imp = True
607  if len(args) > 1: # only one argument means usually a conditional use
608  if len(args) > 2:
609  name = "%s/%s" % (args[2], args[0])
610  else:
611  name = args[0]
612  self.uses[name] = (args[1], imp)
613 
614  elif cmd == "apply_pattern":
615  pattern = args.pop(0)
616  args = dict([x.split('=', 1) for x in args])
617  if pattern in self.singleton_patterns:
618  setattr(self, pattern, args or True)
619  elif pattern in self.multi_patterns:
620  getattr(self, pattern).append(args)
621 
622  elif cmd == 'library':
623  name = args.pop(0)
624  # digest arguments (options, variables, sources)
625  imports = []
626  group = None
627  sources = []
628  for a in args:
629  if a.startswith('-'): # options
630  if a.startswith('-import='):
631  imports.append(a[8:])
632  elif a.startswith('-group='):
633  group = a[7:]
634  elif '=' in a: # variable
635  pass
636  else: # source
637  sources.append(a)
638  self.libraries.append((name, sources, group, imports))
639 
640  elif cmd == 'application':
641  name = args.pop(0)
642  # digest arguments (options, variables, sources)
643  imports = []
644  group = None
645  sources = []
646  for a in args:
647  if a.startswith('-'): # options
648  if a.startswith('-import='):
649  imports.append(a[8:])
650  elif a.startswith('-group='):
651  group = a[7:]
652  elif a == '-check': # used for test applications
653  group = 'tests'
654  elif '=' in a: # variable
655  pass
656  else: # source
657  sources.append(a)
658  if 'test' in name.lower() or [s for s in sources if 'test' in s.lower()]:
659  # usually, developers do not put tests in the right group
660  group = 'tests'
661  self.applications.append((name, sources, group, imports))
662 
663  elif cmd == 'document':
664  name = args.pop(0)
665  constituent = args.pop(0)
666  sources = args
667  self.documents.append((name, constituent, sources))
668 
669  elif cmd == 'macro':
670  # FIXME: should handle macro tags
671  name = args.pop(0)
672  value = args[0].strip('"').strip("'")
673  self.macros[name] = value
674 
675  elif cmd == 'macro_append':
676  # FIXME: should handle macro tags
677  name = args.pop(0)
678  value = args[0].strip('"').strip("'")
679  self.macros[name] = self.macros.get(name, "") + value
680 
681  elif cmd == 'set':
682  name = args.pop(0)
683  if name not in ignore_env:
684  value = args[0].strip('"').strip("'")
685  self.sets[name] = value
686 
687  # classification of libraries in the package
688  unquote = lambda x: x.strip('"').strip("'")
689  self.component_libraries = set([unquote(l['library']) for l in self.component_library])
690  self.linker_libraries = set([unquote(l['library']) for l in self.linker_library])
691  self.reflex_dictionaries = dict([(unquote(l['dictionary']), l.get('options', ''))
692  for l in self.reflex_dictionary])
693 
694 toolchain_template = '''# Special wrapper to load the declared version of the heptools toolchain.
695 set(heptools_version {0})
696 
697 # Remove the reference to this file from the cache.
698 unset(CMAKE_TOOLCHAIN_FILE CACHE)
699 
700 # Find the actual toolchain file.
701 find_file(CMAKE_TOOLCHAIN_FILE
702  NAMES heptools-${{heptools_version}}.cmake
703  HINTS ENV CMTPROJECTPATH
704  PATHS ${{CMAKE_CURRENT_LIST_DIR}}/cmake/toolchain
705  PATH_SUFFIXES toolchain)
706 
707 if(NOT CMAKE_TOOLCHAIN_FILE)
708  message(FATAL_ERROR "Cannot find heptools-${{heptools_version}}.cmake.")
709 endif()
710 
711 # Reset the cache variable to have proper documentation.
712 set(CMAKE_TOOLCHAIN_FILE ${{CMAKE_TOOLCHAIN_FILE}}
713  CACHE FILEPATH "The CMake toolchain file" FORCE)
714 
715 include(${{CMAKE_TOOLCHAIN_FILE}})
716 '''
717 
718 class Project(object):
719  def __init__(self, path):
720  """
721  Create a project instance from the root directory of the project.
722  """
723  self.path = os.path.realpath(path)
724  if not isProject(self.path):
725  raise ValueError("%s is not a project" % self.path)
726  self.requirements = os.path.join(self.path, "cmt", "project.cmt")
727  # Private variables for cached properties
728  self._packages = None
729  self._container = None
730 
731  @property
732  def packages(self):
733  """
734  Dictionary of packages contained in the project.
735  """
736  if self._packages is None:
737  self._packages = {}
738  for root, dirs, _files in os.walk(self.path):
739  if isPackage(root):
740  p = Package(root, self)
741  name = os.path.relpath(p.path, self.path)
742  self._packages[name] = p
743  dirs[:] = []
744  return self._packages
745 
746  @property
747  def container(self):
748  """
749  Name of the container package of the project.
750 
751  The name of the container is deduced using the usual LHCb convention
752  (instead of the content of project.cmt).
753  """
754  if self._container is None:
755  for suffix in ["Release", "Sys"]:
756  try:
757  # gets the first package that ends with the suffix, and does
758  # not have a hat.. or raise StopIteration
759  c = (p for p in self.packages
760  if p.endswith(suffix) and "/" not in p).next()
761  self._container = self.packages[c]
762  break
763  except StopIteration:
764  pass
765  return self._container
766 
767  @property
768  def name(self):
769  # The name of the project is the same of the container without
770  # the 'Release' or 'Sys' suffixes.
771  return self.container.name.replace("Release", "").replace("Sys", "")
772 
773  @property
774  def version(self):
775  return self.container.version
776 
777  def uses(self):
778  for l in open(self.requirements):
779  l = l.split()
780  if l and l[0] == "use" and l[1] != "LCGCMT" and len(l) == 3:
781  yield (projectCase(l[1]), l[2].rsplit('_', 1)[-1])
782 
783  def heptools(self):
784  '''
785  Return the version of heptools (LCGCMT) used by this project.
786  '''
787 
788  def updateCache(value):
789  '''
790  helper function to update the cache and return the value
791  '''
792  k = repr((self.name, self.version))
793  d = cache.get(k, {})
794  d['heptools'] = value
795  cache[k] = d
796  return value
797 
798  # check for a direct dependency
799  exp = re.compile(r'^\s*use\s+LCGCMT\s+LCGCMT[_-](\S+)')
800  for l in open(self.requirements):
801  m = exp.match(l)
802  if m:
803  return updateCache(m.group(1))
804 
805  # try with the projects we use (in the cache),
806  # including ourselves (we may already be there)
807  for u in list(self.uses()) + [(self.name, self.version)]:
808  u = repr(u)
809  if u in cache and 'heptools' in cache[u]:
810  return updateCache(cache[u]['heptools'])
811 
812  # we cannot guess the version of heptools
813  return None
814 
815  @property
816  def data_packages(self):
817  '''
818  Return the list of data packages used by this project (i.e. by all the
819  packages in this project) in the form of a dictionary
820  {name: version_pattern}.
821  '''
822  # for debugging we map
823  def appendDict(d, kv):
824  '''
825  helper function to extend a dictionary of lists
826  '''
827  k, v = kv
828  if k in d:
829  d[k].append(v)
830  else:
831  d[k] = [v]
832  return d
833  # dictionary {"data_package": ("user_package", "data_pkg_version")}
834  dp2pkg = {}
835  for pkgname, pkg in self.packages.items():
836  for dpname, dpversion in pkg.data_packages.items():
837  appendDict(dp2pkg, (dpname, (pkgname, dpversion)))
838 
839  # check and collect the data packages
840  result = {}
841  for dp in sorted(dp2pkg):
842  versions = set([v for _, v in dp2pkg[dp]])
843  if versions:
844  version = sorted(versions)[-1]
845  else:
846  version = '*'
847  if len(versions) != 1:
848  logging.warning('Different versions for data package %s, using %s from %s', dp, version, dp2pkg[dp])
849  result[dp] = version
850 
851  return result
852 
853  def generate(self):
854  # list containing the lines to write to the file
855  data = ["CMAKE_MINIMUM_REQUIRED(VERSION 2.8.5)",
856  "",
857  "#---------------------------------------------------------------",
858  "# Load macros and functions for Gaudi-based projects",
859  "find_package(GaudiProject)",
860  "#---------------------------------------------------------------",
861  "",
862  "# Declare project name and version"]
863  l = "gaudi_project(%s %s" % (self.name, self.version)
864  use = "\n ".join(["%s %s" % u for u in self.uses()])
865  if use:
866  l += "\n USE " + use
867  # collect data packages
868  data_pkgs = []
869  for p, v in sorted(self.data_packages.items()):
870  if v in ('v*', '*'):
871  data_pkgs.append(p)
872  else:
873  data_pkgs.append("%s VERSION %s" % (p, v))
874  if data_pkgs:
875  l += ("\n DATA " +
876  "\n ".join(data_pkgs))
877  l += ")"
878  data.append(l)
879  return "\n".join(data) + "\n"
880 
881  def generateToolchain(self):
882  heptools_version = self.heptools()
883  if heptools_version:
884  return toolchain_template.format(heptools_version)
885  return None
886 
887  def process(self, overwrite=None):
888  # Prepare the project configuration
889  def produceFile(name, generator):
890  cml = os.path.join(self.path, name)
891  if ((overwrite == 'force')
892  or (not os.path.exists(cml))
893  or ((overwrite == 'update')
894  and (os.path.getmtime(cml) < os.path.getmtime(self.requirements)))):
895  # write the file
896  data = generator()
897  if data:
898  writeToFile(cml, data, logging)
899  else:
900  logging.info("file %s not generated (empty)", cml)
901  else:
902  logging.warning("file %s already exists", cml)
903 
904  produceFile("CMakeLists.txt", self.generate)
905  produceFile("toolchain.cmake", self.generateToolchain)
906 
907  # Recurse in the packages
908  for p in sorted(self.packages):
909  self.packages[p].process(overwrite)
910 
911 
912 def main(args=None):
913  from optparse import OptionParser
914  parser = OptionParser(usage="%prog [options] [path to project or package]",
915  description="Convert CMT-based projects/packages to CMake (Gaudi project)")
916  parser.add_option("-f", "--force", action="store_const",
917  dest='overwrite', const='force',
918  help="overwrite existing files")
919  parser.add_option('--cache-only', action='store_true',
920  help='just update the cache without creating the CMakeLists.txt files.')
921  parser.add_option('-u' ,'--update', action='store_const',
922  dest='overwrite', const='update',
923  help='modify the CMakeLists.txt files if they are older than '
924  'the corresponding requirements.')
925  #parser.add_option('--cache-file', action='store',
926  # help='file to be used for the cache')
927 
928  opts, args = parser.parse_args(args=args)
929 
930  logging.basicConfig(level=logging.INFO)
931 
932  top_dir = os.getcwd()
933  if args:
934  top_dir = args[0]
935  if not os.path.isdir(top_dir):
936  parser.error("%s is not a directory" % top_dir)
937 
938  loadConfig(os.path.join(top_dir, 'cmt2cmake.cfg'))
939 
940  open_cache()
941  if isProject(top_dir):
942  root = Project(top_dir)
943  elif isPackage(top_dir):
944  root = Package(top_dir)
945  if opts.cache_only:
946  return # the cache is updated instantiating the package
947  else:
948  raise ValueError("%s is neither a project nor a package" % top_dir)
949 
950  if opts.cache_only:
951  root.packages # the cache is updated by instantiating the packages
952  root.heptools() # this triggers the caching of the heptools_version
953  # note that we can get here only if root is a project
954  else:
955  root.process(opts.overwrite)
956  close_cache()
957 
958 if __name__ == '__main__':
959  main()
960  sys.exit(0)

Generated at Wed Nov 28 2012 12:17:09 for Gaudi Framework, version v23r5 by Doxygen version 1.8.2 written by Dimitri van Heesch, © 1997-2004