Gaudi Framework, version v24r2

Home   Generated: Wed Dec 4 2013
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Groups Pages
cmt2cmake.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 """
3 Script to convert CMT projects/packages to CMake Gaudi-based configuration.
4 """
5 import os
6 import sys
7 import re
8 import logging
9 import shelve
10 import json
11 import operator
12 
13 def makeParser(patterns=None):
14  from pyparsing import ( Word, QuotedString, Keyword, Literal, SkipTo, StringEnd,
15  ZeroOrMore, Optional, Combine,
16  alphas, alphanums, printables )
17  dblQuotedString = QuotedString(quoteChar='"', escChar='\\', unquoteResults=False)
18  sglQuotedString = QuotedString(quoteChar="'", escChar='\\', unquoteResults=False)
19  value = dblQuotedString | sglQuotedString | Word(printables)
20 
21  tag_name = Word(alphas + "_", alphanums + "_-")
22  tag_expression = Combine(tag_name + ZeroOrMore('&' + tag_name))
23  values = value + ZeroOrMore(tag_expression + value)
24 
25  identifier = Word(alphas + "_", alphanums + "_")
26  variable = Combine(identifier + '=' + value)
27 
28  constituent_option = (Keyword('-no_share')
29  | Keyword('-no_static')
30  | Keyword('-prototypes')
31  | Keyword('-no_prototypes')
32  | Keyword('-check')
33  | Keyword('-target_tag')
34  | Combine('-group=' + value)
35  | Combine('-suffix=' + value)
36  | Combine('-import=' + value)
37  | variable
38  | Keyword('-OS9')
39  | Keyword('-windows'))
40  source = (Word(alphanums + "_*./$()")
41  | Combine('-s=' + value)
42  | Combine('-k=' + value)
43  | Combine('-x=' + value))
44 
45  # statements
46  comment = (Literal("#") + SkipTo(StringEnd())).suppress()
47 
48  package = Keyword('package') + Word(printables)
49  version = Keyword("version") + Word(printables)
50  use = Keyword("use") + identifier + Word(printables) + Optional(identifier) + Optional(Keyword("-no_auto_imports"))
51 
52  constituent = ((Keyword('library') | Keyword('application') | Keyword('document'))
53  + identifier + ZeroOrMore(constituent_option | source))
54  macro = (Keyword('macro') | Keyword('macro_append')) + identifier + values
55  setenv = (Keyword('set') | Keyword('path_append') | Keyword('path_prepend')) + identifier + values
56  alias = Keyword('alias') + identifier + values
57 
58  apply_pattern = Keyword("apply_pattern") + identifier + ZeroOrMore(variable)
59  if patterns:
60  direct_patterns = reduce(operator.or_, map(Keyword, set(patterns)))
61  # add the implied 'apply_pattern' to the list of tokens
62  direct_patterns.addParseAction(lambda toks: toks.insert(0, 'apply_pattern'))
63  apply_pattern = apply_pattern | (direct_patterns + ZeroOrMore(variable))
64 
65  statement = (package | version | use | constituent | macro | setenv | alias | apply_pattern)
66 
67  return Optional(statement) + Optional(comment) + StringEnd()
68 
69 
70 cache = None
71 def open_cache():
72  global cache
73  # record of known subdirs with their libraries
74  # {'<subdir>': {'libraries': [...]}}
75  # it contains some info about the projects too, under the keys like repr(('<project>', '<version>'))
76  try:
77  # First we try the environment variable CMT2CMAKECACHE and the directory
78  # containing this file...
79  _shelve_file = os.environ.get('CMT2CMAKECACHE',
80  os.path.join(os.path.dirname(__file__),
81  '.cmt2cmake.cache'))
82  cache = shelve.open(_shelve_file)
83  except:
84  # ... otherwise we use the user home directory
85  _shelve_file = os.path.join(os.path.expanduser('~'), '.cmt2cmake.cache')
86  #logging.info("Using cache file %s", _shelve_file)
87  cache = shelve.open(_shelve_file)
88 
90  global cache
91  if cache:
92  cache.close()
93  cache = None
94 
95 config = {}
96 for k in ['ignored_packages', 'data_packages', 'needing_python', 'no_pedantic',
97  'ignore_env']:
98  config[k] = set()
99 
100 # mappings
101 ignored_packages = config['ignored_packages']
102 data_packages = config['data_packages']
103 
104 # List of packages known to actually need Python to build
105 needing_python = config['needing_python']
106 
107 # packages that must have the pedantic option disabled
108 no_pedantic = config['no_pedantic']
109 
110 ignore_env = config['ignore_env']
111 
112 def loadConfig(config_file):
113  '''
114  Merge the content of the JSON file with the configuration dictionary.
115  '''
116  global config
117  if os.path.exists(config_file):
118  data = json.load(open(config_file))
119  for k in data:
120  if k not in config:
121  config[k] = set()
122  config[k].update(map(str, data[k]))
123  # print config
124 
125 loadConfig(os.path.join(os.path.dirname(__file__), 'cmt2cmake.cfg'))
126 
127 def extName(n):
128  '''
129  Mapping between the name of the LCG_Interface name and the Find*.cmake name
130  (if non-trivial).
131  '''
132  mapping = {'Reflex': 'ROOT',
133  'Python': 'PythonLibs',
134  'neurobayes_expert': 'NeuroBayesExpert',
135  'mysql': 'MySQL',
136  'oracle': 'Oracle',
137  'sqlite': 'SQLite',
138  'lfc': 'LFC',
139  'fftw': 'FFTW',
140  'uuid': 'UUID',
141  'fastjet': 'FastJet',
142  'lapack': 'LAPACK',
143  'bz2lib': 'BZip2',
144  }
145  return mapping.get(n, n)
146 
147 def isPackage(path):
148  return os.path.isfile(os.path.join(path, "cmt", "requirements"))
149 
150 def isProject(path):
151  return os.path.isfile(os.path.join(path, "cmt", "project.cmt"))
152 
153 def projectCase(name):
154  return {'DAVINCI': 'DaVinci',
155  'LHCB': 'LHCb'}.get(name.upper(), name.capitalize())
156 
157 def callStringWithIndent(cmd, arglines):
158  '''
159  Produce a string for a call of a command with indented arguments.
160 
161  >>> print callStringWithIndent('example_command', ['arg1', 'arg2', 'arg3'])
162  example_command(arg1
163  arg2
164  arg3)
165  >>> print callStringWithIndent('example_command', ['', 'arg2', 'arg3'])
166  example_command(arg2
167  arg3)
168  '''
169  indent = '\n' + ' ' * (len(cmd) + 1)
170  return cmd + '(' + indent.join(filter(None, arglines)) + ')'
171 
172 def writeToFile(filename, data, log=None):
173  '''
174  Write the generated CMakeLists.txt.
175  '''
176  if log and os.path.exists(filename):
177  log.info('overwriting %s', filename)
178  f = open(filename, "w")
179  f.write(data)
180  f.close()
181 
182 class Package(object):
183  def __init__(self, path, project=None):
184  self.path = os.path.realpath(path)
185  if not isPackage(self.path):
186  raise ValueError("%s is not a package" % self.path)
187 
188  self.name = os.path.basename(self.path)
189  self.requirements = os.path.join(self.path, "cmt", "requirements")
190  self.project = project
191 
192  # prepare attributes filled during parsing of requirements
193  self.uses = {}
194  self.version = None
195  self.libraries = []
196  self.applications = []
197  self.documents = []
198  self.macros = {}
199  self.sets = {}
200  self.paths = {}
201  self.aliases = {}
202 
203  # These are patterns that can appear only once per package.
204  # The corresponding dictionary will contain the arguments passed to the
205  # pattern.
206  self.singleton_patterns = set(["QMTest", "install_python_modules", "install_scripts",
207  "install_more_includes", "god_headers", "god_dictionary",
208  "PyQtResource", "PyQtUIC"])
210  self.install_python_modules = self.install_scripts = self.QMTest = False
211  self.god_headers = {}
212  self.god_dictionary = {}
213  self.PyQtResource = {}
214  self.PyQtUIC = {}
215 
216  # These are patterns that can be repeated in the requirements.
217  # The corresponding data members will contain the list of dictionaries
218  # corresponding to the various calls.
219  self.multi_patterns = set(['reflex_dictionary', 'component_library', 'linker_library',
220  'copy_relax_rootmap'])
223  self.linker_library = []
225 
227  self.component_libraries = set()
228  self.linker_libraries = set()
229 
230  self.log = logging.getLogger('Package(%s)' % self.name)
232  try:
233  self._parseRequirements()
234  except:
235  print "Processing %s" % self.requirements
236  raise
237  # update the known subdirs
238  cache[self.name] = {# list of linker libraries provided by the package
239  'libraries': list(self.linker_libraries),
240  # true if it's a headers-only package
241  'includes': bool(self.install_more_includes and
242  not self.linker_libraries)}
243 
244  def generate(self):
245  # header
246  data = ["#" * 80,
247  "# Package: %s" % self.name,
248  "#" * 80,
249  "gaudi_subdir(%s %s)" % (self.name, self.version),
250  ""]
251  # dependencies
252  # subdirectories (excluding specials)
253  subdirs = [n for n in sorted(self.uses)
254  if not n.startswith("LCG_Interfaces/")
255  and n not in ignored_packages
256  and n not in data_packages]
257 
258  inc_dirs = []
259  if subdirs:
260  # check if we are missing info for a subdir
261  missing_subdirs = set([s.rsplit('/')[-1] for s in subdirs]) - set(cache)
262  if missing_subdirs:
263  self.log.warning('Missing info cache for subdirs %s', ' '.join(sorted(missing_subdirs)))
264  # declare inclusion order
265  data.append(callStringWithIndent('gaudi_depends_on_subdirs', subdirs))
266  data.append('')
267  # consider header-only subdirs
268  # for each required subdir that comes with only headers, add its
269  # location to the call to 'include_directories'
270  inc_only = lambda s: cache.get(s.rsplit('/')[-1], {}).get('includes')
271  inc_dirs = filter(inc_only, subdirs)
272 
273 
274  # externals (excluding specials)
275  # - Python needs to be treated in a special way
276  find_packages = {}
277  for n in sorted(self.uses):
278  if n.startswith("LCG_Interfaces/"):
279  n = extName(n[15:])
280  # FIXME: find a general way to treat these special cases
281  if n == "PythonLibs":
282  if self.name not in needing_python: # only these packages actually link against Python
283  continue
284  # get custom link options
285  linkopts = self.macros.get(n + '_linkopts', '')
286  components = [m.group(1) or m.group(2)
287  for m in re.finditer(r'(?:\$\(%s_linkopts_([^)]*)\))|(?:-l(\w*))' % n,
288  linkopts)]
289  # FIXME: find a general way to treat the special cases
290  if n == 'COOL':
291  components = ['CoolKernel', 'CoolApplication']
292  elif n == 'CORAL':
293  components = ['CoralBase', 'CoralKernel', 'RelationalAccess']
294  elif n == 'RELAX' and self.copy_relax_rootmap:
295  components = [d['dict'] for d in self.copy_relax_rootmap if 'dict' in d]
296 
297  find_packages[n] = find_packages.get(n, []) + components
298 
299  # this second loops avoid double entries do to converging results of extName()
300  for n in sorted(find_packages):
301  args = [n]
302  components = find_packages[n]
303  if components:
304  if n == 'RELAX': # FIXME: probably we should set 'REQUIRED' for all the externals
305  args.append('REQUIRED')
306  args.append('COMPONENTS')
307  args.extend(components)
308  data.append('find_package(%s)' % ' '.join(args))
309  if find_packages:
310  data.append("")
311 
312  if self.name in no_pedantic:
313  data.append('string(REPLACE "-pedantic" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")\n')
314 
315  # the headers can be installed via "PUBLIC_HEADERS" or by hand
316  if self.install_more_includes:
317  headers = [d for d in self.install_more_includes.values()
318  if os.path.isdir(os.path.join(self.path, d))]
319  else:
320  headers = []
321 
322  if self.god_headers or self.god_dictionary:
323  data.append("include(GaudiObjDesc)")
324  data.append("")
325 
326  god_headers_dest = None
327  if self.god_headers:
328  godargs = [self.god_headers["files"].replace("../", "")]
329 
330  godflags = self.macros.get('%sObj2Doth_GODflags' % self.name, "")
331  godflags = re.search(r'-s\s*(\S+)', godflags)
332  if godflags:
333  god_headers_dest = os.path.normpath('Event/' + godflags.group(1))
334  if god_headers_dest == 'src':
335  # special case
336  godargs.append('PRIVATE')
337  else:
338  godargs.append('DESTINATION ' + god_headers_dest)
339 
340  data.append(callStringWithIndent('god_build_headers', godargs))
341  data.append("")
342 
343  god_dict = []
344  if self.god_dictionary:
345  god_dict = [('--GOD--',
346  [self.god_dictionary["files"].replace("../", "")],
347  None, [])]
348 
349  rflx_dict = []
350  for d in self.reflex_dictionary:
351  for k in d:
352  v = d[k]
353  v = v.replace("$(%sROOT)/" % self.name.upper(), "")
354  v = v.replace("../", "")
355  d[k] = v
356  imports = [i.strip('"').replace('-import=', '') for i in d.get('imports', '').strip().split()]
357  rflx_dict.append((d['dictionary'] + 'Dict',
358  [d['headerfiles'], d['selectionfile']],
359  None,
360  imports))
361 
362  # libraries
363  global_imports = [extName(name[15:])
364  for name in self.uses
365  if name.startswith('LCG_Interfaces/') and self.uses[name][1]] # list of imported ext
366  if 'PythonLibs' in global_imports and self.name not in needing_python:
367  global_imports.remove('PythonLibs')
368 
369  subdir_imports = [s.rsplit('/')[-1] for s in subdirs if self.uses[s][1]]
370  local_links = [] # keep track of linker libraries found so far
371  applications_names = set([a[0] for a in self.applications])
372  # Note: a god_dictionary, a reflex_dictionary or an application is like a module
373  for name, sources, group, imports in self.libraries + god_dict + rflx_dict + self.applications:
374  isGODDict = isRflxDict = isComp = isApp = isLinker = False
375  if name == '--GOD--':
376  isGODDict = True
377  name = '' # no library name for GOD dictionaries
378  elif name.endswith('Dict') and name[:-4] in self.reflex_dictionaries:
379  isRflxDict = True
380  name = name[:-4]
381  elif name in self.component_libraries:
382  isComp = True
383  elif name in applications_names:
384  isApp = True
385  else:
386  if name not in self.linker_libraries:
387  self.log.warning('library %s not declared as component or linker, assume linker', name)
388  isLinker = True
389 
390  # prepare the bits of the command: cmd, name, sources, args
391  if isComp:
392  cmd = 'gaudi_add_module'
393  elif isGODDict:
394  cmd = 'god_build_dictionary'
395  elif isRflxDict:
396  cmd = 'gaudi_add_dictionary'
397  elif isApp:
398  cmd = 'gaudi_add_executable'
399  else: # i.e. isLinker (a fallback)
400  cmd = 'gaudi_add_library'
401 
402  if not sources:
403  self.log.warning("Missing sources for target %s", name)
404 
405  args = []
406  if isLinker:
407  if headers:
408  args.append('PUBLIC_HEADERS ' + ' '.join(headers))
409  else:
410  args.append('NO_PUBLIC_HEADERS')
411  elif isGODDict:
412  if god_headers_dest:
413  args.append('HEADERS_DESTINATION ' + god_headers_dest)
414  # check if we have a customdict in the documents
415  for docname, _, docsources in self.documents:
416  if docname == 'customdict':
417  args.append('EXTEND ' + docsources[0].replace('../', ''))
418  break
419 
420 
421  # # collection of link libraries. #
422  # Externals and subdirs are treated differently:
423  # - externals: just use the package name
424  # - subdirs: find the exported libraries in the global var cache
425  # We also have to add the local linker libraries.
426 
427  # separate external and subdir explicit imports
428  subdirsnames = [s.rsplit('/')[-1] for s in subdirs]
429  subdir_local_imports = [i for i in imports if i in subdirsnames]
430  ext_local_imports = [extName(i) for i in imports if i not in subdir_local_imports]
431 
432  # prepare the link list with the externals
433  links = global_imports + ext_local_imports
434  if links or inc_dirs:
435  # external links need the include dirs
436  args.append('INCLUDE_DIRS ' + ' '.join(links + inc_dirs))
437 
438  if links:
439  not_included = set(links).difference(find_packages, set([s.rsplit('/')[-1] for s in subdirs]))
440  if not_included:
441  self.log.warning('imports without use: %s', ', '.join(sorted(not_included)))
442 
443  # add subdirs...
444  for s in subdir_imports + subdir_local_imports:
445  if s in cache:
446  links.extend(cache[s]['libraries'])
447  # ... and local libraries
448  links.extend(local_links)
449  if 'AIDA' in links:
450  links.remove('AIDA') # FIXME: AIDA does not have a library
451 
452  if links:
453  # note: in some cases we get quoted library names
454  args.append('LINK_LIBRARIES ' + ' '.join([l.strip('"') for l in links]))
455 
456  if isRflxDict and self.reflex_dictionaries[name]:
457  args.append('OPTIONS ' + self.reflex_dictionaries[name])
458 
459  if isLinker:
460  local_links.append(name)
461 
462  # FIXME: very very special case :(
463  if name == 'garbage' and self.name == 'FileStager':
464  data.append('# only for the applications\nfind_package(Boost COMPONENTS program_options)\n')
465 
466  # write command
467  if not (isGODDict or isRflxDict):
468  # dictionaries to not need to have the paths fixed
469  sources = [os.path.normpath('src/' + s) for s in sources]
470  # FIXME: special case
471  sources = [s.replace('src/$(GAUDICONFROOT)', '${CMAKE_SOURCE_DIR}/GaudiConf') for s in sources]
472  libdata = callStringWithIndent(cmd, [name] + sources + args)
473 
474  # FIXME: wrap the test libraries in one if block (instead of several)
475  if group in ('tests', 'test'):
476  # increase indentation
477  libdata = [' ' + l for l in libdata.splitlines()]
478  # and wrap
479  libdata.insert(0, 'if(GAUDI_BUILD_TESTS)')
480  libdata.append('endif()')
481  libdata = '\n'.join(libdata)
482  data.append(libdata)
483  data.append('') # empty line
484 
485  # PyQt resources and UIs
486  if self.PyQtResource or self.PyQtUIC:
487  data.append("# gen_pyqt_* functions are provided by 'pygraphics'")
488  if self.PyQtResource:
489  qrc_files = self.PyQtResource["qrc_files"].replace("../", "")
490  qrc_dest = self.PyQtResource["outputdir"].replace("../python/", "")
491  qrc_target = qrc_dest.replace('/', '.') + '.Resources'
492  data.append('gen_pyqt_resource(%s %s %s)' % (qrc_target, qrc_dest, qrc_files))
493  if self.PyQtUIC:
494  ui_files = self.PyQtUIC["ui_files"].replace("../", "")
495  ui_dest = self.PyQtUIC["outputdir"].replace("../python/", "")
496  ui_target = qrc_dest.replace('/', '.') + '.UI'
497  data.append('gen_pyqt_uic(%s %s %s)' % (ui_target, ui_dest, ui_files))
498  if self.PyQtResource or self.PyQtUIC:
499  data.append('') # empty line
500 
501  if self.copy_relax_rootmap:
502  data.extend(['# Merge the RELAX rootmaps',
503  'set(rootmapfile ${CMAKE_LIBRARY_OUTPUT_DIRECTORY}/relax.rootmap)',
504  callStringWithIndent('add_custom_command',
505  ['OUTPUT ${rootmapfile}',
506  'COMMAND ${merge_cmd} ${RELAX_ROOTMAPS} ${rootmapfile}',
507  'DEPENDS ${RELAX_ROOTMAPS}']),
508  'add_custom_target(RelaxRootmap ALL DEPENDS ${rootmapfile})',
509  '\n# Install the merged file',
510  'install(FILES ${rootmapfile} DESTINATION lib)\n'])
511 
512  # installation
513  installs = []
514  if headers and not self.linker_libraries: # not installed yet
515  installs.append("gaudi_install_headers(%s)" % (" ".join(headers)))
516  if self.install_python_modules:
517  # if we install Python modules, we need to check if we have special
518  # names for the ConfUser modules
519  if (self.name + 'ConfUserModules') in self.macros:
520  installs.append('set_property(DIRECTORY PROPERTY CONFIGURABLE_USER_MODULES %s)'
521  % self.macros[self.name + 'ConfUserModules'])
522  installs.append("gaudi_install_python_modules()")
523  if self.install_scripts:
524  installs.append("gaudi_install_scripts()")
525  if installs:
526  data.extend(installs)
527  data.append('') # empty line
528 
529  if self.aliases:
530  data.extend(['gaudi_alias({0}\n {1})'.format(name, ' '.join(alias))
531  for name, alias in self.aliases.iteritems()])
532  data.append('') # empty line
533 
534  # environment
535  def fixSetValue(s):
536  '''
537  Convert environment variable values from CMT to CMake.
538  '''
539  # escape '$' if not done already
540  s = re.sub(r'(?<!\\)\$', '\\$', s)
541  # replace parenthesis with curly braces
542  s = re.sub(r'\$\(([^()]*)\)', r'${\1}', s)
543  # replace variables like Package_root with PACKAGEROOT
544  v = re.compile(r'\$\{(\w*)_root\}')
545  m = v.search(s)
546  while m:
547  s = s[:m.start()] + ('${%sROOT}' % m.group(1).upper()) + s[m.end():]
548  m = v.search(s)
549  return s
550 
551  if self.sets:
552  data.append(callStringWithIndent('gaudi_env',
553  ['SET %s %s' % (v, fixSetValue(self.sets[v]))
554  for v in sorted(self.sets)]))
555  data.append('') # empty line
556 
557  # tests
558  if self.QMTest:
559  data.append("\ngaudi_add_test(QMTest QMTEST)")
560 
561  return "\n".join(data) + "\n"
562 
563  @property
564  def data_packages(self):
565  '''
566  Return the list of data packages used by this package in the form of a
567  dictionary {name: version_pattern}.
568  '''
569  return dict([ (n, self.uses[n][0]) for n in self.uses if n in data_packages ])
570 
571  def process(self, overwrite=None):
572  cml = os.path.join(self.path, "CMakeLists.txt")
573  if ((overwrite == 'force')
574  or (not os.path.exists(cml))
575  or ((overwrite == 'update')
576  and (os.path.getmtime(cml) < os.path.getmtime(self.requirements)))):
577  # write the file
578  data = self.generate()
579  writeToFile(cml, data, self.log)
580  else:
581  self.log.warning("file %s already exists", cml)
582 
584  def requirements():
585  statement = ""
586  for l in open(self.requirements):
587  if '#' in l:
588  l = l[:l.find('#')]
589  l = l.strip()
590  # if we have something in the line, extend the statement
591  if l:
592  statement += l
593  if statement.endswith('\\'):
594  # if the statement requires another line, get the next
595  statement = statement[:-1] + ' '
596  continue
597  # either we got something more in the statement or not, but
598  # an empty line after a '\' means ending the statement
599  if statement:
600  try:
601  yield list(self.CMTParser.parseString(statement))
602  except:
603  # ignore not know statements
604  self.log.debug("Failed to parse statement: %r", statement)
605  statement = ""
606 
607  for args in requirements():
608  cmd = args.pop(0)
609  if cmd == 'version':
610  self.version = args[0]
611  elif cmd == "use":
612  if "-no_auto_imports" in args:
613  imp = False
614  args.remove("-no_auto_imports")
615  else:
616  imp = True
617  if len(args) > 1: # only one argument means usually a conditional use
618  if len(args) > 2:
619  name = "%s/%s" % (args[2], args[0])
620  else:
621  name = args[0]
622  self.uses[name] = (args[1], imp)
623 
624  elif cmd == "apply_pattern":
625  pattern = args.pop(0)
626  args = dict([x.split('=', 1) for x in args])
627  if pattern in self.singleton_patterns:
628  setattr(self, pattern, args or True)
629  elif pattern in self.multi_patterns:
630  getattr(self, pattern).append(args)
631 
632  elif cmd == 'library':
633  name = args.pop(0)
634  # digest arguments (options, variables, sources)
635  imports = []
636  group = None
637  sources = []
638  for a in args:
639  if a.startswith('-'): # options
640  if a.startswith('-import='):
641  imports.append(a[8:])
642  elif a.startswith('-group='):
643  group = a[7:]
644  elif '=' in a: # variable
645  pass
646  else: # source
647  sources.append(a)
648  self.libraries.append((name, sources, group, imports))
649 
650  elif cmd == 'application':
651  name = args.pop(0)
652  # digest arguments (options, variables, sources)
653  imports = []
654  group = None
655  sources = []
656  for a in args:
657  if a.startswith('-'): # options
658  if a.startswith('-import='):
659  imports.append(a[8:])
660  elif a.startswith('-group='):
661  group = a[7:]
662  elif a == '-check': # used for test applications
663  group = 'tests'
664  elif '=' in a: # variable
665  pass
666  else: # source
667  sources.append(a)
668  if 'test' in name.lower() or [s for s in sources if 'test' in s.lower()]:
669  # usually, developers do not put tests in the right group
670  group = 'tests'
671  self.applications.append((name, sources, group, imports))
672 
673  elif cmd == 'document':
674  name = args.pop(0)
675  constituent = args.pop(0)
676  sources = args
677  self.documents.append((name, constituent, sources))
678 
679  elif cmd == 'macro':
680  # FIXME: should handle macro tags
681  name = args.pop(0)
682  value = args[0].strip('"').strip("'")
683  self.macros[name] = value
684 
685  elif cmd == 'macro_append':
686  # FIXME: should handle macro tags
687  name = args.pop(0)
688  value = args[0].strip('"').strip("'")
689  self.macros[name] = self.macros.get(name, "") + value
690 
691  elif cmd == 'set':
692  name = args.pop(0)
693  if name not in ignore_env:
694  value = args[0].strip('"').strip("'")
695  self.sets[name] = value
696 
697  elif cmd == 'alias':
698  # FIXME: should handle macro tags
699  name = args.pop(0)
700  value = args[0].strip('"').strip("'").split()
701  self.aliases[name] = value
702 
703  # classification of libraries in the package
704  unquote = lambda x: x.strip('"').strip("'")
705  self.component_libraries = set([unquote(l['library']) for l in self.component_library])
706  self.linker_libraries = set([unquote(l['library']) for l in self.linker_library])
707  self.reflex_dictionaries = dict([(unquote(l['dictionary']), l.get('options', ''))
708  for l in self.reflex_dictionary])
709 
710 toolchain_template = '''# Special wrapper to load the declared version of the heptools toolchain.
711 set(heptools_version {0})
712 
713 # Remove the reference to this file from the cache.
714 unset(CMAKE_TOOLCHAIN_FILE CACHE)
715 
716 # Find the actual toolchain file.
717 find_file(CMAKE_TOOLCHAIN_FILE
718  NAMES heptools-${{heptools_version}}.cmake
719  HINTS ENV CMTPROJECTPATH
720  PATHS ${{CMAKE_CURRENT_LIST_DIR}}/cmake/toolchain
721  PATH_SUFFIXES toolchain)
722 
723 if(NOT CMAKE_TOOLCHAIN_FILE)
724  message(FATAL_ERROR "Cannot find heptools-${{heptools_version}}.cmake.")
725 endif()
726 
727 # Reset the cache variable to have proper documentation.
728 set(CMAKE_TOOLCHAIN_FILE ${{CMAKE_TOOLCHAIN_FILE}}
729  CACHE FILEPATH "The CMake toolchain file" FORCE)
730 
731 include(${{CMAKE_TOOLCHAIN_FILE}})
732 '''
733 
734 class Project(object):
735  def __init__(self, path):
736  """
737  Create a project instance from the root directory of the project.
738  """
739  self.path = os.path.realpath(path)
740  if not isProject(self.path):
741  raise ValueError("%s is not a project" % self.path)
742  self.requirements = os.path.join(self.path, "cmt", "project.cmt")
743  # Private variables for cached properties
744  self._packages = None
745  self._container = None
746 
747  @property
748  def packages(self):
749  """
750  Dictionary of packages contained in the project.
751  """
752  if self._packages is None:
753  self._packages = {}
754  for root, dirs, _files in os.walk(self.path):
755  if isPackage(root):
756  p = Package(root, self)
757  name = os.path.relpath(p.path, self.path)
758  self._packages[name] = p
759  dirs[:] = []
760  return self._packages
761 
762  @property
763  def container(self):
764  """
765  Name of the container package of the project.
766 
767  The name of the container is deduced using the usual LHCb convention
768  (instead of the content of project.cmt).
769  """
770  if self._container is None:
771  for suffix in ["Release", "Sys"]:
772  try:
773  # gets the first package that ends with the suffix, and does
774  # not have a hat.. or raise StopIteration
775  c = (p for p in self.packages
776  if p.endswith(suffix) and "/" not in p).next()
777  self._container = self.packages[c]
778  break
779  except StopIteration:
780  pass
781  return self._container
782 
783  @property
784  def name(self):
785  # The name of the project is the same of the container without
786  # the 'Release' or 'Sys' suffixes.
787  return self.container.name.replace("Release", "").replace("Sys", "")
788 
789  @property
790  def version(self):
791  return self.container.version
792 
793  def uses(self):
794  for l in open(self.requirements):
795  l = l.split()
796  if l and l[0] == "use" and l[1] != "LCGCMT" and len(l) == 3:
797  yield (projectCase(l[1]), l[2].rsplit('_', 1)[-1])
798 
799  def heptools(self):
800  '''
801  Return the version of heptools (LCGCMT) used by this project.
802  '''
803 
804  def updateCache(value):
805  '''
806  helper function to update the cache and return the value
807  '''
808  k = repr((self.name, self.version))
809  d = cache.get(k, {})
810  d['heptools'] = value
811  cache[k] = d
812  return value
813 
814  # check for a direct dependency
815  exp = re.compile(r'^\s*use\s+LCGCMT\s+LCGCMT[_-](\S+)')
816  for l in open(self.requirements):
817  m = exp.match(l)
818  if m:
819  return updateCache(m.group(1))
820 
821  # try with the projects we use (in the cache),
822  # including ourselves (we may already be there)
823  for u in list(self.uses()) + [(self.name, self.version)]:
824  u = repr(u)
825  if u in cache and 'heptools' in cache[u]:
826  return updateCache(cache[u]['heptools'])
827 
828  # we cannot guess the version of heptools
829  return None
830 
831  @property
832  def data_packages(self):
833  '''
834  Return the list of data packages used by this project (i.e. by all the
835  packages in this project) in the form of a dictionary
836  {name: version_pattern}.
837  '''
838  # for debugging we map
839  def appendDict(d, kv):
840  '''
841  helper function to extend a dictionary of lists
842  '''
843  k, v = kv
844  if k in d:
845  d[k].append(v)
846  else:
847  d[k] = [v]
848  return d
849  # dictionary {"data_package": ("user_package", "data_pkg_version")}
850  dp2pkg = {}
851  for pkgname, pkg in self.packages.items():
852  for dpname, dpversion in pkg.data_packages.items():
853  appendDict(dp2pkg, (dpname, (pkgname, dpversion)))
854 
855  # check and collect the data packages
856  result = {}
857  for dp in sorted(dp2pkg):
858  versions = set([v for _, v in dp2pkg[dp]])
859  if versions:
860  version = sorted(versions)[-1]
861  else:
862  version = '*'
863  if len(versions) != 1:
864  logging.warning('Different versions for data package %s, using %s from %s', dp, version, dp2pkg[dp])
865  result[dp] = version
866 
867  return result
868 
869  def generate(self):
870  # list containing the lines to write to the file
871  data = ["CMAKE_MINIMUM_REQUIRED(VERSION 2.8.5)",
872  "",
873  "#---------------------------------------------------------------",
874  "# Load macros and functions for Gaudi-based projects",
875  "find_package(GaudiProject)",
876  "#---------------------------------------------------------------",
877  "",
878  "# Declare project name and version"]
879  l = "gaudi_project(%s %s" % (self.name, self.version)
880  use = "\n ".join(["%s %s" % u for u in self.uses()])
881  if use:
882  l += "\n USE " + use
883  # collect data packages
884  data_pkgs = []
885  for p, v in sorted(self.data_packages.items()):
886  if v in ('v*', '*'):
887  data_pkgs.append(p)
888  else:
889  data_pkgs.append("%s VERSION %s" % (p, v))
890  if data_pkgs:
891  l += ("\n DATA " +
892  "\n ".join(data_pkgs))
893  l += ")"
894  data.append(l)
895  return "\n".join(data) + "\n"
896 
897  def generateToolchain(self):
898  heptools_version = self.heptools()
899  if heptools_version:
900  return toolchain_template.format(heptools_version)
901  return None
902 
903  def process(self, overwrite=None):
904  # Prepare the project configuration
905  def produceFile(name, generator):
906  cml = os.path.join(self.path, name)
907  if ((overwrite == 'force')
908  or (not os.path.exists(cml))
909  or ((overwrite == 'update')
910  and (os.path.getmtime(cml) < os.path.getmtime(self.requirements)))):
911  # write the file
912  data = generator()
913  if data:
914  writeToFile(cml, data, logging)
915  else:
916  logging.info("file %s not generated (empty)", cml)
917  else:
918  logging.warning("file %s already exists", cml)
919 
920  produceFile("CMakeLists.txt", self.generate)
921  produceFile("toolchain.cmake", self.generateToolchain)
922 
923  # Recurse in the packages
924  for p in sorted(self.packages):
925  self.packages[p].process(overwrite)
926 
927 
928 def main(args=None):
929  from optparse import OptionParser
930  parser = OptionParser(usage="%prog [options] [path to project or package]",
931  description="Convert CMT-based projects/packages to CMake (Gaudi project)")
932  parser.add_option("-f", "--force", action="store_const",
933  dest='overwrite', const='force',
934  help="overwrite existing files")
935  parser.add_option('--cache-only', action='store_true',
936  help='just update the cache without creating the CMakeLists.txt files.')
937  parser.add_option('-u' ,'--update', action='store_const',
938  dest='overwrite', const='update',
939  help='modify the CMakeLists.txt files if they are older than '
940  'the corresponding requirements.')
941  #parser.add_option('--cache-file', action='store',
942  # help='file to be used for the cache')
943 
944  opts, args = parser.parse_args(args=args)
945 
946  logging.basicConfig(level=logging.INFO)
947 
948  top_dir = os.getcwd()
949  if args:
950  top_dir = args[0]
951  if not os.path.isdir(top_dir):
952  parser.error("%s is not a directory" % top_dir)
953 
954  loadConfig(os.path.join(top_dir, 'cmt2cmake.cfg'))
955 
956  open_cache()
957  if isProject(top_dir):
958  root = Project(top_dir)
959  elif isPackage(top_dir):
960  root = Package(top_dir)
961  if opts.cache_only:
962  return # the cache is updated instantiating the package
963  else:
964  raise ValueError("%s is neither a project nor a package" % top_dir)
965 
966  if opts.cache_only:
967  root.packages # the cache is updated by instantiating the packages
968  root.heptools() # this triggers the caching of the heptools_version
969  # note that we can get here only if root is a project
970  else:
971  root.process(opts.overwrite)
972  close_cache()
973 
974 if __name__ == '__main__':
975  main()
976  sys.exit(0)

Generated at Wed Dec 4 2013 14:33:06 for Gaudi Framework, version v24r2 by Doxygen version 1.8.2 written by Dimitri van Heesch, © 1997-2004