Gaudi Framework, version v23r6

Home   Generated: Wed Jan 30 2013
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Groups Pages
cmt2cmake.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 """
3 Script to convert CMT projects/packages to CMake Gaudi-based configuration.
4 """
5 import os
6 import sys
7 import re
8 import logging
9 import shelve
10 import json
11 import operator
12 
13 def makeParser(patterns=None):
14  from pyparsing import ( Word, QuotedString, Keyword, Literal, SkipTo, StringEnd,
15  ZeroOrMore, Optional, Combine,
16  alphas, alphanums, printables )
17  dblQuotedString = QuotedString(quoteChar='"', escChar='\\', unquoteResults=False)
18  sglQuotedString = QuotedString(quoteChar="'", escChar='\\', unquoteResults=False)
19  value = dblQuotedString | sglQuotedString | Word(printables)
20 
21  tag_name = Word(alphas + "_", alphanums + "_-")
22  tag_expression = Combine(tag_name + ZeroOrMore('&' + tag_name))
23  values = value + ZeroOrMore(tag_expression + value)
24 
25  identifier = Word(alphas + "_", alphanums + "_")
26  variable = Combine(identifier + '=' + value)
27 
28  constituent_option = (Keyword('-no_share')
29  | Keyword('-no_static')
30  | Keyword('-prototypes')
31  | Keyword('-no_prototypes')
32  | Keyword('-check')
33  | Keyword('-target_tag')
34  | Combine('-group=' + value)
35  | Combine('-suffix=' + value)
36  | Combine('-import=' + value)
37  | variable
38  | Keyword('-OS9')
39  | Keyword('-windows'))
40  source = (Word(alphanums + "_*./$()")
41  | Combine('-s=' + value)
42  | Combine('-k=' + value)
43  | Combine('-x=' + value))
44 
45  # statements
46  comment = (Literal("#") + SkipTo(StringEnd())).suppress()
47 
48  package = Keyword('package') + Word(printables)
49  version = Keyword("version") + Word(printables)
50  use = Keyword("use") + identifier + Word(printables) + Optional(identifier) + Optional(Keyword("-no_auto_imports"))
51 
52  constituent = ((Keyword('library') | Keyword('application') | Keyword('document'))
53  + identifier + ZeroOrMore(constituent_option | source))
54  macro = (Keyword('macro') | Keyword('macro_append')) + identifier + values
55  setenv = (Keyword('set') | Keyword('path_append') | Keyword('path_prepend')) + identifier + values
56  alias = Keyword('alias') + identifier + values
57 
58  apply_pattern = Keyword("apply_pattern") + identifier + ZeroOrMore(variable)
59  if patterns:
60  direct_patterns = reduce(operator.or_, map(Keyword, set(patterns)))
61  # add the implied 'apply_pattern' to the list of tokens
62  direct_patterns.addParseAction(lambda toks: toks.insert(0, 'apply_pattern'))
63  apply_pattern = apply_pattern | (direct_patterns + ZeroOrMore(variable))
64 
65  statement = (package | version | use | constituent | macro | setenv | alias | apply_pattern)
66 
67  return Optional(statement) + Optional(comment) + StringEnd()
68 
69 
70 cache = None
71 def open_cache():
72  global cache
73  # record of known subdirs with their libraries
74  # {'<subdir>': {'libraries': [...]}}
75  # it contains some info about the projects too, under the keys like repr(('<project>', '<version>'))
76  try:
77  # First we try the environment variable CMT2CMAKECACHE and the directory
78  # containing this file...
79  _shelve_file = os.environ.get('CMT2CMAKECACHE',
80  os.path.join(os.path.dirname(__file__),
81  '.cmt2cmake.cache'))
82  cache = shelve.open(_shelve_file)
83  except:
84  # ... otherwise we use the user home directory
85  _shelve_file = os.path.join(os.path.expanduser('~'), '.cmt2cmake.cache')
86  #logging.info("Using cache file %s", _shelve_file)
87  cache = shelve.open(_shelve_file)
88 
90  global cache
91  if cache:
92  cache.close()
93  cache = None
94 
95 config = {}
96 for k in ['ignored_packages', 'data_packages', 'needing_python', 'no_pedantic',
97  'ignore_env']:
98  config[k] = set()
99 
100 # mappings
101 ignored_packages = config['ignored_packages']
102 data_packages = config['data_packages']
103 
104 # List of packages known to actually need Python to build
105 needing_python = config['needing_python']
106 
107 # packages that must have the pedantic option disabled
108 no_pedantic = config['no_pedantic']
109 
110 ignore_env = config['ignore_env']
111 
112 def loadConfig(config_file):
113  '''
114  Merge the content of the JSON file with the configuration dictionary.
115  '''
116  global config
117  if os.path.exists(config_file):
118  data = json.load(open(config_file))
119  for k in data:
120  if k not in config:
121  config[k] = set()
122  config[k].update(map(str, data[k]))
123  # print config
124 
125 loadConfig(os.path.join(os.path.dirname(__file__), 'cmt2cmake.cfg'))
126 
127 def extName(n):
128  '''
129  Mapping between the name of the LCG_Interface name and the Find*.cmake name
130  (if non-trivial).
131  '''
132  mapping = {'Reflex': 'ROOT',
133  'Python': 'PythonLibs',
134  'neurobayes_expert': 'NeuroBayesExpert',
135  'mysql': 'MySQL',
136  'oracle': 'Oracle',
137  'sqlite': 'SQLite',
138  'lfc': 'LFC',
139  'fftw': 'FFTW',
140  'uuid': 'UUID',
141  'fastjet': 'FastJet',
142  'lapack': 'LAPACK',
143  }
144  return mapping.get(n, n)
145 
146 def isPackage(path):
147  return os.path.isfile(os.path.join(path, "cmt", "requirements"))
148 
149 def isProject(path):
150  return os.path.isfile(os.path.join(path, "cmt", "project.cmt"))
151 
152 def projectCase(name):
153  return {'DAVINCI': 'DaVinci',
154  'LHCB': 'LHCb'}.get(name.upper(), name.capitalize())
155 
156 def callStringWithIndent(cmd, arglines):
157  '''
158  Produce a string for a call of a command with indented arguments.
159 
160  >>> print callStringWithIndent('example_command', ['arg1', 'arg2', 'arg3'])
161  example_command(arg1
162  arg2
163  arg3)
164  >>> print callStringWithIndent('example_command', ['', 'arg2', 'arg3'])
165  example_command(arg2
166  arg3)
167  '''
168  indent = '\n' + ' ' * (len(cmd) + 1)
169  return cmd + '(' + indent.join(filter(None, arglines)) + ')'
170 
171 def writeToFile(filename, data, log=None):
172  '''
173  Write the generated CMakeLists.txt.
174  '''
175  if log and os.path.exists(filename):
176  log.info('overwriting %s', filename)
177  f = open(filename, "w")
178  f.write(data)
179  f.close()
180 
181 class Package(object):
182  def __init__(self, path, project=None):
183  self.path = os.path.realpath(path)
184  if not isPackage(self.path):
185  raise ValueError("%s is not a package" % self.path)
186 
187  self.name = os.path.basename(self.path)
188  self.requirements = os.path.join(self.path, "cmt", "requirements")
189  self.project = project
190 
191  # prepare attributes filled during parsing of requirements
192  self.uses = {}
193  self.version = None
194  self.libraries = []
195  self.applications = []
196  self.documents = []
197  self.macros = {}
198  self.sets = {}
199  self.paths = {}
200  self.aliases = {}
201 
202  # These are patterns that can appear only once per package.
203  # The corresponding dictionary will contain the arguments passed to the
204  # pattern.
205  self.singleton_patterns = set(["QMTest", "install_python_modules", "install_scripts",
206  "install_more_includes", "god_headers", "god_dictionary",
207  "PyQtResource", "PyQtUIC"])
209  self.install_python_modules = self.install_scripts = self.QMTest = False
210  self.god_headers = {}
211  self.god_dictionary = {}
212  self.PyQtResource = {}
213  self.PyQtUIC = {}
214 
215  # These are patterns that can be repeated in the requirements.
216  # The corresponding data members will contain the list of dictionaries
217  # corresponding to the various calls.
218  self.multi_patterns = set(['reflex_dictionary', 'component_library', 'linker_library',
219  'copy_relax_rootmap'])
222  self.linker_library = []
224 
226  self.component_libraries = set()
227  self.linker_libraries = set()
228 
229  self.log = logging.getLogger('Package(%s)' % self.name)
231  try:
232  self._parseRequirements()
233  except:
234  print "Processing %s" % self.requirements
235  raise
236  # update the known subdirs
237  cache[self.name] = {# list of linker libraries provided by the package
238  'libraries': list(self.linker_libraries),
239  # true if it's a headers-only package
240  'includes': bool(self.install_more_includes and
241  not self.linker_libraries)}
242 
243  def generate(self):
244  # header
245  data = ["#" * 80,
246  "# Package: %s" % self.name,
247  "#" * 80,
248  "gaudi_subdir(%s %s)" % (self.name, self.version),
249  ""]
250  # dependencies
251  # subdirectories (excluding specials)
252  subdirs = [n for n in sorted(self.uses)
253  if not n.startswith("LCG_Interfaces/")
254  and n not in ignored_packages
255  and n not in data_packages]
256 
257  inc_dirs = []
258  if subdirs:
259  # check if we are missing info for a subdir
260  missing_subdirs = set([s.rsplit('/')[-1] for s in subdirs]) - set(cache)
261  if missing_subdirs:
262  self.log.warning('Missing info cache for subdirs %s', ' '.join(sorted(missing_subdirs)))
263  # declare inclusion order
264  data.append(callStringWithIndent('gaudi_depends_on_subdirs', subdirs))
265  data.append('')
266  # consider header-only subdirs
267  # for each required subdir that comes with only headers, add its
268  # location to the call to 'include_directories'
269  inc_only = lambda s: cache.get(s.rsplit('/')[-1], {}).get('includes')
270  inc_dirs = filter(inc_only, subdirs)
271 
272 
273  # externals (excluding specials)
274  # - Python needs to be treated in a special way
275  find_packages = {}
276  for n in sorted(self.uses):
277  if n.startswith("LCG_Interfaces/"):
278  n = extName(n[15:])
279  # FIXME: find a general way to treat these special cases
280  if n == "PythonLibs":
281  if self.name not in needing_python: # only these packages actually link against Python
282  continue
283  # get custom link options
284  linkopts = self.macros.get(n + '_linkopts', '')
285  components = [m.group(1) or m.group(2)
286  for m in re.finditer(r'(?:\$\(%s_linkopts_([^)]*)\))|(?:-l(\w*))' % n,
287  linkopts)]
288  # FIXME: find a general way to treat the special cases
289  if n == 'COOL':
290  components = ['CoolKernel', 'CoolApplication']
291  elif n == 'CORAL':
292  components = ['CoralBase', 'CoralKernel', 'RelationalAccess']
293  elif n == 'RELAX' and self.copy_relax_rootmap:
294  components = [d['dict'] for d in self.copy_relax_rootmap if 'dict' in d]
295 
296  find_packages[n] = find_packages.get(n, []) + components
297 
298  # this second loops avoid double entries do to converging results of extName()
299  for n in sorted(find_packages):
300  args = [n]
301  components = find_packages[n]
302  if components:
303  if n == 'RELAX': # FIXME: probably we should set 'REQUIRED' for all the externals
304  args.append('REQUIRED')
305  args.append('COMPONENTS')
306  args.extend(components)
307  data.append('find_package(%s)' % ' '.join(args))
308  if find_packages:
309  data.append("")
310 
311  if self.name in no_pedantic:
312  data.append('string(REPLACE "-pedantic" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")\n')
313 
314  # the headers can be installed via "PUBLIC_HEADERS" or by hand
315  if self.install_more_includes:
316  headers = [d for d in self.install_more_includes.values()
317  if os.path.isdir(os.path.join(self.path, d))]
318  else:
319  headers = []
320 
321  if self.god_headers or self.god_dictionary:
322  data.append("include(GaudiObjDesc)")
323  data.append("")
324 
325  god_headers_dest = None
326  if self.god_headers:
327  godargs = [self.god_headers["files"].replace("../", "")]
328 
329  godflags = self.macros.get('%sObj2Doth_GODflags' % self.name, "")
330  godflags = re.search(r'-s\s*(\S+)', godflags)
331  if godflags:
332  god_headers_dest = os.path.normpath('Event/' + godflags.group(1))
333  if god_headers_dest == 'src':
334  # special case
335  godargs.append('PRIVATE')
336  else:
337  godargs.append('DESTINATION ' + god_headers_dest)
338 
339  data.append(callStringWithIndent('god_build_headers', godargs))
340  data.append("")
341 
342  god_dict = []
343  if self.god_dictionary:
344  god_dict = [('--GOD--',
345  [self.god_dictionary["files"].replace("../", "")],
346  None, [])]
347 
348  rflx_dict = []
349  for d in self.reflex_dictionary:
350  for k in d:
351  v = d[k]
352  v = v.replace("$(%sROOT)/" % self.name.upper(), "")
353  v = v.replace("../", "")
354  d[k] = v
355  imports = [i.strip('"').replace('-import=', '') for i in d.get('imports', '').strip().split()]
356  rflx_dict.append((d['dictionary'] + 'Dict',
357  [d['headerfiles'], d['selectionfile']],
358  None,
359  imports))
360 
361  # libraries
362  global_imports = [extName(name[15:])
363  for name in self.uses
364  if name.startswith('LCG_Interfaces/') and self.uses[name][1]] # list of imported ext
365  if 'PythonLibs' in global_imports and self.name not in needing_python:
366  global_imports.remove('PythonLibs')
367 
368  subdir_imports = [s.rsplit('/')[-1] for s in subdirs if self.uses[s][1]]
369  local_links = [] # keep track of linker libraries found so far
370  applications_names = set([a[0] for a in self.applications])
371  # Note: a god_dictionary, a reflex_dictionary or an application is like a module
372  for name, sources, group, imports in self.libraries + god_dict + rflx_dict + self.applications:
373  isGODDict = isRflxDict = isComp = isApp = isLinker = False
374  if name == '--GOD--':
375  isGODDict = True
376  name = '' # no library name for GOD dictionaries
377  elif name.endswith('Dict') and name[:-4] in self.reflex_dictionaries:
378  isRflxDict = True
379  name = name[:-4]
380  elif name in self.component_libraries:
381  isComp = True
382  elif name in applications_names:
383  isApp = True
384  else:
385  if name not in self.linker_libraries:
386  self.log.warning('library %s not declared as component or linker, assume linker', name)
387  isLinker = True
388 
389  # prepare the bits of the command: cmd, name, sources, args
390  if isComp:
391  cmd = 'gaudi_add_module'
392  elif isGODDict:
393  cmd = 'god_build_dictionary'
394  elif isRflxDict:
395  cmd = 'gaudi_add_dictionary'
396  elif isApp:
397  cmd = 'gaudi_add_executable'
398  else: # i.e. isLinker (a fallback)
399  cmd = 'gaudi_add_library'
400 
401  if not sources:
402  self.log.warning("Missing sources for target %s", name)
403 
404  args = []
405  if isLinker:
406  if headers:
407  args.append('PUBLIC_HEADERS ' + ' '.join(headers))
408  else:
409  args.append('NO_PUBLIC_HEADERS')
410  elif isGODDict:
411  if god_headers_dest:
412  args.append('HEADERS_DESTINATION ' + god_headers_dest)
413  # check if we have a customdict in the documents
414  for docname, _, docsources in self.documents:
415  if docname == 'customdict':
416  args.append('EXTEND ' + docsources[0].replace('../', ''))
417  break
418 
419 
420  # # collection of link libraries. #
421  # Externals and subdirs are treated differently:
422  # - externals: just use the package name
423  # - subdirs: find the exported libraries in the global var cache
424  # We also have to add the local linker libraries.
425 
426  # separate external and subdir explicit imports
427  subdirsnames = [s.rsplit('/')[-1] for s in subdirs]
428  subdir_local_imports = [i for i in imports if i in subdirsnames]
429  ext_local_imports = [extName(i) for i in imports if i not in subdir_local_imports]
430 
431  # prepare the link list with the externals
432  links = global_imports + ext_local_imports
433  if links or inc_dirs:
434  # external links need the include dirs
435  args.append('INCLUDE_DIRS ' + ' '.join(links + inc_dirs))
436 
437  if links:
438  not_included = set(links).difference(find_packages, set([s.rsplit('/')[-1] for s in subdirs]))
439  if not_included:
440  self.log.warning('imports without use: %s', ', '.join(sorted(not_included)))
441 
442  # add subdirs...
443  for s in subdir_imports + subdir_local_imports:
444  if s in cache:
445  links.extend(cache[s]['libraries'])
446  # ... and local libraries
447  links.extend(local_links)
448  if 'AIDA' in links:
449  links.remove('AIDA') # FIXME: AIDA does not have a library
450 
451  if links:
452  # note: in some cases we get quoted library names
453  args.append('LINK_LIBRARIES ' + ' '.join([l.strip('"') for l in links]))
454 
455  if isRflxDict and self.reflex_dictionaries[name]:
456  args.append('OPTIONS ' + self.reflex_dictionaries[name])
457 
458  if isLinker:
459  local_links.append(name)
460 
461  # FIXME: very very special case :(
462  if name == 'garbage' and self.name == 'FileStager':
463  data.append('# only for the applications\nfind_package(Boost COMPONENTS program_options)\n')
464 
465  # write command
466  if not (isGODDict or isRflxDict):
467  # dictionaries to not need to have the paths fixed
468  sources = [os.path.normpath('src/' + s) for s in sources]
469  # FIXME: special case
470  sources = [s.replace('src/$(GAUDICONFROOT)', '${CMAKE_SOURCE_DIR}/GaudiConf') for s in sources]
471  libdata = callStringWithIndent(cmd, [name] + sources + args)
472 
473  # FIXME: wrap the test libraries in one if block (instead of several)
474  if group in ('tests', 'test'):
475  # increase indentation
476  libdata = [' ' + l for l in libdata.splitlines()]
477  # and wrap
478  libdata.insert(0, 'if(GAUDI_BUILD_TESTS)')
479  libdata.append('endif()')
480  libdata = '\n'.join(libdata)
481  data.append(libdata)
482  data.append('') # empty line
483 
484  # PyQt resources and UIs
485  if self.PyQtResource or self.PyQtUIC:
486  data.append("# gen_pyqt_* functions are provided by 'pygraphics'")
487  if self.PyQtResource:
488  qrc_files = self.PyQtResource["qrc_files"].replace("../", "")
489  qrc_dest = self.PyQtResource["outputdir"].replace("../python/", "")
490  qrc_target = qrc_dest.replace('/', '.') + '.Resources'
491  data.append('gen_pyqt_resource(%s %s %s)' % (qrc_target, qrc_dest, qrc_files))
492  if self.PyQtUIC:
493  ui_files = self.PyQtUIC["ui_files"].replace("../", "")
494  ui_dest = self.PyQtUIC["outputdir"].replace("../python/", "")
495  ui_target = qrc_dest.replace('/', '.') + '.UI'
496  data.append('gen_pyqt_uic(%s %s %s)' % (ui_target, ui_dest, ui_files))
497  if self.PyQtResource or self.PyQtUIC:
498  data.append('') # empty line
499 
500  if self.copy_relax_rootmap:
501  data.extend(['# Merge the RELAX rootmaps',
502  'set(rootmapfile ${CMAKE_LIBRARY_OUTPUT_DIRECTORY}/relax.rootmap)',
503  callStringWithIndent('add_custom_command',
504  ['OUTPUT ${rootmapfile}',
505  'COMMAND ${merge_cmd} ${RELAX_ROOTMAPS} ${rootmapfile}',
506  'DEPENDS ${RELAX_ROOTMAPS}']),
507  'add_custom_target(RelaxRootmap ALL DEPENDS ${rootmapfile})',
508  '\n# Install the merged file',
509  'install(FILES ${rootmapfile} DESTINATION lib)\n'])
510 
511  # installation
512  installs = []
513  if headers and not self.linker_libraries: # not installed yet
514  installs.append("gaudi_install_headers(%s)" % (" ".join(headers)))
515  if self.install_python_modules:
516  # if we install Python modules, we need to check if we have special
517  # names for the ConfUser modules
518  if (self.name + 'ConfUserModules') in self.macros:
519  installs.append('set_property(DIRECTORY PROPERTY CONFIGURABLE_USER_MODULES %s)'
520  % self.macros[self.name + 'ConfUserModules'])
521  installs.append("gaudi_install_python_modules()")
522  if self.install_scripts:
523  installs.append("gaudi_install_scripts()")
524  if installs:
525  data.extend(installs)
526  data.append('') # empty line
527 
528  if self.aliases:
529  data.extend(['gaudi_alias({0}\n {1})'.format(name, ' '.join(alias))
530  for name, alias in self.aliases.iteritems()])
531  data.append('') # empty line
532 
533  # environment
534  def fixSetValue(s):
535  '''
536  Convert environment variable values from CMT to CMake.
537  '''
538  # escape '$' if not done already
539  s = re.sub(r'(?<!\\)\$', '\\$', s)
540  # replace parenthesis with curly braces
541  s = re.sub(r'\$\(([^()]*)\)', r'${\1}', s)
542  # replace variables like Package_root with PACKAGEROOT
543  v = re.compile(r'\$\{(\w*)_root\}')
544  m = v.search(s)
545  while m:
546  s = s[:m.start()] + ('${%sROOT}' % m.group(1).upper()) + s[m.end():]
547  m = v.search(s)
548  return s
549 
550  if self.sets:
551  data.append(callStringWithIndent('gaudi_env',
552  ['SET %s %s' % (v, fixSetValue(self.sets[v]))
553  for v in sorted(self.sets)]))
554  data.append('') # empty line
555 
556  # tests
557  if self.QMTest:
558  data.append("\ngaudi_add_test(QMTest QMTEST)")
559 
560  return "\n".join(data) + "\n"
561 
562  @property
563  def data_packages(self):
564  '''
565  Return the list of data packages used by this package in the form of a
566  dictionary {name: version_pattern}.
567  '''
568  return dict([ (n, self.uses[n][0]) for n in self.uses if n in data_packages ])
569 
570  def process(self, overwrite=None):
571  cml = os.path.join(self.path, "CMakeLists.txt")
572  if ((overwrite == 'force')
573  or (not os.path.exists(cml))
574  or ((overwrite == 'update')
575  and (os.path.getmtime(cml) < os.path.getmtime(self.requirements)))):
576  # write the file
577  data = self.generate()
578  writeToFile(cml, data, self.log)
579  else:
580  self.log.warning("file %s already exists", cml)
581 
583  def requirements():
584  statement = ""
585  for l in open(self.requirements):
586  if '#' in l:
587  l = l[:l.find('#')]
588  l = l.strip()
589  # if we have something in the line, extend the statement
590  if l:
591  statement += l
592  if statement.endswith('\\'):
593  # if the statement requires another line, get the next
594  statement = statement[:-1] + ' '
595  continue
596  # either we got something more in the statement or not, but
597  # an empty line after a '\' means ending the statement
598  if statement:
599  try:
600  yield list(self.CMTParser.parseString(statement))
601  except:
602  # ignore not know statements
603  self.log.debug("Failed to parse statement: %r", statement)
604  statement = ""
605 
606  for args in requirements():
607  cmd = args.pop(0)
608  if cmd == 'version':
609  self.version = args[0]
610  elif cmd == "use":
611  if "-no_auto_imports" in args:
612  imp = False
613  args.remove("-no_auto_imports")
614  else:
615  imp = True
616  if len(args) > 1: # only one argument means usually a conditional use
617  if len(args) > 2:
618  name = "%s/%s" % (args[2], args[0])
619  else:
620  name = args[0]
621  self.uses[name] = (args[1], imp)
622 
623  elif cmd == "apply_pattern":
624  pattern = args.pop(0)
625  args = dict([x.split('=', 1) for x in args])
626  if pattern in self.singleton_patterns:
627  setattr(self, pattern, args or True)
628  elif pattern in self.multi_patterns:
629  getattr(self, pattern).append(args)
630 
631  elif cmd == 'library':
632  name = args.pop(0)
633  # digest arguments (options, variables, sources)
634  imports = []
635  group = None
636  sources = []
637  for a in args:
638  if a.startswith('-'): # options
639  if a.startswith('-import='):
640  imports.append(a[8:])
641  elif a.startswith('-group='):
642  group = a[7:]
643  elif '=' in a: # variable
644  pass
645  else: # source
646  sources.append(a)
647  self.libraries.append((name, sources, group, imports))
648 
649  elif cmd == 'application':
650  name = args.pop(0)
651  # digest arguments (options, variables, sources)
652  imports = []
653  group = None
654  sources = []
655  for a in args:
656  if a.startswith('-'): # options
657  if a.startswith('-import='):
658  imports.append(a[8:])
659  elif a.startswith('-group='):
660  group = a[7:]
661  elif a == '-check': # used for test applications
662  group = 'tests'
663  elif '=' in a: # variable
664  pass
665  else: # source
666  sources.append(a)
667  if 'test' in name.lower() or [s for s in sources if 'test' in s.lower()]:
668  # usually, developers do not put tests in the right group
669  group = 'tests'
670  self.applications.append((name, sources, group, imports))
671 
672  elif cmd == 'document':
673  name = args.pop(0)
674  constituent = args.pop(0)
675  sources = args
676  self.documents.append((name, constituent, sources))
677 
678  elif cmd == 'macro':
679  # FIXME: should handle macro tags
680  name = args.pop(0)
681  value = args[0].strip('"').strip("'")
682  self.macros[name] = value
683 
684  elif cmd == 'macro_append':
685  # FIXME: should handle macro tags
686  name = args.pop(0)
687  value = args[0].strip('"').strip("'")
688  self.macros[name] = self.macros.get(name, "") + value
689 
690  elif cmd == 'set':
691  name = args.pop(0)
692  if name not in ignore_env:
693  value = args[0].strip('"').strip("'")
694  self.sets[name] = value
695 
696  elif cmd == 'alias':
697  # FIXME: should handle macro tags
698  name = args.pop(0)
699  value = args[0].strip('"').strip("'").split()
700  self.aliases[name] = value
701 
702  # classification of libraries in the package
703  unquote = lambda x: x.strip('"').strip("'")
704  self.component_libraries = set([unquote(l['library']) for l in self.component_library])
705  self.linker_libraries = set([unquote(l['library']) for l in self.linker_library])
706  self.reflex_dictionaries = dict([(unquote(l['dictionary']), l.get('options', ''))
707  for l in self.reflex_dictionary])
708 
709 toolchain_template = '''# Special wrapper to load the declared version of the heptools toolchain.
710 set(heptools_version {0})
711 
712 # Remove the reference to this file from the cache.
713 unset(CMAKE_TOOLCHAIN_FILE CACHE)
714 
715 # Find the actual toolchain file.
716 find_file(CMAKE_TOOLCHAIN_FILE
717  NAMES heptools-${{heptools_version}}.cmake
718  HINTS ENV CMTPROJECTPATH
719  PATHS ${{CMAKE_CURRENT_LIST_DIR}}/cmake/toolchain
720  PATH_SUFFIXES toolchain)
721 
722 if(NOT CMAKE_TOOLCHAIN_FILE)
723  message(FATAL_ERROR "Cannot find heptools-${{heptools_version}}.cmake.")
724 endif()
725 
726 # Reset the cache variable to have proper documentation.
727 set(CMAKE_TOOLCHAIN_FILE ${{CMAKE_TOOLCHAIN_FILE}}
728  CACHE FILEPATH "The CMake toolchain file" FORCE)
729 
730 include(${{CMAKE_TOOLCHAIN_FILE}})
731 '''
732 
733 class Project(object):
734  def __init__(self, path):
735  """
736  Create a project instance from the root directory of the project.
737  """
738  self.path = os.path.realpath(path)
739  if not isProject(self.path):
740  raise ValueError("%s is not a project" % self.path)
741  self.requirements = os.path.join(self.path, "cmt", "project.cmt")
742  # Private variables for cached properties
743  self._packages = None
744  self._container = None
745 
746  @property
747  def packages(self):
748  """
749  Dictionary of packages contained in the project.
750  """
751  if self._packages is None:
752  self._packages = {}
753  for root, dirs, _files in os.walk(self.path):
754  if isPackage(root):
755  p = Package(root, self)
756  name = os.path.relpath(p.path, self.path)
757  self._packages[name] = p
758  dirs[:] = []
759  return self._packages
760 
761  @property
762  def container(self):
763  """
764  Name of the container package of the project.
765 
766  The name of the container is deduced using the usual LHCb convention
767  (instead of the content of project.cmt).
768  """
769  if self._container is None:
770  for suffix in ["Release", "Sys"]:
771  try:
772  # gets the first package that ends with the suffix, and does
773  # not have a hat.. or raise StopIteration
774  c = (p for p in self.packages
775  if p.endswith(suffix) and "/" not in p).next()
776  self._container = self.packages[c]
777  break
778  except StopIteration:
779  pass
780  return self._container
781 
782  @property
783  def name(self):
784  # The name of the project is the same of the container without
785  # the 'Release' or 'Sys' suffixes.
786  return self.container.name.replace("Release", "").replace("Sys", "")
787 
788  @property
789  def version(self):
790  return self.container.version
791 
792  def uses(self):
793  for l in open(self.requirements):
794  l = l.split()
795  if l and l[0] == "use" and l[1] != "LCGCMT" and len(l) == 3:
796  yield (projectCase(l[1]), l[2].rsplit('_', 1)[-1])
797 
798  def heptools(self):
799  '''
800  Return the version of heptools (LCGCMT) used by this project.
801  '''
802 
803  def updateCache(value):
804  '''
805  helper function to update the cache and return the value
806  '''
807  k = repr((self.name, self.version))
808  d = cache.get(k, {})
809  d['heptools'] = value
810  cache[k] = d
811  return value
812 
813  # check for a direct dependency
814  exp = re.compile(r'^\s*use\s+LCGCMT\s+LCGCMT[_-](\S+)')
815  for l in open(self.requirements):
816  m = exp.match(l)
817  if m:
818  return updateCache(m.group(1))
819 
820  # try with the projects we use (in the cache),
821  # including ourselves (we may already be there)
822  for u in list(self.uses()) + [(self.name, self.version)]:
823  u = repr(u)
824  if u in cache and 'heptools' in cache[u]:
825  return updateCache(cache[u]['heptools'])
826 
827  # we cannot guess the version of heptools
828  return None
829 
830  @property
831  def data_packages(self):
832  '''
833  Return the list of data packages used by this project (i.e. by all the
834  packages in this project) in the form of a dictionary
835  {name: version_pattern}.
836  '''
837  # for debugging we map
838  def appendDict(d, kv):
839  '''
840  helper function to extend a dictionary of lists
841  '''
842  k, v = kv
843  if k in d:
844  d[k].append(v)
845  else:
846  d[k] = [v]
847  return d
848  # dictionary {"data_package": ("user_package", "data_pkg_version")}
849  dp2pkg = {}
850  for pkgname, pkg in self.packages.items():
851  for dpname, dpversion in pkg.data_packages.items():
852  appendDict(dp2pkg, (dpname, (pkgname, dpversion)))
853 
854  # check and collect the data packages
855  result = {}
856  for dp in sorted(dp2pkg):
857  versions = set([v for _, v in dp2pkg[dp]])
858  if versions:
859  version = sorted(versions)[-1]
860  else:
861  version = '*'
862  if len(versions) != 1:
863  logging.warning('Different versions for data package %s, using %s from %s', dp, version, dp2pkg[dp])
864  result[dp] = version
865 
866  return result
867 
868  def generate(self):
869  # list containing the lines to write to the file
870  data = ["CMAKE_MINIMUM_REQUIRED(VERSION 2.8.5)",
871  "",
872  "#---------------------------------------------------------------",
873  "# Load macros and functions for Gaudi-based projects",
874  "find_package(GaudiProject)",
875  "#---------------------------------------------------------------",
876  "",
877  "# Declare project name and version"]
878  l = "gaudi_project(%s %s" % (self.name, self.version)
879  use = "\n ".join(["%s %s" % u for u in self.uses()])
880  if use:
881  l += "\n USE " + use
882  # collect data packages
883  data_pkgs = []
884  for p, v in sorted(self.data_packages.items()):
885  if v in ('v*', '*'):
886  data_pkgs.append(p)
887  else:
888  data_pkgs.append("%s VERSION %s" % (p, v))
889  if data_pkgs:
890  l += ("\n DATA " +
891  "\n ".join(data_pkgs))
892  l += ")"
893  data.append(l)
894  return "\n".join(data) + "\n"
895 
896  def generateToolchain(self):
897  heptools_version = self.heptools()
898  if heptools_version:
899  return toolchain_template.format(heptools_version)
900  return None
901 
902  def process(self, overwrite=None):
903  # Prepare the project configuration
904  def produceFile(name, generator):
905  cml = os.path.join(self.path, name)
906  if ((overwrite == 'force')
907  or (not os.path.exists(cml))
908  or ((overwrite == 'update')
909  and (os.path.getmtime(cml) < os.path.getmtime(self.requirements)))):
910  # write the file
911  data = generator()
912  if data:
913  writeToFile(cml, data, logging)
914  else:
915  logging.info("file %s not generated (empty)", cml)
916  else:
917  logging.warning("file %s already exists", cml)
918 
919  produceFile("CMakeLists.txt", self.generate)
920  produceFile("toolchain.cmake", self.generateToolchain)
921 
922  # Recurse in the packages
923  for p in sorted(self.packages):
924  self.packages[p].process(overwrite)
925 
926 
927 def main(args=None):
928  from optparse import OptionParser
929  parser = OptionParser(usage="%prog [options] [path to project or package]",
930  description="Convert CMT-based projects/packages to CMake (Gaudi project)")
931  parser.add_option("-f", "--force", action="store_const",
932  dest='overwrite', const='force',
933  help="overwrite existing files")
934  parser.add_option('--cache-only', action='store_true',
935  help='just update the cache without creating the CMakeLists.txt files.')
936  parser.add_option('-u' ,'--update', action='store_const',
937  dest='overwrite', const='update',
938  help='modify the CMakeLists.txt files if they are older than '
939  'the corresponding requirements.')
940  #parser.add_option('--cache-file', action='store',
941  # help='file to be used for the cache')
942 
943  opts, args = parser.parse_args(args=args)
944 
945  logging.basicConfig(level=logging.INFO)
946 
947  top_dir = os.getcwd()
948  if args:
949  top_dir = args[0]
950  if not os.path.isdir(top_dir):
951  parser.error("%s is not a directory" % top_dir)
952 
953  loadConfig(os.path.join(top_dir, 'cmt2cmake.cfg'))
954 
955  open_cache()
956  if isProject(top_dir):
957  root = Project(top_dir)
958  elif isPackage(top_dir):
959  root = Package(top_dir)
960  if opts.cache_only:
961  return # the cache is updated instantiating the package
962  else:
963  raise ValueError("%s is neither a project nor a package" % top_dir)
964 
965  if opts.cache_only:
966  root.packages # the cache is updated by instantiating the packages
967  root.heptools() # this triggers the caching of the heptools_version
968  # note that we can get here only if root is a project
969  else:
970  root.process(opts.overwrite)
971  close_cache()
972 
973 if __name__ == '__main__':
974  main()
975  sys.exit(0)

Generated at Wed Jan 30 2013 17:13:37 for Gaudi Framework, version v23r6 by Doxygen version 1.8.2 written by Dimitri van Heesch, © 1997-2004